mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
161 Commits
v0.6.0+b74
...
v0.6.3-rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
05c2c21a85 | ||
|
|
3771af0a8c | ||
|
|
c32c2d43f7 | ||
|
|
4e2e3f9077 | ||
|
|
2a27422df9 | ||
|
|
f9e0ce8e9c | ||
|
|
a1d49f13d3 | ||
|
|
26aa199f9c | ||
|
|
4c77f3f914 | ||
|
|
d6be792595 | ||
|
|
59c1ea7f16 | ||
|
|
4d24005eff | ||
|
|
2dab35b614 | ||
|
|
0b61b88f5f | ||
|
|
e5cb58207c | ||
|
|
fc17d1af81 | ||
|
|
e6650e1e2d | ||
|
|
3aa1cd0133 | ||
|
|
e04833c327 | ||
|
|
b743cceb60 | ||
|
|
a0e134d3b5 | ||
|
|
d7fb2d7458 | ||
|
|
b913ce6022 | ||
|
|
1eb7945d16 | ||
|
|
37d0026ee4 | ||
|
|
9cdc2cb2f7 | ||
|
|
a9bff9063e | ||
|
|
380126ee44 | ||
|
|
d8377375b8 | ||
|
|
98ff701f9a | ||
|
|
f5ea3e97d3 | ||
|
|
719e96dd2f | ||
|
|
6c6c0256ba | ||
|
|
723df51cdd | ||
|
|
a0f4e263b2 | ||
|
|
4706bf8060 | ||
|
|
f96a9f659a | ||
|
|
63c273f896 | ||
|
|
622ac6d781 | ||
|
|
8dc564a8bc | ||
|
|
3ae5baef22 | ||
|
|
8d819068b5 | ||
|
|
585e056265 | ||
|
|
1914ed7c7c | ||
|
|
bd216e93e7 | ||
|
|
5e351de896 | ||
|
|
de0e534c77 | ||
|
|
5fa1f9440d | ||
|
|
b3ddc5f8b9 | ||
|
|
8cde5f9673 | ||
|
|
1bb53ca497 | ||
|
|
0a3cd9267f | ||
|
|
075d843354 | ||
|
|
b14e5e8c0e | ||
|
|
c9da4be422 | ||
|
|
276ee7c27a | ||
|
|
334040532a | ||
|
|
335a3a98b5 | ||
|
|
b17080a7f5 | ||
|
|
8441c12b01 | ||
|
|
3b4af1b6fa | ||
|
|
c3deb8e2fa | ||
|
|
a60b1686da | ||
|
|
b56e87ceb2 | ||
|
|
fc89bcdaf3 | ||
|
|
15ec8321bb | ||
|
|
e6ba62485c | ||
|
|
9077b01fb9 | ||
|
|
f45281be96 | ||
|
|
a1c8ef9037 | ||
|
|
f46e8af23f | ||
|
|
30a89bfd2c | ||
|
|
6312f8738d | ||
|
|
9e3d5c10c5 | ||
|
|
59b87ec4fd | ||
|
|
27ecf5f25c | ||
|
|
105971c4c8 | ||
|
|
690f8323c3 | ||
|
|
20eb110ce3 | ||
|
|
571c9d0aee | ||
|
|
0ee7292f16 | ||
|
|
8c28392dfd | ||
|
|
671f1f4478 | ||
|
|
557d3748be | ||
|
|
f00d080ed2 | ||
|
|
4e76c1305f | ||
|
|
36ef388e92 | ||
|
|
2e1ee7f76c | ||
|
|
fc1e38772d | ||
|
|
0e631a5121 | ||
|
|
d74175efca | ||
|
|
bf5fe7d2c7 | ||
|
|
0f022aba92 | ||
|
|
0b6e55e55a | ||
|
|
e1c409366c | ||
|
|
3b942118e9 | ||
|
|
7f1543db8f | ||
|
|
74a5121be2 | ||
|
|
26fe136a1a | ||
|
|
83fb189b05 | ||
|
|
5e8d0d36c0 | ||
|
|
4ae4cffa04 | ||
|
|
bc433e88fe | ||
|
|
513ef501a4 | ||
|
|
f2bdcbedfb | ||
|
|
fd056edb2a | ||
|
|
0f0acfdd12 | ||
|
|
1e3b507b2b | ||
|
|
84d95272f3 | ||
|
|
3b08e9e214 | ||
|
|
f4be83b06f | ||
|
|
4918d0430c | ||
|
|
e25b86b10d | ||
|
|
d3d305a843 | ||
|
|
825b93bfe9 | ||
|
|
8c98282200 | ||
|
|
768ac9eb04 | ||
|
|
71011d2fca | ||
|
|
9683a8ed82 | ||
|
|
10a6ac9313 | ||
|
|
dba325e9a2 | ||
|
|
fcd9ab533c | ||
|
|
68e3e8e1c5 | ||
|
|
7f8b738b9e | ||
|
|
8a35dcedfa | ||
|
|
ef763b7157 | ||
|
|
498e1d4474 | ||
|
|
73de936c75 | ||
|
|
e32b709a41 | ||
|
|
60652f63c4 | ||
|
|
d0d4101f90 | ||
|
|
646875794f | ||
|
|
cdad4be0d5 | ||
|
|
8f4285be62 | ||
|
|
acfa55e2d0 | ||
|
|
0b7cd07db0 | ||
|
|
6297ffd523 | ||
|
|
368f4fdbef | ||
|
|
f52044a209 | ||
|
|
9fb33cf746 | ||
|
|
e3c5da5bc5 | ||
|
|
e675690cc6 | ||
|
|
edc1622cf5 | ||
|
|
5ab3d4a40d | ||
|
|
cb29d87b63 | ||
|
|
6ff6bdad9f | ||
|
|
e3cc3ef9a4 | ||
|
|
1fe4f291f2 | ||
|
|
a54119f4a2 | ||
|
|
c5b7fe5321 | ||
|
|
d487ec9153 | ||
|
|
fa19b1ddc8 | ||
|
|
267c32b390 | ||
|
|
aeff3f1494 | ||
|
|
e80e52f6c9 | ||
|
|
fe41a70602 | ||
|
|
976d9abe2d | ||
|
|
041bc1100a | ||
|
|
5d095ff6ab | ||
|
|
ef01b61b29 | ||
|
|
faad6b656b |
5
Makefile
5
Makefile
@@ -1,6 +1,7 @@
|
||||
NAME=redash
|
||||
VERSION=`python ./manage.py version`
|
||||
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
|
||||
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
|
||||
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
|
||||
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
|
||||
|
||||
@@ -15,8 +16,8 @@ pack:
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
|
||||
upload:
|
||||
python bin/upload_version.py $(VERSION) $(FILENAME)
|
||||
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
|
||||
|
||||
test:
|
||||
nosetests --with-coverage --cover-package=redash tests/*.py
|
||||
cd rd_ui && grunt test
|
||||
#cd rd_ui && grunt test
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
<p align="center">
|
||||
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
|
||||
|
||||
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
|
||||
@@ -28,7 +27,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
|
||||
|
||||
## Getting Started
|
||||
|
||||
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
|
||||
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
|
||||
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).
|
||||
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import requests
|
||||
|
||||
if __name__ == '__main__':
|
||||
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
|
||||
|
||||
if response.status_code != 200:
|
||||
exit("Failed getting releases (status code: %s)." % response.status_code)
|
||||
|
||||
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
|
||||
|
||||
latest_release = sorted_releases[0]
|
||||
asset_url = latest_release['assets'][0]['url']
|
||||
filename = latest_release['assets'][0]['name']
|
||||
|
||||
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
|
||||
|
||||
if '--url-only' in sys.argv:
|
||||
print asset_url
|
||||
elif '--wget' in sys.argv:
|
||||
print wget_command
|
||||
else:
|
||||
print "Latest release: %s" % latest_release['tag_name']
|
||||
print latest_release['body']
|
||||
|
||||
print "\nTarball URL: %s" % asset_url
|
||||
print 'wget: %s' % (wget_command)
|
||||
|
||||
|
||||
147
bin/release_manager.py
Normal file
147
bin/release_manager.py
Normal file
@@ -0,0 +1,147 @@
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import requests
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
repo = 'EverythingMe/redash'
|
||||
|
||||
def _github_request(method, path, params=None, headers={}):
|
||||
if not path.startswith('https://api.github.com'):
|
||||
url = "https://api.github.com/{}".format(path)
|
||||
else:
|
||||
url = path
|
||||
|
||||
if params is not None:
|
||||
params = json.dumps(params)
|
||||
|
||||
response = requests.request(method, url, data=params, auth=auth)
|
||||
return response
|
||||
|
||||
def exception_from_error(message, response):
|
||||
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
|
||||
|
||||
def rc_tag_name(version):
|
||||
return "v{}-rc".format(version)
|
||||
|
||||
def get_rc_release(version):
|
||||
tag = rc_tag_name(version)
|
||||
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
|
||||
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
elif response.status_code == 200:
|
||||
return response.json()
|
||||
|
||||
raise exception_from_error("Unknown error while looking RC release: ", response)
|
||||
|
||||
def create_release(version, commit_sha):
|
||||
tag = rc_tag_name(version)
|
||||
|
||||
params = {
|
||||
'tag_name': tag,
|
||||
'name': "{} - RC".format(version),
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
}
|
||||
|
||||
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
|
||||
|
||||
if response.status_code != 201:
|
||||
raise exception_from_error("Failed creating new release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def upload_asset(release, filepath):
|
||||
upload_url = release['upload_url'].replace('{?name}', '')
|
||||
filename = filepath.split('/')[-1]
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
|
||||
|
||||
if response.status_code != 201: # not 200/201/...
|
||||
raise exception_from_error('Failed uploading asset', response)
|
||||
|
||||
return response
|
||||
|
||||
def remove_previous_builds(release):
|
||||
for asset in release['assets']:
|
||||
response = _github_request('delete', asset['url'])
|
||||
if response.status_code != 204:
|
||||
raise exception_from_error("Failed deleting asset", response)
|
||||
|
||||
def get_changelog(commit_sha):
|
||||
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
|
||||
if latest_release.status_code != 200:
|
||||
raise exception_from_error('Failed getting latest release', latest_release)
|
||||
|
||||
latest_release = latest_release.json()
|
||||
previous_sha = latest_release['target_commitish']
|
||||
|
||||
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
|
||||
log = subprocess.check_output(args)
|
||||
changes = ["Changes since {}:".format(latest_release['name'])]
|
||||
|
||||
for line in log.split('\n'):
|
||||
try:
|
||||
sha, subject, body, parents = line[1:-1].split('|')
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = " #{}".format(pull_request)
|
||||
except Exception, ex:
|
||||
pull_request = ""
|
||||
|
||||
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
|
||||
|
||||
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
|
||||
|
||||
return "\n".join(changes)
|
||||
|
||||
def update_release_commit_sha(release, commit_sha):
|
||||
params = {
|
||||
'target_commitish': commit_sha,
|
||||
}
|
||||
|
||||
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating commit sha for existing release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def update_release(version, build_filepath, commit_sha):
|
||||
try:
|
||||
release = get_rc_release(version)
|
||||
if release:
|
||||
release = update_release_commit_sha(release, commit_sha)
|
||||
else:
|
||||
release = create_release(version, commit_sha)
|
||||
|
||||
print "Using release id: {}".format(release['id'])
|
||||
|
||||
remove_previous_builds(release)
|
||||
response = upload_asset(release, build_filepath)
|
||||
|
||||
changelog = get_changelog(commit_sha)
|
||||
|
||||
response = _github_request('patch', release['url'], {'body': changelog})
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating release description", response)
|
||||
|
||||
except Exception, ex:
|
||||
print ex
|
||||
|
||||
if __name__ == '__main__':
|
||||
commit_sha = sys.argv[1]
|
||||
version = sys.argv[2]
|
||||
filepath = sys.argv[3]
|
||||
|
||||
# TODO: make sure running from git directory & remote = repo
|
||||
update_release(version, filepath, commit_sha)
|
||||
@@ -1,46 +0,0 @@
|
||||
#!python
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
import subprocess
|
||||
|
||||
|
||||
def capture_output(command):
|
||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
return proc.stdout.read()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
version = sys.argv[1]
|
||||
filepath = sys.argv[2]
|
||||
filename = filepath.split('/')[-1]
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
commit_sha = os.environ['CIRCLE_SHA1']
|
||||
|
||||
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
|
||||
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
|
||||
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
|
||||
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
|
||||
|
||||
params = json.dumps({
|
||||
'tag_name': 'v{0}'.format(version),
|
||||
'name': 're:dash v{0}'.format(version),
|
||||
'body': version_body,
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
})
|
||||
|
||||
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
|
||||
data=params,
|
||||
auth=auth)
|
||||
|
||||
upload_url = response.json()['upload_url']
|
||||
upload_url = upload_url.replace('{?name}', '')
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
|
||||
headers=headers, verify=False)
|
||||
|
||||
@@ -7,6 +7,9 @@ machine:
|
||||
2.7.3
|
||||
dependencies:
|
||||
pre:
|
||||
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
|
||||
- tar xvf optipng-0.7.5.tar.gz
|
||||
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
|
||||
- make deps
|
||||
- pip install -r dev_requirements.txt
|
||||
- pip install -r requirements.txt
|
||||
|
||||
@@ -2,12 +2,15 @@
|
||||
"""
|
||||
CLI to manage redash.
|
||||
"""
|
||||
import json
|
||||
|
||||
from flask.ext.script import Manager
|
||||
|
||||
from redash import settings, models, __version__
|
||||
from redash.wsgi import app
|
||||
from redash.import_export import import_manager
|
||||
from redash.cli import users, database, data_sources
|
||||
from redash.monitor import get_status
|
||||
|
||||
manager = Manager(app)
|
||||
manager.add_command("database", database.manager)
|
||||
@@ -21,6 +24,9 @@ def version():
|
||||
"""Displays re:dash version."""
|
||||
print __version__
|
||||
|
||||
@manager.command
|
||||
def status():
|
||||
print json.dumps(get_status(), indent=2)
|
||||
|
||||
@manager.command
|
||||
def runworkers():
|
||||
|
||||
23
migrations/0007_add_schedule_to_queries.py
Normal file
23
migrations/0007_add_schedule_to_queries.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('queries', 'schedule', models.Query.schedule),
|
||||
)
|
||||
|
||||
db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")
|
||||
|
||||
migrate(
|
||||
migrator.drop_column('queries', 'ttl')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
20
migrations/0008_make_ds_name_unique.py
Normal file
20
migrations/0008_make_ds_name_unique.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
with db.database.transaction():
|
||||
# Make sure all data sources names are unique.
|
||||
db.database.execute_sql("""
|
||||
UPDATE data_sources
|
||||
SET name = new_names.name
|
||||
FROM (
|
||||
SELECT id, name || ' ' || id as name
|
||||
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
|
||||
) AS new_names
|
||||
WHERE data_sources.id = new_names.id;
|
||||
""")
|
||||
# Add unique constraint on data_sources.name.
|
||||
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
|
||||
|
||||
db.close_db(None)
|
||||
BIN
rd_ui/app/images/favicon-16x16.png
Executable file
BIN
rd_ui/app/images/favicon-16x16.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.3 KiB |
BIN
rd_ui/app/images/favicon-32x32.png
Executable file
BIN
rd_ui/app/images/favicon-32x32.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 2.0 KiB |
BIN
rd_ui/app/images/favicon-96x96.png
Executable file
BIN
rd_ui/app/images/favicon-96x96.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 3.8 KiB |
BIN
rd_ui/app/images/redash_icon_small.png
Normal file
BIN
rd_ui/app/images/redash_icon_small.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.0 KiB |
@@ -18,8 +18,15 @@
|
||||
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
|
||||
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
|
||||
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
|
||||
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<div growl></div>
|
||||
@@ -33,15 +40,15 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
|
||||
</div>
|
||||
{% raw %}
|
||||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<span ng-repeat="(name, group) in groupedDashboards">
|
||||
<li class="dropdown-submenu">
|
||||
<a href="#" ng-bind="name"></a>
|
||||
@@ -59,9 +66,9 @@
|
||||
<li><a data-toggle="modal" href="#new_dashboard_dialog" ng-show="currentUser.hasPermission('create_dashboard')">New Dashboard</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Queries <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle>Queries <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<li ng-show="currentUser.hasPermission('create_query')"><a href="/queries/new">New Query</a></li>
|
||||
<li><a href="/queries">Queries</a></li>
|
||||
</ul>
|
||||
@@ -105,9 +112,11 @@
|
||||
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/python/python.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
|
||||
<script src="/bower_components/angular-ui-codemirror/ui-codemirror.js"></script>
|
||||
<script src="/bower_components/highcharts/highcharts.js"></script>
|
||||
<script src="/bower_components/highcharts/modules/exporting.js"></script>
|
||||
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>
|
||||
@@ -123,13 +132,14 @@
|
||||
<script src="/bower_components/marked/lib/marked.js"></script>
|
||||
<script src="/scripts/ng_highchart.js"></script>
|
||||
<script src="/scripts/ng_smart_table.js"></script>
|
||||
<script src="/scripts/ui-bootstrap-tpls-0.5.0.min.js"></script>
|
||||
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
|
||||
<script src="/bower_components/bucky/bucky.js"></script>
|
||||
<script src="/bower_components/pace/pace.js"></script>
|
||||
<script src="/bower_components/mustache/mustache.js"></script>
|
||||
<script src="/bower_components/canvg/rgbcolor.js"></script>
|
||||
<script src="/bower_components/canvg/StackBlur.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
|
||||
@@ -146,6 +156,7 @@
|
||||
<script src="/scripts/visualizations/base.js"></script>
|
||||
<script src="/scripts/visualizations/chart.js"></script>
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/map.js"></script>
|
||||
<script src="/scripts/visualizations/counter.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/visualizations/pivot.js"></script>
|
||||
|
||||
@@ -48,6 +48,19 @@
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% if show_saml_login %}
|
||||
|
||||
<div class="row">
|
||||
<a href="/saml/login">SAML Login</a>
|
||||
</div>
|
||||
|
||||
<div class="login-or">
|
||||
<hr class="hr-or">
|
||||
<span class="span-or">or</span>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
|
||||
<form role="form" method="post" name="login">
|
||||
<div class="form-group">
|
||||
<label for="inputUsernameEmail">Username or email</label>
|
||||
|
||||
@@ -6,7 +6,6 @@ angular.module('redash', [
|
||||
'redash.services',
|
||||
'redash.renderers',
|
||||
'redash.visualization',
|
||||
'ui.codemirror',
|
||||
'highchart',
|
||||
'ui.select2',
|
||||
'angular-growl',
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
(function () {
|
||||
var dateFormatter = function (value) {
|
||||
if (!value) return "-";
|
||||
if (!value) {
|
||||
return "-";
|
||||
}
|
||||
return value.toDate().toLocaleString();
|
||||
};
|
||||
|
||||
@@ -30,9 +32,9 @@
|
||||
},
|
||||
{
|
||||
'label': 'Update Schedule',
|
||||
'map': 'ttl',
|
||||
'map': 'schedule',
|
||||
'formatFunction': function (value) {
|
||||
return $filter('refreshRateHumanize')(value);
|
||||
return $filter('scheduleHumanize')(value);
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -127,9 +129,9 @@
|
||||
},
|
||||
{
|
||||
'label': 'Update Schedule',
|
||||
'map': 'ttl',
|
||||
'map': 'schedule',
|
||||
'formatFunction': function (value) {
|
||||
return $filter('refreshRateHumanize')(value);
|
||||
return $filter('scheduleHumanize')(value);
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -100,9 +100,13 @@
|
||||
Events.record(currentUser, "autorefresh", "dashboard", dashboard.id, {'enable': $scope.refreshEnabled});
|
||||
|
||||
if ($scope.refreshEnabled) {
|
||||
var refreshRate = _.min(_.flatten($scope.dashboard.widgets), function(widget) {
|
||||
return widget.visualization.query.ttl;
|
||||
}).visualization.query.ttl;
|
||||
var refreshRate = _.min(_.map(_.flatten($scope.dashboard.widgets), function(widget) {
|
||||
var schedule = widget.visualization.query.schedule;
|
||||
if (schedule === null || schedule.match(/\d\d:\d\d/) !== null) {
|
||||
return 60;
|
||||
}
|
||||
return widget.visualization.query.schedule;
|
||||
}));
|
||||
|
||||
$scope.refreshRate = _.max([120, refreshRate * 2]) * 1000;
|
||||
|
||||
@@ -138,7 +142,6 @@
|
||||
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
|
||||
var maxAge = $location.search()['maxAge'];
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
$scope.nextUpdateTime = moment(new Date(($scope.query.updated_at + $scope.query.ttl + $scope.query.runtime + 300) * 1000)).fromNow();
|
||||
|
||||
$scope.type = 'visualization';
|
||||
} else {
|
||||
|
||||
@@ -68,7 +68,7 @@
|
||||
$scope.duplicateQuery = function() {
|
||||
Events.record(currentUser, 'fork', 'query', $scope.query.id);
|
||||
$scope.query.id = null;
|
||||
$scope.query.ttl = -1;
|
||||
$scope.query.schedule = null;
|
||||
|
||||
$scope.saveQuery({
|
||||
successMessage: 'Query forked',
|
||||
|
||||
@@ -1,19 +1,46 @@
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, Query, DataSource) {
|
||||
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, $modal, Query, DataSource) {
|
||||
var DEFAULT_TAB = 'table';
|
||||
|
||||
var getQueryResult = function(ttl) {
|
||||
var getQueryResult = function(maxAge) {
|
||||
// Collect params, and getQueryResult with params; getQueryResult merges it into the query
|
||||
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
|
||||
if (ttl == undefined) {
|
||||
ttl = $location.search()['maxAge'];
|
||||
if (maxAge == undefined) {
|
||||
maxAge = $location.search()['maxAge'];
|
||||
}
|
||||
$scope.queryResult = $scope.query.getQueryResult(ttl, parameters);
|
||||
|
||||
if (maxAge == undefined) {
|
||||
maxAge = -1;
|
||||
}
|
||||
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
}
|
||||
|
||||
$scope.dataSource = {};
|
||||
$scope.query = $route.current.locals.query;
|
||||
|
||||
var updateSchema = function() {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
|
||||
DataSource.getSchema({id: dataSourceId}, function(data) {
|
||||
if (data && data.length > 0) {
|
||||
$scope.schema = data;
|
||||
_.each(data, function(table) {
|
||||
table.collapsed = true;
|
||||
});
|
||||
|
||||
$scope.editorSize = "col-md-9";
|
||||
$scope.hasSchema = true;
|
||||
} else {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Events.record(currentUser, 'view', 'query', $scope.query.id);
|
||||
getQueryResult();
|
||||
$scope.queryExecuting = false;
|
||||
@@ -22,7 +49,9 @@
|
||||
$scope.canViewSource = currentUser.hasPermission('view_source');
|
||||
|
||||
$scope.dataSources = DataSource.get(function(dataSources) {
|
||||
updateSchema();
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
});
|
||||
|
||||
// in view mode, latest dataset is always visible
|
||||
@@ -98,7 +127,7 @@
|
||||
|
||||
return Query.delete({id: data.id}, function() {
|
||||
$scope.query.is_archived = true;
|
||||
$scope.query.ttl = -1;
|
||||
$scope.query.schedule = null;
|
||||
growl.addSuccessMessage(options.successMessage);
|
||||
// This feels dirty.
|
||||
$('#archive-confirmation-modal').modal('hide');
|
||||
@@ -121,6 +150,8 @@
|
||||
});
|
||||
}
|
||||
|
||||
updateSchema();
|
||||
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
$scope.executeQuery();
|
||||
};
|
||||
|
||||
@@ -168,6 +199,28 @@
|
||||
}
|
||||
});
|
||||
|
||||
$scope.openScheduleForm = function() {
|
||||
if (!$scope.isQueryOwner) {
|
||||
return;
|
||||
};
|
||||
|
||||
$modal.open({
|
||||
templateUrl: '/views/schedule_form.html',
|
||||
size: 'sm',
|
||||
scope: $scope,
|
||||
controller: ['$scope', '$modalInstance', function($scope, $modalInstance) {
|
||||
$scope.close = function() {
|
||||
$modalInstance.close();
|
||||
}
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
$scope.refreshType = 'daily';
|
||||
} else {
|
||||
$scope.refreshType = 'periodic';
|
||||
}
|
||||
}]
|
||||
});
|
||||
};
|
||||
|
||||
$scope.$watch(function() {
|
||||
return $location.hash()
|
||||
}, function(hash) {
|
||||
@@ -180,5 +233,5 @@
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('QueryViewCtrl',
|
||||
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', 'Query', 'DataSource', QueryViewCtrl]);
|
||||
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', '$modal', 'Query', 'DataSource', QueryViewCtrl]);
|
||||
})();
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
'query': '=',
|
||||
'visualization': '=?'
|
||||
},
|
||||
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
link: function(scope, element) {
|
||||
scope.link = '/queries/' + scope.query.id;
|
||||
if (scope.visualization) {
|
||||
@@ -29,7 +29,7 @@
|
||||
restrict: 'E',
|
||||
template: '<span ng-show="query.id && canViewSource">\
|
||||
<a ng-show="!sourceMode"\
|
||||
ng-href="{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
</a>\
|
||||
<a ng-show="sourceMode"\
|
||||
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
|
||||
@@ -63,26 +63,97 @@
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
'query': '=',
|
||||
'lock': '='
|
||||
'lock': '=',
|
||||
'schema': '=',
|
||||
'syntax': '='
|
||||
},
|
||||
template: '<textarea\
|
||||
ui-codemirror="editorOptions"\
|
||||
ng-model="query.query">',
|
||||
link: function($scope) {
|
||||
$scope.editorOptions = {
|
||||
mode: 'text/x-sql',
|
||||
template: '<textarea></textarea>',
|
||||
link: {
|
||||
pre: function ($scope, element) {
|
||||
$scope.syntax = $scope.syntax || 'sql';
|
||||
|
||||
var modes = {
|
||||
'sql': 'text/x-sql',
|
||||
'python': 'text/x-python',
|
||||
'json': 'application/json'
|
||||
};
|
||||
|
||||
var textarea = element.children()[0];
|
||||
var editorOptions = {
|
||||
mode: modes[$scope.syntax],
|
||||
lineWrapping: true,
|
||||
lineNumbers: true,
|
||||
readOnly: false,
|
||||
matchBrackets: true,
|
||||
autoCloseBrackets: true
|
||||
};
|
||||
autoCloseBrackets: true,
|
||||
extraKeys: {"Ctrl-Space": "autocomplete"}
|
||||
};
|
||||
|
||||
$scope.$watch('lock', function(locked) {
|
||||
$scope.editorOptions.readOnly = locked ? 'nocursor' : false;
|
||||
});
|
||||
var additionalHints = [];
|
||||
|
||||
CodeMirror.commands.autocomplete = function(cm) {
|
||||
var hinter = function(editor, options) {
|
||||
var hints = CodeMirror.hint.anyword(editor, options);
|
||||
var cur = editor.getCursor(), token = editor.getTokenAt(cur).string;
|
||||
|
||||
hints.list = _.union(hints.list, _.filter(additionalHints, function (h) {
|
||||
return h.search(token) === 0;
|
||||
}));
|
||||
|
||||
return hints;
|
||||
};
|
||||
|
||||
// CodeMirror.showHint(cm, CodeMirror.hint.anyword);
|
||||
CodeMirror.showHint(cm, hinter);
|
||||
};
|
||||
|
||||
var codemirror = CodeMirror.fromTextArea(textarea, editorOptions);
|
||||
|
||||
codemirror.on('change', function(instance) {
|
||||
var newValue = instance.getValue();
|
||||
|
||||
if (newValue !== $scope.query.query) {
|
||||
$scope.$evalAsync(function() {
|
||||
$scope.query.query = newValue;
|
||||
});
|
||||
}
|
||||
|
||||
$('.schema-container').css('height', $('.CodeMirror').css('height'));
|
||||
});
|
||||
|
||||
$scope.$watch('query.query', function () {
|
||||
if ($scope.query.query !== codemirror.getValue()) {
|
||||
codemirror.setValue($scope.query.query);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.$watch('schema', function (schema) {
|
||||
if (schema) {
|
||||
var keywords = [];
|
||||
_.each(schema, function (table) {
|
||||
keywords.push(table.name);
|
||||
_.each(table.columns, function (c) {
|
||||
keywords.push(c);
|
||||
});
|
||||
});
|
||||
|
||||
additionalHints = _.unique(keywords);
|
||||
}
|
||||
|
||||
codemirror.refresh();
|
||||
});
|
||||
|
||||
$scope.$watch('syntax', function(syntax) {
|
||||
codemirror.setOption('mode', modes[syntax]);
|
||||
});
|
||||
|
||||
$scope.$watch('lock', function (locked) {
|
||||
var readOnly = locked ? 'nocursor' : false;
|
||||
codemirror.setOption('readOnly', readOnly);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function queryFormatter($http) {
|
||||
@@ -111,42 +182,98 @@
|
||||
}
|
||||
}
|
||||
|
||||
function queryTimePicker() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<select ng-disabled="refreshType != \'daily\'" ng-model="hour" ng-change="updateSchedule()" ng-options="c as c for c in hourOptions"></select> :\
|
||||
<select ng-disabled="refreshType != \'daily\'" ng-model="minute" ng-change="updateSchedule()" ng-options="c as c for c in minuteOptions"></select>',
|
||||
link: function($scope) {
|
||||
var padWithZeros = function(size, v) {
|
||||
v = String(v);
|
||||
if (v.length < size) {
|
||||
v = "0" + v;
|
||||
}
|
||||
return v;
|
||||
};
|
||||
|
||||
$scope.hourOptions = _.map(_.range(0, 24), _.partial(padWithZeros, 2));
|
||||
$scope.minuteOptions = _.map(_.range(0, 60, 5), _.partial(padWithZeros, 2));
|
||||
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
var parts = $scope.query.scheduleInLocalTime().split(':');
|
||||
$scope.minute = parts[1];
|
||||
$scope.hour = parts[0];
|
||||
} else {
|
||||
$scope.minute = "15";
|
||||
$scope.hour = "00";
|
||||
}
|
||||
|
||||
$scope.updateSchedule = function() {
|
||||
var newSchedule = moment().hour($scope.hour).minute($scope.minute).utc().format('HH:mm');
|
||||
if (newSchedule != $scope.query.schedule) {
|
||||
$scope.query.schedule = newSchedule;
|
||||
$scope.saveQuery();
|
||||
}
|
||||
};
|
||||
|
||||
$scope.$watch('refreshType', function() {
|
||||
if ($scope.refreshType == 'daily') {
|
||||
$scope.updateSchedule();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function queryRefreshSelect() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<select\
|
||||
ng-disabled="!isQueryOwner"\
|
||||
ng-model="query.ttl"\
|
||||
ng-disabled="refreshType != \'periodic\'"\
|
||||
ng-model="query.schedule"\
|
||||
ng-change="saveQuery()"\
|
||||
ng-options="c.value as c.name for c in refreshOptions">\
|
||||
<option value="">No Refresh</option>\
|
||||
</select>',
|
||||
link: function($scope) {
|
||||
$scope.refreshOptions = [
|
||||
{
|
||||
value: -1,
|
||||
name: 'No Refresh'
|
||||
},
|
||||
{
|
||||
value: 60,
|
||||
value: "60",
|
||||
name: 'Every minute'
|
||||
},
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
_.each([5, 10, 15, 30], function(i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: String(i*60),
|
||||
name: "Every " + i + " minutes"
|
||||
})
|
||||
});
|
||||
|
||||
_.each(_.range(1, 13), function (i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: i * 3600,
|
||||
value: String(i * 3600),
|
||||
name: 'Every ' + i + 'h'
|
||||
});
|
||||
})
|
||||
|
||||
$scope.refreshOptions.push({
|
||||
value: 24 * 3600,
|
||||
value: String(24 * 3600),
|
||||
name: 'Every 24h'
|
||||
});
|
||||
$scope.refreshOptions.push({
|
||||
value: 7 * 24 * 3600,
|
||||
value: String(7 * 24 * 3600),
|
||||
name: 'Once a week'
|
||||
});
|
||||
|
||||
$scope.$watch('refreshType', function() {
|
||||
if ($scope.refreshType == 'periodic') {
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
$scope.query.schedule = null;
|
||||
$scope.saveQuery();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
@@ -158,5 +285,6 @@
|
||||
.directive('queryResultLink', queryResultCSVLink)
|
||||
.directive('queryEditor', queryEditor)
|
||||
.directive('queryRefreshSelect', queryRefreshSelect)
|
||||
.directive('queryTimePicker', queryTimePicker)
|
||||
.directive('queryFormatter', ['$http', queryFormatter]);
|
||||
})();
|
||||
@@ -24,13 +24,17 @@ angular.module('redash.filters', []).
|
||||
return durationHumanize;
|
||||
})
|
||||
|
||||
.filter('refreshRateHumanize', function () {
|
||||
return function (ttl) {
|
||||
if (ttl == -1) {
|
||||
.filter('scheduleHumanize', function() {
|
||||
return function (schedule) {
|
||||
if (schedule === null) {
|
||||
return "Never";
|
||||
} else {
|
||||
return "Every " + durationHumanize(ttl);
|
||||
} else if (schedule.match(/\d\d:\d\d/) !== null) {
|
||||
var parts = schedule.split(':');
|
||||
var localTime = moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
return "Every day at " + localTime;
|
||||
}
|
||||
|
||||
return "Every " + durationHumanize(parseInt(schedule));
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -308,21 +308,22 @@
|
||||
// We check either for true or undefined for backward compatibility.
|
||||
var series = scope.series;
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
// If this is a chart that has just one row for multiple columns, sort
|
||||
// by the Y values. For example:
|
||||
//
|
||||
// A | B | C
|
||||
// 20 | 30 | 15
|
||||
//
|
||||
// Will be sorted:
|
||||
// C | A | B
|
||||
// 15 | 20 | 30
|
||||
var sortable = _.every(series, function(s) { return s.data.length == 1 });
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
if (sortable) {
|
||||
series = _.sortBy(series, function (s) {
|
||||
return s.data[0].y
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
|
||||
@@ -359,6 +360,23 @@
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
|
||||
@@ -381,7 +381,10 @@
|
||||
function sortDataRow(array, column) {
|
||||
var sortAlgo = (scope.sortAlgorithm && angular.isFunction(scope.sortAlgorithm)) === true ? scope.sortAlgorithm : filter('orderBy');
|
||||
if (column) {
|
||||
return arrayUtility.sort(array, sortAlgo, column.sortPredicate, column.reverse);
|
||||
var predicate = function(o) {
|
||||
return o[column.sortPredicate];
|
||||
};
|
||||
return arrayUtility.sort(array, sortAlgo, predicate, column.reverse);
|
||||
} else {
|
||||
return array;
|
||||
}
|
||||
|
||||
@@ -12,6 +12,8 @@
|
||||
|
||||
var columnTypes = {};
|
||||
|
||||
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
|
||||
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
|
||||
_.each(this.query_result.data.rows, function (row) {
|
||||
_.each(row, function (v, k) {
|
||||
if (angular.isNumber(v)) {
|
||||
@@ -30,7 +32,9 @@
|
||||
|
||||
_.each(this.query_result.data.columns, function(column) {
|
||||
if (columnTypes[column.name]) {
|
||||
column.type = columnTypes[column.name];
|
||||
if (column.type == null || column.type == 'string') {
|
||||
column.type = columnTypes[column.name];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -308,7 +312,7 @@
|
||||
this.filters = filters;
|
||||
}
|
||||
|
||||
var refreshStatus = function (queryResult, query, ttl) {
|
||||
var refreshStatus = function (queryResult, query) {
|
||||
Job.get({'id': queryResult.job.id}, function (response) {
|
||||
queryResult.update(response);
|
||||
|
||||
@@ -318,7 +322,7 @@
|
||||
});
|
||||
} else if (queryResult.getStatus() != "failed") {
|
||||
$timeout(function () {
|
||||
refreshStatus(queryResult, query, ttl);
|
||||
refreshStatus(queryResult, query);
|
||||
}, 3000);
|
||||
}
|
||||
})
|
||||
@@ -338,14 +342,19 @@
|
||||
return this.deferred.promise;
|
||||
}
|
||||
|
||||
QueryResult.get = function (data_source_id, query, ttl) {
|
||||
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
|
||||
var queryResult = new QueryResult();
|
||||
|
||||
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'ttl': ttl}, function (response) {
|
||||
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
|
||||
if (queryId !== undefined) {
|
||||
params['query_id'] = queryId;
|
||||
};
|
||||
|
||||
QueryResultResource.post(params, function (response) {
|
||||
queryResult.update(response);
|
||||
|
||||
if ('job' in response) {
|
||||
refreshStatus(queryResult, query, ttl);
|
||||
refreshStatus(queryResult, query);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -373,7 +382,7 @@
|
||||
return new Query({
|
||||
query: "",
|
||||
name: "New Query",
|
||||
ttl: -1,
|
||||
schedule: null,
|
||||
user: currentUser
|
||||
});
|
||||
};
|
||||
@@ -397,10 +406,19 @@
|
||||
return '/queries/' + this.id + '/source';
|
||||
};
|
||||
|
||||
Query.prototype.getQueryResult = function (ttl, parameters) {
|
||||
if (ttl == undefined) {
|
||||
ttl = this.ttl;
|
||||
}
|
||||
Query.prototype.hasDailySchedule = function() {
|
||||
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
|
||||
}
|
||||
|
||||
Query.prototype.scheduleInLocalTime = function() {
|
||||
var parts = this.schedule.split(':');
|
||||
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
}
|
||||
|
||||
Query.prototype.getQueryResult = function (maxAge, parameters) {
|
||||
// if (ttl == undefined) {
|
||||
// ttl = this.ttl;
|
||||
// }
|
||||
|
||||
var queryText = this.query;
|
||||
|
||||
@@ -426,16 +444,16 @@
|
||||
this.latest_query_data_id = null;
|
||||
}
|
||||
|
||||
if (this.latest_query_data && ttl != 0) {
|
||||
if (this.latest_query_data && maxAge != 0) {
|
||||
if (!this.queryResult) {
|
||||
this.queryResult = new QueryResult({'query_result': this.latest_query_data});
|
||||
}
|
||||
} else if (this.latest_query_data_id && ttl != 0) {
|
||||
} else if (this.latest_query_data_id && maxAge != 0) {
|
||||
if (!this.queryResult) {
|
||||
this.queryResult = QueryResult.getById(this.latest_query_data_id);
|
||||
}
|
||||
} else if (this.data_source_id) {
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, ttl);
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
|
||||
}
|
||||
|
||||
return this.queryResult;
|
||||
@@ -471,7 +489,12 @@
|
||||
|
||||
|
||||
var DataSource = function ($resource) {
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
|
||||
var actions = {
|
||||
'get': {'method': 'GET', 'cache': true, 'isArray': true},
|
||||
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
|
||||
};
|
||||
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
|
||||
|
||||
return DataSourceResource;
|
||||
}
|
||||
|
||||
@@ -55,6 +55,22 @@
|
||||
}];
|
||||
};
|
||||
|
||||
var VisualizationName = function(Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
visualization: '='
|
||||
},
|
||||
template: '<small>{{name}}</small>',
|
||||
replace: false,
|
||||
link: function (scope) {
|
||||
if (Visualization.visualizations[scope.visualization.type].name != scope.visualization.name) {
|
||||
scope.name = scope.visualization.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var VisualizationRenderer = function ($location, Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
@@ -72,42 +88,9 @@
|
||||
width: '50%'
|
||||
};
|
||||
|
||||
function readURL() {
|
||||
var searchFilters = angular.fromJson($location.search().filters);
|
||||
if (searchFilters) {
|
||||
_.forEach(scope.filters, function(filter) {
|
||||
var value = searchFilters[filter.friendlyName];
|
||||
if (value) {
|
||||
filter.current = value;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateURL(filters) {
|
||||
var current = {};
|
||||
_.each(filters, function(filter) {
|
||||
if (filter.current) {
|
||||
current[filter.friendlyName] = filter.current;
|
||||
}
|
||||
});
|
||||
|
||||
var newSearch = angular.extend($location.search(), {
|
||||
filters: angular.toJson(current)
|
||||
});
|
||||
$location.search(newSearch);
|
||||
}
|
||||
|
||||
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
|
||||
if (filters) {
|
||||
scope.filters = filters;
|
||||
|
||||
if (filters.length && false) {
|
||||
readURL();
|
||||
|
||||
// start watching for changes and update URL
|
||||
scope.$watch('filters', updateURL, true);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -138,7 +121,7 @@
|
||||
query: '=',
|
||||
queryResult: '=',
|
||||
visualization: '=?',
|
||||
openEditor: '=?',
|
||||
openEditor: '@',
|
||||
onNewSuccess: '=?'
|
||||
},
|
||||
link: function (scope, element, attrs) {
|
||||
@@ -167,9 +150,13 @@
|
||||
scope.$watch('visualization.type', function (type, oldType) {
|
||||
// if not edited by user, set name to match type
|
||||
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
|
||||
// poor man's titlecase
|
||||
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
|
||||
scope.visualization.name = _.string.titleize(scope.visualization.type);
|
||||
}
|
||||
|
||||
if (type && oldType != type && scope.visualization) {
|
||||
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
scope.submit = function () {
|
||||
@@ -208,6 +195,7 @@
|
||||
.provider('Visualization', VisualizationProvider)
|
||||
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
|
||||
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
|
||||
.directive('visualizationName', ['Visualization', VisualizationName])
|
||||
.directive('filters', Filters)
|
||||
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])
|
||||
})();
|
||||
|
||||
@@ -112,9 +112,6 @@
|
||||
|
||||
scope.columnTypes = {
|
||||
"X": "x",
|
||||
// "X (Date time)": "x",
|
||||
// "X (Linear)": "x-linear",
|
||||
// "X (Category)": "x-category",
|
||||
"Y": "y",
|
||||
"Series": "series",
|
||||
"Unused": "unused"
|
||||
@@ -166,7 +163,7 @@
|
||||
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
|
||||
}
|
||||
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
|
||||
|
||||
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
|
||||
});
|
||||
scope.zIndexes = _.range(scope.series.length);
|
||||
scope.yAxes = [[0, 'left'], [1, 'right']];
|
||||
@@ -227,6 +224,12 @@
|
||||
}
|
||||
});
|
||||
|
||||
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
|
||||
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
|
||||
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
|
||||
scope.visualization.options.xAxis.labels.enabled = true;
|
||||
}
|
||||
|
||||
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
|
||||
|
||||
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {
|
||||
|
||||
238
rd_ui/app/scripts/visualizations/map.js
Normal file
238
rd_ui/app/scripts/visualizations/map.js
Normal file
@@ -0,0 +1,238 @@
|
||||
'use strict';
|
||||
|
||||
(function() {
|
||||
var module = angular.module('redash.visualization');
|
||||
|
||||
module.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
var renderTemplate =
|
||||
'<map-renderer ' +
|
||||
'options="visualization.options" query-result="queryResult">' +
|
||||
'</map-renderer>';
|
||||
|
||||
var editTemplate = '<map-editor></map-editor>';
|
||||
var defaultOptions = {
|
||||
'height': 500,
|
||||
'draw': 'Marker',
|
||||
'classify':'none'
|
||||
};
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'MAP',
|
||||
name: 'Map',
|
||||
renderTemplate: renderTemplate,
|
||||
editorTemplate: editTemplate,
|
||||
defaultOptions: defaultOptions
|
||||
});
|
||||
}
|
||||
]);
|
||||
|
||||
module.directive('mapRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
|
||||
var setBounds = function(){
|
||||
var b = $scope.visualization.options.bounds;
|
||||
|
||||
if(b){
|
||||
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
|
||||
} else if ($scope.features.length > 0){
|
||||
var group= new L.featureGroup($scope.features);
|
||||
$scope.map.fitBounds(group.getBounds());
|
||||
}
|
||||
};
|
||||
|
||||
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
|
||||
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
|
||||
function() {
|
||||
var marker = function(lat,lon){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
return L.marker([lat, lon]);
|
||||
};
|
||||
|
||||
var heatpoint = function(lat,lon,obj){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
var color = 'red';
|
||||
|
||||
if (obj &&
|
||||
obj[$scope.visualization.options.classify] &&
|
||||
$scope.visualization.options.classification){
|
||||
var v = $.grep($scope.visualization.options.classification,function(e){
|
||||
return e.value == obj[$scope.visualization.options.classify];
|
||||
});
|
||||
if (v.length >0) color = v[0].color;
|
||||
}
|
||||
|
||||
var style = {
|
||||
fillColor:color,
|
||||
fillOpacity:0.5,
|
||||
stroke:false
|
||||
};
|
||||
|
||||
return L.circleMarker([lat,lon],style)
|
||||
};
|
||||
|
||||
var color = function(val){
|
||||
// taken from http://jsfiddle.net/xgJ2e/2/
|
||||
|
||||
var h= Math.floor((100 - val) * 120 / 100);
|
||||
var s = Math.abs(val - 50)/50;
|
||||
var v = 1;
|
||||
|
||||
var rgb, i, data = [];
|
||||
if (s === 0) {
|
||||
rgb = [v,v,v];
|
||||
} else {
|
||||
h = h / 60;
|
||||
i = Math.floor(h);
|
||||
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
|
||||
switch(i) {
|
||||
case 0:
|
||||
rgb = [v, data[2], data[0]];
|
||||
break;
|
||||
case 1:
|
||||
rgb = [data[1], v, data[0]];
|
||||
break;
|
||||
case 2:
|
||||
rgb = [data[0], v, data[2]];
|
||||
break;
|
||||
case 3:
|
||||
rgb = [data[0], data[1], v];
|
||||
break;
|
||||
case 4:
|
||||
rgb = [data[2], data[0], v];
|
||||
break;
|
||||
default:
|
||||
rgb = [v, data[0], data[1]];
|
||||
break;
|
||||
}
|
||||
}
|
||||
return '#' + rgb.map(function(x){
|
||||
return ("0" + Math.round(x*255).toString(16)).slice(-2);
|
||||
}).join('');
|
||||
};
|
||||
|
||||
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
|
||||
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
|
||||
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
|
||||
|
||||
function getBounds(e) {
|
||||
$scope.visualization.options.bounds = $scope.map.getBounds();
|
||||
}
|
||||
|
||||
var queryData = $scope.queryResult.getData();
|
||||
var classify = $scope.visualization.options.classify;
|
||||
|
||||
if (queryData) {
|
||||
$scope.visualization.options.classification = [];
|
||||
|
||||
for (var row in queryData) {
|
||||
if (queryData[row][classify] &&
|
||||
$.grep($scope.visualization.options.classification, function (e) {
|
||||
return e.value == queryData[row][classify]
|
||||
}).length == 0) {
|
||||
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
|
||||
}
|
||||
}
|
||||
|
||||
$.each($scope.visualization.options.classification, function (i, c) {
|
||||
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
|
||||
});
|
||||
|
||||
if (!$scope.map) {
|
||||
$scope.map = L.map(elm[0].children[0].children[0])
|
||||
}
|
||||
|
||||
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
|
||||
}).addTo($scope.map);
|
||||
|
||||
$scope.features = $scope.features || [];
|
||||
|
||||
var tmp_features = [];
|
||||
|
||||
var lat_col = $scope.visualization.options.latColName || 'lat';
|
||||
var lon_col = $scope.visualization.options.lonColName || 'lon';
|
||||
|
||||
for (var row in queryData) {
|
||||
var feature;
|
||||
|
||||
if ($scope.visualization.options.draw == 'Marker') {
|
||||
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
|
||||
} else if ($scope.visualization.options.draw == 'Color') {
|
||||
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
|
||||
}
|
||||
|
||||
if (!feature) continue;
|
||||
|
||||
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
|
||||
for (var k in queryData[row]){
|
||||
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
|
||||
}
|
||||
obj_description += '</ul>';
|
||||
feature.bindPopup(obj_description);
|
||||
tmp_features.push(feature);
|
||||
}
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
$scope.map.removeLayer(f);
|
||||
});
|
||||
|
||||
$scope.features = tmp_features;
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
f.addTo($scope.map)
|
||||
});
|
||||
|
||||
setBounds();
|
||||
|
||||
$scope.map.on('focus',function(){
|
||||
$scope.map.on('moveend', getBounds);
|
||||
});
|
||||
|
||||
$scope.map.on('blur',function(){
|
||||
$scope.map.off('moveend', getBounds);
|
||||
});
|
||||
|
||||
|
||||
// We redraw the map if it was loaded in a hidden tab
|
||||
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
|
||||
|
||||
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
|
||||
setTimeout(function() {
|
||||
$scope.map.invalidateSize(false);
|
||||
|
||||
setBounds();
|
||||
},500);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}, true);
|
||||
|
||||
$scope.$watch('visualization.options.height', function() {
|
||||
|
||||
if (!$scope.map) return;
|
||||
$scope.map.invalidateSize(false);
|
||||
setBounds();
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.directive('mapEditor', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map_editor.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
$scope.draw_options = ['Marker','Color'];
|
||||
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
})();
|
||||
@@ -14,7 +14,12 @@ a.page-title {
|
||||
}
|
||||
|
||||
a.navbar-brand {
|
||||
font-style: italic;
|
||||
padding: 5px 5px 0px 0px;
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
a.navbar-brand img {
|
||||
height: 40px;
|
||||
}
|
||||
|
||||
.graph {
|
||||
@@ -92,7 +97,7 @@ a.navbar-brand {
|
||||
}
|
||||
|
||||
.panel-heading .query-link:hover {
|
||||
text-decoration: none;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
/* angular-growl */
|
||||
@@ -308,6 +313,23 @@ counter-renderer counter-name {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.schema-container {
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
.schema-browser {
|
||||
height: 100%;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
div.table-name {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
/*
|
||||
bootstrap's hidden-xs class adds display:block when not hidden
|
||||
use this class when you need to keep the original display value
|
||||
|
||||
@@ -28,6 +28,7 @@
|
||||
<p>
|
||||
<span ng-hide="currentUser.hasPermission('view_query')">{{query.name}}</span>
|
||||
<query-link query="query" visualization="widget.visualization" ng-show="currentUser.hasPermission('view_query')"></query-link>
|
||||
<visualization-name visualization="widget.visualization"/>
|
||||
</p>
|
||||
<div class="text-muted" ng-bind-html="query.description | markdown"></div>
|
||||
</h3>
|
||||
@@ -37,7 +38,7 @@
|
||||
|
||||
<div class="panel-footer">
|
||||
<span class="label label-default"
|
||||
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip="(query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
|
||||
|
||||
<span class="pull-right">
|
||||
|
||||
@@ -59,9 +59,9 @@
|
||||
|
||||
<hr>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div ng-show="sourceMode">
|
||||
<div class="row" ng-if="sourceMode">
|
||||
<div ng-class="editorSize">
|
||||
<div>
|
||||
<p>
|
||||
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
|
||||
<span class="glyphicon glyphicon-play"></span> Execute
|
||||
@@ -77,19 +77,31 @@
|
||||
</button>
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- code editor -->
|
||||
<div ng-show="sourceMode">
|
||||
<p>
|
||||
<query-editor query="query" lock="queryFormatting"></query-editor>
|
||||
<query-editor query="query" schema="schema" syntax="dataSource.syntax" lock="queryFormatting"></query-editor>
|
||||
</p>
|
||||
<hr>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-3 schema-container" ng-show="hasSchema">
|
||||
<div ng-show="schema.length < 200">
|
||||
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
|
||||
</div>
|
||||
<div class="schema-browser">
|
||||
<div ng-repeat="table in schema | filter:schemaFilter track by table.name">
|
||||
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
|
||||
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
|
||||
</div>
|
||||
<div collapse="table.collapsed && !schemaFilter">
|
||||
<div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<hr ng-if="sourceMode">
|
||||
<div class="row">
|
||||
<div class="col-lg-3 rd-hidden-xs">
|
||||
<p>
|
||||
@@ -97,7 +109,7 @@
|
||||
<span class="text-muted">Created By </span>
|
||||
<strong>{{query.user.name}}</strong>
|
||||
</p>
|
||||
<p ng-if="query.user.id != query.last_modified_by.id">
|
||||
<p ng-if="query.last_modified_by && query.user.id != query.last_modified_by.id">
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Last Modified By </span>
|
||||
<strong>{{query.last_modified_by.name}}</strong>
|
||||
@@ -121,8 +133,8 @@
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-refresh"></span>
|
||||
<span class="text-muted">Refresh Interval</span>
|
||||
<query-refresh-select></query-refresh-select>
|
||||
<span class="text-muted">Refresh Schedule</span>
|
||||
<a href="" ng-click="openScheduleForm()">{{query.schedule | scheduleHumanize}}</a>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
@@ -190,7 +202,7 @@
|
||||
<rd-tab tab-id="{{vis.id}}" name="{{vis.name}}" ng-if="vis.type!='TABLE'" ng-repeat="vis in query.visualizations">
|
||||
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> ×</span>
|
||||
</rd-tab>
|
||||
<rd-tab tab-id="add" name="+ New" removeable="true" ng-show="canEdit"></rd-tab>
|
||||
<rd-tab tab-id="add" name="+ New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
|
||||
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
18
rd_ui/app/views/schedule_form.html
Normal file
18
rd_ui/app/views/schedule_form.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" aria-label="Close" ng-click="close()"><span aria-hidden="true">×</span></button>
|
||||
<h4 class="modal-title">Refresh Schedule</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" value="periodic" ng-model="refreshType">
|
||||
<query-refresh-select ng-disabled="refreshType != 'periodic'"></query-refresh-select>
|
||||
</label>
|
||||
</div>
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" value="daily" ng-model="refreshType">
|
||||
<query-time-picker ng-disabled="refreshType != 'daily'"></query-time-picker>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
@@ -54,6 +54,14 @@
|
||||
ng-model="visualization.options.sortX">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Show X Axis Labels</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="sortX" type="checkbox" class="form-control"
|
||||
ng-model="visualization.options.xAxis.labels.enabled">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -100,6 +108,15 @@
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Index</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
|
||||
ng-options="o as o for o in zIndexes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">y Axis</label>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<div>
|
||||
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
|
||||
|
||||
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<div class="form-group">
|
||||
<label class="control-label">Name</label>
|
||||
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">
|
||||
|
||||
3
rd_ui/app/views/visualizations/map.html
Normal file
3
rd_ui/app/views/visualizations/map.html
Normal file
@@ -0,0 +1,3 @@
|
||||
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
|
||||
<div style="width:100%; height:100%;"></div>
|
||||
</div>
|
||||
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
@@ -0,0 +1,55 @@
|
||||
<div class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Map height (px)</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" type="number" ng-model = "visualization.options.height" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Draw option</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Latitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Longitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div ng-show = "visualization.options.draw == 'Color'">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Classify by column</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row" >
|
||||
<div class="col-lg-6">
|
||||
<div ng-repeat="element in visualization.options.classification" class="list-group">
|
||||
<div class="list-group-item active">
|
||||
{{element.value}}
|
||||
</div>
|
||||
|
||||
<div class="list-group-item">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-4">Color</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -12,8 +12,7 @@
|
||||
"es5-shim": "2.0.8",
|
||||
"angular-moment": "0.2.0",
|
||||
"moment": "2.1.0",
|
||||
"angular-ui-bootstrap": "0.5.0",
|
||||
"angular-ui-codemirror": "0.0.5",
|
||||
"codemirror": "4.8.0",
|
||||
"highcharts": "3.0.10",
|
||||
"underscore": "1.5.1",
|
||||
"pivottable": "~1.1.1",
|
||||
@@ -29,7 +28,9 @@
|
||||
"angular-ui-select": "0.8.2",
|
||||
"font-awesome": "~4.2.0",
|
||||
"mustache": "~1.0.0",
|
||||
"canvg": "gabelerner/canvg"
|
||||
"canvg": "gabelerner/canvg",
|
||||
"angular-ui-bootstrap-bower": "~0.12.1",
|
||||
"leaflet":"~0.7.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "1.2.18",
|
||||
|
||||
BIN
rd_ui/favicon.ico
Executable file
BIN
rd_ui/favicon.ico
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.1 KiB |
@@ -6,7 +6,7 @@ from statsd import StatsClient
|
||||
from redash import settings
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = '0.6.0'
|
||||
__version__ = '0.6.3'
|
||||
|
||||
|
||||
def setup_logging():
|
||||
|
||||
116
redash/admin.py
Normal file
116
redash/admin.py
Normal file
@@ -0,0 +1,116 @@
|
||||
import json
|
||||
from flask_admin.contrib.peewee import ModelView
|
||||
from flask.ext.admin import Admin
|
||||
from flask_admin.contrib.peewee.form import CustomModelConverter
|
||||
from flask_admin.form.widgets import DateTimePickerWidget
|
||||
from playhouse.postgres_ext import ArrayField, DateTimeTZField
|
||||
from wtforms import fields
|
||||
from wtforms.widgets import TextInput
|
||||
|
||||
from redash import models
|
||||
from redash import query_runner
|
||||
from redash.permissions import require_permission
|
||||
|
||||
|
||||
class ArrayListField(fields.Field):
|
||||
widget = TextInput()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
return u', '.join(self.data)
|
||||
else:
|
||||
return u''
|
||||
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = [x.strip() for x in valuelist[0].split(',')]
|
||||
else:
|
||||
self.data = []
|
||||
|
||||
|
||||
class JSONTextAreaField(fields.TextAreaField):
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
try:
|
||||
json.loads(valuelist[0])
|
||||
except ValueError:
|
||||
raise ValueError(self.gettext(u'Invalid JSON'))
|
||||
self.data = valuelist[0]
|
||||
else:
|
||||
self.data = ''
|
||||
|
||||
class PasswordHashField(fields.PasswordField):
|
||||
def _value(self):
|
||||
return u''
|
||||
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = models.pwd_context.encrypt(valuelist[0])
|
||||
else:
|
||||
self.data = u''
|
||||
|
||||
|
||||
class PgModelConverter(CustomModelConverter):
|
||||
def __init__(self, view, additional=None):
|
||||
additional = {ArrayField: self.handle_array_field,
|
||||
DateTimeTZField: self.handle_datetime_tz_field}
|
||||
super(PgModelConverter, self).__init__(view, additional)
|
||||
self.view = view
|
||||
|
||||
def handle_array_field(self, model, field, **kwargs):
|
||||
return field.name, ArrayListField(**kwargs)
|
||||
|
||||
def handle_datetime_tz_field(self, model, field, **kwargs):
|
||||
kwargs['widget'] = DateTimePickerWidget()
|
||||
return field.name, fields.DateTimeField(**kwargs)
|
||||
|
||||
|
||||
class BaseModelView(ModelView):
|
||||
model_form_converter = PgModelConverter
|
||||
|
||||
@require_permission('admin')
|
||||
def is_accessible(self):
|
||||
return True
|
||||
|
||||
|
||||
class UserModelView(BaseModelView):
|
||||
column_searchable_list = ('name', 'email')
|
||||
form_excluded_columns = ('created_at', 'updated_at')
|
||||
column_exclude_list = ('password_hash',)
|
||||
|
||||
form_overrides = dict(password_hash=PasswordHashField)
|
||||
form_args = {
|
||||
'password_hash': {'label': 'Password'}
|
||||
}
|
||||
|
||||
|
||||
def query_runner_type_formatter(view, context, model, name):
|
||||
qr = query_runner.query_runners.get(model.type, None)
|
||||
if qr:
|
||||
return qr.name()
|
||||
|
||||
return model.type
|
||||
|
||||
|
||||
class DataSourceModelView(BaseModelView):
|
||||
form_overrides = dict(type=fields.SelectField, options=JSONTextAreaField)
|
||||
form_args = dict(type={
|
||||
'choices': [(k, r.name()) for k, r in query_runner.query_runners.iteritems()]
|
||||
})
|
||||
column_formatters = dict(type=query_runner_type_formatter)
|
||||
column_filters = ('type',)
|
||||
|
||||
|
||||
def init_admin(app):
|
||||
admin = Admin(app, name='re:dash admin')
|
||||
|
||||
views = {
|
||||
models.User: UserModelView(models.User),
|
||||
models.DataSource: DataSourceModelView(models.DataSource)
|
||||
}
|
||||
|
||||
for m in models.all_models:
|
||||
if m in views:
|
||||
admin.add_view(views[m])
|
||||
else:
|
||||
admin.add_view(BaseModelView(m))
|
||||
@@ -7,7 +7,7 @@ import logging
|
||||
from flask import request, make_response, redirect, url_for
|
||||
from flask.ext.login import LoginManager, login_user, current_user, logout_user
|
||||
|
||||
from redash import models, settings, google_oauth
|
||||
from redash import models, settings, google_oauth, saml_auth
|
||||
|
||||
login_manager = LoginManager()
|
||||
logger = logging.getLogger('authentication')
|
||||
@@ -86,6 +86,7 @@ def setup_authentication(app):
|
||||
login_manager.anonymous_user = models.AnonymousUser
|
||||
app.secret_key = settings.COOKIE_SECRET
|
||||
app.register_blueprint(google_oauth.blueprint)
|
||||
app.register_blueprint(saml_auth.blueprint)
|
||||
|
||||
if settings.AUTH_TYPE == 'hmac':
|
||||
auth = HMACAuthentication()
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
from flask import make_response
|
||||
from functools import update_wrapper
|
||||
|
||||
ONE_YEAR = 60 * 60 * 24 * 365.25
|
||||
|
||||
headers = {
|
||||
|
||||
@@ -12,7 +12,7 @@ import time
|
||||
import logging
|
||||
|
||||
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
|
||||
session, url_for
|
||||
session, url_for, current_app
|
||||
from flask.ext.restful import Resource, abort
|
||||
from flask_login import current_user, login_user, logout_user
|
||||
import sqlparse
|
||||
@@ -23,6 +23,7 @@ from redash.tasks import QueryTask, record_event
|
||||
from redash.cache import headers as cache_headers
|
||||
from redash.permissions import require_permission
|
||||
from redash.query_runner import query_runners, validate_configuration
|
||||
from redash.monitor import get_status
|
||||
|
||||
|
||||
@app.route('/ping', methods=['GET'])
|
||||
@@ -66,7 +67,11 @@ def login():
|
||||
return redirect(request.args.get('next') or '/')
|
||||
|
||||
if not settings.PASSWORD_LOGIN_ENABLED:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
if settings.SAML_LOGIN_ENABLED:
|
||||
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
|
||||
else:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
|
||||
|
||||
if request.method == 'POST':
|
||||
user = models.User.select().where(models.User.email == request.form['username']).first()
|
||||
@@ -80,8 +85,8 @@ def login():
|
||||
analytics=settings.ANALYTICS,
|
||||
next=request.args.get('next'),
|
||||
username=request.form.get('username', ''),
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
|
||||
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
show_saml_login=settings.SAML_LOGIN_ENABLED)
|
||||
|
||||
@app.route('/logout')
|
||||
def logout():
|
||||
@@ -94,34 +99,7 @@ def logout():
|
||||
@auth.required
|
||||
@require_permission('admin')
|
||||
def status_api():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = models.Query.outdated_queries().count()
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
status = get_status()
|
||||
|
||||
return jsonify(status)
|
||||
|
||||
@@ -148,7 +126,7 @@ def create_query_route():
|
||||
query=query,
|
||||
data_source=data_source_id,
|
||||
user=current_user._get_current_object(),
|
||||
ttl=-1)
|
||||
schedule=None)
|
||||
|
||||
return redirect('/queries/{}'.format(query.id), 303)
|
||||
|
||||
@@ -219,10 +197,18 @@ class DataSourceListAPI(BaseResource):
|
||||
|
||||
return datasource.to_dict()
|
||||
|
||||
|
||||
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
|
||||
|
||||
|
||||
class DataSourceSchemaAPI(BaseResource):
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
schema = data_source.get_schema()
|
||||
|
||||
return schema
|
||||
|
||||
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
|
||||
|
||||
class DashboardRecentAPI(BaseResource):
|
||||
def get(self):
|
||||
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]
|
||||
@@ -442,7 +428,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
|
||||
class QueryResultListAPI(BaseResource):
|
||||
@require_permission('execute_query')
|
||||
def post(self):
|
||||
params = request.json
|
||||
params = request.get_json(force=True)
|
||||
|
||||
if settings.FEATURE_TABLES_PERMISSIONS:
|
||||
metadata = utils.SQLMetaData(params['query'])
|
||||
@@ -468,16 +454,19 @@ class QueryResultListAPI(BaseResource):
|
||||
activity=params['query']
|
||||
).save()
|
||||
|
||||
if params['ttl'] == 0:
|
||||
max_age = int(params.get('max_age', -1))
|
||||
|
||||
if max_age == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], int(params['ttl']))
|
||||
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], max_age)
|
||||
|
||||
if query_result:
|
||||
return {'query_result': query_result.to_dict()}
|
||||
else:
|
||||
data_source = models.DataSource.get_by_id(params['data_source_id'])
|
||||
job = QueryTask.add_task(params['query'], data_source)
|
||||
query_id = params.get('query_id', 'adhoc')
|
||||
job = QueryTask.add_task(params['query'], data_source, metadata={"Username": self.current_user.name, "Query ID": query_id})
|
||||
return {'job': job.to_dict()}
|
||||
|
||||
|
||||
@@ -558,7 +547,12 @@ api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
|
||||
|
||||
@app.route('/<path:filename>')
|
||||
def send_static(filename):
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
|
||||
if current_app.debug:
|
||||
cache_timeout = 0
|
||||
else:
|
||||
cache_timeout = None
|
||||
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -28,7 +28,7 @@ class Importer(object):
|
||||
def import_query(self, user, query):
|
||||
new_query = self._get_or_create(models.Query, query['id'], name=query['name'],
|
||||
user=user,
|
||||
ttl=-1,
|
||||
schedule=None,
|
||||
query=query['query'],
|
||||
query_hash=query['query_hash'],
|
||||
description=query['description'],
|
||||
|
||||
@@ -11,8 +11,10 @@ import peewee
|
||||
from passlib.apps import custom_app_context as pwd_context
|
||||
from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDatabase
|
||||
from flask.ext.login import UserMixin, AnonymousUserMixin
|
||||
import psycopg2
|
||||
|
||||
from redash import utils, settings
|
||||
from redash import utils, settings, redis_connection
|
||||
from redash.query_runner import get_query_runner
|
||||
|
||||
|
||||
class Database(object):
|
||||
@@ -223,11 +225,11 @@ class ActivityLog(BaseModel):
|
||||
|
||||
class DataSource(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField()
|
||||
name = peewee.CharField(unique=True)
|
||||
type = peewee.CharField()
|
||||
options = peewee.TextField()
|
||||
queue_name = peewee.CharField(default="queries")
|
||||
scheduled_queue_name = peewee.CharField(default="queries")
|
||||
scheduled_queue_name = peewee.CharField(default="scheduled_queries")
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
@@ -237,9 +239,31 @@ class DataSource(BaseModel):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'type': self.type
|
||||
'type': self.type,
|
||||
'syntax': self.query_runner.syntax
|
||||
}
|
||||
|
||||
def get_schema(self, refresh=False):
|
||||
key = "data_source:schema:{}".format(self.id)
|
||||
|
||||
cache = None
|
||||
if not refresh:
|
||||
cache = redis_connection.get(key)
|
||||
|
||||
if cache is None:
|
||||
query_runner = self.query_runner
|
||||
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
|
||||
|
||||
redis_connection.set(key, json.dumps(schema))
|
||||
else:
|
||||
schema = json.loads(cache)
|
||||
|
||||
return schema
|
||||
|
||||
@property
|
||||
def query_runner(self):
|
||||
return get_query_runner(self.type, self.options)
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
return cls.select().order_by(cls.id.asc())
|
||||
@@ -278,16 +302,16 @@ class QueryResult(BaseModel):
|
||||
return unused_results
|
||||
|
||||
@classmethod
|
||||
def get_latest(cls, data_source, query, ttl=0):
|
||||
def get_latest(cls, data_source, query, max_age=0):
|
||||
query_hash = utils.gen_query_hash(query)
|
||||
|
||||
if ttl == -1:
|
||||
if max_age == -1:
|
||||
query = cls.select().where(cls.query_hash == query_hash,
|
||||
cls.data_source == data_source).order_by(cls.retrieved_at.desc())
|
||||
else:
|
||||
query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source,
|
||||
peewee.SQL("retrieved_at + interval '%s second' >= now() at time zone 'utc'",
|
||||
ttl)).order_by(cls.retrieved_at.desc())
|
||||
max_age)).order_by(cls.retrieved_at.desc())
|
||||
|
||||
return query.first()
|
||||
|
||||
@@ -314,6 +338,27 @@ class QueryResult(BaseModel):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
|
||||
|
||||
def should_schedule_next(previous_iteration, now, schedule):
|
||||
if schedule.isdigit():
|
||||
ttl = int(schedule)
|
||||
next_iteration = previous_iteration + datetime.timedelta(seconds=ttl)
|
||||
else:
|
||||
hour, minute = schedule.split(':')
|
||||
hour, minute = int(hour), int(minute)
|
||||
|
||||
# The following logic is needed for cases like the following:
|
||||
# - The query scheduled to run at 23:59.
|
||||
# - The scheduler wakes up at 00:01.
|
||||
# - Using naive implementation of comparing timestamps, it will skip the execution.
|
||||
normalized_previous_iteration = previous_iteration.replace(hour=hour, minute=minute)
|
||||
if normalized_previous_iteration > previous_iteration:
|
||||
previous_iteration = normalized_previous_iteration - datetime.timedelta(days=1)
|
||||
|
||||
next_iteration = (previous_iteration + datetime.timedelta(days=1)).replace(hour=hour, minute=minute)
|
||||
|
||||
return now > next_iteration
|
||||
|
||||
|
||||
class Query(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
data_source = peewee.ForeignKeyField(DataSource)
|
||||
@@ -323,11 +368,11 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
query = peewee.TextField()
|
||||
query_hash = peewee.CharField(max_length=32)
|
||||
api_key = peewee.CharField(max_length=40)
|
||||
ttl = peewee.IntegerField()
|
||||
user_email = peewee.CharField(max_length=360, null=True)
|
||||
user = peewee.ForeignKeyField(User)
|
||||
last_modified_by = peewee.ForeignKeyField(User, null=True, related_name="modified_queries")
|
||||
is_archived = peewee.BooleanField(default=False, index=True)
|
||||
schedule = peewee.CharField(max_length=10, null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'queries'
|
||||
@@ -340,7 +385,7 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
'description': self.description,
|
||||
'query': self.query,
|
||||
'query_hash': self.query_hash,
|
||||
'ttl': self.ttl,
|
||||
'schedule': self.schedule,
|
||||
'api_key': self.api_key,
|
||||
'is_archived': self.is_archived,
|
||||
'updated_at': self.updated_at,
|
||||
@@ -350,7 +395,7 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
|
||||
if with_user:
|
||||
d['user'] = self.user.to_dict()
|
||||
d['last_modified_by'] = self.last_modified_by.to_dict()
|
||||
d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
|
||||
else:
|
||||
d['user_id'] = self._data['user']
|
||||
|
||||
@@ -366,7 +411,7 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
|
||||
def archive(self):
|
||||
self.is_archived = True
|
||||
self.ttl = -1
|
||||
self.schedule = None
|
||||
|
||||
for vis in self.visualizations:
|
||||
for w in vis.widgets:
|
||||
@@ -387,21 +432,19 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
|
||||
@classmethod
|
||||
def outdated_queries(cls):
|
||||
# TODO: this will only find scheduled queries that were executed before. I think this is
|
||||
# a reasonable assumption, but worth revisiting.
|
||||
outdated_queries_ids = cls.select(
|
||||
peewee.Func('first_value', cls.id).over(partition_by=[cls.query_hash, cls.data_source])) \
|
||||
.join(QueryResult) \
|
||||
.where(cls.ttl > 0,
|
||||
cls.is_archived==False,
|
||||
(QueryResult.retrieved_at +
|
||||
(cls.ttl * peewee.SQL("interval '1 second'"))) <
|
||||
peewee.SQL("(now() at time zone 'utc')"))
|
||||
queries = cls.select(cls, QueryResult.retrieved_at, DataSource)\
|
||||
.join(QueryResult)\
|
||||
.switch(Query).join(DataSource)\
|
||||
.where(cls.schedule != None)
|
||||
|
||||
queries = cls.select(cls, DataSource).join(DataSource) \
|
||||
.where(cls.id << outdated_queries_ids)
|
||||
now = utils.utcnow()
|
||||
outdated_queries = {}
|
||||
for query in queries:
|
||||
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
|
||||
key = "{}:{}".format(query.query_hash, query.data_source.id)
|
||||
outdated_queries[key] = query
|
||||
|
||||
return queries
|
||||
return outdated_queries.values()
|
||||
|
||||
@classmethod
|
||||
def search(cls, term):
|
||||
|
||||
33
redash/monitor.py
Normal file
33
redash/monitor.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from redash import redis_connection, models, __version__
|
||||
|
||||
def get_status():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
|
||||
return status
|
||||
@@ -15,6 +15,7 @@ __all__ = [
|
||||
'TYPE_STRING',
|
||||
'TYPE_DATE',
|
||||
'TYPE_FLOAT',
|
||||
'SUPPORTED_COLUMN_TYPES',
|
||||
'register',
|
||||
'get_query_runner',
|
||||
'import_query_runners'
|
||||
@@ -28,10 +29,19 @@ TYPE_STRING = 'string'
|
||||
TYPE_DATETIME = 'datetime'
|
||||
TYPE_DATE = 'date'
|
||||
|
||||
SUPPORTED_COLUMN_TYPES = set([
|
||||
TYPE_INTEGER,
|
||||
TYPE_FLOAT,
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_STRING,
|
||||
TYPE_DATETIME,
|
||||
TYPE_DATE
|
||||
])
|
||||
|
||||
class BaseQueryRunner(object):
|
||||
def __init__(self, configuration):
|
||||
jsonschema.validate(configuration, self.configuration_schema())
|
||||
self.syntax = 'sql'
|
||||
self.configuration = configuration
|
||||
|
||||
@classmethod
|
||||
@@ -57,6 +67,9 @@ class BaseQueryRunner(object):
|
||||
def run_query(self, query):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_schema(self):
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def to_dict(cls):
|
||||
return {
|
||||
@@ -101,4 +114,4 @@ def validate_configuration(query_runner_type, configuration_json):
|
||||
|
||||
def import_query_runners(query_runner_imports):
|
||||
for runner_import in query_runner_imports:
|
||||
__import__(runner_import)
|
||||
__import__(runner_import)
|
||||
|
||||
@@ -5,6 +5,8 @@ import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
@@ -15,6 +17,7 @@ try:
|
||||
from apiclient.discovery import build
|
||||
from apiclient.errors import HttpError
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from oauth2client import gce
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
@@ -66,18 +69,6 @@ def _load_key(filename):
|
||||
f.close()
|
||||
|
||||
|
||||
def _get_bigquery_service(service_account, private_key):
|
||||
scope = [
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
]
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(service_account, private_key, scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
|
||||
def _get_query_results(jobs, project_id, job_id, start_index):
|
||||
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
|
||||
logging.debug('query_reply %s', query_reply)
|
||||
@@ -117,11 +108,23 @@ class BigQuery(BaseQueryRunner):
|
||||
def __init__(self, configuration_json):
|
||||
super(BigQuery, self).__init__(configuration_json)
|
||||
|
||||
self.private_key = _load_key(self.configuration["privateKey"])
|
||||
def _get_bigquery_service(self):
|
||||
scope = [
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
]
|
||||
|
||||
private_key = _load_key(self.configuration["privateKey"])
|
||||
credentials = SignedJwtAssertionCredentials(self.configuration['serviceAccount'], private_key, scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
def _get_project_id(self):
|
||||
return self.configuration["projectId"]
|
||||
|
||||
def run_query(self, query):
|
||||
bigquery_service = _get_bigquery_service(self.configuration["serviceAccount"],
|
||||
self.private_key)
|
||||
bigquery_service = self._get_bigquery_service()
|
||||
|
||||
jobs = bigquery_service.jobs()
|
||||
job_data = {
|
||||
@@ -134,13 +137,13 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
logger.debug("BigQuery got query: %s", query)
|
||||
|
||||
project_id = self.configuration["projectId"]
|
||||
project_id = self._get_project_id()
|
||||
|
||||
try:
|
||||
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
|
||||
current_row = 0
|
||||
query_reply = _get_query_results(jobs, project_id=project_id,
|
||||
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
|
||||
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
|
||||
|
||||
logger.debug("bigquery replied: %s", query_reply)
|
||||
|
||||
@@ -176,4 +179,26 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(BigQuery)
|
||||
|
||||
class BigQueryGCE(BigQuery):
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "bigquery_gce"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {}
|
||||
|
||||
def _get_project_id(self):
|
||||
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
|
||||
|
||||
def _get_bigquery_service(self):
|
||||
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
|
||||
register(BigQuery)
|
||||
register(BigQueryGCE)
|
||||
258
redash/query_runner/elasticsearch.py
Normal file
258
redash/query_runner/elasticsearch.py
Normal file
@@ -0,0 +1,258 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
|
||||
import requests
|
||||
import dateutil
|
||||
from dateutil.parser import parse
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
except ImportError:
|
||||
# Python 2
|
||||
import httplib as http_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ELASTICSEARCH_TYPES_MAPPING = {
|
||||
"integer" : TYPE_INTEGER,
|
||||
"long" : TYPE_INTEGER,
|
||||
"float" : TYPE_FLOAT,
|
||||
"double" : TYPE_FLOAT,
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
# "geo_point" TODO: Need to split to 2 fields somehow
|
||||
}
|
||||
|
||||
PYTHON_TYPES_MAPPING = {
|
||||
str: TYPE_STRING,
|
||||
unicode: TYPE_STRING,
|
||||
bool : TYPE_BOOLEAN,
|
||||
int : TYPE_INTEGER,
|
||||
long: TYPE_INTEGER,
|
||||
float: TYPE_FLOAT
|
||||
}
|
||||
|
||||
#
|
||||
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
|
||||
# but without the aggregation).
|
||||
#
|
||||
# Full blown JSON based ElasticSearch queries (including aggregations) will be
|
||||
# added later
|
||||
#
|
||||
# Simple query example:
|
||||
#
|
||||
# - Query the index named "twitter"
|
||||
# - Filter by "user:kimchy"
|
||||
# - Return the fields: "@timestamp", "tweet" and "user"
|
||||
# - Return up to 15 results
|
||||
# - Sort by @timestamp ascending
|
||||
#
|
||||
# {
|
||||
# "index" : "twitter",
|
||||
# "query" : "user:kimchy",
|
||||
# "fields" : ["@timestamp", "tweet", "user"],
|
||||
# "size" : 15,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Simple query on a logstash ElasticSearch instance:
|
||||
#
|
||||
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
|
||||
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
|
||||
# - Return up to 250 results
|
||||
# - Sort by @timestamp ascending
|
||||
|
||||
# {
|
||||
# "index" : "logstash-2015.04.*",
|
||||
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
# "size" : 250,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
|
||||
class ElasticSearch(BaseQueryRunner):
|
||||
DEBUG_ENABLED = False
|
||||
|
||||
"""
|
||||
ElastichSearch query runner for querying ElasticSearch servers.
|
||||
Query can be done using the Lucene Syntax (single line) or the more complex,
|
||||
full blown ElasticSearch JSON syntax
|
||||
"""
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'server': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
"required" : ["server"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(ElasticSearch, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
if self.DEBUG_ENABLED:
|
||||
http_client.HTTPConnection.debuglevel = 1
|
||||
|
||||
# you need to initialize logging, otherwise you will not see anything from requests
|
||||
logging.basicConfig()
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
requests_log = logging.getLogger("requests.packages.urllib3")
|
||||
requests_log.setLevel(logging.DEBUG)
|
||||
requests_log.propagate = True
|
||||
|
||||
def get_mappings(self, url):
|
||||
mappings = {}
|
||||
|
||||
r = requests.get(url)
|
||||
mappings_data = r.json()
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
for property_name in index_mappings["mappings"][m]["properties"]:
|
||||
property_data = index_mappings["mappings"][m]["properties"][property_name]
|
||||
if not property_name in mappings:
|
||||
property_type = property_data.get("type", None)
|
||||
if property_type:
|
||||
if property_type in ELASTICSEARCH_TYPES_MAPPING:
|
||||
mappings[property_name] = property_type
|
||||
else:
|
||||
raise "Unknown property type: {0}".format(property_type)
|
||||
|
||||
return mappings
|
||||
|
||||
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
|
||||
result_columns_index = {}
|
||||
for c in result_columns:
|
||||
result_columns_index[c["name"]] = c
|
||||
|
||||
result_fields_index = {}
|
||||
if result_fields:
|
||||
for r in result_fields:
|
||||
result_fields_index[r] = None
|
||||
|
||||
for h in raw_result["hits"]["hits"]:
|
||||
row = {}
|
||||
for column in h["_source"]:
|
||||
if result_fields and column not in result_fields_index:
|
||||
continue
|
||||
|
||||
if column not in result_columns_index:
|
||||
result_columns.append({
|
||||
"name" : column,
|
||||
"friendly_name" : column,
|
||||
"type" : mappings.get(column, "string")
|
||||
})
|
||||
result_columns_index[column] = result_columns[-1]
|
||||
|
||||
row[column] = h["_source"][column]
|
||||
|
||||
if row and len(row) > 0:
|
||||
result_rows.append(row)
|
||||
|
||||
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
|
||||
url += "&from={0}".format(_from)
|
||||
r = requests.get(url)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
|
||||
|
||||
raw_result = r.json()
|
||||
|
||||
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
|
||||
|
||||
total = raw_result["hits"]["total"]
|
||||
result_size = len(raw_result["hits"]["hits"])
|
||||
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
|
||||
|
||||
return raw_result["hits"]["total"]
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
error = None
|
||||
|
||||
logger.debug(query)
|
||||
query_params = json.loads(query)
|
||||
|
||||
index_name = query_params["index"]
|
||||
query_data = query_params["query"]
|
||||
size = int(query_params.get("size", 500))
|
||||
result_fields = query_params.get("fields", None)
|
||||
sort = query_params.get("sort", None)
|
||||
|
||||
server_url = self.configuration["server"]
|
||||
if not server_url:
|
||||
error = "Missing configuration key 'server'"
|
||||
return None, error
|
||||
|
||||
|
||||
if server_url[-1] == "/":
|
||||
server_url = server_url[:-1]
|
||||
|
||||
url = "{0}/{1}/_search?".format(server_url, index_name)
|
||||
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
|
||||
|
||||
mappings = self.get_mappings(mapping_url)
|
||||
|
||||
logger.debug(json.dumps(mappings, indent=4))
|
||||
|
||||
if size:
|
||||
url += "&size={0}".format(size)
|
||||
|
||||
if sort:
|
||||
url += "&sort={0}".format(urllib.quote_plus(sort))
|
||||
|
||||
url += "&q={0}".format(urllib.quote_plus(query_data))
|
||||
|
||||
logger.debug("Using URL: {0}".format(url))
|
||||
logger.debug("Using Query: {0}".format(query_data))
|
||||
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
if isinstance(query_data, str) or isinstance(query_data, unicode):
|
||||
_from = 0
|
||||
while True:
|
||||
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
|
||||
_from += size
|
||||
if _from >= total:
|
||||
break
|
||||
else:
|
||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||
raise Exception("Advanced queries are not supported")
|
||||
|
||||
json_data = json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
})
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(ElasticSearch)
|
||||
@@ -12,6 +12,7 @@ logger = logging.getLogger(__name__)
|
||||
try:
|
||||
import pymongo
|
||||
from bson.objectid import ObjectId
|
||||
from bson.son import SON
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
@@ -32,24 +33,74 @@ TYPES_MAP = {
|
||||
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
class MongoDBJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, ObjectId):
|
||||
return str(o)
|
||||
|
||||
def _get_column_by_name(columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _convert_date(q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
if q[field_name].find(":") == -1:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
|
||||
else:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
|
||||
return super(MongoDBJSONEncoder, self).default(o)
|
||||
|
||||
|
||||
# Simple query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "my_collection",
|
||||
# "query" : {
|
||||
# "date" : {
|
||||
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
# },
|
||||
# "type" : 1
|
||||
# },
|
||||
# "fields" : {
|
||||
# "_id" : 1,
|
||||
# "name" : 2
|
||||
# },
|
||||
# "sort" : [
|
||||
# {
|
||||
# "name" : "date",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
#
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Aggregation
|
||||
# ===========
|
||||
# Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
# correct order of sorting, it uses a regular list for the "$sort" operation
|
||||
# that converts into a SON (sorted dictionary) object before execution.
|
||||
#
|
||||
# Aggregation query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "things",
|
||||
# "aggregate" : [
|
||||
# {
|
||||
# "$unwind" : "$tags"
|
||||
# },
|
||||
# {
|
||||
# "$group" : {
|
||||
# "_id" : "$tags",
|
||||
# "count" : { "$sum" : 1 }
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$sort" : [
|
||||
# {
|
||||
# "name" : "count",
|
||||
# "direction" : -1
|
||||
# },
|
||||
# {
|
||||
# "name" : "_id",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
#
|
||||
#
|
||||
class MongoDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
@@ -68,8 +119,8 @@ class MongoDB(BaseQueryRunner):
|
||||
'type': 'string',
|
||||
'title': 'Replica Set Name'
|
||||
},
|
||||
'required': ['connectionString']
|
||||
}
|
||||
},
|
||||
'required': ['connectionString']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -83,10 +134,28 @@ class MongoDB(BaseQueryRunner):
|
||||
def __init__(self, configuration_json):
|
||||
super(MongoDB, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = 'json'
|
||||
|
||||
self.db_name = self.configuration["dbName"]
|
||||
|
||||
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
|
||||
def _get_column_by_name(self, columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _convert_date(self, q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
if q[field_name].find(":") == -1:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
|
||||
else:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
|
||||
|
||||
def run_query(self, query):
|
||||
if self.is_replica_set:
|
||||
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
|
||||
@@ -117,21 +186,42 @@ class MongoDB(BaseQueryRunner):
|
||||
for k in q:
|
||||
if q[k] and type(q[k]) in [str, unicode]:
|
||||
logging.debug(q[k])
|
||||
_convert_date(q, k)
|
||||
self._convert_date(q, k)
|
||||
elif q[k] and type(q[k]) is dict:
|
||||
for k2 in q[k]:
|
||||
if type(q[k][k2]) in [str, unicode]:
|
||||
_convert_date(q[k], k2)
|
||||
self._convert_date(q[k], k2)
|
||||
|
||||
f = None
|
||||
|
||||
aggregate = None
|
||||
if "aggregate" in query_data:
|
||||
aggregate = query_data["aggregate"]
|
||||
for step in aggregate:
|
||||
if "$sort" in step:
|
||||
sort_list = []
|
||||
for sort_item in step["$sort"]:
|
||||
sort_list.append((sort_item["name"], sort_item["direction"]))
|
||||
|
||||
step["$sort"] = SON(sort_list)
|
||||
|
||||
if aggregate:
|
||||
pass
|
||||
else:
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field in query_data["sort"]:
|
||||
s.append((field["name"], field["direction"]))
|
||||
|
||||
if "fields" in query_data:
|
||||
f = query_data["fields"]
|
||||
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field_name in query_data["sort"]:
|
||||
s.append((field_name, query_data["sort"][field_name]))
|
||||
for field_data in query_data["sort"]:
|
||||
s.append((field_data["name"], field_data["direction"]))
|
||||
|
||||
columns = []
|
||||
rows = []
|
||||
@@ -139,30 +229,38 @@ class MongoDB(BaseQueryRunner):
|
||||
error = None
|
||||
json_data = None
|
||||
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
cursor = None
|
||||
if q or (not q and not aggregate):
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
|
||||
if "skip" in query_data:
|
||||
cursor = cursor.skip(query_data["skip"])
|
||||
|
||||
if "limit" in query_data:
|
||||
cursor = cursor.limit(query_data["limit"])
|
||||
|
||||
elif aggregate:
|
||||
r = db[collection].aggregate(aggregate)
|
||||
cursor = r["result"]
|
||||
|
||||
for r in cursor:
|
||||
for k in r:
|
||||
if _get_column_by_name(columns, k) is None:
|
||||
if self._get_column_by_name(columns, k) is None:
|
||||
columns.append({
|
||||
"name": k,
|
||||
"friendly_name": k,
|
||||
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
|
||||
})
|
||||
|
||||
# Convert ObjectId to string
|
||||
if type(r[k]) == ObjectId:
|
||||
r[k] = str(r[k])
|
||||
|
||||
rows.append(r)
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
for k in sorted(f, key=f.get):
|
||||
ordered_columns.append(_get_column_by_name(columns, k))
|
||||
ordered_columns.append(self._get_column_by_name(columns, k))
|
||||
|
||||
columns = ordered_columns
|
||||
|
||||
@@ -171,8 +269,8 @@ class MongoDB(BaseQueryRunner):
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(MongoDB)
|
||||
register(MongoDB)
|
||||
|
||||
@@ -7,6 +7,24 @@ from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
types_map = {
|
||||
0: TYPE_FLOAT,
|
||||
1: TYPE_INTEGER,
|
||||
2: TYPE_INTEGER,
|
||||
3: TYPE_INTEGER,
|
||||
4: TYPE_FLOAT,
|
||||
5: TYPE_FLOAT,
|
||||
7: TYPE_DATETIME,
|
||||
8: TYPE_INTEGER,
|
||||
9: TYPE_INTEGER,
|
||||
10: TYPE_DATE,
|
||||
12: TYPE_DATETIME,
|
||||
15: TYPE_STRING,
|
||||
16: TYPE_INTEGER,
|
||||
246: TYPE_FLOAT,
|
||||
253: TYPE_STRING,
|
||||
254: TYPE_STRING,
|
||||
}
|
||||
|
||||
class Mysql(BaseQueryRunner):
|
||||
@classmethod
|
||||
@@ -27,7 +45,10 @@ class Mysql(BaseQueryRunner):
|
||||
'db': {
|
||||
'type': 'string',
|
||||
'title': 'Database name'
|
||||
}
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
},
|
||||
'required': ['db']
|
||||
}
|
||||
@@ -44,13 +65,49 @@ class Mysql(BaseQueryRunner):
|
||||
def __init__(self, configuration_json):
|
||||
super(Mysql, self).__init__(configuration_json)
|
||||
|
||||
def get_schema(self):
|
||||
query = """
|
||||
SELECT col.table_schema,
|
||||
col.table_name,
|
||||
col.column_name
|
||||
FROM `information_schema`.`columns` col
|
||||
INNER JOIN
|
||||
(SELECT table_schema,
|
||||
TABLE_NAME
|
||||
FROM information_schema.tables
|
||||
WHERE table_type <> 'SYSTEM VIEW' AND table_schema NOT IN ('performance_schema', 'mysql')) tables ON tables.table_schema = col.table_schema
|
||||
AND tables.TABLE_NAME = col.TABLE_NAME;
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != self.configuration['db']:
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
else:
|
||||
table_name = row['table_name']
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
import MySQLdb
|
||||
|
||||
connection = MySQLdb.connect(self.configuration.get('host', ''),
|
||||
self.configuration.get('user', ''),
|
||||
self.configuration.get('passwd', ''),
|
||||
self.configuration['db'],
|
||||
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
|
||||
user=self.configuration.get('user', ''),
|
||||
passwd=self.configuration.get('passwd', ''),
|
||||
db=self.configuration['db'],
|
||||
port=self.configuration.get('port', 3306),
|
||||
charset='utf8', use_unicode=True)
|
||||
cursor = connection.cursor()
|
||||
|
||||
@@ -61,17 +118,15 @@ class Mysql(BaseQueryRunner):
|
||||
|
||||
data = cursor.fetchall()
|
||||
|
||||
cursor_desc = cursor.description
|
||||
if cursor_desc is not None:
|
||||
num_fields = len(cursor_desc)
|
||||
column_names = [i[0] for i in cursor.description]
|
||||
# TODO - very similar to pg.py
|
||||
if cursor.description is not None:
|
||||
columns_data = [(i[0], i[1]) for i in cursor.description]
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in data]
|
||||
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
|
||||
|
||||
# TODO: add types support
|
||||
columns = [{'name': col_name,
|
||||
'friendly_name': col_name,
|
||||
'type': None} for col_name in column_names]
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': types_map.get(col[1], None)} for col in columns_data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
@@ -83,6 +83,34 @@ class PostgreSQL(BaseQueryRunner):
|
||||
|
||||
self.connection_string = " ".join(values)
|
||||
|
||||
def get_schema(self):
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != 'public':
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
else:
|
||||
table_name = row['table_name']
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
connection = psycopg2.connect(self.connection_string, async=True)
|
||||
_wait(connection)
|
||||
|
||||
@@ -1,9 +1,46 @@
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
|
||||
import importlib
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from RestrictedPython import compile_restricted
|
||||
from RestrictedPython.Guards import safe_builtins
|
||||
|
||||
ALLOWED_MODULES = {}
|
||||
|
||||
|
||||
def custom_write(obj):
|
||||
"""
|
||||
Custom hooks which controls the way objects/lists/tuples/dicts behave in
|
||||
RestrictedPython
|
||||
"""
|
||||
return obj
|
||||
|
||||
|
||||
def custom_import(name, globals=None, locals=None, fromlist=(), level=0):
|
||||
if name in ALLOWED_MODULES:
|
||||
m = None
|
||||
if ALLOWED_MODULES[name] is None:
|
||||
m = importlib.import_module(name)
|
||||
ALLOWED_MODULES[name] = m
|
||||
else:
|
||||
m = ALLOWED_MODULES[name]
|
||||
|
||||
return m
|
||||
|
||||
raise Exception("'{0}' is not configured as a supported import module".format(name))
|
||||
|
||||
def custom_get_item(obj, key):
|
||||
return obj[key]
|
||||
|
||||
def custom_get_iter(obj):
|
||||
return iter(obj)
|
||||
|
||||
def get_query_result(query_id):
|
||||
try:
|
||||
@@ -20,11 +57,14 @@ def get_query_result(query_id):
|
||||
return json.loads(query.latest_query_data.data)
|
||||
|
||||
|
||||
def execute_query(data_source_name, query):
|
||||
def execute_query(data_source_name_or_id, query):
|
||||
try:
|
||||
data_source = models.DataSource.get(models.DataSource.name==data_source_name)
|
||||
if type(data_source_name_or_id) == int:
|
||||
data_source = models.DataSource.get_by_id(data_source_name_or_id)
|
||||
else:
|
||||
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
|
||||
except models.DataSource.DoesNotExist:
|
||||
raise Exception("Wrong data source name: %s." % data_source_name)
|
||||
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
|
||||
|
||||
query_runner = get_query_runner(data_source.type, data_source.options)
|
||||
|
||||
@@ -36,6 +76,28 @@ def execute_query(data_source_name, query):
|
||||
return json.loads(data)
|
||||
|
||||
|
||||
def add_result_column(result, column_name, friendly_name, column_type):
|
||||
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
|
||||
if column_type not in SUPPORTED_COLUMN_TYPES:
|
||||
raise Exception("'{0}' is not a supported column type".format(column_type))
|
||||
|
||||
if not "columns" in result:
|
||||
result["columns"] = []
|
||||
|
||||
result["columns"].append({
|
||||
"name" : column_name,
|
||||
"friendly_name" : friendly_name,
|
||||
"type" : column_type
|
||||
})
|
||||
|
||||
|
||||
def add_result_row(result, values):
|
||||
if not "rows" in result:
|
||||
result["rows"] = []
|
||||
|
||||
result["rows"].append(values)
|
||||
|
||||
|
||||
class Python(BaseQueryRunner):
|
||||
"""
|
||||
This is very, very unsafe. Use at your own risk with people you really trust.
|
||||
@@ -45,24 +107,67 @@ class Python(BaseQueryRunner):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
}
|
||||
'allowedImportModules': {
|
||||
'type': 'string',
|
||||
'title': 'Modules to import prior to running the script'
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
global ALLOWED_MODULES
|
||||
|
||||
super(Python, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "python"
|
||||
|
||||
if self.configuration.get("allowedImportModules", None):
|
||||
for item in self.configuration["allowedImportModules"].split(","):
|
||||
ALLOWED_MODULES[item] = None
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
error = None
|
||||
|
||||
script_globals = {'get_query_result': get_query_result, 'execute_query': execute_query}
|
||||
script_locals = {'result': None}
|
||||
# TODO: timeout, sandboxing
|
||||
exec query in script_globals, script_locals
|
||||
code = compile_restricted(query, '<string>', 'exec')
|
||||
|
||||
safe_builtins["_write_"] = custom_write
|
||||
safe_builtins["__import__"] = custom_import
|
||||
safe_builtins["_getattr_"] = getattr
|
||||
safe_builtins["getattr"] = getattr
|
||||
safe_builtins["_setattr_"] = setattr
|
||||
safe_builtins["setattr"] = setattr
|
||||
safe_builtins["_getitem_"] = custom_get_item
|
||||
safe_builtins["_getiter_"] = custom_get_iter
|
||||
|
||||
script_locals = { "result" : { "rows" : [], "columns" : [] } }
|
||||
|
||||
restricted_globals = dict(__builtins__=safe_builtins)
|
||||
restricted_globals["get_query_result"] = get_query_result
|
||||
restricted_globals["execute_query"] = execute_query
|
||||
restricted_globals["add_result_column"] = add_result_column
|
||||
restricted_globals["add_result_row"] = add_result_row
|
||||
|
||||
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
|
||||
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
|
||||
restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
|
||||
restricted_globals["TYPE_STRING"] = TYPE_STRING
|
||||
restricted_globals["TYPE_DATE"] = TYPE_DATE
|
||||
restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT
|
||||
|
||||
# TODO: Figure out the best way to have a timeout on a script
|
||||
# One option is to use ETA with Celery + timeouts on workers
|
||||
# And replacement of worker process every X requests handled.
|
||||
|
||||
exec(code) in restricted_globals, script_locals
|
||||
|
||||
if script_locals['result'] is None:
|
||||
raise Exception("result wasn't set to value.")
|
||||
@@ -76,4 +181,5 @@ class Python(BaseQueryRunner):
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Python)
|
||||
|
||||
@@ -30,7 +30,7 @@ class Script(BaseQueryRunner):
|
||||
def __init__(self, configuration_json):
|
||||
super(Script, self).__init__(configuration_json)
|
||||
|
||||
# Poor man's protection against running scripts from output the scripts directory
|
||||
# Poor man's protection against running scripts from outside the scripts directory
|
||||
if self.configuration["path"].find("../") > -1:
|
||||
raise ValidationError("Scripts can only be run from the configured scripts directory")
|
||||
|
||||
@@ -41,11 +41,13 @@ class Script(BaseQueryRunner):
|
||||
|
||||
query = query.strip()
|
||||
|
||||
script = os.path.join(self.configuration["path"], query)
|
||||
script = os.path.join(self.configuration["path"], query.split(" ")[0])
|
||||
if not os.path.exists(script):
|
||||
return None, "Script '%s' not found in script directory" % query
|
||||
|
||||
output = subprocess.check_output(script, shell=False)
|
||||
script = os.path.join(self.configuration["path"], query)
|
||||
|
||||
output = subprocess.check_output(script.split(" "), shell=False)
|
||||
if output is not None:
|
||||
output = output.strip()
|
||||
if output != "":
|
||||
@@ -62,4 +64,4 @@ class Script(BaseQueryRunner):
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Script)
|
||||
register(Script)
|
||||
|
||||
145
redash/saml_auth.py
Normal file
145
redash/saml_auth.py
Normal file
@@ -0,0 +1,145 @@
|
||||
# Copyright 2015 Okta, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from flask.ext.login import login_user
|
||||
import requests
|
||||
from flask import redirect, url_for, Blueprint, request
|
||||
from flask_oauth import OAuth
|
||||
from redash import models, settings
|
||||
from saml2 import (
|
||||
BINDING_HTTP_POST,
|
||||
BINDING_HTTP_REDIRECT,
|
||||
entity,
|
||||
)
|
||||
from saml2.client import Saml2Client
|
||||
from saml2.config import Config as Saml2Config
|
||||
|
||||
|
||||
logger = logging.getLogger('saml_auth')
|
||||
|
||||
blueprint = Blueprint('saml_auth', __name__)
|
||||
|
||||
def get_saml_client():
|
||||
'''
|
||||
Return saml configuation.
|
||||
The configuration is a hash for use by saml2.config.Config
|
||||
'''
|
||||
|
||||
if settings.SAML_CALLBACK_SERVER_NAME:
|
||||
acs_url=settings.SAML_CALLBACK_SERVER_NAME + url_for("saml_auth.idp_initiated")
|
||||
else:
|
||||
acs_url = url_for("saml_auth.idp_initiated",_external=True)
|
||||
|
||||
# NOTE:
|
||||
# Ideally, this should fetch the metadata and pass it to
|
||||
# PySAML2 via the "inline" metadata type.
|
||||
# However, this method doesn't seem to work on PySAML2 v2.4.0
|
||||
#
|
||||
# SAML metadata changes very rarely. On a production system,
|
||||
# this data should be cached as approprate for your production system.
|
||||
rv = requests.get(settings.SAML_METADATA_URL)
|
||||
import tempfile
|
||||
tmp = tempfile.NamedTemporaryFile()
|
||||
f = open(tmp.name, 'w')
|
||||
f.write(rv.text)
|
||||
f.close()
|
||||
|
||||
saml_settings = {
|
||||
'metadata': {
|
||||
# 'inline': metadata,
|
||||
"local": [tmp.name]
|
||||
},
|
||||
'service': {
|
||||
'sp': {
|
||||
'endpoints': {
|
||||
'assertion_consumer_service': [
|
||||
(acs_url, BINDING_HTTP_REDIRECT),
|
||||
(acs_url, BINDING_HTTP_POST)
|
||||
],
|
||||
},
|
||||
# Don't verify that the incoming requests originate from us via
|
||||
# the built-in cache for authn request ids in pysaml2
|
||||
'allow_unsolicited': True,
|
||||
# Don't sign authn requests, since signed requests only make
|
||||
# sense in a situation where you control both the SP and IdP
|
||||
'authn_requests_signed': False,
|
||||
'logout_requests_signed': True,
|
||||
'want_assertions_signed': True,
|
||||
'want_response_signed': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
spConfig = Saml2Config()
|
||||
spConfig.load(saml_settings)
|
||||
spConfig.allow_unknown_attributes = True
|
||||
saml_client = Saml2Client(config=spConfig)
|
||||
tmp.close()
|
||||
return saml_client
|
||||
|
||||
|
||||
@blueprint.route("/saml/callback", methods=['POST'])
|
||||
def idp_initiated():
|
||||
saml_client = get_saml_client()
|
||||
authn_response = saml_client.parse_authn_request_response(
|
||||
request.form['SAMLResponse'],
|
||||
entity.BINDING_HTTP_POST)
|
||||
authn_response.get_identity()
|
||||
user_info = authn_response.get_subject()
|
||||
email = user_info.text
|
||||
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
|
||||
|
||||
# This is what as known as "Just In Time (JIT) provisioning".
|
||||
# What that means is that, if a user in a SAML assertion
|
||||
# isn't in the user store, we create that user first, then log them in
|
||||
try:
|
||||
user_object = models.User.get(models.User.email == email)
|
||||
if user_object.name != name:
|
||||
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
|
||||
user_object.name = name
|
||||
user_object.save()
|
||||
except models.User.DoesNotExist:
|
||||
logger.debug("Creating user object (%r)", name)
|
||||
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
|
||||
|
||||
login_user(user_object, remember=True)
|
||||
url = url_for('index')
|
||||
|
||||
return redirect(url)
|
||||
|
||||
@blueprint.route("/saml/login")
|
||||
def sp_initiated():
|
||||
if not settings.SAML_METADATA_URL:
|
||||
logger.error("Cannot invoke saml endpoint without metadata url in settings.")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
saml_client = get_saml_client()
|
||||
reqid, info = saml_client.prepare_for_authenticate()
|
||||
|
||||
redirect_url = None
|
||||
# Select the IdP URL to send the AuthN request to
|
||||
for key, value in info['headers']:
|
||||
if key is 'Location':
|
||||
redirect_url = value
|
||||
response = redirect(redirect_url, code=302)
|
||||
# NOTE:
|
||||
# I realize I _technically_ don't need to set Cache-Control or Pragma:
|
||||
# http://stackoverflow.com/a/5494469
|
||||
# However, Section 3.2.3.2 of the SAML spec suggests they are set:
|
||||
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
|
||||
# We set those headers here as a "belt and suspenders" approach,
|
||||
# since enterprise environments don't always conform to RFCs
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
return response
|
||||
@@ -66,6 +66,10 @@ GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
|
||||
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
|
||||
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
|
||||
|
||||
SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
|
||||
SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
|
||||
SAML_CALLBACK_SERVER_NAME = os.environ.get("REDASH_SAML_CALLBACK_SERVER_NAME", "")
|
||||
|
||||
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
|
||||
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
|
||||
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
import redis
|
||||
from celery import Task
|
||||
from celery.result import AsyncResult
|
||||
from celery.utils.log import get_task_logger
|
||||
from redash import redis_connection, models, statsd_client, settings
|
||||
from redash import redis_connection, models, statsd_client, settings, utils
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.worker import celery
|
||||
from redash.query_runner import get_query_runner
|
||||
@@ -47,12 +46,13 @@ class QueryTask(object):
|
||||
return self._async_result.id
|
||||
|
||||
@classmethod
|
||||
def add_task(cls, query, data_source, scheduled=False):
|
||||
def add_task(cls, query, data_source, scheduled=False, metadata={}):
|
||||
query_hash = gen_query_hash(query)
|
||||
logging.info("[Manager][%s] Inserting job", query_hash)
|
||||
logging.info("[Manager] Metadata: [%s]", metadata)
|
||||
try_count = 0
|
||||
job = None
|
||||
|
||||
|
||||
while try_count < cls.MAX_RETRIES:
|
||||
try_count += 1
|
||||
|
||||
@@ -77,8 +77,9 @@ class QueryTask(object):
|
||||
else:
|
||||
queue_name = data_source.queue_name
|
||||
|
||||
result = execute_query.apply_async(args=(query, data_source.id), queue=queue_name)
|
||||
result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)
|
||||
job = cls(async_result=result)
|
||||
|
||||
logging.info("[Manager][%s] Created new job: %s", query_hash, job.id)
|
||||
pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)
|
||||
pipe.execute()
|
||||
@@ -146,8 +147,8 @@ def refresh_queries():
|
||||
|
||||
outdated_queries_count = 0
|
||||
for query in models.Query.outdated_queries():
|
||||
# TODO: this should go into lower priority
|
||||
QueryTask.add_task(query.query, query.data_source, scheduled=True)
|
||||
QueryTask.add_task(query.query, query.data_source, scheduled=True,
|
||||
metadata={'Query ID': query.id, 'Username': 'Scheduled'})
|
||||
outdated_queries_count += 1
|
||||
|
||||
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
|
||||
@@ -197,9 +198,9 @@ def cleanup_tasks():
|
||||
logger.warning("%s is ready (%s), removing lock.", lock_keys[i], t.celery_status)
|
||||
redis_connection.delete(lock_keys[i])
|
||||
|
||||
if t.celery_status == 'STARTED' and t.id not in all_tasks:
|
||||
logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
|
||||
redis_connection.delete(lock_keys[i])
|
||||
# if t.celery_status == 'STARTED' and t.id not in all_tasks:
|
||||
# logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
|
||||
# redis_connection.delete(lock_keys[i])
|
||||
|
||||
|
||||
@celery.task(base=BaseTask)
|
||||
@@ -218,9 +219,19 @@ def cleanup_query_results():
|
||||
logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
|
||||
|
||||
|
||||
@celery.task(base=BaseTask)
|
||||
def refresh_schemas():
|
||||
"""
|
||||
Refershs the datasources schema.
|
||||
"""
|
||||
|
||||
for ds in models.DataSource.all():
|
||||
logger.info("Refreshing schema for: {}".format(ds.name))
|
||||
ds.get_schema(refresh=True)
|
||||
|
||||
|
||||
@celery.task(bind=True, base=BaseTask, track_started=True)
|
||||
def execute_query(self, query, data_source_id):
|
||||
# TODO: maybe this should be a class?
|
||||
def execute_query(self, query, data_source_id, metadata):
|
||||
start_time = time.time()
|
||||
|
||||
logger.info("Loading data source (%d)...", data_source_id)
|
||||
@@ -236,9 +247,15 @@ def execute_query(self, query, data_source_id):
|
||||
query_runner = get_query_runner(data_source.type, data_source.options)
|
||||
|
||||
if query_runner.annotate_query():
|
||||
# TODO: annotate with queue name
|
||||
annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \
|
||||
(self.request.id, query_hash, query)
|
||||
metadata['Task ID'] = self.request.id
|
||||
metadata['Query Hash'] = query_hash
|
||||
metadata['Queue'] = self.request.delivery_info['routing_key']
|
||||
|
||||
annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()])
|
||||
|
||||
logging.debug(u"Annotation: %s", annotation)
|
||||
|
||||
annotated_query = u"/* {} */ {}".format(annotation, query)
|
||||
else:
|
||||
annotated_query = query
|
||||
|
||||
@@ -254,7 +271,7 @@ def execute_query(self, query, data_source_id):
|
||||
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
|
||||
|
||||
if not error:
|
||||
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow())
|
||||
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
|
||||
else:
|
||||
raise Exception(error)
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ import json
|
||||
import re
|
||||
import hashlib
|
||||
import sqlparse
|
||||
import pytz
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
|
||||
@@ -62,6 +63,14 @@ class SQLMetaData(object):
|
||||
return False
|
||||
|
||||
|
||||
def utcnow():
|
||||
"""Return datetime.now value with timezone specified.
|
||||
|
||||
Without the timezone data, when the timestamp stored to the database it gets the current timezone of the server,
|
||||
which leads to errors in calculations.
|
||||
"""
|
||||
return datetime.datetime.now(pytz.utc)
|
||||
|
||||
def slugify(s):
|
||||
return re.sub('[^a-z0-9_\-]+', '-', s.lower())
|
||||
|
||||
@@ -86,9 +95,9 @@ class JSONEncoder(json.JSONEncoder):
|
||||
if isinstance(o, decimal.Decimal):
|
||||
return float(o)
|
||||
|
||||
if isinstance(o, datetime.date):
|
||||
if isinstance(o, (datetime.date, datetime.time, datetime.timedelta)):
|
||||
return o.isoformat()
|
||||
|
||||
|
||||
super(JSONEncoder, self).default(o)
|
||||
|
||||
|
||||
@@ -128,4 +137,4 @@ class UnicodeWriter:
|
||||
|
||||
def writerows(self, rows):
|
||||
for row in rows:
|
||||
self.writerow(row)
|
||||
self.writerow(row)
|
||||
|
||||
@@ -15,6 +15,10 @@ celery_schedule = {
|
||||
'cleanup_tasks': {
|
||||
'task': 'redash.tasks.cleanup_tasks',
|
||||
'schedule': timedelta(minutes=5)
|
||||
},
|
||||
'refresh_schemas': {
|
||||
'task': 'redash.tasks.refresh_schemas',
|
||||
'schedule': timedelta(minutes=30)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,8 @@ from flask.ext.restful import Api
|
||||
|
||||
from redash import settings, utils
|
||||
from redash.models import db
|
||||
from redash.admin import init_admin
|
||||
|
||||
|
||||
__version__ = '0.4.0'
|
||||
|
||||
@@ -14,6 +16,7 @@ app = Flask(__name__,
|
||||
|
||||
|
||||
api = Api(app)
|
||||
init_admin(app)
|
||||
|
||||
# configure our database
|
||||
settings.DATABASE_CONFIG.update({'threadlocals': True})
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
Flask==0.10.1
|
||||
Flask-Admin==1.1.0
|
||||
Flask-RESTful==0.2.10
|
||||
Flask-Login==0.2.9
|
||||
Flask-OAuth==0.12
|
||||
@@ -14,7 +15,7 @@ psycopg2==2.5.2
|
||||
python-dateutil==2.1
|
||||
pytz==2013.9
|
||||
redis==2.7.5
|
||||
requests==2.2.0
|
||||
requests==2.3.0
|
||||
six==1.5.2
|
||||
sqlparse==0.1.8
|
||||
wsgiref==0.1.2
|
||||
@@ -25,3 +26,7 @@ gunicorn==18.0
|
||||
celery==3.1.11
|
||||
jsonschema==2.4.0
|
||||
click==3.3
|
||||
RestrictedPython==3.6.0
|
||||
wtf-peewee==0.2.3
|
||||
pysaml2==2.4.0
|
||||
pycrypto==2.6.1
|
||||
@@ -98,10 +98,8 @@ if [ ! -f "/opt/redash/.env" ]; then
|
||||
fi
|
||||
|
||||
# Install latest version
|
||||
# REDASH_VERSION=${REDASH_VERSION-0.4.0.b589}
|
||||
# modified by @fedex1 3/15/2015 seems to be the latest version at this point in time.
|
||||
REDASH_VERSION=${REDASH_VERSION-0.6.0.b722}
|
||||
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION/.b/%2Bb}/redash.$REDASH_VERSION.tar.gz"
|
||||
REDASH_VERSION=${REDASH_VERSION-0.6.2.b887}
|
||||
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
|
||||
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
|
||||
REDASH_TARBALL=/tmp/redash.tar.gz
|
||||
REDASH_TARBALL=/tmp/redash.tar.gz
|
||||
|
||||
@@ -20,8 +20,12 @@ autorestart=true
|
||||
stdout_logfile=/opt/redash/logs/api.log
|
||||
stderr_logfile=/opt/redash/logs/api_error.log
|
||||
|
||||
# There are two queue types here: one for ad-hoc queries, and one for the refresh of scheduled queries
|
||||
# (note that "scheduled_queries" appears only in the queue list of "redash_celery_scheduled").
|
||||
# The default concurrency level for each is 2 (-c2), you can increase based on your machine's resources.
|
||||
|
||||
[program:redash_celery]
|
||||
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries
|
||||
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -c2 -Qqueries,celery
|
||||
process_name=redash_celery
|
||||
numprocs=1
|
||||
priority=999
|
||||
@@ -29,3 +33,13 @@ autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/opt/redash/logs/celery.log
|
||||
stderr_logfile=/opt/redash/logs/celery_error.log
|
||||
|
||||
[program:redash_celery_scheduled]
|
||||
command=/opt/redash/current/bin/run celery worker --app=redash.worker -c2 -Qscheduled_queries
|
||||
process_name=redash_celery_scheduled
|
||||
numprocs=1
|
||||
priority=999
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/opt/redash/logs/celery.log
|
||||
stderr_logfile=/opt/redash/logs/celery_error.log
|
||||
|
||||
@@ -1,13 +1,17 @@
|
||||
import os
|
||||
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
|
||||
|
||||
import logging
|
||||
from unittest import TestCase
|
||||
import datetime
|
||||
from redash import settings
|
||||
|
||||
settings.DATABASE_CONFIG = {
|
||||
'name': 'circle_test',
|
||||
'threadlocals': True
|
||||
}
|
||||
|
||||
from redash import models
|
||||
from redash import models, redis_connection
|
||||
|
||||
logging.getLogger('peewee').setLevel(logging.INFO)
|
||||
|
||||
@@ -20,6 +24,7 @@ class BaseTestCase(TestCase):
|
||||
def tearDown(self):
|
||||
models.db.close_db(None)
|
||||
models.create_db(False, True)
|
||||
redis_connection.flushdb()
|
||||
|
||||
def assertResponseEqual(self, expected, actual):
|
||||
for k, v in expected.iteritems():
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import datetime
|
||||
import redash.models
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.utils import gen_query_hash, utcnow
|
||||
|
||||
|
||||
class ModelFactory(object):
|
||||
@@ -45,9 +44,9 @@ user_factory = ModelFactory(redash.models.User,
|
||||
|
||||
|
||||
data_source_factory = ModelFactory(redash.models.DataSource,
|
||||
name='Test',
|
||||
name=Sequence('Test {}'),
|
||||
type='pg',
|
||||
options='')
|
||||
options='{"dbname": "test"}')
|
||||
|
||||
|
||||
dashboard_factory = ModelFactory(redash.models.Dashboard,
|
||||
@@ -58,15 +57,15 @@ query_factory = ModelFactory(redash.models.Query,
|
||||
name='New Query',
|
||||
description='',
|
||||
query='SELECT 1',
|
||||
ttl=-1,
|
||||
user=user_factory.create,
|
||||
is_archived=False,
|
||||
schedule=None,
|
||||
data_source=data_source_factory.create)
|
||||
|
||||
query_result_factory = ModelFactory(redash.models.QueryResult,
|
||||
data='{"columns":{}, "rows":[]}',
|
||||
runtime=1,
|
||||
retrieved_at=datetime.datetime.utcnow,
|
||||
retrieved_at=utcnow,
|
||||
query="SELECT 1",
|
||||
query_hash=gen_query_hash('SELECT 1'),
|
||||
data_source=data_source_factory.create)
|
||||
@@ -83,4 +82,4 @@ widget_factory = ModelFactory(redash.models.Widget,
|
||||
width=1,
|
||||
options='{}',
|
||||
dashboard=dashboard_factory.create,
|
||||
visualization=visualization_factory.create)
|
||||
visualization=visualization_factory.create)
|
||||
|
||||
@@ -241,7 +241,7 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
query_data = {
|
||||
'name': 'Testing',
|
||||
'query': 'SELECT 1',
|
||||
'ttl': 3600,
|
||||
'schedule': "3600",
|
||||
'data_source_id': data_source.id
|
||||
}
|
||||
|
||||
@@ -319,6 +319,17 @@ class QueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
self.paths = []
|
||||
super(QueryResultAPITest, self).setUp()
|
||||
|
||||
def test_post_result_list(self):
|
||||
data_source = data_source_factory.create()
|
||||
query_result = query_result_factory.create()
|
||||
query = query_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/query_results',
|
||||
data={'data_source_id': data_source.id,
|
||||
'query': query.query})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
|
||||
class JobAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
#encoding: utf8
|
||||
import datetime
|
||||
import json
|
||||
from unittest import TestCase
|
||||
import mock
|
||||
from tests import BaseTestCase
|
||||
from redash import models
|
||||
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.utils import gen_query_hash, utcnow
|
||||
|
||||
|
||||
class DashboardTest(BaseTestCase):
|
||||
@@ -79,19 +81,88 @@ class QueryTest(BaseTestCase):
|
||||
self.assertNotEqual(q.updated_at, one_day_ago)
|
||||
|
||||
|
||||
class ShouldScheduleNextTest(TestCase):
|
||||
def test_interval_schedule_that_needs_reschedule(self):
|
||||
now = datetime.datetime.now()
|
||||
two_hours_ago = now - datetime.timedelta(hours=2)
|
||||
self.assertTrue(models.should_schedule_next(two_hours_ago, now, "3600"))
|
||||
|
||||
def test_interval_schedule_that_doesnt_need_reschedule(self):
|
||||
now = datetime.datetime.now()
|
||||
half_an_hour_ago = now - datetime.timedelta(minutes=30)
|
||||
self.assertFalse(models.should_schedule_next(half_an_hour_ago, now, "3600"))
|
||||
|
||||
def test_exact_time_that_needs_reschedule(self):
|
||||
now = datetime.datetime.now()
|
||||
yesterday = now - datetime.timedelta(days=1)
|
||||
schedule = "{:02d}:00".format(now.hour - 3)
|
||||
self.assertTrue(models.should_schedule_next(yesterday, now, schedule))
|
||||
|
||||
def test_exact_time_that_doesnt_need_reschedule(self):
|
||||
now = datetime.datetime.now()
|
||||
yesterday = (now - datetime.timedelta(days=1)).replace(hour=now.hour+3, minute=now.minute+1)
|
||||
schedule = "{:02d}:00".format(now.hour + 3)
|
||||
self.assertFalse(models.should_schedule_next(yesterday, now, schedule))
|
||||
|
||||
def test_exact_time_with_day_change(self):
|
||||
now = datetime.datetime.now().replace(hour=0, minute=1)
|
||||
previous = (now - datetime.timedelta(days=2)).replace(hour=23, minute=59)
|
||||
schedule = "23:59".format(now.hour + 3)
|
||||
self.assertTrue(models.should_schedule_next(previous, now, schedule))
|
||||
|
||||
|
||||
class QueryOutdatedQueriesTest(BaseTestCase):
|
||||
# TODO: this test can be refactored to use mock version of should_schedule_next to simplify it.
|
||||
def test_outdated_queries_skips_unscheduled_queries(self):
|
||||
query = query_factory.create(schedule=None)
|
||||
queries = models.Query.outdated_queries()
|
||||
|
||||
self.assertNotIn(query, queries)
|
||||
|
||||
def test_outdated_queries_works_with_ttl_based_schedule(self):
|
||||
two_hours_ago = datetime.datetime.now() - datetime.timedelta(hours=2)
|
||||
query = query_factory.create(schedule="3600")
|
||||
query_result = query_result_factory.create(query=query, retrieved_at=two_hours_ago)
|
||||
query.latest_query_data = query_result
|
||||
query.save()
|
||||
|
||||
queries = models.Query.outdated_queries()
|
||||
self.assertIn(query, queries)
|
||||
|
||||
def test_skips_fresh_queries(self):
|
||||
half_an_hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=30)
|
||||
query = query_factory.create(schedule="3600")
|
||||
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago)
|
||||
query.latest_query_data = query_result
|
||||
query.save()
|
||||
|
||||
queries = models.Query.outdated_queries()
|
||||
self.assertNotIn(query, queries)
|
||||
|
||||
def test_outdated_queries_works_with_specific_time_schedule(self):
|
||||
half_an_hour_ago = utcnow() - datetime.timedelta(minutes=30)
|
||||
query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M'))
|
||||
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1))
|
||||
query.latest_query_data = query_result
|
||||
query.save()
|
||||
|
||||
queries = models.Query.outdated_queries()
|
||||
self.assertIn(query, queries)
|
||||
|
||||
|
||||
class QueryArchiveTest(BaseTestCase):
|
||||
def setUp(self):
|
||||
super(QueryArchiveTest, self).setUp()
|
||||
|
||||
def test_archive_query_sets_flag(self):
|
||||
query = query_factory.create(ttl=1)
|
||||
query = query_factory.create()
|
||||
query.archive()
|
||||
query = models.Query.get_by_id(query.id)
|
||||
|
||||
self.assertEquals(query.is_archived, True)
|
||||
|
||||
def test_archived_query_doesnt_return_in_all(self):
|
||||
query = query_factory.create(ttl=1)
|
||||
query = query_factory.create(schedule="1")
|
||||
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
|
||||
query_result = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
|
||||
123, yesterday)
|
||||
@@ -116,15 +187,53 @@ class QueryArchiveTest(BaseTestCase):
|
||||
self.assertRaises(models.Widget.DoesNotExist, models.Widget.get_by_id, widget.id)
|
||||
|
||||
def test_removes_scheduling(self):
|
||||
query = query_factory.create(ttl=1)
|
||||
query = query_factory.create(schedule="1")
|
||||
|
||||
query.archive()
|
||||
|
||||
query = models.Query.get_by_id(query.id)
|
||||
|
||||
self.assertEqual(-1, query.ttl)
|
||||
self.assertEqual(None, query.schedule)
|
||||
|
||||
|
||||
class DataSourceTest(BaseTestCase):
|
||||
def test_get_schema(self):
|
||||
return_value = [{'name': 'table', 'columns': []}]
|
||||
|
||||
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
ds = data_source_factory.create()
|
||||
schema = ds.get_schema()
|
||||
|
||||
self.assertEqual(return_value, schema)
|
||||
|
||||
def test_get_schema_uses_cache(self):
|
||||
return_value = [{'name': 'table', 'columns': []}]
|
||||
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
ds = data_source_factory.create()
|
||||
ds.get_schema()
|
||||
schema = ds.get_schema()
|
||||
|
||||
self.assertEqual(return_value, schema)
|
||||
self.assertEqual(patched_get_schema.call_count, 1)
|
||||
|
||||
def test_get_schema_skips_cache_with_refresh_true(self):
|
||||
return_value = [{'name': 'table', 'columns': []}]
|
||||
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
|
||||
patched_get_schema.return_value = return_value
|
||||
|
||||
ds = data_source_factory.create()
|
||||
ds.get_schema()
|
||||
new_return_value = [{'name': 'new_table', 'columns': []}]
|
||||
patched_get_schema.return_value = new_return_value
|
||||
schema = ds.get_schema(refresh=True)
|
||||
|
||||
self.assertEqual(new_return_value, schema)
|
||||
self.assertEqual(patched_get_schema.call_count, 2)
|
||||
|
||||
|
||||
class QueryResultTest(BaseTestCase):
|
||||
def setUp(self):
|
||||
@@ -158,7 +267,7 @@ class QueryResultTest(BaseTestCase):
|
||||
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
|
||||
qr = query_result_factory.create(retrieved_at=yesterday)
|
||||
|
||||
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=60)
|
||||
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=60)
|
||||
|
||||
self.assertIsNone(found_query_result)
|
||||
|
||||
@@ -166,7 +275,7 @@ class QueryResultTest(BaseTestCase):
|
||||
yesterday = datetime.datetime.now() - datetime.timedelta(seconds=30)
|
||||
qr = query_result_factory.create(retrieved_at=yesterday)
|
||||
|
||||
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=120)
|
||||
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=120)
|
||||
|
||||
self.assertEqual(found_query_result, qr)
|
||||
|
||||
@@ -216,7 +325,7 @@ class TestQueryResultStoreResult(BaseTestCase):
|
||||
self.query = "SELECT 1"
|
||||
self.query_hash = gen_query_hash(self.query)
|
||||
self.runtime = 123
|
||||
self.utcnow = datetime.datetime.utcnow()
|
||||
self.utcnow = utcnow()
|
||||
self.data = "data"
|
||||
|
||||
def test_stores_the_result(self):
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
import datetime
|
||||
from mock import patch, call
|
||||
from mock import patch, call, ANY
|
||||
from tests import BaseTestCase
|
||||
from tests.factories import query_factory, query_result_factory
|
||||
from redash.utils import utcnow
|
||||
from redash.tasks import refresh_queries
|
||||
|
||||
|
||||
@@ -10,8 +11,8 @@ from redash.tasks import refresh_queries
|
||||
# 2. test for the refresh_query task
|
||||
class TestRefreshQueries(BaseTestCase):
|
||||
def test_enqueues_outdated_queries(self):
|
||||
query = query_factory.create(ttl=60)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
query = query_factory.create(schedule="60")
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
@@ -19,11 +20,11 @@ class TestRefreshQueries(BaseTestCase):
|
||||
|
||||
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
|
||||
refresh_queries()
|
||||
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True)
|
||||
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True, metadata=ANY)
|
||||
|
||||
def test_skips_fresh_queries(self):
|
||||
query = query_factory.create(ttl=1200)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
query = query_factory.create(schedule="1200")
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
|
||||
@@ -32,8 +33,8 @@ class TestRefreshQueries(BaseTestCase):
|
||||
self.assertFalse(add_job_mock.called)
|
||||
|
||||
def test_skips_queries_with_no_ttl(self):
|
||||
query = query_factory.create(ttl=-1)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
query = query_factory.create(schedule=None)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
|
||||
@@ -42,10 +43,10 @@ class TestRefreshQueries(BaseTestCase):
|
||||
self.assertFalse(add_job_mock.called)
|
||||
|
||||
def test_enqueues_query_only_once(self):
|
||||
query = query_factory.create(ttl=60)
|
||||
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash,
|
||||
query = query_factory.create(schedule="60")
|
||||
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash,
|
||||
data_source=query.data_source)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
@@ -55,12 +56,12 @@ class TestRefreshQueries(BaseTestCase):
|
||||
|
||||
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
|
||||
refresh_queries()
|
||||
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
|
||||
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)#{'Query ID': query.id, 'Username': 'Scheduled'})
|
||||
|
||||
def test_enqueues_query_with_correct_data_source(self):
|
||||
query = query_factory.create(ttl=60)
|
||||
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
query = query_factory.create(schedule="60")
|
||||
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
@@ -70,13 +71,16 @@ class TestRefreshQueries(BaseTestCase):
|
||||
|
||||
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
|
||||
refresh_queries()
|
||||
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True), call(query.query, query.data_source, scheduled=True)], any_order=True)
|
||||
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True, metadata=ANY),
|
||||
call(query.query, query.data_source, scheduled=True, metadata=ANY)],
|
||||
any_order=True)
|
||||
self.assertEquals(2, add_job_mock.call_count)
|
||||
|
||||
def test_enqueues_only_for_relevant_data_source(self):
|
||||
query = query_factory.create(ttl=60)
|
||||
query2 = query_factory.create(ttl=3600, query=query.query, query_hash=query.query_hash)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
query = query_factory.create(schedule="60")
|
||||
query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash)
|
||||
import psycopg2
|
||||
retrieved_at = utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
@@ -86,4 +90,4 @@ class TestRefreshQueries(BaseTestCase):
|
||||
|
||||
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
|
||||
refresh_queries()
|
||||
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
|
||||
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)
|
||||
Reference in New Issue
Block a user