mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
323 Commits
v0.5.0+b67
...
v0.7.0-rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c7d30c8b87 | ||
|
|
076710f0c6 | ||
|
|
a9172dac00 | ||
|
|
accca51f39 | ||
|
|
5f5774d01b | ||
|
|
00e99d858c | ||
|
|
da56dc883f | ||
|
|
02582cab65 | ||
|
|
bff4d31ada | ||
|
|
83554207e1 | ||
|
|
1c0c3e0b93 | ||
|
|
5feb563dc9 | ||
|
|
07b88d0b53 | ||
|
|
21f33462d5 | ||
|
|
6a9d95f1ac | ||
|
|
36b80fc4ef | ||
|
|
d89dd2c9af | ||
|
|
658af526c7 | ||
|
|
3d859ec5f3 | ||
|
|
fdff799d23 | ||
|
|
5fc0b88b23 | ||
|
|
63de247478 | ||
|
|
5d3caac1b5 | ||
|
|
e4b9d23dfe | ||
|
|
890f59a4c9 | ||
|
|
d4a18ba611 | ||
|
|
c4502b2925 | ||
|
|
1d5efdd93f | ||
|
|
2b95da102e | ||
|
|
d512cd0c1d | ||
|
|
3dc9c84a98 | ||
|
|
4a33b987b8 | ||
|
|
f7041977d5 | ||
|
|
83bc38579e | ||
|
|
4b8a94e795 | ||
|
|
406010a7a6 | ||
|
|
4f11f28efa | ||
|
|
c919602b20 | ||
|
|
7702b05635 | ||
|
|
5fc7c499a3 | ||
|
|
628240906e | ||
|
|
41b9b21a20 | ||
|
|
dbd3f754ba | ||
|
|
4ef3c27fe6 | ||
|
|
58a005c71b | ||
|
|
9d7ff31178 | ||
|
|
93d6b01fbf | ||
|
|
7d57f9d0f1 | ||
|
|
e80f470255 | ||
|
|
5636cec0eb | ||
|
|
912bbc1a4a | ||
|
|
d3bb58167e | ||
|
|
2911fa8af7 | ||
|
|
4503c6af66 | ||
|
|
7fc2d5ee0b | ||
|
|
3c9c1466a3 | ||
|
|
4a7c066bf0 | ||
|
|
b850da52a2 | ||
|
|
1a3657572e | ||
|
|
666e3281e4 | ||
|
|
66084b1a3b | ||
|
|
421470666a | ||
|
|
f8e2bc9eca | ||
|
|
079fbf33f4 | ||
|
|
c195362710 | ||
|
|
b671dd0431 | ||
|
|
7793f3b257 | ||
|
|
e09aa6f81a | ||
|
|
780e0c0418 | ||
|
|
43edb009d6 | ||
|
|
81978c5049 | ||
|
|
239813e195 | ||
|
|
28dd571a03 | ||
|
|
808126cf91 | ||
|
|
69a8295f4c | ||
|
|
a692e3f664 | ||
|
|
6860dde1f7 | ||
|
|
e183affdd0 | ||
|
|
6338be3811 | ||
|
|
3ee6371250 | ||
|
|
4f38d42182 | ||
|
|
39db74ff20 | ||
|
|
05c2c21a85 | ||
|
|
00edc29e50 | ||
|
|
3771af0a8c | ||
|
|
c32c2d43f7 | ||
|
|
4e2e3f9077 | ||
|
|
2a27422df9 | ||
|
|
f9e0ce8e9c | ||
|
|
a1d49f13d3 | ||
|
|
26aa199f9c | ||
|
|
4c77f3f914 | ||
|
|
d6be792595 | ||
|
|
59c1ea7f16 | ||
|
|
4d24005eff | ||
|
|
2dab35b614 | ||
|
|
0b61b88f5f | ||
|
|
e5cb58207c | ||
|
|
fc17d1af81 | ||
|
|
e6650e1e2d | ||
|
|
3aa1cd0133 | ||
|
|
e04833c327 | ||
|
|
b743cceb60 | ||
|
|
a0e134d3b5 | ||
|
|
d7fb2d7458 | ||
|
|
b913ce6022 | ||
|
|
1eb7945d16 | ||
|
|
37d0026ee4 | ||
|
|
9cdc2cb2f7 | ||
|
|
a9bff9063e | ||
|
|
380126ee44 | ||
|
|
d8377375b8 | ||
|
|
98ff701f9a | ||
|
|
f5ea3e97d3 | ||
|
|
719e96dd2f | ||
|
|
6c6c0256ba | ||
|
|
723df51cdd | ||
|
|
a0f4e263b2 | ||
|
|
4706bf8060 | ||
|
|
f96a9f659a | ||
|
|
63c273f896 | ||
|
|
622ac6d781 | ||
|
|
8dc564a8bc | ||
|
|
3ae5baef22 | ||
|
|
8d819068b5 | ||
|
|
585e056265 | ||
|
|
1914ed7c7c | ||
|
|
bd216e93e7 | ||
|
|
5e351de896 | ||
|
|
de0e534c77 | ||
|
|
5fa1f9440d | ||
|
|
b3ddc5f8b9 | ||
|
|
8cde5f9673 | ||
|
|
1bb53ca497 | ||
|
|
0a3cd9267f | ||
|
|
075d843354 | ||
|
|
b14e5e8c0e | ||
|
|
c9da4be422 | ||
|
|
276ee7c27a | ||
|
|
334040532a | ||
|
|
335a3a98b5 | ||
|
|
b17080a7f5 | ||
|
|
8441c12b01 | ||
|
|
3b4af1b6fa | ||
|
|
c3deb8e2fa | ||
|
|
a60b1686da | ||
|
|
b56e87ceb2 | ||
|
|
fc89bcdaf3 | ||
|
|
15ec8321bb | ||
|
|
e6ba62485c | ||
|
|
9077b01fb9 | ||
|
|
f45281be96 | ||
|
|
a1c8ef9037 | ||
|
|
f46e8af23f | ||
|
|
30a89bfd2c | ||
|
|
6312f8738d | ||
|
|
9e3d5c10c5 | ||
|
|
59b87ec4fd | ||
|
|
27ecf5f25c | ||
|
|
105971c4c8 | ||
|
|
690f8323c3 | ||
|
|
20eb110ce3 | ||
|
|
571c9d0aee | ||
|
|
0ee7292f16 | ||
|
|
8c28392dfd | ||
|
|
671f1f4478 | ||
|
|
557d3748be | ||
|
|
f00d080ed2 | ||
|
|
4e76c1305f | ||
|
|
36ef388e92 | ||
|
|
2e1ee7f76c | ||
|
|
fc1e38772d | ||
|
|
0e631a5121 | ||
|
|
d74175efca | ||
|
|
bf5fe7d2c7 | ||
|
|
0f022aba92 | ||
|
|
0b6e55e55a | ||
|
|
e1c409366c | ||
|
|
3b942118e9 | ||
|
|
7f1543db8f | ||
|
|
74a5121be2 | ||
|
|
26fe136a1a | ||
|
|
83fb189b05 | ||
|
|
5e8d0d36c0 | ||
|
|
4ae4cffa04 | ||
|
|
bc433e88fe | ||
|
|
513ef501a4 | ||
|
|
f2bdcbedfb | ||
|
|
fd056edb2a | ||
|
|
0f0acfdd12 | ||
|
|
1e3b507b2b | ||
|
|
84d95272f3 | ||
|
|
3b08e9e214 | ||
|
|
f4be83b06f | ||
|
|
4918d0430c | ||
|
|
e25b86b10d | ||
|
|
d3d305a843 | ||
|
|
825b93bfe9 | ||
|
|
8c98282200 | ||
|
|
768ac9eb04 | ||
|
|
71011d2fca | ||
|
|
9683a8ed82 | ||
|
|
10a6ac9313 | ||
|
|
dba325e9a2 | ||
|
|
fcd9ab533c | ||
|
|
68e3e8e1c5 | ||
|
|
7f8b738b9e | ||
|
|
8a35dcedfa | ||
|
|
ef763b7157 | ||
|
|
498e1d4474 | ||
|
|
73de936c75 | ||
|
|
e32b709a41 | ||
|
|
60652f63c4 | ||
|
|
d0d4101f90 | ||
|
|
646875794f | ||
|
|
cdad4be0d5 | ||
|
|
8f4285be62 | ||
|
|
acfa55e2d0 | ||
|
|
0b7cd07db0 | ||
|
|
6297ffd523 | ||
|
|
368f4fdbef | ||
|
|
f52044a209 | ||
|
|
9fb33cf746 | ||
|
|
e3c5da5bc5 | ||
|
|
e675690cc6 | ||
|
|
edc1622cf5 | ||
|
|
5ab3d4a40d | ||
|
|
cb29d87b63 | ||
|
|
6ff6bdad9f | ||
|
|
e3cc3ef9a4 | ||
|
|
1fe4f291f2 | ||
|
|
a54119f4a2 | ||
|
|
c5b7fe5321 | ||
|
|
d487ec9153 | ||
|
|
fa19b1ddc8 | ||
|
|
267c32b390 | ||
|
|
aeff3f1494 | ||
|
|
e80e52f6c9 | ||
|
|
fe41a70602 | ||
|
|
976d9abe2d | ||
|
|
041bc1100a | ||
|
|
5d095ff6ab | ||
|
|
ef01b61b29 | ||
|
|
faad6b656b | ||
|
|
0bc775584b | ||
|
|
f2d96d61a1 | ||
|
|
09bf2dd608 | ||
|
|
ad1b9b06cf | ||
|
|
a4bceae60b | ||
|
|
9385449feb | ||
|
|
562e1bb8c9 | ||
|
|
082b718303 | ||
|
|
c0872899e9 | ||
|
|
086bbf129d | ||
|
|
4b7561e538 | ||
|
|
407c5a839b | ||
|
|
b8aefd26b8 | ||
|
|
85a762bcd2 | ||
|
|
4f1b3d5beb | ||
|
|
9218a7c437 | ||
|
|
71a3f066a5 | ||
|
|
89436d779c | ||
|
|
3631e938da | ||
|
|
c0a9db68f0 | ||
|
|
bec9c9e14e | ||
|
|
47bbc25277 | ||
|
|
f02c2588d2 | ||
|
|
7db5449dad | ||
|
|
7f6c7f0634 | ||
|
|
73955c74f7 | ||
|
|
7de85da8ef | ||
|
|
0aab35252a | ||
|
|
141dbc9e70 | ||
|
|
2e513c347c | ||
|
|
335c136ec2 | ||
|
|
df1170eb9b | ||
|
|
69bcaddbe0 | ||
|
|
67958cc27b | ||
|
|
6c716f23d9 | ||
|
|
bea11b0ac2 | ||
|
|
4927386299 | ||
|
|
30a8550f6b | ||
|
|
0389a45be4 | ||
|
|
707c169867 | ||
|
|
fca034ac0d | ||
|
|
97691ea5ee | ||
|
|
40335a0e21 | ||
|
|
9344cbd078 | ||
|
|
9442fd9465 | ||
|
|
c816f1003d | ||
|
|
2107b79a80 | ||
|
|
8fae6de8c7 | ||
|
|
d798c77574 | ||
|
|
0abce27381 | ||
|
|
8a171ba39a | ||
|
|
20af276772 | ||
|
|
4058342763 | ||
|
|
af64657260 | ||
|
|
b6bd46e59e | ||
|
|
31fe547e03 | ||
|
|
aff324071e | ||
|
|
131266e408 | ||
|
|
b1f97e8c8d | ||
|
|
9783d6e839 | ||
|
|
8eea2fb367 | ||
|
|
b585480c81 | ||
|
|
89e307daba | ||
|
|
a5eb0e293c | ||
|
|
48d1113225 | ||
|
|
d82d5c3bdc | ||
|
|
dfe58b3953 | ||
|
|
44019b8357 | ||
|
|
3c15a44faf | ||
|
|
8d113dadd2 | ||
|
|
c1dd26aee7 | ||
|
|
b2228c2a39 | ||
|
|
d9618cb09c | ||
|
|
c8ca683d3a | ||
|
|
888963ffaa | ||
|
|
ae947a8310 | ||
|
|
bee9cde347 | ||
|
|
c131dab125 | ||
|
|
e113642ae4 |
15
.env.example
15
.env.example
@@ -1,9 +1,6 @@
|
||||
REDASH_CONNECTION_ADAPTER=pg
|
||||
REDASH_CONNECTION_STRING="dbname=data"
|
||||
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
|
||||
REDASH_GOOGLE_APPS_DOMAIN=
|
||||
REDASH_ADMINS=
|
||||
REDASH_WORKERS_COUNT=2
|
||||
REDASH_COOKIE_SECRET=
|
||||
REDASH_DATABASE_URL='postgresql://rd'
|
||||
REDASH_LOG_LEVEL = "INFO"
|
||||
export REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
|
||||
export REDASH_LOG_LEVEL="INFO"
|
||||
export REDASH_REDIS_URL=redis://localhost:6379/1
|
||||
export REDASH_DATABASE_URL="postgresql://redash"
|
||||
export REDASH_COOKIE_SECRET=veryverysecret
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=
|
||||
|
||||
7
Makefile
7
Makefile
@@ -1,6 +1,7 @@
|
||||
NAME=redash
|
||||
VERSION=`python ./manage.py version`
|
||||
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
|
||||
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
|
||||
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
|
||||
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
|
||||
|
||||
@@ -12,11 +13,11 @@ deps:
|
||||
|
||||
pack:
|
||||
sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
|
||||
upload:
|
||||
python bin/upload_version.py $(VERSION) $(FILENAME)
|
||||
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
|
||||
|
||||
test:
|
||||
nosetests --with-coverage --cover-package=redash tests/*.py
|
||||
cd rd_ui && grunt test
|
||||
#cd rd_ui && grunt test
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
<p align="center">
|
||||
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
|
||||
|
||||
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
|
||||
@@ -11,7 +10,8 @@
|
||||
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
|
||||
|
||||
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts.
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
|
||||
Presto, Google Spreadsheets, Cloudera Impala and custom scripts.
|
||||
|
||||
**_re:dash_** consists of two parts:
|
||||
|
||||
@@ -28,7 +28,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
|
||||
|
||||
## Getting Started
|
||||
|
||||
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
|
||||
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
|
||||
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).
|
||||
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import requests
|
||||
|
||||
if __name__ == '__main__':
|
||||
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
|
||||
|
||||
if response.status_code != 200:
|
||||
exit("Failed getting releases (status code: %s)." % response.status_code)
|
||||
|
||||
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
|
||||
|
||||
latest_release = sorted_releases[0]
|
||||
asset_url = latest_release['assets'][0]['url']
|
||||
filename = latest_release['assets'][0]['name']
|
||||
|
||||
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
|
||||
|
||||
if '--url-only' in sys.argv:
|
||||
print asset_url
|
||||
elif '--wget' in sys.argv:
|
||||
print wget_command
|
||||
else:
|
||||
print "Latest release: %s" % latest_release['tag_name']
|
||||
print latest_release['body']
|
||||
|
||||
print "\nTarball URL: %s" % asset_url
|
||||
print 'wget: %s' % (wget_command)
|
||||
|
||||
|
||||
147
bin/release_manager.py
Normal file
147
bin/release_manager.py
Normal file
@@ -0,0 +1,147 @@
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import requests
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
repo = 'EverythingMe/redash'
|
||||
|
||||
def _github_request(method, path, params=None, headers={}):
|
||||
if not path.startswith('https://api.github.com'):
|
||||
url = "https://api.github.com/{}".format(path)
|
||||
else:
|
||||
url = path
|
||||
|
||||
if params is not None:
|
||||
params = json.dumps(params)
|
||||
|
||||
response = requests.request(method, url, data=params, auth=auth)
|
||||
return response
|
||||
|
||||
def exception_from_error(message, response):
|
||||
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
|
||||
|
||||
def rc_tag_name(version):
|
||||
return "v{}-rc".format(version)
|
||||
|
||||
def get_rc_release(version):
|
||||
tag = rc_tag_name(version)
|
||||
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
|
||||
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
elif response.status_code == 200:
|
||||
return response.json()
|
||||
|
||||
raise exception_from_error("Unknown error while looking RC release: ", response)
|
||||
|
||||
def create_release(version, commit_sha):
|
||||
tag = rc_tag_name(version)
|
||||
|
||||
params = {
|
||||
'tag_name': tag,
|
||||
'name': "{} - RC".format(version),
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
}
|
||||
|
||||
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
|
||||
|
||||
if response.status_code != 201:
|
||||
raise exception_from_error("Failed creating new release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def upload_asset(release, filepath):
|
||||
upload_url = release['upload_url'].replace('{?name}', '')
|
||||
filename = filepath.split('/')[-1]
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
|
||||
|
||||
if response.status_code != 201: # not 200/201/...
|
||||
raise exception_from_error('Failed uploading asset', response)
|
||||
|
||||
return response
|
||||
|
||||
def remove_previous_builds(release):
|
||||
for asset in release['assets']:
|
||||
response = _github_request('delete', asset['url'])
|
||||
if response.status_code != 204:
|
||||
raise exception_from_error("Failed deleting asset", response)
|
||||
|
||||
def get_changelog(commit_sha):
|
||||
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
|
||||
if latest_release.status_code != 200:
|
||||
raise exception_from_error('Failed getting latest release', latest_release)
|
||||
|
||||
latest_release = latest_release.json()
|
||||
previous_sha = latest_release['target_commitish']
|
||||
|
||||
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
|
||||
log = subprocess.check_output(args)
|
||||
changes = ["Changes since {}:".format(latest_release['name'])]
|
||||
|
||||
for line in log.split('\n'):
|
||||
try:
|
||||
sha, subject, body, parents = line[1:-1].split('|')
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = " #{}".format(pull_request)
|
||||
except Exception, ex:
|
||||
pull_request = ""
|
||||
|
||||
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
|
||||
|
||||
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
|
||||
|
||||
return "\n".join(changes)
|
||||
|
||||
def update_release_commit_sha(release, commit_sha):
|
||||
params = {
|
||||
'target_commitish': commit_sha,
|
||||
}
|
||||
|
||||
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating commit sha for existing release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def update_release(version, build_filepath, commit_sha):
|
||||
try:
|
||||
release = get_rc_release(version)
|
||||
if release:
|
||||
release = update_release_commit_sha(release, commit_sha)
|
||||
else:
|
||||
release = create_release(version, commit_sha)
|
||||
|
||||
print "Using release id: {}".format(release['id'])
|
||||
|
||||
remove_previous_builds(release)
|
||||
response = upload_asset(release, build_filepath)
|
||||
|
||||
changelog = get_changelog(commit_sha)
|
||||
|
||||
response = _github_request('patch', release['url'], {'body': changelog})
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating release description", response)
|
||||
|
||||
except Exception, ex:
|
||||
print ex
|
||||
|
||||
if __name__ == '__main__':
|
||||
commit_sha = sys.argv[1]
|
||||
version = sys.argv[2]
|
||||
filepath = sys.argv[3]
|
||||
|
||||
# TODO: make sure running from git directory & remote = repo
|
||||
update_release(version, filepath, commit_sha)
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Script to test concurrency (multithreading/multiprocess) issues with the workers. Use with caution.
|
||||
"""
|
||||
import json
|
||||
import atfork
|
||||
atfork.monkeypatch_os_fork_functions()
|
||||
import atfork.stdlib_fixer
|
||||
atfork.stdlib_fixer.fix_logging_module()
|
||||
|
||||
import time
|
||||
from redash.data import worker
|
||||
from redash import models, data_manager, redis_connection
|
||||
|
||||
if __name__ == '__main__':
|
||||
models.create_db(True, False)
|
||||
|
||||
print "Creating data source..."
|
||||
data_source = models.DataSource.create(name="Concurrency", type="pg", options="dbname=postgres")
|
||||
|
||||
print "Clear jobs/hashes:"
|
||||
redis_connection.delete("jobs")
|
||||
query_hashes = redis_connection.keys("query_hash_*")
|
||||
if query_hashes:
|
||||
redis_connection.delete(*query_hashes)
|
||||
|
||||
starting_query_results_count = models.QueryResult.select().count()
|
||||
jobs_count = 5000
|
||||
workers_count = 10
|
||||
|
||||
print "Creating jobs..."
|
||||
for i in xrange(jobs_count):
|
||||
query = "SELECT {}".format(i)
|
||||
print "Inserting: {}".format(query)
|
||||
data_manager.add_job(query=query, priority=worker.Job.LOW_PRIORITY,
|
||||
data_source=data_source)
|
||||
|
||||
print "Starting workers..."
|
||||
workers = data_manager.start_workers(workers_count)
|
||||
|
||||
print "Waiting for jobs to be done..."
|
||||
keep_waiting = True
|
||||
while keep_waiting:
|
||||
results_count = models.QueryResult.select().count() - starting_query_results_count
|
||||
print "QueryResults: {}".format(results_count)
|
||||
time.sleep(5)
|
||||
if results_count == jobs_count:
|
||||
print "Yay done..."
|
||||
keep_waiting = False
|
||||
|
||||
data_manager.stop_workers()
|
||||
|
||||
qr_count = 0
|
||||
for qr in models.QueryResult.select():
|
||||
number = int(qr.query.split()[1])
|
||||
data_number = json.loads(qr.data)['rows'][0].values()[0]
|
||||
|
||||
if number != data_number:
|
||||
print "Oops? {} != {} ({})".format(number, data_number, qr.id)
|
||||
qr_count += 1
|
||||
|
||||
print "Verified {} query results.".format(qr_count)
|
||||
|
||||
print "Done."
|
||||
@@ -1,46 +0,0 @@
|
||||
#!python
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
import subprocess
|
||||
|
||||
|
||||
def capture_output(command):
|
||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
return proc.stdout.read()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
version = sys.argv[1]
|
||||
filepath = sys.argv[2]
|
||||
filename = filepath.split('/')[-1]
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
commit_sha = os.environ['CIRCLE_SHA1']
|
||||
|
||||
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
|
||||
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
|
||||
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
|
||||
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
|
||||
|
||||
params = json.dumps({
|
||||
'tag_name': 'v{0}'.format(version),
|
||||
'name': 're:dash v{0}'.format(version),
|
||||
'body': version_body,
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
})
|
||||
|
||||
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
|
||||
data=params,
|
||||
auth=auth)
|
||||
|
||||
upload_url = response.json()['upload_url']
|
||||
upload_url = upload_url.replace('{?name}', '')
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
|
||||
headers=headers, verify=False)
|
||||
|
||||
@@ -7,6 +7,9 @@ machine:
|
||||
2.7.3
|
||||
dependencies:
|
||||
pre:
|
||||
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
|
||||
- tar xvf optipng-0.7.5.tar.gz
|
||||
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
|
||||
- make deps
|
||||
- pip install -r dev_requirements.txt
|
||||
- pip install -r requirements.txt
|
||||
@@ -26,3 +29,7 @@ deployment:
|
||||
notify:
|
||||
webhooks:
|
||||
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
|
||||
general:
|
||||
branches:
|
||||
ignore:
|
||||
- gh-pages
|
||||
|
||||
19
manage.py
19
manage.py
@@ -2,12 +2,15 @@
|
||||
"""
|
||||
CLI to manage redash.
|
||||
"""
|
||||
import json
|
||||
|
||||
from flask.ext.script import Manager
|
||||
|
||||
from redash import settings, models, __version__
|
||||
from redash.wsgi import app
|
||||
from redash.import_export import import_manager
|
||||
from redash.cli import users, database, data_sources
|
||||
from redash.monitor import get_status
|
||||
|
||||
manager = Manager(app)
|
||||
manager.add_command("database", database.manager)
|
||||
@@ -21,6 +24,9 @@ def version():
|
||||
"""Displays re:dash version."""
|
||||
print __version__
|
||||
|
||||
@manager.command
|
||||
def status():
|
||||
print json.dumps(get_status(), indent=2)
|
||||
|
||||
@manager.command
|
||||
def runworkers():
|
||||
@@ -37,12 +43,15 @@ def make_shell_context():
|
||||
@manager.command
|
||||
def check_settings():
|
||||
"""Show the settings as re:dash sees them (useful for debugging)."""
|
||||
from types import ModuleType
|
||||
for name, item in settings.all_settings().iteritems():
|
||||
print "{} = {}".format(name, item)
|
||||
|
||||
for name in dir(settings):
|
||||
item = getattr(settings, name)
|
||||
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
|
||||
print "{} = {}".format(name, item)
|
||||
@manager.command
|
||||
def send_test_mail():
|
||||
from redash import mail
|
||||
from flask_mail import Message
|
||||
|
||||
mail.send(Message(subject="Test Message from re:dash", recipients=[settings.MAIL_DEFAULT_SENDER], body="Test message."))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -1,12 +1,15 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.Query, models.Query.is_archived, 'is_archived')
|
||||
migrate(
|
||||
migrator.add_column('queries', 'is_archived', models.Query.is_archived)
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
21
migrations/0002_fix_timestamp_fields.py
Normal file
21
migrations/0002_fix_timestamp_fields.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
columns = (
|
||||
('activity_log', 'created_at'),
|
||||
('dashboards', 'created_at'),
|
||||
('data_sources', 'created_at'),
|
||||
('events', 'created_at'),
|
||||
('groups', 'created_at'),
|
||||
('queries', 'created_at'),
|
||||
('widgets', 'created_at'),
|
||||
('query_results', 'retrieved_at')
|
||||
)
|
||||
|
||||
with db.database.transaction():
|
||||
for column in columns:
|
||||
db.database.execute_sql("ALTER TABLE {} ALTER COLUMN {} TYPE timestamp with time zone;".format(*column))
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
73
migrations/0003_update_data_source_config.py
Normal file
73
migrations/0003_update_data_source_config.py
Normal file
@@ -0,0 +1,73 @@
|
||||
import json
|
||||
|
||||
from redash import query_runner
|
||||
from redash.models import DataSource
|
||||
|
||||
|
||||
def update(data_source):
|
||||
print "[%s] Old options: %s" % (data_source.name, data_source.options)
|
||||
|
||||
if query_runner.validate_configuration(data_source.type, data_source.options):
|
||||
print "[%s] configuration already valid. skipping." % data_source.name
|
||||
return
|
||||
|
||||
if data_source.type == 'pg':
|
||||
values = data_source.options.split(" ")
|
||||
configuration = {}
|
||||
for value in values:
|
||||
k, v = value.split("=", 1)
|
||||
configuration[k] = v
|
||||
if k == 'port':
|
||||
configuration[k] = int(v)
|
||||
|
||||
data_source.options = json.dumps(configuration)
|
||||
|
||||
elif data_source.type == 'mysql':
|
||||
mapping = {
|
||||
'Server': 'host',
|
||||
'User': 'user',
|
||||
'Pwd': 'passwd',
|
||||
'Database': 'db'
|
||||
}
|
||||
|
||||
values = data_source.options.split(";")
|
||||
configuration = {}
|
||||
for value in values:
|
||||
k, v = value.split("=", 1)
|
||||
configuration[mapping[k]] = v
|
||||
data_source.options = json.dumps(configuration)
|
||||
|
||||
elif data_source.type == 'graphite':
|
||||
old_config = json.loads(data_source.options)
|
||||
|
||||
configuration = {
|
||||
"url": old_config["url"]
|
||||
}
|
||||
|
||||
if "verify" in old_config:
|
||||
configuration['verify'] = old_config['verify']
|
||||
|
||||
if "auth" in old_config:
|
||||
configuration['username'], configuration['password'] = old_config["auth"]
|
||||
|
||||
data_source.options = json.dumps(configuration)
|
||||
|
||||
elif data_source.type == 'url':
|
||||
data_source.options = json.dumps({"url": data_source.options})
|
||||
|
||||
elif data_source.type == 'script':
|
||||
data_source.options = json.dumps({"path": data_source.options})
|
||||
|
||||
elif data_source.type == 'mongo':
|
||||
data_source.type = 'mongodb'
|
||||
|
||||
else:
|
||||
print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type)
|
||||
|
||||
print "[%s] New options: %s" % (data_source.name, data_source.options)
|
||||
data_source.save()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for data_source in DataSource.all():
|
||||
update(data_source)
|
||||
12
migrations/0004_allow_null_in_event_user.py
Normal file
12
migrations/0004_allow_null_in_event_user.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.drop_not_null('events', 'user_id')
|
||||
)
|
||||
26
migrations/0005_add_updated_at.py
Normal file
26
migrations/0005_add_updated_at.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('queries', 'updated_at', models.Query.updated_at),
|
||||
migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at),
|
||||
migrator.add_column('widgets', 'updated_at', models.Widget.updated_at),
|
||||
migrator.add_column('users', 'created_at', models.User.created_at),
|
||||
migrator.add_column('users', 'updated_at', models.User.updated_at),
|
||||
migrator.add_column('visualizations', 'created_at', models.Visualization.created_at),
|
||||
migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at)
|
||||
)
|
||||
|
||||
db.database.execute_sql("UPDATE queries SET updated_at = created_at;")
|
||||
db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;")
|
||||
db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
19
migrations/0006_queries_last_edit_by.py
Normal file
19
migrations/0006_queries_last_edit_by.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('queries', 'last_modified_by_id', models.Query.last_modified_by)
|
||||
)
|
||||
|
||||
db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
23
migrations/0007_add_schedule_to_queries.py
Normal file
23
migrations/0007_add_schedule_to_queries.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('queries', 'schedule', models.Query.schedule),
|
||||
)
|
||||
|
||||
db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")
|
||||
|
||||
migrate(
|
||||
migrator.drop_column('queries', 'ttl')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
20
migrations/0008_make_ds_name_unique.py
Normal file
20
migrations/0008_make_ds_name_unique.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
with db.database.transaction():
|
||||
# Make sure all data sources names are unique.
|
||||
db.database.execute_sql("""
|
||||
UPDATE data_sources
|
||||
SET name = new_names.name
|
||||
FROM (
|
||||
SELECT id, name || ' ' || id as name
|
||||
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
|
||||
) AS new_names
|
||||
WHERE data_sources.id = new_names.id;
|
||||
""")
|
||||
# Add unique constraint on data_sources.name.
|
||||
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
|
||||
|
||||
db.close_db(None)
|
||||
27
migrations/0009_add_api_key_to_user.py
Normal file
27
migrations/0009_add_api_key_to_user.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
column = models.User.api_key
|
||||
column.null = True
|
||||
migrate(
|
||||
migrator.add_column('users', 'api_key', models.User.api_key),
|
||||
)
|
||||
|
||||
for user in models.User.select():
|
||||
user.save()
|
||||
|
||||
migrate(
|
||||
migrator.add_not_null('users', 'api_key')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
18
migrations/0010_allow_deleting_datasources.py
Normal file
18
migrations/0010_allow_deleting_datasources.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.drop_not_null('queries', 'data_source_id'),
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
|
||||
8
migrations/0010_create_alerts.py
Normal file
8
migrations/0010_create_alerts.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from redash.models import db, Alert, AlertSubscription
|
||||
|
||||
if __name__ == '__main__':
|
||||
with db.database.transaction():
|
||||
Alert.create_table()
|
||||
AlertSubscription.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
44
migrations/0011_migrate_bigquery_to_json.py
Normal file
44
migrations/0011_migrate_bigquery_to_json.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from base64 import b64encode
|
||||
import json
|
||||
from redash.models import DataSource
|
||||
|
||||
|
||||
def convert_p12_to_pem(p12file):
|
||||
from OpenSSL import crypto
|
||||
with open(p12file, 'rb') as f:
|
||||
p12 = crypto.load_pkcs12(f.read(), "notasecret")
|
||||
|
||||
return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for ds in DataSource.all():
|
||||
|
||||
if ds.type == 'bigquery':
|
||||
options = json.loads(ds.options)
|
||||
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
new_options = {
|
||||
'projectId': options['projectId'],
|
||||
'jsonKeyFile': b64encode(json.dumps({
|
||||
'client_email': options['serviceAccount'],
|
||||
'private_key': convert_p12_to_pem(options['privateKey'])
|
||||
}))
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save()
|
||||
elif ds.type == 'google_spreadsheets':
|
||||
options = json.loads(ds.options)
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
with open(options['credentialsFilePath']) as f:
|
||||
new_options = {
|
||||
'jsonKeyFile': b64encode(f.read())
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save()
|
||||
@@ -19,6 +19,7 @@
|
||||
"trailing": true,
|
||||
"smarttabs": true,
|
||||
"globals": {
|
||||
"angular": false
|
||||
"angular": false,
|
||||
"_": false
|
||||
}
|
||||
}
|
||||
|
||||
BIN
rd_ui/app/images/favicon-16x16.png
Executable file
BIN
rd_ui/app/images/favicon-16x16.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.3 KiB |
BIN
rd_ui/app/images/favicon-32x32.png
Executable file
BIN
rd_ui/app/images/favicon-32x32.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 2.0 KiB |
BIN
rd_ui/app/images/favicon-96x96.png
Executable file
BIN
rd_ui/app/images/favicon-96x96.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 3.8 KiB |
BIN
rd_ui/app/images/redash_icon_small.png
Normal file
BIN
rd_ui/app/images/redash_icon_small.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.0 KiB |
@@ -18,10 +18,15 @@
|
||||
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
|
||||
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
|
||||
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="/bower_components/offline/themes/offline-theme-default.css">
|
||||
<link rel="stylesheet" href="/bower_components/offline/themes/offline-language-english.css">
|
||||
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
|
||||
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<div growl></div>
|
||||
@@ -35,15 +40,15 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
|
||||
</div>
|
||||
{% raw %}
|
||||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<span ng-repeat="(name, group) in groupedDashboards">
|
||||
<li class="dropdown-submenu">
|
||||
<a href="#" ng-bind="name"></a>
|
||||
@@ -61,13 +66,19 @@
|
||||
<li><a data-toggle="modal" href="#new_dashboard_dialog" ng-show="currentUser.hasPermission('create_dashboard')">New Dashboard</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Queries <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle>Queries <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<li ng-show="currentUser.hasPermission('create_query')"><a href="/queries/new">New Query</a></li>
|
||||
<li><a href="/queries">Queries</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="/alerts">Alerts</a>
|
||||
</li>
|
||||
<li ng-show="currentUser.hasPermission('admin')">
|
||||
<a href="/data_sources">Data Sources</a>
|
||||
</li>
|
||||
</ul>
|
||||
<form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()">
|
||||
<div class="form-group">
|
||||
@@ -107,9 +118,11 @@
|
||||
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/python/python.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
|
||||
<script src="/bower_components/angular-ui-codemirror/ui-codemirror.js"></script>
|
||||
<script src="/bower_components/highcharts/highcharts.js"></script>
|
||||
<script src="/bower_components/highcharts/modules/exporting.js"></script>
|
||||
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>
|
||||
@@ -123,13 +136,17 @@
|
||||
<script src="/bower_components/angular-ui-select/dist/select.js"></script>
|
||||
<script src="/bower_components/underscore.string/lib/underscore.string.js"></script>
|
||||
<script src="/bower_components/marked/lib/marked.js"></script>
|
||||
<script src="/bower_components/angular-base64-upload/dist/angular-base64-upload.js"></script>
|
||||
<script src="/scripts/ng_highchart.js"></script>
|
||||
<script src="/scripts/ng_smart_table.js"></script>
|
||||
<script src="/scripts/ui-bootstrap-tpls-0.5.0.min.js"></script>
|
||||
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
|
||||
<script src="/bower_components/bucky/bucky.js"></script>
|
||||
<script src="/bower_components/pace/pace.js"></script>
|
||||
<script src="/bower_components/mustache/mustache.js"></script>
|
||||
<script src="/bower_components/offline/offline.min.js"></script>
|
||||
<script src="/bower_components/canvg/rgbcolor.js"></script>
|
||||
<script src="/bower_components/canvg/StackBlur.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
|
||||
@@ -141,18 +158,22 @@
|
||||
<script src="/scripts/controllers/controllers.js"></script>
|
||||
<script src="/scripts/controllers/dashboard.js"></script>
|
||||
<script src="/scripts/controllers/admin_controllers.js"></script>
|
||||
<script src="/scripts/controllers/data_sources.js"></script>
|
||||
<script src="/scripts/controllers/query_view.js"></script>
|
||||
<script src="/scripts/controllers/query_source.js"></script>
|
||||
<script src="/scripts/visualizations/base.js"></script>
|
||||
<script src="/scripts/visualizations/chart.js"></script>
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/map.js"></script>
|
||||
<script src="/scripts/visualizations/counter.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/visualizations/pivot.js"></script>
|
||||
<script src="/scripts/directives/directives.js"></script>
|
||||
<script src="/scripts/directives/query_directives.js"></script>
|
||||
<script src="/scripts/directives/data_source_directives.js"></script>
|
||||
<script src="/scripts/directives/dashboard_directives.js"></script>
|
||||
<script src="/scripts/filters.js"></script>
|
||||
<script src="/scripts/controllers/alerts.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<script>
|
||||
@@ -167,7 +188,7 @@
|
||||
|
||||
currentUser.hasPermission = function(permission) {
|
||||
return this.permissions.indexOf(permission) != -1;
|
||||
}
|
||||
};
|
||||
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
@@ -13,6 +13,10 @@
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<link rel="stylesheet" href="/styles/login.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -26,13 +30,20 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
{% with messages = get_flashed_messages() %}
|
||||
{% if messages %}
|
||||
{% for message in messages %}
|
||||
<div class="alert alert-warning" role="alert">{{ message }}</div>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
<div class="main">
|
||||
{% if show_google_openid %}
|
||||
@@ -48,6 +59,19 @@
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% if show_saml_login %}
|
||||
|
||||
<div class="row">
|
||||
<a href="/saml/login">SAML Login</a>
|
||||
</div>
|
||||
|
||||
<div class="login-or">
|
||||
<hr class="hr-or">
|
||||
<span class="span-or">or</span>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
|
||||
<form role="form" method="post" name="login">
|
||||
<div class="form-group">
|
||||
<label for="inputUsernameEmail">Username or email</label>
|
||||
|
||||
@@ -6,7 +6,6 @@ angular.module('redash', [
|
||||
'redash.services',
|
||||
'redash.renderers',
|
||||
'redash.visualization',
|
||||
'ui.codemirror',
|
||||
'highchart',
|
||||
'ui.select2',
|
||||
'angular-growl',
|
||||
@@ -15,7 +14,8 @@ angular.module('redash', [
|
||||
'smartTable.table',
|
||||
'ngResource',
|
||||
'ngRoute',
|
||||
'ui.select'
|
||||
'ui.select',
|
||||
'naif.base64'
|
||||
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider',
|
||||
function ($routeProvider, $locationProvider, $compileProvider, growlProvider) {
|
||||
if (featureFlags.clientSideMetrics) {
|
||||
@@ -81,9 +81,23 @@ angular.module('redash', [
|
||||
templateUrl: '/views/admin_status.html',
|
||||
controller: 'AdminStatusCtrl'
|
||||
});
|
||||
$routeProvider.when('/admin/workers', {
|
||||
templateUrl: '/views/admin_workers.html',
|
||||
controller: 'AdminWorkersCtrl'
|
||||
|
||||
$routeProvider.when('/alerts', {
|
||||
templateUrl: '/views/alerts/list.html',
|
||||
controller: 'AlertsCtrl'
|
||||
});
|
||||
$routeProvider.when('/alerts/:alertId', {
|
||||
templateUrl: '/views/alerts/edit.html',
|
||||
controller: 'AlertCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/data_sources/:dataSourceId', {
|
||||
templateUrl: '/views/data_sources/edit.html',
|
||||
controller: 'DataSourceCtrl'
|
||||
});
|
||||
$routeProvider.when('/data_sources', {
|
||||
templateUrl: '/views/data_sources/list.html',
|
||||
controller: 'DataSourcesCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/', {
|
||||
|
||||
@@ -16,16 +16,9 @@
|
||||
$timeout(refresh, 59 * 1000);
|
||||
};
|
||||
|
||||
$scope.flowerUrl = featureFlags.flowerUrl;
|
||||
|
||||
refresh();
|
||||
}
|
||||
|
||||
var AdminWorkersCtrl = function ($scope, $sce) {
|
||||
$scope.flowerUrl = $sce.trustAsResourceUrl(featureFlags.flowerUrl);
|
||||
};
|
||||
|
||||
angular.module('redash.admin_controllers', [])
|
||||
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])
|
||||
.controller('AdminWorkersCtrl', ['$scope', '$sce', AdminWorkersCtrl])
|
||||
})();
|
||||
|
||||
174
rd_ui/app/scripts/controllers/alerts.js
Normal file
174
rd_ui/app/scripts/controllers/alerts.js
Normal file
@@ -0,0 +1,174 @@
|
||||
(function() {
|
||||
|
||||
var AlertsCtrl = function($scope, Events, Alert) {
|
||||
Events.record(currentUser, "view", "page", "alerts");
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alerts = []
|
||||
Alert.query(function(alerts) {
|
||||
var stateClass = {
|
||||
'ok': 'label label-success',
|
||||
'triggered': 'label label-danger',
|
||||
'unknown': 'label label-warning'
|
||||
};
|
||||
_.each(alerts, function(alert) {
|
||||
alert.class = stateClass[alert.state];
|
||||
})
|
||||
$scope.alerts = alerts;
|
||||
|
||||
});
|
||||
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: 50,
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
"map": "name",
|
||||
"cellTemplate": '<a href="/alerts/{{dataRow.id}}">{{dataRow.name}}</a> (<a href="/queries/{{dataRow.query.id}}">query</a>)'
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user.name'
|
||||
},
|
||||
{
|
||||
'label': 'State',
|
||||
'cellTemplate': '<span ng-class="dataRow.class">{{dataRow.state | uppercase}}</span> since <span am-time-ago="dataRow.updated_at"></span>'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
var AlertCtrl = function($scope, $routeParams, $location, growl, Query, Events, Alert) {
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alertId = $routeParams.alertId;
|
||||
if ($scope.alertId === "new") {
|
||||
Events.record(currentUser, 'view', 'page', 'alerts/new');
|
||||
} else {
|
||||
Events.record(currentUser, 'view', 'alert', $scope.alertId);
|
||||
}
|
||||
|
||||
$scope.onQuerySelected = function(item) {
|
||||
$scope.selectedQuery = item;
|
||||
item.getQueryResultPromise().then(function(result) {
|
||||
$scope.queryResult = result;
|
||||
$scope.alert.options.column = result.getColumnNames()[0];
|
||||
});
|
||||
};
|
||||
|
||||
if ($scope.alertId === "new") {
|
||||
$scope.alert = new Alert({options: {}});
|
||||
} else {
|
||||
$scope.alert = Alert.get({id: $scope.alertId}, function(alert) {
|
||||
$scope.onQuerySelected(new Query($scope.alert.query));
|
||||
});
|
||||
}
|
||||
|
||||
$scope.ops = ['greater than', 'less than', 'equals'];
|
||||
$scope.selectedQuery = null;
|
||||
|
||||
$scope.getDefaultName = function() {
|
||||
if (!$scope.alert.query) {
|
||||
return undefined;
|
||||
}
|
||||
return _.template("<%= query.name %>: <%= options.column %> <%= options.op %> <%= options.value %>", $scope.alert);
|
||||
};
|
||||
|
||||
$scope.searchQueries = function (term) {
|
||||
if (!term || term.length < 3) {
|
||||
return;
|
||||
}
|
||||
|
||||
Query.search({q: term}, function(results) {
|
||||
$scope.queries = results;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
if ($scope.alert.name === undefined || $scope.alert.name === '') {
|
||||
$scope.alert.name = $scope.getDefaultName();
|
||||
}
|
||||
|
||||
$scope.alert.$save(function(alert) {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
if ($scope.alertId === "new") {
|
||||
$location.path('/alerts/' + alert.id).replace();
|
||||
}
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving alert.");
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
angular.module('redash.directives').directive('alertSubscribers', ['AlertSubscription', function (AlertSubscription) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/alerts/subscribers.html',
|
||||
scope: {
|
||||
'alertId': '='
|
||||
},
|
||||
controller: function ($scope) {
|
||||
$scope.subscribers = AlertSubscription.query({alertId: $scope.alertId});
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.directives').directive('subscribeButton', ['AlertSubscription', 'growl', function (AlertSubscription, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
template: '<button class="btn btn-default btn-xs" ng-click="toggleSubscription()"><i ng-class="class"></i></button>',
|
||||
controller: function ($scope) {
|
||||
var updateClass = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.class = "fa fa-eye-slash";
|
||||
} else {
|
||||
$scope.class = "fa fa-eye";
|
||||
}
|
||||
}
|
||||
|
||||
$scope.subscribers.$promise.then(function() {
|
||||
$scope.subscription = _.find($scope.subscribers, function(subscription) {
|
||||
return (subscription.user.email == currentUser.email);
|
||||
});
|
||||
|
||||
updateClass();
|
||||
});
|
||||
|
||||
$scope.toggleSubscription = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.subscription.$delete(function() {
|
||||
$scope.subscribers = _.without($scope.subscribers, $scope.subscription);
|
||||
$scope.subscription = undefined;
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving subscription.");
|
||||
});
|
||||
} else {
|
||||
$scope.subscription = new AlertSubscription({alert_id: $scope.alertId});
|
||||
$scope.subscription.$save(function() {
|
||||
$scope.subscribers.push($scope.subscription);
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Unsubscription failed.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('AlertsCtrl', ['$scope', 'Events', 'Alert', AlertsCtrl])
|
||||
.controller('AlertCtrl', ['$scope', '$routeParams', '$location', 'growl', 'Query', 'Events', 'Alert', AlertCtrl])
|
||||
|
||||
})();
|
||||
@@ -1,4 +1,11 @@
|
||||
(function () {
|
||||
var dateFormatter = function (value) {
|
||||
if (!value) {
|
||||
return "-";
|
||||
}
|
||||
return value.toDate().toLocaleString();
|
||||
};
|
||||
|
||||
var QuerySearchCtrl = function($scope, $location, $filter, Events, Query) {
|
||||
$scope.$parent.pageTitle = "Queries Search";
|
||||
|
||||
@@ -8,11 +15,6 @@
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
var dateFormatter = function (value) {
|
||||
if (!value) return "-";
|
||||
return value.format("DD/MM/YY HH:mm");
|
||||
}
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
@@ -30,9 +32,9 @@
|
||||
},
|
||||
{
|
||||
'label': 'Update Schedule',
|
||||
'map': 'ttl',
|
||||
'map': 'schedule',
|
||||
'formatFunction': function (value) {
|
||||
return $filter('refreshRateHumanize')(value);
|
||||
return $filter('scheduleHumanize')(value);
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -70,11 +72,6 @@
|
||||
$scope.allQueries = [];
|
||||
$scope.queries = [];
|
||||
|
||||
var dateFormatter = function (value) {
|
||||
if (!value) return "-";
|
||||
return value.format("DD/MM/YY HH:mm");
|
||||
}
|
||||
|
||||
var filterQueries = function () {
|
||||
$scope.queries = _.filter($scope.allQueries, function (query) {
|
||||
if (!$scope.selectedTab) {
|
||||
@@ -130,9 +127,9 @@
|
||||
},
|
||||
{
|
||||
'label': 'Update Schedule',
|
||||
'map': 'ttl',
|
||||
'map': 'schedule',
|
||||
'formatFunction': function (value) {
|
||||
return $filter('refreshRateHumanize')(value);
|
||||
return $filter('scheduleHumanize')(value);
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -27,12 +27,19 @@
|
||||
var filters = {};
|
||||
_.each(queryResults, function(queryResult) {
|
||||
var queryFilters = queryResult.getFilters();
|
||||
_.each(queryFilters, function (filter) {
|
||||
if (!_.has(filters, filter.name)) {
|
||||
// TODO: first object should be a copy, otherwise one of the chart filters behaves different than the others.
|
||||
_.each(queryFilters, function (queryFilter) {
|
||||
var hasQueryStringValue = _.has($location.search(), queryFilter.name);
|
||||
|
||||
if (!(hasQueryStringValue || dashboard.dashboard_filters_enabled)) {
|
||||
// If dashboard filters not enabled, or no query string value given, skip filters linking.
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_.has(filters, queryFilter.name)) {
|
||||
var filter = _.extend({}, queryFilter);
|
||||
filters[filter.name] = filter;
|
||||
filters[filter.name].originFilters = [];
|
||||
if (_.has($location.search(), filter.name)) {
|
||||
if (hasQueryStringValue) {
|
||||
filter.current = $location.search()[filter.name];
|
||||
}
|
||||
|
||||
@@ -44,7 +51,7 @@
|
||||
}
|
||||
|
||||
// TODO: merge values.
|
||||
filters[filter.name].originFilters.push(filter);
|
||||
filters[queryFilter.name].originFilters.push(queryFilter);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -93,9 +100,13 @@
|
||||
Events.record(currentUser, "autorefresh", "dashboard", dashboard.id, {'enable': $scope.refreshEnabled});
|
||||
|
||||
if ($scope.refreshEnabled) {
|
||||
var refreshRate = _.min(_.flatten($scope.dashboard.widgets), function(widget) {
|
||||
return widget.visualization.query.ttl;
|
||||
}).visualization.query.ttl;
|
||||
var refreshRate = _.min(_.map(_.flatten($scope.dashboard.widgets), function(widget) {
|
||||
var schedule = widget.visualization.query.schedule;
|
||||
if (schedule === null || schedule.match(/\d\d:\d\d/) !== null) {
|
||||
return 60;
|
||||
}
|
||||
return widget.visualization.query.schedule;
|
||||
}));
|
||||
|
||||
$scope.refreshRate = _.max([120, refreshRate * 2]) * 1000;
|
||||
|
||||
@@ -131,7 +142,6 @@
|
||||
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
|
||||
var maxAge = $location.search()['maxAge'];
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
$scope.nextUpdateTime = moment(new Date(($scope.query.updated_at + $scope.query.ttl + $scope.query.runtime + 300) * 1000)).fromNow();
|
||||
|
||||
$scope.type = 'visualization';
|
||||
} else {
|
||||
|
||||
47
rd_ui/app/scripts/controllers/data_sources.js
Normal file
47
rd_ui/app/scripts/controllers/data_sources.js
Normal file
@@ -0,0 +1,47 @@
|
||||
(function () {
|
||||
var DataSourcesCtrl = function ($scope, $location, growl, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_sources");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSources = DataSource.query();
|
||||
|
||||
$scope.openDataSource = function(datasource) {
|
||||
$location.path('/data_sources/' + datasource.id);
|
||||
};
|
||||
|
||||
$scope.deleteDataSource = function(event, datasource) {
|
||||
event.stopPropagation();
|
||||
Events.record(currentUser, "delete", "datasource", datasource.id);
|
||||
datasource.$delete(function(resource) {
|
||||
growl.addSuccessMessage("Data source deleted succesfully.");
|
||||
this.$parent.dataSources = _.without(this.dataSources, resource);
|
||||
}.bind(this), function(httpResponse) {
|
||||
console.log("Failed to delete data source: ", httpResponse.status, httpResponse.statusText, httpResponse.data);
|
||||
growl.addErrorMessage("Failed to delete data source.");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
var DataSourceCtrl = function ($scope, $routeParams, $http, $location, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_source");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSourceId = $routeParams.dataSourceId;
|
||||
|
||||
if ($scope.dataSourceId == "new") {
|
||||
$scope.dataSource = new DataSource({options: {}});
|
||||
} else {
|
||||
$scope.dataSource = DataSource.get({id: $routeParams.dataSourceId});
|
||||
}
|
||||
|
||||
$scope.$watch('dataSource.id', function(id) {
|
||||
if (id != $scope.dataSourceId && id !== undefined) {
|
||||
$location.path('/data_sources/' + id).replace();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('DataSourcesCtrl', ['$scope', '$location', 'growl', 'Events', 'DataSource', DataSourcesCtrl])
|
||||
.controller('DataSourceCtrl', ['$scope', '$routeParams', '$http', '$location', 'Events', 'DataSource', DataSourceCtrl])
|
||||
})();
|
||||
@@ -17,7 +17,7 @@
|
||||
saveQuery = $scope.saveQuery;
|
||||
|
||||
$scope.sourceMode = true;
|
||||
$scope.canEdit = currentUser.canEdit($scope.query);
|
||||
$scope.canEdit = true;
|
||||
$scope.isDirty = false;
|
||||
|
||||
$scope.newVisualization = undefined;
|
||||
@@ -29,7 +29,7 @@
|
||||
}
|
||||
});
|
||||
|
||||
KeyboardShortcuts.bind({
|
||||
var shortcuts = {
|
||||
'meta+s': function () {
|
||||
if ($scope.canEdit) {
|
||||
$scope.saveQuery();
|
||||
@@ -44,7 +44,9 @@
|
||||
'meta+enter': $scope.executeQuery,
|
||||
// Ctrl+Enter for PC
|
||||
'ctrl+enter': $scope.executeQuery
|
||||
});
|
||||
};
|
||||
|
||||
KeyboardShortcuts.bind(shortcuts);
|
||||
|
||||
// @override
|
||||
$scope.saveQuery = function(options, data) {
|
||||
@@ -66,7 +68,7 @@
|
||||
$scope.duplicateQuery = function() {
|
||||
Events.record(currentUser, 'fork', 'query', $scope.query.id);
|
||||
$scope.query.id = null;
|
||||
$scope.query.ttl = -1;
|
||||
$scope.query.schedule = null;
|
||||
|
||||
$scope.saveQuery({
|
||||
successMessage: 'Query forked',
|
||||
|
||||
@@ -1,38 +1,76 @@
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, Query, DataSource) {
|
||||
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, $modal, Query, DataSource) {
|
||||
var DEFAULT_TAB = 'table';
|
||||
|
||||
var getQueryResult = function(ttl) {
|
||||
var getQueryResult = function(maxAge) {
|
||||
// Collect params, and getQueryResult with params; getQueryResult merges it into the query
|
||||
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
|
||||
if (ttl == undefined) {
|
||||
ttl = $location.search()['maxAge'];
|
||||
if (maxAge == undefined) {
|
||||
maxAge = $location.search()['maxAge'];
|
||||
}
|
||||
$scope.queryResult = $scope.query.getQueryResult(ttl, parameters);
|
||||
|
||||
if (maxAge == undefined) {
|
||||
maxAge = -1;
|
||||
}
|
||||
|
||||
$scope.showLog = false;
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
}
|
||||
|
||||
$scope.dataSource = {};
|
||||
$scope.query = $route.current.locals.query;
|
||||
|
||||
var updateSchema = function() {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
|
||||
DataSource.getSchema({id: dataSourceId}, function(data) {
|
||||
if (data && data.length > 0) {
|
||||
$scope.schema = data;
|
||||
_.each(data, function(table) {
|
||||
table.collapsed = true;
|
||||
});
|
||||
|
||||
$scope.editorSize = "col-md-9";
|
||||
$scope.hasSchema = true;
|
||||
} else {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Events.record(currentUser, 'view', 'query', $scope.query.id);
|
||||
getQueryResult();
|
||||
$scope.queryExecuting = false;
|
||||
|
||||
$scope.isQueryOwner = currentUser.id === $scope.query.user.id;
|
||||
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
|
||||
$scope.canViewSource = currentUser.hasPermission('view_source');
|
||||
|
||||
$scope.dataSources = DataSource.get(function(dataSources) {
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.dataSources = DataSource.query(function(dataSources) {
|
||||
updateSchema();
|
||||
|
||||
if ($scope.query.isNew()) {
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
}
|
||||
});
|
||||
|
||||
// in view mode, latest dataset is always visible
|
||||
// source mode changes this behavior
|
||||
$scope.showDataset = true;
|
||||
$scope.showLog = false;
|
||||
|
||||
$scope.lockButton = function(lock) {
|
||||
$scope.queryExecuting = lock;
|
||||
};
|
||||
|
||||
$scope.showApiKey = function() {
|
||||
alert("API Key for this query:\n" + $scope.query.api_key);
|
||||
};
|
||||
|
||||
$scope.saveQuery = function(options, data) {
|
||||
if (data) {
|
||||
data.id = $scope.query.id;
|
||||
@@ -66,6 +104,9 @@
|
||||
};
|
||||
|
||||
$scope.executeQuery = function() {
|
||||
if (!$scope.query.query) {
|
||||
return;
|
||||
}
|
||||
getQueryResult(0);
|
||||
$scope.lockButton(true);
|
||||
$scope.cancelling = false;
|
||||
@@ -77,24 +118,24 @@
|
||||
$scope.queryResult.cancelExecution();
|
||||
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
|
||||
};
|
||||
|
||||
|
||||
$scope.archiveQuery = function(options, data) {
|
||||
if (data) {
|
||||
data.id = $scope.query.id;
|
||||
} else {
|
||||
data = $scope.query;
|
||||
}
|
||||
|
||||
|
||||
$scope.isDirty = false;
|
||||
|
||||
|
||||
options = _.extend({}, {
|
||||
successMessage: 'Query archived',
|
||||
errorMessage: 'Query could not be archived'
|
||||
}, options);
|
||||
|
||||
|
||||
return Query.delete({id: data.id}, function() {
|
||||
$scope.query.is_archived = true;
|
||||
$scope.query.ttl = -1;
|
||||
$scope.query.schedule = null;
|
||||
growl.addSuccessMessage(options.successMessage);
|
||||
// This feels dirty.
|
||||
$('#archive-confirmation-modal').modal('hide');
|
||||
@@ -117,6 +158,8 @@
|
||||
});
|
||||
}
|
||||
|
||||
updateSchema();
|
||||
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
$scope.executeQuery();
|
||||
};
|
||||
|
||||
@@ -155,13 +198,41 @@
|
||||
$scope.query.queryResult = $scope.queryResult;
|
||||
|
||||
notifications.showNotification("re:dash", $scope.query.name + " updated.");
|
||||
} else if (status == 'failed') {
|
||||
notifications.showNotification("re:dash", $scope.query.name + " failed to run: " + $scope.queryResult.getError());
|
||||
}
|
||||
|
||||
if (status === 'done' || status === 'failed') {
|
||||
$scope.lockButton(false);
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getLog() != null) {
|
||||
$scope.showLog = true;
|
||||
}
|
||||
});
|
||||
|
||||
$scope.openScheduleForm = function() {
|
||||
if (!$scope.isQueryOwner) {
|
||||
return;
|
||||
};
|
||||
|
||||
$modal.open({
|
||||
templateUrl: '/views/schedule_form.html',
|
||||
size: 'sm',
|
||||
scope: $scope,
|
||||
controller: ['$scope', '$modalInstance', function($scope, $modalInstance) {
|
||||
$scope.close = function() {
|
||||
$modalInstance.close();
|
||||
}
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
$scope.refreshType = 'daily';
|
||||
} else {
|
||||
$scope.refreshType = 'periodic';
|
||||
}
|
||||
}]
|
||||
});
|
||||
};
|
||||
|
||||
$scope.$watch(function() {
|
||||
return $location.hash()
|
||||
}, function(hash) {
|
||||
@@ -174,5 +245,5 @@
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('QueryViewCtrl',
|
||||
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', 'Query', 'DataSource', QueryViewCtrl]);
|
||||
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', '$modal', 'Query', 'DataSource', QueryViewCtrl]);
|
||||
})();
|
||||
|
||||
76
rd_ui/app/scripts/directives/data_source_directives.js
Normal file
76
rd_ui/app/scripts/directives/data_source_directives.js
Normal file
@@ -0,0 +1,76 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var directives = angular.module('redash.directives');
|
||||
|
||||
// Angular strips data- from the directive, so data-source-form becomes sourceForm...
|
||||
directives.directive('sourceForm', ['$http', 'growl', function ($http, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/data_sources/form.html',
|
||||
scope: {
|
||||
'dataSource': '='
|
||||
},
|
||||
link: function ($scope) {
|
||||
var setType = function(types) {
|
||||
if ($scope.dataSource.type === undefined) {
|
||||
$scope.dataSource.type = types[0].type;
|
||||
return types[0];
|
||||
}
|
||||
|
||||
$scope.type = _.find(types, function (t) {
|
||||
return t.type == $scope.dataSource.type;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.files = {};
|
||||
|
||||
$scope.$watchCollection('files', function() {
|
||||
_.each($scope.files, function(v, k) {
|
||||
if (v) {
|
||||
$scope.dataSource.options[k] = v.base64;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$http.get('/api/data_sources/types').success(function (types) {
|
||||
setType(types);
|
||||
|
||||
$scope.dataSourceTypes = types;
|
||||
|
||||
_.each(types, function (type) {
|
||||
_.each(type.configuration_schema.properties, function (prop, name) {
|
||||
if (name == 'password' || name == 'passwd') {
|
||||
prop.type = 'password';
|
||||
}
|
||||
|
||||
if (_.string.endsWith(name, "File")) {
|
||||
prop.type = 'file';
|
||||
}
|
||||
|
||||
prop.required = _.contains(type.configuration_schema.required, name);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
$scope.$watch('dataSource.type', function(current, prev) {
|
||||
if (prev !== current) {
|
||||
if (prev !== undefined) {
|
||||
$scope.dataSource.options = {};
|
||||
}
|
||||
setType($scope.dataSourceTypes);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
$scope.dataSource.$save(function() {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
})();
|
||||
@@ -8,7 +8,7 @@
|
||||
'query': '=',
|
||||
'visualization': '=?'
|
||||
},
|
||||
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
link: function(scope, element) {
|
||||
scope.link = '/queries/' + scope.query.id;
|
||||
if (scope.visualization) {
|
||||
@@ -29,7 +29,7 @@
|
||||
restrict: 'E',
|
||||
template: '<span ng-show="query.id && canViewSource">\
|
||||
<a ng-show="!sourceMode"\
|
||||
ng-href="{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
</a>\
|
||||
<a ng-show="sourceMode"\
|
||||
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
|
||||
@@ -63,26 +63,97 @@
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
'query': '=',
|
||||
'lock': '='
|
||||
'lock': '=',
|
||||
'schema': '=',
|
||||
'syntax': '='
|
||||
},
|
||||
template: '<textarea\
|
||||
ui-codemirror="editorOptions"\
|
||||
ng-model="query.query">',
|
||||
link: function($scope) {
|
||||
$scope.editorOptions = {
|
||||
mode: 'text/x-sql',
|
||||
template: '<textarea></textarea>',
|
||||
link: {
|
||||
pre: function ($scope, element) {
|
||||
$scope.syntax = $scope.syntax || 'sql';
|
||||
|
||||
var modes = {
|
||||
'sql': 'text/x-sql',
|
||||
'python': 'text/x-python',
|
||||
'json': 'application/json'
|
||||
};
|
||||
|
||||
var textarea = element.children()[0];
|
||||
var editorOptions = {
|
||||
mode: modes[$scope.syntax],
|
||||
lineWrapping: true,
|
||||
lineNumbers: true,
|
||||
readOnly: false,
|
||||
matchBrackets: true,
|
||||
autoCloseBrackets: true
|
||||
};
|
||||
autoCloseBrackets: true,
|
||||
extraKeys: {"Ctrl-Space": "autocomplete"}
|
||||
};
|
||||
|
||||
$scope.$watch('lock', function(locked) {
|
||||
$scope.editorOptions.readOnly = locked ? 'nocursor' : false;
|
||||
});
|
||||
var additionalHints = [];
|
||||
|
||||
CodeMirror.commands.autocomplete = function(cm) {
|
||||
var hinter = function(editor, options) {
|
||||
var hints = CodeMirror.hint.anyword(editor, options);
|
||||
var cur = editor.getCursor(), token = editor.getTokenAt(cur).string;
|
||||
|
||||
hints.list = _.union(hints.list, _.filter(additionalHints, function (h) {
|
||||
return h.search(token) === 0;
|
||||
}));
|
||||
|
||||
return hints;
|
||||
};
|
||||
|
||||
// CodeMirror.showHint(cm, CodeMirror.hint.anyword);
|
||||
CodeMirror.showHint(cm, hinter);
|
||||
};
|
||||
|
||||
var codemirror = CodeMirror.fromTextArea(textarea, editorOptions);
|
||||
|
||||
codemirror.on('change', function(instance) {
|
||||
var newValue = instance.getValue();
|
||||
|
||||
if (newValue !== $scope.query.query) {
|
||||
$scope.$evalAsync(function() {
|
||||
$scope.query.query = newValue;
|
||||
});
|
||||
}
|
||||
|
||||
$('.schema-container').css('height', $('.CodeMirror').css('height'));
|
||||
});
|
||||
|
||||
$scope.$watch('query.query', function () {
|
||||
if ($scope.query.query !== codemirror.getValue()) {
|
||||
codemirror.setValue($scope.query.query);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.$watch('schema', function (schema) {
|
||||
if (schema) {
|
||||
var keywords = [];
|
||||
_.each(schema, function (table) {
|
||||
keywords.push(table.name);
|
||||
_.each(table.columns, function (c) {
|
||||
keywords.push(c);
|
||||
});
|
||||
});
|
||||
|
||||
additionalHints = _.unique(keywords);
|
||||
}
|
||||
|
||||
codemirror.refresh();
|
||||
});
|
||||
|
||||
$scope.$watch('syntax', function(syntax) {
|
||||
codemirror.setOption('mode', modes[syntax]);
|
||||
});
|
||||
|
||||
$scope.$watch('lock', function (locked) {
|
||||
var readOnly = locked ? 'nocursor' : false;
|
||||
codemirror.setOption('readOnly', readOnly);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function queryFormatter($http) {
|
||||
@@ -98,55 +169,111 @@
|
||||
</button>',
|
||||
link: function($scope) {
|
||||
$scope.formatQuery = function formatQuery() {
|
||||
$scope.queryExecuting = true;
|
||||
$scope.queryFormatting = true;
|
||||
$http.post('/api/queries/format', {
|
||||
'query': $scope.query.query
|
||||
}).success(function (response) {
|
||||
$scope.query.query = response;
|
||||
}).finally(function () {
|
||||
$scope.queryExecuting = false;
|
||||
$scope.queryFormatting = false;
|
||||
});
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function queryTimePicker() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<select ng-disabled="refreshType != \'daily\'" ng-model="hour" ng-change="updateSchedule()" ng-options="c as c for c in hourOptions"></select> :\
|
||||
<select ng-disabled="refreshType != \'daily\'" ng-model="minute" ng-change="updateSchedule()" ng-options="c as c for c in minuteOptions"></select>',
|
||||
link: function($scope) {
|
||||
var padWithZeros = function(size, v) {
|
||||
v = String(v);
|
||||
if (v.length < size) {
|
||||
v = "0" + v;
|
||||
}
|
||||
return v;
|
||||
};
|
||||
|
||||
$scope.hourOptions = _.map(_.range(0, 24), _.partial(padWithZeros, 2));
|
||||
$scope.minuteOptions = _.map(_.range(0, 60, 5), _.partial(padWithZeros, 2));
|
||||
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
var parts = $scope.query.scheduleInLocalTime().split(':');
|
||||
$scope.minute = parts[1];
|
||||
$scope.hour = parts[0];
|
||||
} else {
|
||||
$scope.minute = "15";
|
||||
$scope.hour = "00";
|
||||
}
|
||||
|
||||
$scope.updateSchedule = function() {
|
||||
var newSchedule = moment().hour($scope.hour).minute($scope.minute).utc().format('HH:mm');
|
||||
if (newSchedule != $scope.query.schedule) {
|
||||
$scope.query.schedule = newSchedule;
|
||||
$scope.saveQuery();
|
||||
}
|
||||
};
|
||||
|
||||
$scope.$watch('refreshType', function() {
|
||||
if ($scope.refreshType == 'daily') {
|
||||
$scope.updateSchedule();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function queryRefreshSelect() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<select\
|
||||
ng-disabled="!isQueryOwner"\
|
||||
ng-model="query.ttl"\
|
||||
ng-disabled="refreshType != \'periodic\'"\
|
||||
ng-model="query.schedule"\
|
||||
ng-change="saveQuery()"\
|
||||
ng-options="c.value as c.name for c in refreshOptions">\
|
||||
<option value="">No Refresh</option>\
|
||||
</select>',
|
||||
link: function($scope) {
|
||||
$scope.refreshOptions = [
|
||||
{
|
||||
value: -1,
|
||||
name: 'No Refresh'
|
||||
},
|
||||
{
|
||||
value: 60,
|
||||
value: "60",
|
||||
name: 'Every minute'
|
||||
},
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
_.each([5, 10, 15, 30], function(i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: String(i*60),
|
||||
name: "Every " + i + " minutes"
|
||||
})
|
||||
});
|
||||
|
||||
_.each(_.range(1, 13), function (i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: i * 3600,
|
||||
value: String(i * 3600),
|
||||
name: 'Every ' + i + 'h'
|
||||
});
|
||||
})
|
||||
|
||||
$scope.refreshOptions.push({
|
||||
value: 24 * 3600,
|
||||
value: String(24 * 3600),
|
||||
name: 'Every 24h'
|
||||
});
|
||||
$scope.refreshOptions.push({
|
||||
value: 7 * 24 * 3600,
|
||||
value: String(7 * 24 * 3600),
|
||||
name: 'Once a week'
|
||||
});
|
||||
|
||||
$scope.$watch('refreshType', function() {
|
||||
if ($scope.refreshType == 'periodic') {
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
$scope.query.schedule = null;
|
||||
$scope.saveQuery();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
@@ -158,5 +285,6 @@
|
||||
.directive('queryResultLink', queryResultCSVLink)
|
||||
.directive('queryEditor', queryEditor)
|
||||
.directive('queryRefreshSelect', queryRefreshSelect)
|
||||
.directive('queryTimePicker', queryTimePicker)
|
||||
.directive('queryFormatter', ['$http', queryFormatter]);
|
||||
})();
|
||||
@@ -24,13 +24,17 @@ angular.module('redash.filters', []).
|
||||
return durationHumanize;
|
||||
})
|
||||
|
||||
.filter('refreshRateHumanize', function () {
|
||||
return function (ttl) {
|
||||
if (ttl == -1) {
|
||||
.filter('scheduleHumanize', function() {
|
||||
return function (schedule) {
|
||||
if (schedule === null) {
|
||||
return "Never";
|
||||
} else {
|
||||
return "Every " + durationHumanize(ttl);
|
||||
} else if (schedule.match(/\d\d:\d\d/) !== null) {
|
||||
var parts = schedule.split(':');
|
||||
var localTime = moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
return "Every day at " + localTime;
|
||||
}
|
||||
|
||||
return "Every " + durationHumanize(parseInt(schedule));
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
;
|
||||
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
|
||||
var s = '<b>' + this.x.toDate().toLocaleString() + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
$.each(this.points, function (i, point) {
|
||||
@@ -145,7 +145,7 @@
|
||||
|
||||
if (!hasTotalsAlready) {
|
||||
this.addSeries({
|
||||
data: _.values(data),
|
||||
data: _.sortBy(_.values(data), 'x'),
|
||||
type: 'line',
|
||||
name: 'Total'
|
||||
}, false)
|
||||
@@ -153,6 +153,24 @@
|
||||
|
||||
this.redraw();
|
||||
}
|
||||
},
|
||||
{
|
||||
text: 'Save Image',
|
||||
onclick: function () {
|
||||
var canvas = document.createElement('canvas');
|
||||
window.canvg(canvas, this.getSVG());
|
||||
var href = canvas.toDataURL('image/png');
|
||||
var a = document.createElement('a');
|
||||
a.href = href;
|
||||
var filenameSuffix = new Date().toISOString().replace(/:/g,'_').replace('Z', '');
|
||||
if (this.title) {
|
||||
filenameSuffix = this.title.text;
|
||||
}
|
||||
a.download = 'redash_charts_'+filenameSuffix+'.png';
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -287,8 +305,29 @@
|
||||
scope.chart.series[0].remove(false);
|
||||
};
|
||||
|
||||
// We check either for true or undefined for backward compatibility.
|
||||
var series = scope.series;
|
||||
|
||||
|
||||
// If this is a chart that has just one row for multiple columns, sort
|
||||
// by the Y values. For example:
|
||||
//
|
||||
// A | B | C
|
||||
// 20 | 30 | 15
|
||||
//
|
||||
// Will be sorted:
|
||||
// C | A | B
|
||||
// 15 | 20 | 30
|
||||
var sortable = _.every(series, function(s) { return s.data.length == 1 });
|
||||
|
||||
if (sortable) {
|
||||
series = _.sortBy(series, function (s) {
|
||||
return s.data[0].y
|
||||
});
|
||||
}
|
||||
|
||||
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
|
||||
if (scope.series.length > 0 && _.some(scope.series[0].data, function (p) {
|
||||
if (series.length > 0 && _.some(series[0].data, function (p) {
|
||||
return (angular.isString(p.x) || angular.isDefined(p.name));
|
||||
})) {
|
||||
chartOptions['xAxis'] = chartOptions['xAxis'] || {};
|
||||
@@ -300,13 +339,13 @@
|
||||
}
|
||||
|
||||
if (chartOptions['xAxis']['type'] == 'category' || chartOptions['series']['type']=='pie') {
|
||||
if (!angular.isDefined(scope.series[0].data[0].name)) {
|
||||
if (!angular.isDefined(series[0].data[0].name)) {
|
||||
// We need to make sure that for each category, each series has a value.
|
||||
var categories = _.union.apply(this, _.map(scope.series, function (s) {
|
||||
var categories = _.union.apply(this, _.map(series, function (s) {
|
||||
return _.pluck(s.data, 'x')
|
||||
}));
|
||||
|
||||
_.each(scope.series, function (s) {
|
||||
_.each(series, function (s) {
|
||||
// TODO: move this logic to Query#getChartData
|
||||
var yValues = _.groupBy(s.data, 'x');
|
||||
|
||||
@@ -317,19 +356,31 @@
|
||||
}
|
||||
});
|
||||
|
||||
if (categories.length == 1) {
|
||||
newData = _.sortBy(newData, 'y').reverse();
|
||||
}
|
||||
;
|
||||
|
||||
s.data = newData;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
_.each(scope.series, function (s) {
|
||||
_.each(series, function (s) {
|
||||
// here we override the series with the visualization config
|
||||
s = _.extend(s, chartOptions['series']);
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,28 @@
|
||||
(function () {
|
||||
function QueryResultError(errorMessage) {
|
||||
this.errorMessage = errorMessage;
|
||||
}
|
||||
|
||||
QueryResultError.prototype.getError = function() {
|
||||
return this.errorMessage;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getStatus = function() {
|
||||
return 'failed';
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getLog = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getChartData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
var QueryResult = function ($resource, $timeout, $q) {
|
||||
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
|
||||
var Job = $resource('/api/jobs/:id', {id: '@id'});
|
||||
@@ -12,6 +36,8 @@
|
||||
|
||||
var columnTypes = {};
|
||||
|
||||
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
|
||||
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
|
||||
_.each(this.query_result.data.rows, function (row) {
|
||||
_.each(row, function (v, k) {
|
||||
if (angular.isNumber(v)) {
|
||||
@@ -30,7 +56,9 @@
|
||||
|
||||
_.each(this.query_result.data.columns, function(column) {
|
||||
if (columnTypes[column.name]) {
|
||||
column.type = columnTypes[column.name];
|
||||
if (column.type == null || column.type == 'string') {
|
||||
column.type = columnTypes[column.name];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -40,7 +68,7 @@
|
||||
} else {
|
||||
this.status = undefined;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function QueryResult(props) {
|
||||
this.deferred = $q.defer();
|
||||
@@ -91,6 +119,14 @@
|
||||
return this.job.error;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getLog = function() {
|
||||
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.query_result.data.log;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getUpdatedAt = function () {
|
||||
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
|
||||
}
|
||||
@@ -214,10 +250,6 @@
|
||||
}
|
||||
});
|
||||
|
||||
_.each(series, function (series) {
|
||||
series.data = _.sortBy(series.data, 'x');
|
||||
});
|
||||
|
||||
return _.values(series);
|
||||
};
|
||||
|
||||
@@ -247,26 +279,9 @@
|
||||
return parts[0];
|
||||
};
|
||||
|
||||
var charConversionMap = {
|
||||
'__pct': /%/g,
|
||||
'_': / /g,
|
||||
'__qm': /\?/g,
|
||||
'__brkt': /[\(\)\[\]]/g,
|
||||
'__dash': /-/g,
|
||||
'__amp': /&/g,
|
||||
'__sl': /\//g,
|
||||
'__fsl': /\\/g,
|
||||
};
|
||||
|
||||
QueryResult.prototype.getColumnCleanName = function (column) {
|
||||
var name = this.getColumnNameWithoutType(column);
|
||||
|
||||
if (name != '') {
|
||||
_.each(charConversionMap, function(regex, replacement) {
|
||||
name = name.replace(regex, replacement);
|
||||
});
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
@@ -329,7 +344,7 @@
|
||||
this.filters = filters;
|
||||
}
|
||||
|
||||
var refreshStatus = function (queryResult, query, ttl) {
|
||||
var refreshStatus = function (queryResult, query) {
|
||||
Job.get({'id': queryResult.job.id}, function (response) {
|
||||
queryResult.update(response);
|
||||
|
||||
@@ -339,25 +354,9 @@
|
||||
});
|
||||
} else if (queryResult.getStatus() != "failed") {
|
||||
$timeout(function () {
|
||||
refreshStatus(queryResult, query, ttl);
|
||||
refreshStatus(queryResult, query);
|
||||
}, 3000);
|
||||
}
|
||||
}, function() {
|
||||
var upHandler = function() {
|
||||
Offline.off('up', upHandler);
|
||||
console.log('trying again');
|
||||
refreshStatus(queryResult, query, ttl);
|
||||
};
|
||||
|
||||
var downHandler = function() {
|
||||
console.log('2 handling down case');
|
||||
|
||||
Offline.on('up', upHandler);
|
||||
Offline.off('down', downHandler);
|
||||
};
|
||||
|
||||
Offline.on('down', downHandler);
|
||||
Offline.check();
|
||||
})
|
||||
}
|
||||
|
||||
@@ -375,14 +374,19 @@
|
||||
return this.deferred.promise;
|
||||
}
|
||||
|
||||
QueryResult.get = function (data_source_id, query, ttl) {
|
||||
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
|
||||
var queryResult = new QueryResult();
|
||||
|
||||
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'ttl': ttl}, function (response) {
|
||||
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
|
||||
if (queryId !== undefined) {
|
||||
params['query_id'] = queryId;
|
||||
};
|
||||
|
||||
QueryResultResource.post(params, function (response) {
|
||||
queryResult.update(response);
|
||||
|
||||
if ('job' in response) {
|
||||
refreshStatus(queryResult, query, ttl);
|
||||
refreshStatus(queryResult, query);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -410,7 +414,7 @@
|
||||
return new Query({
|
||||
query: "",
|
||||
name: "New Query",
|
||||
ttl: -1,
|
||||
schedule: null,
|
||||
user: currentUser
|
||||
});
|
||||
};
|
||||
@@ -434,11 +438,23 @@
|
||||
return '/queries/' + this.id + '/source';
|
||||
};
|
||||
|
||||
Query.prototype.getQueryResult = function (ttl, parameters) {
|
||||
if (ttl == undefined) {
|
||||
ttl = this.ttl;
|
||||
}
|
||||
Query.prototype.isNew = function() {
|
||||
return this.id === undefined;
|
||||
};
|
||||
|
||||
Query.prototype.hasDailySchedule = function() {
|
||||
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
|
||||
};
|
||||
|
||||
Query.prototype.scheduleInLocalTime = function() {
|
||||
var parts = this.schedule.split(':');
|
||||
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
};
|
||||
|
||||
Query.prototype.getQueryResult = function (maxAge, parameters) {
|
||||
if (!this.query) {
|
||||
return;
|
||||
}
|
||||
var queryText = this.query;
|
||||
|
||||
var queryParameters = this.getParameters();
|
||||
@@ -463,16 +479,18 @@
|
||||
this.latest_query_data_id = null;
|
||||
}
|
||||
|
||||
if (this.latest_query_data && ttl != 0) {
|
||||
if (this.latest_query_data && maxAge != 0) {
|
||||
if (!this.queryResult) {
|
||||
this.queryResult = new QueryResult({'query_result': this.latest_query_data});
|
||||
}
|
||||
} else if (this.latest_query_data_id && ttl != 0) {
|
||||
} else if (this.latest_query_data_id && maxAge != 0) {
|
||||
if (!this.queryResult) {
|
||||
this.queryResult = QueryResult.getById(this.latest_query_data_id);
|
||||
}
|
||||
} else if (this.data_source_id) {
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, ttl);
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
|
||||
} else {
|
||||
return new QueryResultError("Please select data source to run this query.");
|
||||
}
|
||||
|
||||
return this.queryResult;
|
||||
@@ -485,11 +503,19 @@
|
||||
Query.prototype.getParameters = function() {
|
||||
var parts = Mustache.parse(this.query);
|
||||
var parameters = [];
|
||||
_.each(parts, function(part) {
|
||||
if (part[0] == 'name') {
|
||||
parameters.push(part[1]);
|
||||
}
|
||||
});
|
||||
var collectParams = function(parts) {
|
||||
parameters = [];
|
||||
_.each(parts, function(part) {
|
||||
if (part[0] == 'name' || part[0] == '&') {
|
||||
parameters.push(part[1]);
|
||||
} else if (part[0] == '#') {
|
||||
parameters = _.union(parameters, collectParams(part[4]));
|
||||
}
|
||||
});
|
||||
return parameters;
|
||||
};
|
||||
|
||||
parameters = collectParams(parts);
|
||||
|
||||
return parameters;
|
||||
}
|
||||
@@ -500,10 +526,42 @@
|
||||
|
||||
|
||||
var DataSource = function ($resource) {
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
|
||||
var actions = {
|
||||
'get': {'method': 'GET', 'cache': false, 'isArray': false},
|
||||
'query': {'method': 'GET', 'cache': false, 'isArray': true},
|
||||
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
|
||||
};
|
||||
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
|
||||
|
||||
|
||||
return DataSourceResource;
|
||||
}
|
||||
};
|
||||
|
||||
var AlertSubscription = function ($resource) {
|
||||
var resource = $resource('/api/alerts/:alertId/subscriptions/:userId', {alertId: '@alert_id', userId: '@user.id'});
|
||||
return resource;
|
||||
};
|
||||
|
||||
var Alert = function ($resource, $http) {
|
||||
var actions = {
|
||||
save: {
|
||||
method: 'POST',
|
||||
transformRequest: [function(data) {
|
||||
var newData = _.extend({}, data);
|
||||
if (newData.query_id === undefined) {
|
||||
newData.query_id = newData.query.id;
|
||||
delete newData.query;
|
||||
}
|
||||
|
||||
return newData;
|
||||
}].concat($http.defaults.transformRequest)
|
||||
}
|
||||
};
|
||||
var resource = $resource('/api/alerts/:id', {id: '@id'}, actions);
|
||||
|
||||
return resource;
|
||||
};
|
||||
|
||||
var Widget = function ($resource, Query) {
|
||||
var WidgetResource = $resource('/api/widgets/:id', {id: '@id'});
|
||||
@@ -530,5 +588,7 @@
|
||||
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
|
||||
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
|
||||
.factory('DataSource', ['$resource', DataSource])
|
||||
.factory('Alert', ['$resource', '$http', Alert])
|
||||
.factory('AlertSubscription', ['$resource', AlertSubscription])
|
||||
.factory('Widget', ['$resource', 'Query', Widget]);
|
||||
})();
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -55,6 +55,22 @@
|
||||
}];
|
||||
};
|
||||
|
||||
var VisualizationName = function(Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
visualization: '='
|
||||
},
|
||||
template: '<small>{{name}}</small>',
|
||||
replace: false,
|
||||
link: function (scope) {
|
||||
if (Visualization.visualizations[scope.visualization.type].name != scope.visualization.name) {
|
||||
scope.name = scope.visualization.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var VisualizationRenderer = function ($location, Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
@@ -72,42 +88,9 @@
|
||||
width: '50%'
|
||||
};
|
||||
|
||||
function readURL() {
|
||||
var searchFilters = angular.fromJson($location.search().filters);
|
||||
if (searchFilters) {
|
||||
_.forEach(scope.filters, function(filter) {
|
||||
var value = searchFilters[filter.friendlyName];
|
||||
if (value) {
|
||||
filter.current = value;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateURL(filters) {
|
||||
var current = {};
|
||||
_.each(filters, function(filter) {
|
||||
if (filter.current) {
|
||||
current[filter.friendlyName] = filter.current;
|
||||
}
|
||||
});
|
||||
|
||||
var newSearch = angular.extend($location.search(), {
|
||||
filters: angular.toJson(current)
|
||||
});
|
||||
$location.search(newSearch);
|
||||
}
|
||||
|
||||
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
|
||||
if (filters) {
|
||||
scope.filters = filters;
|
||||
|
||||
if (filters.length && false) {
|
||||
readURL();
|
||||
|
||||
// start watching for changes and update URL
|
||||
scope.$watch('filters', updateURL, true);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -138,7 +121,7 @@
|
||||
query: '=',
|
||||
queryResult: '=',
|
||||
visualization: '=?',
|
||||
openEditor: '=?',
|
||||
openEditor: '@',
|
||||
onNewSuccess: '=?'
|
||||
},
|
||||
link: function (scope, element, attrs) {
|
||||
@@ -167,9 +150,13 @@
|
||||
scope.$watch('visualization.type', function (type, oldType) {
|
||||
// if not edited by user, set name to match type
|
||||
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
|
||||
// poor man's titlecase
|
||||
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
|
||||
scope.visualization.name = _.string.titleize(scope.visualization.type);
|
||||
}
|
||||
|
||||
if (type && oldType != type && scope.visualization) {
|
||||
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
scope.submit = function () {
|
||||
@@ -208,6 +195,7 @@
|
||||
.provider('Visualization', VisualizationProvider)
|
||||
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
|
||||
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
|
||||
.directive('visualizationName', ['Visualization', VisualizationName])
|
||||
.directive('filters', Filters)
|
||||
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])
|
||||
})();
|
||||
|
||||
@@ -112,9 +112,6 @@
|
||||
|
||||
scope.columnTypes = {
|
||||
"X": "x",
|
||||
// "X (Date time)": "x",
|
||||
// "X (Linear)": "x-linear",
|
||||
// "X (Category)": "x-category",
|
||||
"Y": "y",
|
||||
"Series": "series",
|
||||
"Unused": "unused"
|
||||
@@ -147,6 +144,10 @@
|
||||
scope.stacking = scope.visualization.options.series.stacking;
|
||||
}
|
||||
|
||||
if (scope.visualization.options.sortX === undefined) {
|
||||
scope.visualization.options.sortX = true;
|
||||
}
|
||||
|
||||
var refreshSeries = function() {
|
||||
scope.series = _.map(scope.queryResult.getChartData(scope.visualization.options.columnMapping), function (s) { return s.name; });
|
||||
|
||||
@@ -162,7 +163,7 @@
|
||||
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
|
||||
}
|
||||
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
|
||||
|
||||
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
|
||||
});
|
||||
scope.zIndexes = _.range(scope.series.length);
|
||||
scope.yAxes = [[0, 'left'], [1, 'right']];
|
||||
@@ -223,6 +224,12 @@
|
||||
}
|
||||
});
|
||||
|
||||
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
|
||||
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
|
||||
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
|
||||
scope.visualization.options.xAxis.labels.enabled = true;
|
||||
}
|
||||
|
||||
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
|
||||
|
||||
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {
|
||||
|
||||
@@ -26,7 +26,10 @@
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
|
||||
} else {
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(), "date");
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(),function(r) {
|
||||
return r['date'] + r['day_number'] ;
|
||||
});
|
||||
|
||||
var grouped = _.groupBy(sortedData, "date");
|
||||
var maxColumns = _.reduce(grouped, function(memo, data){
|
||||
return (data.length > memo)? data.length : memo;
|
||||
|
||||
238
rd_ui/app/scripts/visualizations/map.js
Normal file
238
rd_ui/app/scripts/visualizations/map.js
Normal file
@@ -0,0 +1,238 @@
|
||||
'use strict';
|
||||
|
||||
(function() {
|
||||
var module = angular.module('redash.visualization');
|
||||
|
||||
module.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
var renderTemplate =
|
||||
'<map-renderer ' +
|
||||
'options="visualization.options" query-result="queryResult">' +
|
||||
'</map-renderer>';
|
||||
|
||||
var editTemplate = '<map-editor></map-editor>';
|
||||
var defaultOptions = {
|
||||
'height': 500,
|
||||
'draw': 'Marker',
|
||||
'classify':'none'
|
||||
};
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'MAP',
|
||||
name: 'Map',
|
||||
renderTemplate: renderTemplate,
|
||||
editorTemplate: editTemplate,
|
||||
defaultOptions: defaultOptions
|
||||
});
|
||||
}
|
||||
]);
|
||||
|
||||
module.directive('mapRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
|
||||
var setBounds = function(){
|
||||
var b = $scope.visualization.options.bounds;
|
||||
|
||||
if(b){
|
||||
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
|
||||
} else if ($scope.features.length > 0){
|
||||
var group= new L.featureGroup($scope.features);
|
||||
$scope.map.fitBounds(group.getBounds());
|
||||
}
|
||||
};
|
||||
|
||||
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
|
||||
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
|
||||
function() {
|
||||
var marker = function(lat,lon){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
return L.marker([lat, lon]);
|
||||
};
|
||||
|
||||
var heatpoint = function(lat,lon,obj){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
var color = 'red';
|
||||
|
||||
if (obj &&
|
||||
obj[$scope.visualization.options.classify] &&
|
||||
$scope.visualization.options.classification){
|
||||
var v = $.grep($scope.visualization.options.classification,function(e){
|
||||
return e.value == obj[$scope.visualization.options.classify];
|
||||
});
|
||||
if (v.length >0) color = v[0].color;
|
||||
}
|
||||
|
||||
var style = {
|
||||
fillColor:color,
|
||||
fillOpacity:0.5,
|
||||
stroke:false
|
||||
};
|
||||
|
||||
return L.circleMarker([lat,lon],style)
|
||||
};
|
||||
|
||||
var color = function(val){
|
||||
// taken from http://jsfiddle.net/xgJ2e/2/
|
||||
|
||||
var h= Math.floor((100 - val) * 120 / 100);
|
||||
var s = Math.abs(val - 50)/50;
|
||||
var v = 1;
|
||||
|
||||
var rgb, i, data = [];
|
||||
if (s === 0) {
|
||||
rgb = [v,v,v];
|
||||
} else {
|
||||
h = h / 60;
|
||||
i = Math.floor(h);
|
||||
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
|
||||
switch(i) {
|
||||
case 0:
|
||||
rgb = [v, data[2], data[0]];
|
||||
break;
|
||||
case 1:
|
||||
rgb = [data[1], v, data[0]];
|
||||
break;
|
||||
case 2:
|
||||
rgb = [data[0], v, data[2]];
|
||||
break;
|
||||
case 3:
|
||||
rgb = [data[0], data[1], v];
|
||||
break;
|
||||
case 4:
|
||||
rgb = [data[2], data[0], v];
|
||||
break;
|
||||
default:
|
||||
rgb = [v, data[0], data[1]];
|
||||
break;
|
||||
}
|
||||
}
|
||||
return '#' + rgb.map(function(x){
|
||||
return ("0" + Math.round(x*255).toString(16)).slice(-2);
|
||||
}).join('');
|
||||
};
|
||||
|
||||
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
|
||||
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
|
||||
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
|
||||
|
||||
function getBounds(e) {
|
||||
$scope.visualization.options.bounds = $scope.map.getBounds();
|
||||
}
|
||||
|
||||
var queryData = $scope.queryResult.getData();
|
||||
var classify = $scope.visualization.options.classify;
|
||||
|
||||
if (queryData) {
|
||||
$scope.visualization.options.classification = [];
|
||||
|
||||
for (var row in queryData) {
|
||||
if (queryData[row][classify] &&
|
||||
$.grep($scope.visualization.options.classification, function (e) {
|
||||
return e.value == queryData[row][classify]
|
||||
}).length == 0) {
|
||||
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
|
||||
}
|
||||
}
|
||||
|
||||
$.each($scope.visualization.options.classification, function (i, c) {
|
||||
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
|
||||
});
|
||||
|
||||
if (!$scope.map) {
|
||||
$scope.map = L.map(elm[0].children[0].children[0])
|
||||
}
|
||||
|
||||
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
|
||||
}).addTo($scope.map);
|
||||
|
||||
$scope.features = $scope.features || [];
|
||||
|
||||
var tmp_features = [];
|
||||
|
||||
var lat_col = $scope.visualization.options.latColName || 'lat';
|
||||
var lon_col = $scope.visualization.options.lonColName || 'lon';
|
||||
|
||||
for (var row in queryData) {
|
||||
var feature;
|
||||
|
||||
if ($scope.visualization.options.draw == 'Marker') {
|
||||
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
|
||||
} else if ($scope.visualization.options.draw == 'Color') {
|
||||
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
|
||||
}
|
||||
|
||||
if (!feature) continue;
|
||||
|
||||
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
|
||||
for (var k in queryData[row]){
|
||||
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
|
||||
}
|
||||
obj_description += '</ul>';
|
||||
feature.bindPopup(obj_description);
|
||||
tmp_features.push(feature);
|
||||
}
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
$scope.map.removeLayer(f);
|
||||
});
|
||||
|
||||
$scope.features = tmp_features;
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
f.addTo($scope.map)
|
||||
});
|
||||
|
||||
setBounds();
|
||||
|
||||
$scope.map.on('focus',function(){
|
||||
$scope.map.on('moveend', getBounds);
|
||||
});
|
||||
|
||||
$scope.map.on('blur',function(){
|
||||
$scope.map.off('moveend', getBounds);
|
||||
});
|
||||
|
||||
|
||||
// We redraw the map if it was loaded in a hidden tab
|
||||
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
|
||||
|
||||
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
|
||||
setTimeout(function() {
|
||||
$scope.map.invalidateSize(false);
|
||||
|
||||
setBounds();
|
||||
},500);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}, true);
|
||||
|
||||
$scope.$watch('visualization.options.height', function() {
|
||||
|
||||
if (!$scope.map) return;
|
||||
$scope.map.invalidateSize(false);
|
||||
setBounds();
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.directive('mapEditor', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map_editor.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
$scope.draw_options = ['Marker','Color'];
|
||||
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
})();
|
||||
@@ -78,15 +78,15 @@
|
||||
};
|
||||
} else if (columnType === 'date') {
|
||||
columnDefinition.formatFunction = function (value) {
|
||||
if (value) {
|
||||
return value.format("DD/MM/YY");
|
||||
if (value && moment.isMoment(value)) {
|
||||
return value.toDate().toLocaleDateString();
|
||||
}
|
||||
return value;
|
||||
};
|
||||
} else if (columnType === 'datetime') {
|
||||
columnDefinition.formatFunction = function (value) {
|
||||
if (value) {
|
||||
return value.format("DD/MM/YY HH:mm");
|
||||
if (value && moment.isMoment(value)) {
|
||||
return value.toDate().toLocaleString();
|
||||
}
|
||||
return value;
|
||||
};
|
||||
|
||||
@@ -14,7 +14,12 @@ a.page-title {
|
||||
}
|
||||
|
||||
a.navbar-brand {
|
||||
font-style: italic;
|
||||
padding: 5px 5px 0px 0px;
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
a.navbar-brand img {
|
||||
height: 40px;
|
||||
}
|
||||
|
||||
.graph {
|
||||
@@ -92,7 +97,16 @@ a.navbar-brand {
|
||||
}
|
||||
|
||||
.panel-heading .query-link:hover {
|
||||
text-decoration: none;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.list-group-item.clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.list-group-item.clickable:focus,
|
||||
.list-group-item.clickable:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
/* angular-growl */
|
||||
@@ -308,6 +322,28 @@ counter-renderer counter-name {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.schema-container {
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
.schema-browser {
|
||||
height: 100%;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
div.table-name {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.blankslate {
|
||||
text-align: center;
|
||||
padding: 30px;
|
||||
}
|
||||
|
||||
/*
|
||||
bootstrap's hidden-xs class adds display:block when not hidden
|
||||
use this class when you need to keep the original display value
|
||||
@@ -317,3 +353,7 @@ use this class when you need to keep the original display value
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.log-container {
|
||||
margin-bottom: 50px;
|
||||
}
|
||||
|
||||
@@ -24,10 +24,6 @@
|
||||
<span class="badge">{{manager.outdated_queries_count}}</span>
|
||||
Outdated Queries Count
|
||||
</li>
|
||||
|
||||
<li class="list-group-item" ng-if="flowerUrl">
|
||||
<a href="/admin/workers">Workers' Status</a>
|
||||
</li>
|
||||
</ul>
|
||||
<ul class="list-group col-lg-4">
|
||||
<li class="list-group-item active">Queues</li>
|
||||
|
||||
@@ -1,3 +0,0 @@
|
||||
<div class="container-fluid iframe-container">
|
||||
<iframe src="{{flowerUrl}}" style="width:100%; height:100%; background-color:transparent;"></iframe>
|
||||
</div>
|
||||
58
rd_ui/app/views/alerts/edit.html
Normal file
58
rd_ui/app/views/alerts/edit.html
Normal file
@@ -0,0 +1,58 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li><a href="/alerts">Alerts</a></li>
|
||||
<li class="active">{{alert.name || getDefaultName() || "New"}}</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<form name="alertForm" ng-submit="saveChanges()" class="form">
|
||||
<div class="form-group">
|
||||
<label>Query</label>
|
||||
<ui-select ng-model="alert.query" theme="bootstrap" reset-search-input="false" on-select="onQuerySelected($item)">
|
||||
<ui-select-match placeholder="Search a query by name">{{$select.selected.name}}</ui-select-match>
|
||||
<ui-select-choices repeat="q in queries"
|
||||
refresh="searchQueries($select.search)"
|
||||
refresh-delay="0">
|
||||
<div ng-bind-html="q.name | highlight: $select.search | trustAsHtml"></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
|
||||
<div class="form-group" ng-show="selectedQuery">
|
||||
<label>Name</label>
|
||||
<input type="string" placeholder="{{getDefaultName()}}" class="form-control" ng-model="alert.name">
|
||||
</div>
|
||||
|
||||
<div ng-show="queryResult" class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="control-label col-md-2">Value column</label>
|
||||
<div class="col-md-4">
|
||||
<select ng-options="name for name in queryResult.getColumnNames()" ng-model="alert.options.column" class="form-control"></select>
|
||||
</div>
|
||||
<label class="control-label col-md-2">Value</label>
|
||||
<div class="col-md-4">
|
||||
<p class="form-control-static">{{queryResult.getData()[0][alert.options.column]}}</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-md-2">Op</label>
|
||||
<div class="col-md-4">
|
||||
<select ng-options="name for name in ops" ng-model="alert.options.op" class="form-control"></select>
|
||||
</div>
|
||||
<label class="control-label col-md-2">Reference</label>
|
||||
<div class="col-md-4">
|
||||
<input type="number" class="form-control" ng-model="alert.options.value" placeholder="reference value" required/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<button class="btn btn-primary" ng-disabled="!alertForm.$valid">Save</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="col-md-4" ng-if="alert.id">
|
||||
<alert-subscribers alert-id="alert.id"></alert-subscribers>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
16
rd_ui/app/views/alerts/list.html
Normal file
16
rd_ui/app/views/alerts/list.html
Normal file
@@ -0,0 +1,16 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li class="active">Alerts</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<p>
|
||||
<a href="/alerts/new" class="btn btn-default"><i class="fa fa-plus"></i> New Alert</a>
|
||||
</p>
|
||||
|
||||
<smart-table rows="alerts" columns="gridColumns"
|
||||
config="gridConfig"
|
||||
class="table table-condensed table-hover"></smart-table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
4
rd_ui/app/views/alerts/subscribers.html
Normal file
4
rd_ui/app/views/alerts/subscribers.html
Normal file
@@ -0,0 +1,4 @@
|
||||
<div>
|
||||
<strong>Subscribers</strong> <subscribe-button alert-id="alertId" subscribers="subscribers"></subscribe-button><br/>
|
||||
<img ng-src="{{s.user.gravatar_url}}" class="img-circle" alt="{{s.user.name}}" ng-repeat="s in subscribers"/>
|
||||
</div>
|
||||
@@ -28,6 +28,7 @@
|
||||
<p>
|
||||
<span ng-hide="currentUser.hasPermission('view_query')">{{query.name}}</span>
|
||||
<query-link query="query" visualization="widget.visualization" ng-show="currentUser.hasPermission('view_query')"></query-link>
|
||||
<visualization-name visualization="widget.visualization"/>
|
||||
</p>
|
||||
<div class="text-muted" ng-bind-html="query.description | markdown"></div>
|
||||
</h3>
|
||||
@@ -37,7 +38,7 @@
|
||||
|
||||
<div class="panel-footer">
|
||||
<span class="label label-default"
|
||||
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip="(query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
|
||||
|
||||
<span class="pull-right">
|
||||
|
||||
11
rd_ui/app/views/data_sources/edit.html
Normal file
11
rd_ui/app/views/data_sources/edit.html
Normal file
@@ -0,0 +1,11 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li><a href="/data_sources">Data Sources</a></li>
|
||||
<li class="active">{{dataSource.name || "New"}}</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<data-source-form data-data-source="dataSource" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
20
rd_ui/app/views/data_sources/form.html
Normal file
20
rd_ui/app/views/data_sources/form.html
Normal file
@@ -0,0 +1,20 @@
|
||||
<form name="dataSourceForm" ng-submit="saveChanges()">
|
||||
<div class="form-group">
|
||||
<label for="dataSourceName">Name</label>
|
||||
<input type="string" class="form-control" name="dataSourceName" ng-model="dataSource.name" required>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="type">Type</label>
|
||||
<select name="type" class="form-control" ng-options="type.type as type.name for type in dataSourceTypes" ng-model="dataSource.type"></select>
|
||||
</div>
|
||||
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
|
||||
<label>{{input.title || name | capitalize}}</label>
|
||||
<input name="input" type="{{input.type}}" class="form-control" ng-model="dataSource.options[name]" ng-required="input.required"
|
||||
ng-if="input.type !== 'file'" accesskey="tab">
|
||||
|
||||
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required"
|
||||
base-sixty-four-input
|
||||
ng-if="input.type === 'file'">
|
||||
</div>
|
||||
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid">Save</button>
|
||||
</form>
|
||||
18
rd_ui/app/views/data_sources/list.html
Normal file
18
rd_ui/app/views/data_sources/list.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li class="active">Data Sources</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="list-group">
|
||||
<div class="list-group-item clickable" ng-repeat="dataSource in dataSources" ng-click="openDataSource(dataSource)">
|
||||
<i class="fa fa-database"></i> {{dataSource.name}}
|
||||
<button class="btn btn-xs btn-danger pull-right" ng-click="deleteDataSource($event, dataSource)">Delete</button>
|
||||
</div>
|
||||
<a ng-href="/data_sources/new" class="list-group-item">
|
||||
<i class="fa fa-plus"></i> Add Data Source
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -59,9 +59,9 @@
|
||||
|
||||
<hr>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div ng-show="sourceMode">
|
||||
<div class="row" ng-if="sourceMode">
|
||||
<div ng-class="editorSize">
|
||||
<div>
|
||||
<p>
|
||||
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
|
||||
<span class="glyphicon glyphicon-play"></span> Execute
|
||||
@@ -77,21 +77,43 @@
|
||||
</button>
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- code editor -->
|
||||
<div ng-show="sourceMode">
|
||||
<p>
|
||||
<query-editor query="query" lock="queryExecuting"></query-editor>
|
||||
<query-editor query="query" schema="schema" syntax="dataSource.syntax" lock="queryFormatting"></query-editor>
|
||||
</p>
|
||||
<hr>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-3 schema-container" ng-show="hasSchema">
|
||||
<div ng-show="schema.length < 200">
|
||||
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
|
||||
</div>
|
||||
<div class="schema-browser">
|
||||
<div ng-repeat="table in schema | filter:schemaFilter track by table.name">
|
||||
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
|
||||
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
|
||||
</div>
|
||||
<div collapse="table.collapsed && !schemaFilter">
|
||||
<div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<hr ng-if="sourceMode">
|
||||
<div class="row">
|
||||
<div class="col-lg-3 rd-hidden-xs">
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Created By </span>
|
||||
<strong>{{query.user.name}}</strong>
|
||||
</p>
|
||||
<p ng-if="query.last_modified_by && query.user.id != query.last_modified_by.id">
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Last Modified By </span>
|
||||
<strong>{{query.last_modified_by.name}}</strong>
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-time"></span>
|
||||
<span class="text-muted">Last update </span>
|
||||
@@ -99,12 +121,6 @@
|
||||
<rd-time-ago value="queryResult.query_result.retrieved_at"></rd-time-ago>
|
||||
</strong>
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Created By </span>
|
||||
<strong ng-hide="isQueryOwner">{{query.user.name}}</strong>
|
||||
<strong ng-show="isQueryOwner">You</strong>
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-play"></span>
|
||||
<span class="text-muted">Runtime </span>
|
||||
@@ -117,12 +133,12 @@
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-refresh"></span>
|
||||
<span class="text-muted">Refresh Interval</span>
|
||||
<query-refresh-select></query-refresh-select>
|
||||
<span class="text-muted">Refresh Schedule</span>
|
||||
<a href="" ng-click="openScheduleForm()">{{query.schedule | scheduleHumanize}}</a>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-hdd"></span>
|
||||
<i class="fa fa-database"></i>
|
||||
<span class="text-muted">Data Source</span>
|
||||
<select ng-disabled="!isQueryOwner" ng-model="query.data_source_id" ng-change="updateDataSource()" ng-options="ds.id as ds.name for ds in dataSources"></select>
|
||||
</p>
|
||||
@@ -139,7 +155,11 @@
|
||||
ng-show="!query.is_archived && query.id != undefined && (isQueryOwner || currentUser.hasPermission('admin'))">
|
||||
<i class="fa fa-archive" title="Archive Query"></i>
|
||||
</a>
|
||||
|
||||
|
||||
<button class="btn btn-default btn-sm" ng-show="query.id != undefined" ng-click="showApiKey()">
|
||||
<i class="fa fa-key" title="Show API Key"></i>
|
||||
</button>
|
||||
|
||||
<div class="modal fade" id="archive-confirmation-modal" tabindex="-1" role="dialog" aria-labelledby="archiveConfirmationModal" aria-hidden="true">
|
||||
<div class="modal-dialog">
|
||||
<div class="modal-content">
|
||||
@@ -172,6 +192,16 @@
|
||||
</div>
|
||||
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
|
||||
|
||||
<div class="row log-container" ng-show="showLog">
|
||||
<span ng-show="showLog">Log Information:</span>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr ng-repeat="l in queryResult.getLog()">
|
||||
<td>{{l}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<!-- tabs and data -->
|
||||
<div ng-show="showDataset">
|
||||
<div class="row">
|
||||
@@ -182,7 +212,7 @@
|
||||
<rd-tab tab-id="{{vis.id}}" name="{{vis.name}}" ng-if="vis.type!='TABLE'" ng-repeat="vis in query.visualizations">
|
||||
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> ×</span>
|
||||
</rd-tab>
|
||||
<rd-tab tab-id="add" name="+ New" removeable="true" ng-show="canEdit"></rd-tab>
|
||||
<rd-tab tab-id="add" name="+ New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
|
||||
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
18
rd_ui/app/views/schedule_form.html
Normal file
18
rd_ui/app/views/schedule_form.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" aria-label="Close" ng-click="close()"><span aria-hidden="true">×</span></button>
|
||||
<h4 class="modal-title">Refresh Schedule</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" value="periodic" ng-model="refreshType">
|
||||
<query-refresh-select ng-disabled="refreshType != 'periodic'"></query-refresh-select>
|
||||
</label>
|
||||
</div>
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" value="daily" ng-model="refreshType">
|
||||
<query-time-picker ng-disabled="refreshType != 'daily'"></query-time-picker>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
@@ -46,6 +46,22 @@
|
||||
placeholder="Auto">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Sort X Values</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="sortX" type="checkbox" class="form-control"
|
||||
ng-model="visualization.options.sortX">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Show X Axis Labels</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="sortX" type="checkbox" class="form-control"
|
||||
ng-model="visualization.options.xAxis.labels.enabled">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -92,6 +108,15 @@
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Index</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
|
||||
ng-options="o as o for o in zIndexes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">y Axis</label>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<div>
|
||||
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
|
||||
|
||||
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<div class="form-group">
|
||||
<label class="control-label">Name</label>
|
||||
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">
|
||||
|
||||
3
rd_ui/app/views/visualizations/map.html
Normal file
3
rd_ui/app/views/visualizations/map.html
Normal file
@@ -0,0 +1,3 @@
|
||||
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
|
||||
<div style="width:100%; height:100%;"></div>
|
||||
</div>
|
||||
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
@@ -0,0 +1,55 @@
|
||||
<div class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Map height (px)</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" type="number" ng-model = "visualization.options.height" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Draw option</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Latitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Longitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div ng-show = "visualization.options.draw == 'Color'">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Classify by column</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row" >
|
||||
<div class="col-lg-6">
|
||||
<div ng-repeat="element in visualization.options.classification" class="list-group">
|
||||
<div class="list-group-item active">
|
||||
{{element.value}}
|
||||
</div>
|
||||
|
||||
<div class="list-group-item">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-4">Color</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -12,8 +12,7 @@
|
||||
"es5-shim": "2.0.8",
|
||||
"angular-moment": "0.2.0",
|
||||
"moment": "2.1.0",
|
||||
"angular-ui-bootstrap": "0.5.0",
|
||||
"angular-ui-codemirror": "0.0.5",
|
||||
"codemirror": "4.8.0",
|
||||
"highcharts": "3.0.10",
|
||||
"underscore": "1.5.1",
|
||||
"pivottable": "~1.1.1",
|
||||
@@ -26,10 +25,13 @@
|
||||
"marked": "~0.3.2",
|
||||
"bucky": "~0.2.6",
|
||||
"pace": "~0.5.1",
|
||||
"angular-ui-select": "0.8.2",
|
||||
"angular-ui-select": "~0.12.0",
|
||||
"font-awesome": "~4.2.0",
|
||||
"mustache": "~1.0.0",
|
||||
"offline": "~0.7.11"
|
||||
"canvg": "gabelerner/canvg",
|
||||
"angular-ui-bootstrap-bower": "~0.12.1",
|
||||
"leaflet": "~0.7.3",
|
||||
"angular-base64-upload": "~0.1.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "1.2.18",
|
||||
|
||||
BIN
rd_ui/favicon.ico
Executable file
BIN
rd_ui/favicon.ico
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.1 KiB |
@@ -36,6 +36,7 @@
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "grunt test"
|
||||
"test": "grunt test",
|
||||
"bower": "bower"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,10 +2,12 @@ import logging
|
||||
import urlparse
|
||||
import redis
|
||||
from statsd import StatsClient
|
||||
from flask_mail import Mail
|
||||
|
||||
from redash import settings
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = '0.5.0'
|
||||
__version__ = '0.7.0'
|
||||
|
||||
|
||||
def setup_logging():
|
||||
@@ -31,4 +33,8 @@ def create_redis_connection():
|
||||
|
||||
setup_logging()
|
||||
redis_connection = create_redis_connection()
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
mail = Mail()
|
||||
mail.init_mail(settings.all_settings())
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
|
||||
import_query_runners(settings.QUERY_RUNNERS)
|
||||
|
||||
109
redash/admin.py
Normal file
109
redash/admin.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import json
|
||||
from flask_admin.contrib.peewee import ModelView
|
||||
from flask.ext.admin import Admin
|
||||
from flask_admin.contrib.peewee.form import CustomModelConverter
|
||||
from flask_admin.form.widgets import DateTimePickerWidget
|
||||
from playhouse.postgres_ext import ArrayField, DateTimeTZField
|
||||
from wtforms import fields
|
||||
from wtforms.widgets import TextInput
|
||||
|
||||
from redash import models
|
||||
from redash import query_runner
|
||||
from redash.permissions import require_permission
|
||||
|
||||
|
||||
class ArrayListField(fields.Field):
|
||||
widget = TextInput()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
return u', '.join(self.data)
|
||||
else:
|
||||
return u''
|
||||
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = [x.strip() for x in valuelist[0].split(',')]
|
||||
else:
|
||||
self.data = []
|
||||
|
||||
|
||||
class JSONTextAreaField(fields.TextAreaField):
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
try:
|
||||
json.loads(valuelist[0])
|
||||
except ValueError:
|
||||
raise ValueError(self.gettext(u'Invalid JSON'))
|
||||
self.data = valuelist[0]
|
||||
else:
|
||||
self.data = ''
|
||||
|
||||
class PasswordHashField(fields.PasswordField):
|
||||
def _value(self):
|
||||
return u''
|
||||
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = models.pwd_context.encrypt(valuelist[0])
|
||||
else:
|
||||
self.data = u''
|
||||
|
||||
|
||||
class PgModelConverter(CustomModelConverter):
|
||||
def __init__(self, view, additional=None):
|
||||
additional = {ArrayField: self.handle_array_field,
|
||||
DateTimeTZField: self.handle_datetime_tz_field,
|
||||
}
|
||||
super(PgModelConverter, self).__init__(view, additional)
|
||||
self.view = view
|
||||
|
||||
def handle_array_field(self, model, field, **kwargs):
|
||||
return field.name, ArrayListField(**kwargs)
|
||||
|
||||
def handle_datetime_tz_field(self, model, field, **kwargs):
|
||||
kwargs['widget'] = DateTimePickerWidget()
|
||||
return field.name, fields.DateTimeField(**kwargs)
|
||||
|
||||
|
||||
class BaseModelView(ModelView):
|
||||
model_form_converter = PgModelConverter
|
||||
|
||||
@require_permission('admin')
|
||||
def is_accessible(self):
|
||||
return True
|
||||
|
||||
|
||||
class UserModelView(BaseModelView):
|
||||
column_searchable_list = ('name', 'email')
|
||||
form_excluded_columns = ('created_at', 'updated_at')
|
||||
column_exclude_list = ('password_hash',)
|
||||
|
||||
form_overrides = dict(password_hash=PasswordHashField)
|
||||
form_args = {
|
||||
'password_hash': {'label': 'Password'}
|
||||
}
|
||||
|
||||
|
||||
class QueryResultModelView(BaseModelView):
|
||||
column_exclude_list = ('data',)
|
||||
|
||||
|
||||
class QueryModelView(BaseModelView):
|
||||
column_exclude_list = ('latest_query_data',)
|
||||
|
||||
|
||||
class DashboardModelView(BaseModelView):
|
||||
column_searchable_list = ('name', 'slug')
|
||||
|
||||
|
||||
def init_admin(app):
|
||||
admin = Admin(app, name='re:dash admin', template_mode='bootstrap3')
|
||||
|
||||
admin.add_view(UserModelView(models.User))
|
||||
admin.add_view(QueryModelView(models.Query))
|
||||
admin.add_view(QueryResultModelView(models.QueryResult))
|
||||
admin.add_view(DashboardModelView(models.Dashboard))
|
||||
|
||||
for m in (models.Visualization, models.Widget, models.ActivityLog, models.Group, models.Event):
|
||||
admin.add_view(BaseModelView(m))
|
||||
@@ -1,13 +1,13 @@
|
||||
import functools
|
||||
import hashlib
|
||||
import hmac
|
||||
import time
|
||||
import logging
|
||||
|
||||
from flask import request, make_response, redirect, url_for
|
||||
from flask.ext.login import LoginManager, login_user, current_user
|
||||
from flask.ext.login import LoginManager
|
||||
from flask.ext.login import user_logged_in
|
||||
|
||||
from redash import models, settings, google_oauth
|
||||
from redash import models, settings, google_oauth, saml_auth
|
||||
from redash.tasks import record_event
|
||||
|
||||
login_manager = LoginManager()
|
||||
logger = logging.getLogger('authentication')
|
||||
@@ -23,47 +23,85 @@ def sign(key, path, expires):
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
class HMACAuthentication(object):
|
||||
@staticmethod
|
||||
def api_key_authentication():
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return models.User.get_by_id(user_id)
|
||||
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and query_id and time.time() < expires <= time.time() + 3600:
|
||||
|
||||
def hmac_load_user_from_request(request):
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
user_id = request.args.get('user_id', None)
|
||||
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and time.time() < expires <= time.time() + 3600:
|
||||
if user_id:
|
||||
user = models.User.get_by_id(user_id)
|
||||
calculated_signature = sign(user.api_key, request.path, expires)
|
||||
|
||||
if user.api_key and signature == calculated_signature:
|
||||
return user
|
||||
|
||||
if query_id:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
calculated_signature = sign(query.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and signature == calculated_signature:
|
||||
login_user(models.ApiUser(query.api_key), remember=False)
|
||||
return True
|
||||
return models.ApiUser(query.api_key)
|
||||
|
||||
return False
|
||||
return None
|
||||
|
||||
def required(self, fn):
|
||||
@functools.wraps(fn)
|
||||
def decorated(*args, **kwargs):
|
||||
if current_user.is_authenticated():
|
||||
return fn(*args, **kwargs)
|
||||
def get_user_from_api_key(api_key, query_id):
|
||||
if not api_key:
|
||||
return None
|
||||
|
||||
if self.api_key_authentication():
|
||||
return fn(*args, **kwargs)
|
||||
user = None
|
||||
try:
|
||||
user = models.User.get_by_api_key(api_key)
|
||||
except models.User.DoesNotExist:
|
||||
if query_id:
|
||||
query = models.Query.get_by_id(query_id)
|
||||
if query and query.api_key == api_key:
|
||||
user = models.ApiUser(api_key)
|
||||
|
||||
return make_response(redirect(url_for("login", next=request.url)))
|
||||
return user
|
||||
|
||||
return decorated
|
||||
def api_key_load_user_from_request(request):
|
||||
api_key = request.args.get('api_key', None)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
user = get_user_from_api_key(api_key, query_id)
|
||||
return user
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return models.User.select().where(models.User.id == user_id).first()
|
||||
def log_user_logged_in(app, user):
|
||||
event = {
|
||||
'user_id': user.id,
|
||||
'action': 'login',
|
||||
'object_type': 'redash',
|
||||
'timestamp': int(time.time()),
|
||||
}
|
||||
|
||||
record_event.delay(event)
|
||||
|
||||
|
||||
def setup_authentication(app):
|
||||
login_manager.init_app(app)
|
||||
login_manager.anonymous_user = models.AnonymousUser
|
||||
login_manager.login_view = 'login'
|
||||
app.secret_key = settings.COOKIE_SECRET
|
||||
app.register_blueprint(google_oauth.blueprint)
|
||||
app.register_blueprint(saml_auth.blueprint)
|
||||
|
||||
user_logged_in.connect(log_user_logged_in)
|
||||
|
||||
if settings.AUTH_TYPE == 'hmac':
|
||||
login_manager.request_loader(hmac_load_user_from_request)
|
||||
elif settings.AUTH_TYPE == 'api_key':
|
||||
login_manager.request_loader(api_key_load_user_from_request)
|
||||
else:
|
||||
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
|
||||
login_manager.request_loader(hmac_load_user_from_request)
|
||||
|
||||
|
||||
return HMACAuthentication()
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
from flask import make_response
|
||||
from functools import update_wrapper
|
||||
|
||||
ONE_YEAR = 60 * 60 * 24 * 365.25
|
||||
|
||||
headers = {
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
import json
|
||||
import click
|
||||
from flask.ext.script import Manager
|
||||
from redash import models
|
||||
from redash.query_runner import query_runners, validate_configuration
|
||||
|
||||
manager = Manager(help="Data sources management commands.")
|
||||
|
||||
@@ -13,11 +16,70 @@ def list():
|
||||
print "Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options)
|
||||
|
||||
|
||||
def validate_data_source_type(type):
|
||||
if type not in query_runners.keys():
|
||||
print "Error: the type \"{}\" is not supported (supported types: {}).".format(type, ", ".join(query_runners.keys()))
|
||||
exit()
|
||||
|
||||
|
||||
def validate_data_source_options(type, options):
|
||||
if not validate_configuration(type, options):
|
||||
print "Error: invalid configuration."
|
||||
exit()
|
||||
|
||||
@manager.command
|
||||
def new(name, type, options):
|
||||
def new(name=None, type=None, options=None):
|
||||
"""Create new data source"""
|
||||
# TODO: validate it's a valid type and in the future, validate the options.
|
||||
if name is None:
|
||||
name = click.prompt("Name")
|
||||
|
||||
if type is None:
|
||||
print "Select type:"
|
||||
for i, query_runner_name in enumerate(query_runners.keys()):
|
||||
print "{}. {}".format(i+1, query_runner_name)
|
||||
|
||||
idx = 0
|
||||
while idx < 1 or idx > len(query_runners.keys()):
|
||||
idx = click.prompt("[{}-{}]".format(1, len(query_runners.keys())), type=int)
|
||||
|
||||
type = query_runners.keys()[idx-1]
|
||||
else:
|
||||
validate_data_source_type(type)
|
||||
|
||||
if options is None:
|
||||
query_runner = query_runners[type]
|
||||
schema = query_runner.configuration_schema()
|
||||
|
||||
types = {
|
||||
'string': unicode,
|
||||
'number': int,
|
||||
'boolean': bool
|
||||
}
|
||||
|
||||
options_obj = {}
|
||||
|
||||
for k, prop in schema['properties'].iteritems():
|
||||
required = k in schema.get('required', [])
|
||||
default_value = "<<DEFAULT_VALUE>>"
|
||||
if required:
|
||||
default_value = None
|
||||
|
||||
prompt = prop.get('title', k.capitalize())
|
||||
if required:
|
||||
prompt = "{} (required)".format(prompt)
|
||||
else:
|
||||
prompt = "{} (optional)".format(prompt)
|
||||
|
||||
value = click.prompt(prompt, default=default_value, type=types[prop['type']], show_default=False)
|
||||
if value != default_value:
|
||||
options_obj[k] = value
|
||||
|
||||
options = json.dumps(options_obj)
|
||||
|
||||
validate_data_source_options(type, options)
|
||||
|
||||
print "Creating {} data source ({}) with options:\n{}".format(type, name, options)
|
||||
|
||||
data_source = models.DataSource.create(name=name,
|
||||
type=type,
|
||||
options=options)
|
||||
@@ -49,7 +111,14 @@ def update_attr(obj, attr, new_value):
|
||||
def edit(name, new_name=None, options=None, type=None):
|
||||
"""Edit data source settings (name, options, type)"""
|
||||
try:
|
||||
if type is not None:
|
||||
validate_data_source_type(type)
|
||||
|
||||
data_source = models.DataSource.get(models.DataSource.name==name)
|
||||
|
||||
if options is not None:
|
||||
validate_data_source_options(data_source.type, options)
|
||||
|
||||
update_attr(data_source, "name", new_name)
|
||||
update_attr(data_source, "type", type)
|
||||
update_attr(data_source, "options", options)
|
||||
|
||||
@@ -7,22 +7,24 @@ but this is only due to configuration issues and temporary.
|
||||
import csv
|
||||
import hashlib
|
||||
import json
|
||||
import numbers
|
||||
import cStringIO
|
||||
import datetime
|
||||
import time
|
||||
import logging
|
||||
|
||||
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
|
||||
session, url_for
|
||||
from flask.ext.restful import Resource, abort
|
||||
from flask_login import current_user, login_user, logout_user
|
||||
session, url_for, current_app, flash
|
||||
from flask.ext.restful import Resource, abort, reqparse
|
||||
from flask_login import current_user, login_user, logout_user, login_required
|
||||
from funcy import project
|
||||
import sqlparse
|
||||
|
||||
from redash import redis_connection, statsd_client, models, settings, utils, __version__
|
||||
from redash.wsgi import app, auth, api
|
||||
from redash import statsd_client, models, settings, utils
|
||||
from redash.wsgi import app, api
|
||||
from redash.tasks import QueryTask, record_event
|
||||
from redash.cache import headers as cache_headers
|
||||
from redash.permissions import require_permission
|
||||
from redash.query_runner import query_runners, validate_configuration
|
||||
from redash.monitor import get_status
|
||||
|
||||
|
||||
@app.route('/ping', methods=['GET'])
|
||||
@@ -30,14 +32,19 @@ def ping():
|
||||
return 'PONG.'
|
||||
|
||||
|
||||
@app.route('/admin/<anything>/<whatever>')
|
||||
@app.route('/admin/<anything>')
|
||||
@app.route('/dashboard/<anything>')
|
||||
@app.route('/alerts')
|
||||
@app.route('/alerts/<pk>')
|
||||
@app.route('/queries')
|
||||
@app.route('/data_sources')
|
||||
@app.route('/data_sources/<pk>')
|
||||
@app.route('/queries/<query_id>')
|
||||
@app.route('/queries/<query_id>/<anything>')
|
||||
@app.route('/personal')
|
||||
@app.route('/')
|
||||
@auth.required
|
||||
@login_required
|
||||
def index(**kwargs):
|
||||
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
@@ -52,8 +59,7 @@ def index(**kwargs):
|
||||
}
|
||||
|
||||
features = {
|
||||
'clientSideMetrics': settings.CLIENT_SIDE_METRICS,
|
||||
'flowerUrl': settings.CELERY_FLOWER_URL
|
||||
'clientSideMetrics': settings.CLIENT_SIDE_METRICS
|
||||
}
|
||||
|
||||
return render_template("index.html", user=json.dumps(user), name=settings.NAME,
|
||||
@@ -67,22 +73,30 @@ def login():
|
||||
return redirect(request.args.get('next') or '/')
|
||||
|
||||
if not settings.PASSWORD_LOGIN_ENABLED:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
if settings.SAML_LOGIN_ENABLED:
|
||||
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
|
||||
else:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
|
||||
if request.method == 'POST':
|
||||
user = models.User.select().where(models.User.email == request.form['username']).first()
|
||||
if user and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
login_user(user, remember=remember)
|
||||
return redirect(request.args.get('next') or '/')
|
||||
try:
|
||||
user = models.User.get_by_email(request.form['username'])
|
||||
if user and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
login_user(user, remember=remember)
|
||||
return redirect(request.args.get('next') or '/')
|
||||
else:
|
||||
flash("Wrong username or password.")
|
||||
except models.User.DoesNotExist:
|
||||
flash("Wrong username or password.")
|
||||
|
||||
return render_template("login.html",
|
||||
name=settings.NAME,
|
||||
analytics=settings.ANALYTICS,
|
||||
next=request.args.get('next'),
|
||||
username=request.form.get('username', ''),
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
|
||||
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
show_saml_login=settings.SAML_LOGIN_ENABLED)
|
||||
|
||||
@app.route('/logout')
|
||||
def logout():
|
||||
@@ -92,43 +106,16 @@ def logout():
|
||||
return redirect('/login')
|
||||
|
||||
@app.route('/status.json')
|
||||
@auth.required
|
||||
@login_required
|
||||
@require_permission('admin')
|
||||
def status_api():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = models.Query.outdated_queries().count()
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
status = get_status()
|
||||
|
||||
return jsonify(status)
|
||||
|
||||
|
||||
@app.route('/api/queries/format', methods=['POST'])
|
||||
@auth.required
|
||||
@login_required
|
||||
def format_sql_query():
|
||||
arguments = request.get_json(force=True)
|
||||
query = arguments.get("query", "")
|
||||
@@ -136,8 +123,26 @@ def format_sql_query():
|
||||
return sqlparse.format(query, reindent=True, keyword_case='upper')
|
||||
|
||||
|
||||
@app.route('/queries/new', methods=['POST'])
|
||||
@login_required
|
||||
def create_query_route():
|
||||
query = request.form.get('query', None)
|
||||
data_source_id = request.form.get('data_source_id', None)
|
||||
|
||||
if query is None or data_source_id is None:
|
||||
abort(400)
|
||||
|
||||
query = models.Query.create(name="New Query",
|
||||
query=query,
|
||||
data_source=data_source_id,
|
||||
user=current_user._get_current_object(),
|
||||
schedule=None)
|
||||
|
||||
return redirect('/queries/{}'.format(query.id), 303)
|
||||
|
||||
|
||||
class BaseResource(Resource):
|
||||
decorators = [auth.required]
|
||||
decorators = [login_required]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BaseResource, self).__init__(*args, **kwargs)
|
||||
@@ -174,13 +179,74 @@ class MetricsAPI(BaseResource):
|
||||
api.add_resource(MetricsAPI, '/api/metrics/v1/send', endpoint='metrics')
|
||||
|
||||
|
||||
class DataSourceTypeListAPI(BaseResource):
|
||||
@require_permission("admin")
|
||||
def get(self):
|
||||
return [q.to_dict() for q in query_runners.values()]
|
||||
|
||||
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
|
||||
|
||||
|
||||
class DataSourceAPI(BaseResource):
|
||||
@require_permission('admin')
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_permission('admin')
|
||||
def post(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
req = request.get_json(True)
|
||||
if not validate_configuration(req['type'], req['options']):
|
||||
abort(400)
|
||||
|
||||
data_source.name = req['name']
|
||||
data_source.options = json.dumps(req['options'])
|
||||
|
||||
data_source.save()
|
||||
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_permission('admin')
|
||||
def delete(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
data_source.delete_instance(recursive=True)
|
||||
|
||||
return make_response('', 204)
|
||||
|
||||
|
||||
class DataSourceListAPI(BaseResource):
|
||||
def get(self):
|
||||
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
|
||||
return data_sources
|
||||
|
||||
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
|
||||
@require_permission("admin")
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
required_fields = ('options', 'name', 'type')
|
||||
for f in required_fields:
|
||||
if f not in req:
|
||||
abort(400)
|
||||
|
||||
if not validate_configuration(req['type'], req['options']):
|
||||
abort(400)
|
||||
|
||||
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=json.dumps(req['options']))
|
||||
|
||||
return datasource.to_dict(all=True)
|
||||
|
||||
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
|
||||
api.add_resource(DataSourceAPI, '/api/data_sources/<data_source_id>', endpoint='data_source')
|
||||
|
||||
|
||||
class DataSourceSchemaAPI(BaseResource):
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
schema = data_source.get_schema()
|
||||
|
||||
return schema
|
||||
|
||||
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
|
||||
|
||||
class DashboardRecentAPI(BaseResource):
|
||||
def get(self):
|
||||
@@ -295,7 +361,7 @@ class QueryListAPI(BaseResource):
|
||||
@require_permission('create_query')
|
||||
def post(self):
|
||||
query_def = request.get_json(force=True)
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
query_def['user'] = self.current_user
|
||||
@@ -303,8 +369,6 @@ class QueryListAPI(BaseResource):
|
||||
query = models.Query(**query_def)
|
||||
query.save()
|
||||
|
||||
query.create_default_visualizations()
|
||||
|
||||
return query.to_dict()
|
||||
|
||||
@require_permission('view_query')
|
||||
@@ -316,9 +380,9 @@ class QueryAPI(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
def post(self, query_id):
|
||||
query = models.Query.get_by_id(query_id)
|
||||
|
||||
|
||||
query_def = request.get_json(force=True)
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user']:
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
if 'latest_query_data_id' in query_def:
|
||||
@@ -327,6 +391,9 @@ class QueryAPI(BaseResource):
|
||||
if 'data_source_id' in query_def:
|
||||
query_def['data_source'] = query_def.pop('data_source_id')
|
||||
|
||||
query_def['last_modified_by'] = self.current_user
|
||||
|
||||
# TODO: use #save() with #dirty_fields.
|
||||
models.Query.update_instance(query_id, **query_def)
|
||||
|
||||
query = models.Query.get_by_id(query_id)
|
||||
@@ -365,7 +432,7 @@ class VisualizationListAPI(BaseResource):
|
||||
kwargs = request.get_json(force=True)
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs['query'] = kwargs.pop('query_id')
|
||||
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
vis.save()
|
||||
|
||||
@@ -400,7 +467,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
|
||||
class QueryResultListAPI(BaseResource):
|
||||
@require_permission('execute_query')
|
||||
def post(self):
|
||||
params = request.json
|
||||
params = request.get_json(force=True)
|
||||
|
||||
if settings.FEATURE_TABLES_PERMISSIONS:
|
||||
metadata = utils.SQLMetaData(params['query'])
|
||||
@@ -426,16 +493,19 @@ class QueryResultListAPI(BaseResource):
|
||||
activity=params['query']
|
||||
).save()
|
||||
|
||||
if params['ttl'] == 0:
|
||||
max_age = int(params.get('max_age', -1))
|
||||
|
||||
if max_age == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], int(params['ttl']))
|
||||
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], max_age)
|
||||
|
||||
if query_result:
|
||||
return {'query_result': query_result.to_dict()}
|
||||
else:
|
||||
data_source = models.DataSource.get_by_id(params['data_source_id'])
|
||||
job = QueryTask.add_task(params['query'], data_source)
|
||||
query_id = params.get('query_id', 'adhoc')
|
||||
job = QueryTask.add_task(params['query'], data_source, metadata={"Username": self.current_user.name, "Query ID": query_id})
|
||||
return {'job': job.to_dict()}
|
||||
|
||||
|
||||
@@ -449,16 +519,34 @@ class QueryResultAPI(BaseResource):
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
for k, v in row.iteritems():
|
||||
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
|
||||
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
|
||||
|
||||
writer.writerow(row)
|
||||
|
||||
headers = {'Content-Type': "text/csv; charset=UTF-8"}
|
||||
headers.update(cache_headers)
|
||||
return make_response(s.getvalue(), 200, headers)
|
||||
|
||||
@staticmethod
|
||||
def add_cors_headers(headers):
|
||||
if 'Origin' in request.headers:
|
||||
origin = request.headers['Origin']
|
||||
|
||||
if origin in settings.ACCESS_CONTROL_ALLOW_ORIGIN:
|
||||
headers['Access-Control-Allow-Origin'] = origin
|
||||
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
|
||||
|
||||
@require_permission('view_query')
|
||||
def options(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
headers = {}
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
if settings.ACCESS_CONTROL_REQUEST_METHOD:
|
||||
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
|
||||
|
||||
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
|
||||
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
|
||||
|
||||
return make_response("", 200, headers)
|
||||
|
||||
@require_permission('view_query')
|
||||
def get(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
if query_result_id is None and query_id is not None:
|
||||
@@ -470,9 +558,33 @@ class QueryResultAPI(BaseResource):
|
||||
query_result = models.QueryResult.get_by_id(query_result_id)
|
||||
|
||||
if query_result:
|
||||
if isinstance(self.current_user, models.ApiUser):
|
||||
event = {
|
||||
'user_id': None,
|
||||
'action': 'api_get',
|
||||
'timestamp': int(time.time()),
|
||||
'api_key': self.current_user.id,
|
||||
'file_type': filetype
|
||||
}
|
||||
|
||||
if query_id:
|
||||
event['object_type'] = 'query'
|
||||
event['object_id'] = query_id
|
||||
else:
|
||||
event['object_type'] = 'query_result'
|
||||
event['object_id'] = query_result_id
|
||||
|
||||
record_event.delay(event)
|
||||
|
||||
headers = {}
|
||||
|
||||
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
if filetype == 'json':
|
||||
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
|
||||
return make_response(data, 200, cache_headers)
|
||||
headers.update(cache_headers)
|
||||
return make_response(data, 200, headers)
|
||||
else:
|
||||
return self.csv_response(query_result)
|
||||
|
||||
@@ -500,13 +612,110 @@ class JobAPI(BaseResource):
|
||||
|
||||
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
|
||||
|
||||
|
||||
class AlertAPI(BaseResource):
|
||||
def get(self, alert_id):
|
||||
alert = models.Alert.get_by_id(alert_id)
|
||||
return alert.to_dict()
|
||||
|
||||
def post(self, alert_id):
|
||||
req = request.get_json(True)
|
||||
params = project(req, ('options', 'name', 'query_id'))
|
||||
alert = models.Alert.get_by_id(alert_id)
|
||||
if 'query_id' in params:
|
||||
params['query'] = params.pop('query_id')
|
||||
|
||||
alert.update_instance(**params)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'edit',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
return alert.to_dict()
|
||||
|
||||
|
||||
class AlertListAPI(BaseResource):
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
required_fields = ('options', 'name', 'query_id')
|
||||
for f in required_fields:
|
||||
if f not in req:
|
||||
abort(400)
|
||||
|
||||
alert = models.Alert.create(
|
||||
name=req['name'],
|
||||
query=req['query_id'],
|
||||
user=self.current_user,
|
||||
options=req['options']
|
||||
)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'create',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
# TODO: should be in model?
|
||||
models.AlertSubscription.create(alert=alert, user=self.current_user)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'subscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
return alert.to_dict()
|
||||
|
||||
def get(self):
|
||||
return [alert.to_dict() for alert in models.Alert.all()]
|
||||
|
||||
|
||||
class AlertSubscriptionListResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
subscription = models.AlertSubscription.create(alert=alert_id, user=self.current_user)
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'subscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
return subscription.to_dict()
|
||||
|
||||
def get(self, alert_id):
|
||||
subscriptions = models.AlertSubscription.all(alert_id)
|
||||
return [s.to_dict() for s in subscriptions]
|
||||
|
||||
|
||||
class AlertSubscriptionResource(BaseResource):
|
||||
def delete(self, alert_id, subscriber_id):
|
||||
models.AlertSubscription.unsubscribe(alert_id, subscriber_id)
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'unsubscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
api.add_resource(AlertAPI, '/api/alerts/<alert_id>', endpoint='alert')
|
||||
api.add_resource(AlertSubscriptionListResource, '/api/alerts/<alert_id>/subscriptions', endpoint='alert_subscriptions')
|
||||
api.add_resource(AlertSubscriptionResource, '/api/alerts/<alert_id>/subscriptions/<subscriber_id>', endpoint='alert_subscription')
|
||||
api.add_resource(AlertListAPI, '/api/alerts', endpoint='alerts')
|
||||
|
||||
@app.route('/<path:filename>')
|
||||
def send_static(filename):
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
||||
|
||||
|
||||
if current_app.debug:
|
||||
cache_timeout = 0
|
||||
else:
|
||||
cache_timeout = None
|
||||
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)
|
||||
|
||||
@@ -1,34 +0,0 @@
|
||||
import json
|
||||
|
||||
|
||||
def get_query_runner(connection_type, connection_string):
|
||||
if connection_type == 'mysql':
|
||||
from redash.data import query_runner_mysql
|
||||
runner = query_runner_mysql.mysql(connection_string)
|
||||
elif connection_type == 'graphite':
|
||||
from redash.data import query_runner_graphite
|
||||
connection_params = json.loads(connection_string)
|
||||
if connection_params['auth']:
|
||||
connection_params['auth'] = tuple(connection_params['auth'])
|
||||
else:
|
||||
connection_params['auth'] = None
|
||||
runner = query_runner_graphite.graphite(connection_params)
|
||||
elif connection_type == 'bigquery':
|
||||
from redash.data import query_runner_bigquery
|
||||
connection_params = json.loads(connection_string)
|
||||
runner = query_runner_bigquery.bigquery(connection_params)
|
||||
elif connection_type == 'script':
|
||||
from redash.data import query_runner_script
|
||||
runner = query_runner_script.script(connection_string)
|
||||
elif connection_type == 'url':
|
||||
from redash.data import query_runner_url
|
||||
runner = query_runner_url.url(connection_string)
|
||||
elif connection_type == "mongo":
|
||||
from redash.data import query_runner_mongodb
|
||||
connection_params = json.loads(connection_string)
|
||||
runner = query_runner_mongodb.mongodb(connection_params)
|
||||
else:
|
||||
from redash.data import query_runner_pg
|
||||
runner = query_runner_pg.pg(connection_string)
|
||||
|
||||
return runner
|
||||
@@ -1,138 +0,0 @@
|
||||
import datetime
|
||||
import httplib2
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
import apiclient.errors
|
||||
from apiclient.discovery import build
|
||||
from apiclient.errors import HttpError
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
except ImportError:
|
||||
print "Missing dependencies. Please install google-api-python-client and oauth2client."
|
||||
print "You can use pip: pip install google-api-python-client oauth2client"
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
types_map = {
|
||||
'INTEGER': 'integer',
|
||||
'FLOAT': 'float',
|
||||
'BOOLEAN': 'boolean',
|
||||
'STRING': 'string',
|
||||
'TIMESTAMP': 'datetime',
|
||||
}
|
||||
|
||||
def transform_row(row, fields):
|
||||
column_index = 0
|
||||
row_data = {}
|
||||
|
||||
for cell in row["f"]:
|
||||
field = fields[column_index]
|
||||
cell_value = cell['v']
|
||||
|
||||
if cell_value is None:
|
||||
pass
|
||||
# Otherwise just cast the value
|
||||
elif field['type'] == 'INTEGER':
|
||||
cell_value = int(cell_value)
|
||||
elif field['type'] == 'FLOAT':
|
||||
cell_value = float(cell_value)
|
||||
elif field['type'] == 'BOOLEAN':
|
||||
cell_value = cell_value.lower() == "true"
|
||||
elif field['type'] == 'TIMESTAMP':
|
||||
cell_value = datetime.datetime.fromtimestamp(float(cell_value))
|
||||
|
||||
row_data[field["name"]] = cell_value
|
||||
column_index += 1
|
||||
|
||||
return row_data
|
||||
|
||||
def bigquery(connection_string):
|
||||
def load_key(filename):
|
||||
f = file(filename, "rb")
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
def get_bigquery_service():
|
||||
scope = [
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
]
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(connection_string["serviceAccount"],
|
||||
load_key(connection_string["privateKey"]), scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
def get_query_results(jobs, project_id, job_id, start_index):
|
||||
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
|
||||
logging.debug('query_reply %s', query_reply)
|
||||
if not query_reply['jobComplete']:
|
||||
time.sleep(10)
|
||||
return get_query_results(jobs, project_id, job_id, start_index)
|
||||
|
||||
return query_reply
|
||||
|
||||
def query_runner(query):
|
||||
bigquery_service = get_bigquery_service()
|
||||
|
||||
jobs = bigquery_service.jobs()
|
||||
job_data = {
|
||||
"configuration": {
|
||||
"query": {
|
||||
"query": query,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logging.debug("bigquery got query: %s", query)
|
||||
|
||||
project_id = connection_string["projectId"]
|
||||
|
||||
try:
|
||||
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
|
||||
current_row = 0
|
||||
query_reply = get_query_results(jobs, project_id=project_id,
|
||||
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
|
||||
|
||||
logging.debug("bigquery replied: %s", query_reply)
|
||||
|
||||
rows = []
|
||||
|
||||
while ("rows" in query_reply) and current_row < query_reply['totalRows']:
|
||||
for row in query_reply["rows"]:
|
||||
rows.append(transform_row(row, query_reply["schema"]["fields"]))
|
||||
|
||||
current_row += len(query_reply['rows'])
|
||||
query_reply = jobs.getQueryResults(projectId=project_id, jobId=query_reply['jobReference']['jobId'],
|
||||
startIndex=current_row).execute()
|
||||
|
||||
columns = [{'name': f["name"],
|
||||
'friendly_name': f["name"],
|
||||
'type': types_map.get(f['type'], "string")} for f in query_reply["schema"]["fields"]]
|
||||
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
except apiclient.errors.HttpError, e:
|
||||
json_data = None
|
||||
error = e.content
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
return query_runner
|
||||
@@ -1,46 +0,0 @@
|
||||
"""
|
||||
QueryRunner for Graphite.
|
||||
"""
|
||||
import json
|
||||
import datetime
|
||||
import requests
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
|
||||
def graphite(connection_params):
|
||||
def transform_result(response):
|
||||
columns = [{'name': 'Time::x'}, {'name': 'value::y'}, {'name': 'name::series'}]
|
||||
rows = []
|
||||
|
||||
for series in response.json():
|
||||
for values in series['datapoints']:
|
||||
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
|
||||
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
def query_runner(query):
|
||||
base_url = "%s/render?format=json&" % connection_params['url']
|
||||
url = "%s%s" % (base_url, "&".join(query.split("\n")))
|
||||
error = None
|
||||
data = None
|
||||
|
||||
try:
|
||||
response = requests.get(url, auth=connection_params['auth'],
|
||||
verify=connection_params['verify'])
|
||||
|
||||
if response.status_code == 200:
|
||||
data = transform_result(response)
|
||||
else:
|
||||
error = "Failed getting results (%d)" % response.status_code
|
||||
|
||||
except Exception, ex:
|
||||
data = None
|
||||
error = ex.message
|
||||
|
||||
return data, error
|
||||
|
||||
query_runner.annotate_query = False
|
||||
|
||||
return query_runner
|
||||
@@ -1,242 +0,0 @@
|
||||
import datetime
|
||||
import logging
|
||||
import json
|
||||
import sys
|
||||
import re
|
||||
import time
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
try:
|
||||
import pymongo
|
||||
from bson.objectid import ObjectId
|
||||
from bson.son import SON
|
||||
except ImportError:
|
||||
print "Missing dependencies. Please install pymongo."
|
||||
print "You can use pip: pip install pymongo"
|
||||
raise
|
||||
|
||||
TYPES_MAP = {
|
||||
ObjectId : "string",
|
||||
str : "string",
|
||||
unicode : "string",
|
||||
int : "integer",
|
||||
long : "integer",
|
||||
float : "float",
|
||||
bool : "boolean",
|
||||
datetime.datetime: "datetime",
|
||||
}
|
||||
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
# Simple query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "my_collection",
|
||||
# "query" : {
|
||||
# "date" : {
|
||||
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
# },
|
||||
# "type" : 1
|
||||
# },
|
||||
# "fields" : {
|
||||
# "_id" : 1,
|
||||
# "name" : 2
|
||||
# },
|
||||
# "sort" : [
|
||||
# {
|
||||
# "name" : "date",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
#
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Aggregation
|
||||
# ===========
|
||||
# Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
# correct order of sorting, it uses a regular list for the "$sort" operation
|
||||
# that converts into a SON (sorted dictionary) object before execution.
|
||||
#
|
||||
# Aggregation query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "things",
|
||||
# "aggregate" : [
|
||||
# {
|
||||
# "$unwind" : "$tags"
|
||||
# },
|
||||
# {
|
||||
# "$group" : {
|
||||
# {
|
||||
# "_id" : "$tags",
|
||||
# "count" : { "$sum" : 1 }
|
||||
# }
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$sort" : [
|
||||
# {
|
||||
# "name" : "count",
|
||||
# "direction" : -1
|
||||
# },
|
||||
# {
|
||||
# "name" : "_id",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
#
|
||||
#
|
||||
def mongodb(connection_string):
|
||||
def _get_column_by_name(columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
def _convert_date(q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
if q[field_name].find(":") == -1:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
|
||||
else:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
|
||||
|
||||
def query_runner(query):
|
||||
if not "dbName" in connection_string or not connection_string["dbName"]:
|
||||
return None, "dbName is missing from connection string JSON or is empty"
|
||||
|
||||
db_name = connection_string["dbName"]
|
||||
|
||||
if not "connectionString" in connection_string or not connection_string["connectionString"]:
|
||||
return None, "connectionString is missing from connection string JSON or is empty"
|
||||
|
||||
is_replica_set = True if "replicaSetName" in connection_string and connection_string["replicaSetName"] else False
|
||||
|
||||
if is_replica_set:
|
||||
if not connection_string["replicaSetName"]:
|
||||
return None, "replicaSetName is set in the connection string JSON but is empty"
|
||||
|
||||
db_connection = pymongo.MongoReplicaSetClient(connection_string["connectionString"], replicaSet=connection_string["replicaSetName"])
|
||||
else:
|
||||
db_connection = pymongo.MongoClient(connection_string["connectionString"])
|
||||
|
||||
if db_name not in db_connection.database_names():
|
||||
return None, "Unknown database name '%s'" % db_name
|
||||
|
||||
db = db_connection[db_name]
|
||||
|
||||
logging.debug("mongodb connection string: %s", connection_string)
|
||||
logging.debug("mongodb got query: %s", query)
|
||||
|
||||
try:
|
||||
query_data = json.loads(query)
|
||||
except:
|
||||
return None, "Invalid query format. The query is not a valid JSON."
|
||||
|
||||
if "query" in query_data and "aggregate" in query_data:
|
||||
return None, "'query' and 'aggregate' sections cannot be used at the same time"
|
||||
|
||||
collection = None
|
||||
if not "collection" in query_data:
|
||||
return None, "'collection' must be set"
|
||||
else:
|
||||
collection = query_data["collection"]
|
||||
|
||||
q = None
|
||||
if "query" in query_data:
|
||||
q = query_data["query"]
|
||||
for k in q:
|
||||
if q[k] and type(q[k]) in [str, unicode]:
|
||||
logging.debug(q[k])
|
||||
_convert_date(q, k)
|
||||
elif q[k] and type(q[k]) is dict:
|
||||
for k2 in q[k]:
|
||||
if type(q[k][k2]) in [str, unicode]:
|
||||
_convert_date(q[k], k2)
|
||||
|
||||
f = None
|
||||
|
||||
aggregate = None
|
||||
if "aggregate" in query_data:
|
||||
aggregate = query_data["aggregate"]
|
||||
for step in aggregate:
|
||||
if "$sort" in step:
|
||||
sort_list = []
|
||||
for sort_item in step["$sort"]:
|
||||
sort_list.append((sort_item["name"], sort_item["direction"]))
|
||||
|
||||
step["$sort"] = SON(sort_list)
|
||||
|
||||
if aggregate:
|
||||
pass
|
||||
else:
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field in query_data["sort"]:
|
||||
s.append((field["name"], field["direction"]))
|
||||
|
||||
if "fields" in query_data:
|
||||
f = query_data["fields"]
|
||||
|
||||
columns = []
|
||||
rows = []
|
||||
|
||||
error = None
|
||||
json_data = None
|
||||
|
||||
cursor = None
|
||||
if q or (not q and not aggregate):
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
|
||||
if "skip" in query_data:
|
||||
cursor = cursor.skip(query_data["skip"])
|
||||
|
||||
if "limit" in query_data:
|
||||
cursor = cursor.limit(query_data["limit"])
|
||||
|
||||
elif aggregate:
|
||||
r = db[collection].aggregate(aggregate)
|
||||
cursor = r["result"]
|
||||
|
||||
for r in cursor:
|
||||
for k in r:
|
||||
if _get_column_by_name(columns, k) is None:
|
||||
columns.append({
|
||||
"name": k,
|
||||
"friendly_name": k,
|
||||
"type": TYPES_MAP[type(r[k])] if type(r[k]) in TYPES_MAP else None
|
||||
})
|
||||
|
||||
# Convert ObjectId to string
|
||||
if type(r[k]) == ObjectId:
|
||||
r[k] = str(r[k])
|
||||
|
||||
rows.append(r)
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
for k in sorted(f, key=f.get):
|
||||
ordered_columns.append(_get_column_by_name(columns, k))
|
||||
|
||||
columns = ordered_columns
|
||||
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
return json_data, error
|
||||
|
||||
query_runner.annotate_query = False
|
||||
return query_runner
|
||||
@@ -1,64 +0,0 @@
|
||||
"""
|
||||
QueryRunner is the function that the workers use, to execute queries. This is the Redshift
|
||||
(PostgreSQL in fact) version, but easily we can write another to support additional databases
|
||||
(MySQL and others).
|
||||
|
||||
Because the worker just pass the query, this can be used with any data store that has some sort of
|
||||
query language (for example: HiveQL).
|
||||
"""
|
||||
import logging
|
||||
import json
|
||||
import MySQLdb
|
||||
import sys
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
def mysql(connection_string):
|
||||
if connection_string.endswith(';'):
|
||||
connection_string = connection_string[0:-1]
|
||||
|
||||
def query_runner(query):
|
||||
connections_params = [entry.split('=')[1] for entry in connection_string.split(';')]
|
||||
connection = MySQLdb.connect(*connections_params, charset="utf8", use_unicode=True)
|
||||
cursor = connection.cursor()
|
||||
|
||||
logging.debug("mysql got query: %s", query)
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
|
||||
data = cursor.fetchall()
|
||||
|
||||
cursor_desc = cursor.description
|
||||
if (cursor_desc != None):
|
||||
num_fields = len(cursor_desc)
|
||||
column_names = [i[0] for i in cursor.description]
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in data]
|
||||
|
||||
columns = [{'name': col_name,
|
||||
'friendly_name': col_name,
|
||||
'type': None} for col_name in column_names]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
else:
|
||||
json_data = None
|
||||
error = "No data was returned."
|
||||
|
||||
cursor.close()
|
||||
except MySQLdb.Error, e:
|
||||
json_data = None
|
||||
error = e.args[1]
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
return query_runner
|
||||
@@ -1,110 +0,0 @@
|
||||
"""
|
||||
QueryRunner is the function that the workers use, to execute queries. This is the PostgreSQL
|
||||
version, but easily we can write another to support additional databases (MySQL and others).
|
||||
|
||||
Because the worker just pass the query, this can be used with any data store that has some sort of
|
||||
query language (for example: HiveQL).
|
||||
"""
|
||||
import json
|
||||
import sys
|
||||
import select
|
||||
import logging
|
||||
import psycopg2
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
types_map = {
|
||||
20: 'integer',
|
||||
21: 'integer',
|
||||
23: 'integer',
|
||||
700: 'float',
|
||||
1700: 'float',
|
||||
701: 'float',
|
||||
16: 'boolean',
|
||||
1082: 'date',
|
||||
1114: 'datetime',
|
||||
1184: 'datetime',
|
||||
1014: 'string',
|
||||
1015: 'string',
|
||||
1008: 'string',
|
||||
1009: 'string',
|
||||
2951: 'string'
|
||||
}
|
||||
|
||||
|
||||
def pg(connection_string):
|
||||
def column_friendly_name(column_name):
|
||||
return column_name
|
||||
|
||||
def wait(conn):
|
||||
while 1:
|
||||
try:
|
||||
state = conn.poll()
|
||||
if state == psycopg2.extensions.POLL_OK:
|
||||
break
|
||||
elif state == psycopg2.extensions.POLL_WRITE:
|
||||
select.select([], [conn.fileno()], [])
|
||||
elif state == psycopg2.extensions.POLL_READ:
|
||||
select.select([conn.fileno()], [], [])
|
||||
else:
|
||||
raise psycopg2.OperationalError("poll() returned %s" % state)
|
||||
except select.error:
|
||||
raise psycopg2.OperationalError("select.error received")
|
||||
|
||||
def query_runner(query):
|
||||
connection = psycopg2.connect(connection_string, async=True)
|
||||
wait(connection)
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
wait(connection)
|
||||
|
||||
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
|
||||
# size of the data we can assume it's ok.
|
||||
column_names = []
|
||||
columns = []
|
||||
duplicates_counter = 1
|
||||
|
||||
for column in cursor.description:
|
||||
# TODO: this deduplication needs to be generalized and reused in all query runners.
|
||||
column_name = column.name
|
||||
if column_name in column_names:
|
||||
column_name = column_name + str(duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_friendly_name(column_name),
|
||||
'type': types_map.get(column.type_code, None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
except (select.error, OSError) as e:
|
||||
logging.exception(e)
|
||||
error = "Query interrupted. Please retry."
|
||||
json_data = None
|
||||
except psycopg2.DatabaseError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = e.message
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
return query_runner
|
||||
@@ -1,51 +0,0 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
# We use subprocess.check_output because we are lazy.
|
||||
# If someone will really want to run this on Python < 2.7 they can easily update the code to run
|
||||
# Popen, check the retcodes and other things and read the standard output to a variable.
|
||||
if not "check_output" in subprocess.__dict__:
|
||||
print "ERROR: This runner uses subprocess.check_output function which exists in Python 2.7"
|
||||
|
||||
def script(connection_string):
|
||||
|
||||
def query_runner(query):
|
||||
try:
|
||||
json_data = None
|
||||
error = None
|
||||
|
||||
if connection_string is None:
|
||||
return None, "script execution path is not set. Please reconfigure the data source"
|
||||
|
||||
# Poor man's protection against running scripts from output the scripts directory
|
||||
if connection_string.find("../") > -1:
|
||||
return None, "Scripts can only be run from the configured scripts directory"
|
||||
|
||||
query = query.strip()
|
||||
|
||||
script = os.path.join(connection_string, query)
|
||||
if not os.path.exists(script):
|
||||
return None, "Script '%s' not found in script directory" % query
|
||||
|
||||
output = subprocess.check_output(script, shell=False)
|
||||
if output != None:
|
||||
output = output.strip()
|
||||
if output != "":
|
||||
return output, None
|
||||
|
||||
error = "Error reading output"
|
||||
except subprocess.CalledProcessError as e:
|
||||
return None, str(e)
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
query_runner.annotate_query = False
|
||||
return query_runner
|
||||
@@ -1,25 +1,25 @@
|
||||
import logging
|
||||
from flask.ext.login import login_user
|
||||
import requests
|
||||
from flask import redirect, url_for, Blueprint
|
||||
from flask import redirect, url_for, Blueprint, flash
|
||||
from flask_oauth import OAuth
|
||||
from redash import models, settings
|
||||
|
||||
logger = logging.getLogger('google_oauth')
|
||||
oauth = OAuth()
|
||||
|
||||
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
|
||||
|
||||
if settings.GOOGLE_APPS_DOMAIN:
|
||||
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
|
||||
else:
|
||||
if not settings.GOOGLE_APPS_DOMAIN:
|
||||
logger.warning("No Google Apps domain defined, all Google accounts allowed.")
|
||||
|
||||
google = oauth.remote_app('google',
|
||||
base_url='https://www.google.com/accounts/',
|
||||
authorize_url='https://accounts.google.com/o/oauth2/auth',
|
||||
request_token_url=None,
|
||||
request_token_params=request_token_params,
|
||||
request_token_params={
|
||||
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
|
||||
'response_type': 'code'
|
||||
},
|
||||
access_token_url='https://accounts.google.com/o/oauth2/token',
|
||||
access_token_method='POST',
|
||||
access_token_params={'grant_type': 'authorization_code'},
|
||||
@@ -31,7 +31,7 @@ blueprint = Blueprint('google_oauth', __name__)
|
||||
|
||||
|
||||
def get_user_profile(access_token):
|
||||
headers = {'Authorization': 'OAuth '+access_token}
|
||||
headers = {'Authorization': 'OAuth {}'.format(access_token)}
|
||||
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
|
||||
|
||||
if response.status_code == 401:
|
||||
@@ -41,9 +41,17 @@ def get_user_profile(access_token):
|
||||
return response.json()
|
||||
|
||||
|
||||
def verify_profile(profile):
|
||||
if not settings.GOOGLE_APPS_DOMAIN:
|
||||
return True
|
||||
|
||||
domain = profile['email'].split('@')[-1]
|
||||
return domain in settings.GOOGLE_APPS_DOMAIN
|
||||
|
||||
|
||||
def create_and_login_user(name, email):
|
||||
try:
|
||||
user_object = models.User.get(models.User.email == email)
|
||||
user_object = models.User.get_by_email(email)
|
||||
if user_object.name != name:
|
||||
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
|
||||
user_object.name = name
|
||||
@@ -70,10 +78,17 @@ def authorized(resp):
|
||||
|
||||
if access_token is None:
|
||||
logger.warning("Access token missing in call back request.")
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
profile = get_user_profile(access_token)
|
||||
if profile is None:
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
if not verify_profile(profile):
|
||||
logger.warning("User tried to login with unauthorized domain name: %s", profile['email'])
|
||||
flash("Your Google Apps domain name isn't allowed.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
create_and_login_user(profile['name'], profile['email'])
|
||||
|
||||
@@ -28,7 +28,7 @@ class Importer(object):
|
||||
def import_query(self, user, query):
|
||||
new_query = self._get_or_create(models.Query, query['id'], name=query['name'],
|
||||
user=user,
|
||||
ttl=-1,
|
||||
schedule=None,
|
||||
query=query['query'],
|
||||
query_hash=query['query_hash'],
|
||||
description=query['description'],
|
||||
|
||||
338
redash/models.py
338
redash/models.py
@@ -9,17 +9,21 @@ import itertools
|
||||
|
||||
import peewee
|
||||
from passlib.apps import custom_app_context as pwd_context
|
||||
from playhouse.postgres_ext import ArrayField
|
||||
from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDatabase
|
||||
from flask.ext.login import UserMixin, AnonymousUserMixin
|
||||
import psycopg2
|
||||
|
||||
from redash import utils, settings
|
||||
from redash import utils, settings, redis_connection
|
||||
from redash.query_runner import get_query_runner
|
||||
from utils import generate_token
|
||||
|
||||
|
||||
class Database(object):
|
||||
def __init__(self):
|
||||
self.database_config = dict(settings.DATABASE_CONFIG)
|
||||
self.database_config['register_hstore'] = False
|
||||
self.database_name = self.database_config.pop('name')
|
||||
self.database = peewee.PostgresqlDatabase(self.database_name, **self.database_config)
|
||||
self.database = PostgresqlExtDatabase(self.database_name, **self.database_config)
|
||||
self.app = None
|
||||
self.pid = os.getpid()
|
||||
|
||||
@@ -59,6 +63,41 @@ class BaseModel(peewee.Model):
|
||||
def get_by_id(cls, model_id):
|
||||
return cls.get(cls.id == model_id)
|
||||
|
||||
def pre_save(self, created):
|
||||
pass
|
||||
|
||||
def post_save(self, created):
|
||||
# Handler for post_save operations. Overriding if needed.
|
||||
pass
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
pk_value = self._get_pk_value()
|
||||
created = kwargs.get('force_insert', False) or not bool(pk_value)
|
||||
self.pre_save(created)
|
||||
super(BaseModel, self).save(*args, **kwargs)
|
||||
self.post_save(created)
|
||||
|
||||
def update_instance(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
# setattr(model_instance, field_name, field_obj.python_value(value))
|
||||
setattr(self, k, v)
|
||||
|
||||
dirty_fields = self.dirty_fields
|
||||
if hasattr(self, 'updated_at'):
|
||||
dirty_fields = dirty_fields + [self.__class__.updated_at]
|
||||
|
||||
self.save(only=dirty_fields)
|
||||
|
||||
|
||||
class ModelTimestampsMixin(BaseModel):
|
||||
updated_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
def pre_save(self, created):
|
||||
super(ModelTimestampsMixin, self).pre_save(created)
|
||||
|
||||
self.updated_at = datetime.datetime.now()
|
||||
|
||||
|
||||
class PermissionsCheckMixin(object):
|
||||
def has_permission(self, permission):
|
||||
@@ -83,6 +122,9 @@ class ApiUser(UserMixin, PermissionsCheckMixin):
|
||||
def __init__(self, api_key):
|
||||
self.id = api_key
|
||||
|
||||
def __repr__(self):
|
||||
return u"<ApiUser: {}>".format(self.id)
|
||||
|
||||
@property
|
||||
def permissions(self):
|
||||
return ['view_query']
|
||||
@@ -96,7 +138,7 @@ class Group(BaseModel):
|
||||
name = peewee.CharField(max_length=100)
|
||||
permissions = ArrayField(peewee.CharField, default=DEFAULT_PERMISSIONS)
|
||||
tables = ArrayField(peewee.CharField)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'groups'
|
||||
@@ -114,7 +156,7 @@ class Group(BaseModel):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
DEFAULT_GROUPS = ['default']
|
||||
|
||||
id = peewee.PrimaryKeyField()
|
||||
@@ -122,6 +164,7 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
email = peewee.CharField(max_length=320, index=True, unique=True)
|
||||
password_hash = peewee.CharField(max_length=128, null=True)
|
||||
groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS)
|
||||
api_key = peewee.CharField(max_length=40, unique=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'users'
|
||||
@@ -130,13 +173,27 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'email': self.email
|
||||
'email': self.email,
|
||||
'gravatar_url': self.gravatar_url,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(User, self).__init__(*args, **kwargs)
|
||||
self._allowed_tables = None
|
||||
|
||||
def pre_save(self, created):
|
||||
super(User, self).pre_save(created)
|
||||
|
||||
if not self.api_key:
|
||||
self.api_key = generate_token(40)
|
||||
|
||||
@property
|
||||
def gravatar_url(self):
|
||||
email_md5 = hashlib.md5(self.email.lower()).hexdigest()
|
||||
return "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
@property
|
||||
def permissions(self):
|
||||
# TODO: this should be cached.
|
||||
@@ -156,8 +213,12 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
def get_by_email(cls, email):
|
||||
return cls.get(cls.email == email)
|
||||
|
||||
@classmethod
|
||||
def get_by_api_key(cls, api_key):
|
||||
return cls.get(cls.api_key == api_key)
|
||||
|
||||
def __unicode__(self):
|
||||
return '%r, %r' % (self.name, self.email)
|
||||
return u'%s (%s)' % (self.name, self.email)
|
||||
|
||||
def hash_password(self, password):
|
||||
self.password_hash = pwd_context.encrypt(password)
|
||||
@@ -173,7 +234,7 @@ class ActivityLog(BaseModel):
|
||||
user = peewee.ForeignKeyField(User)
|
||||
type = peewee.IntegerField()
|
||||
activity = peewee.TextField()
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'activity_log'
|
||||
@@ -193,28 +254,68 @@ class ActivityLog(BaseModel):
|
||||
|
||||
class DataSource(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField()
|
||||
name = peewee.CharField(unique=True)
|
||||
type = peewee.CharField()
|
||||
options = peewee.TextField()
|
||||
queue_name = peewee.CharField(default="queries")
|
||||
scheduled_queue_name = peewee.CharField(default="queries")
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
scheduled_queue_name = peewee.CharField(default="scheduled_queries")
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'data_sources'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
def to_dict(self, all=False):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'type': self.type
|
||||
'type': self.type,
|
||||
'syntax': self.query_runner.syntax
|
||||
}
|
||||
|
||||
if all:
|
||||
d['options'] = json.loads(self.options)
|
||||
d['queue_name'] = self.queue_name
|
||||
d['scheduled_queue_name'] = self.scheduled_queue_name
|
||||
|
||||
return d
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def get_schema(self, refresh=False):
|
||||
key = "data_source:schema:{}".format(self.id)
|
||||
|
||||
cache = None
|
||||
if not refresh:
|
||||
cache = redis_connection.get(key)
|
||||
|
||||
if cache is None:
|
||||
query_runner = self.query_runner
|
||||
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
|
||||
|
||||
redis_connection.set(key, json.dumps(schema))
|
||||
else:
|
||||
schema = json.loads(cache)
|
||||
|
||||
return schema
|
||||
|
||||
@property
|
||||
def query_runner(self):
|
||||
return get_query_runner(self.type, self.options)
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
return cls.select().order_by(cls.id.asc())
|
||||
|
||||
|
||||
class JSONField(peewee.TextField):
|
||||
def db_value(self, value):
|
||||
return json.dumps(value)
|
||||
|
||||
def python_value(self, value):
|
||||
return json.loads(value)
|
||||
|
||||
|
||||
class QueryResult(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
data_source = peewee.ForeignKeyField(DataSource)
|
||||
@@ -222,7 +323,7 @@ class QueryResult(BaseModel):
|
||||
query = peewee.TextField()
|
||||
data = peewee.TextField()
|
||||
runtime = peewee.FloatField()
|
||||
retrieved_at = peewee.DateTimeField()
|
||||
retrieved_at = DateTimeTZField()
|
||||
|
||||
class Meta:
|
||||
db_table = 'query_results'
|
||||
@@ -248,16 +349,16 @@ class QueryResult(BaseModel):
|
||||
return unused_results
|
||||
|
||||
@classmethod
|
||||
def get_latest(cls, data_source, query, ttl=0):
|
||||
def get_latest(cls, data_source, query, max_age=0):
|
||||
query_hash = utils.gen_query_hash(query)
|
||||
|
||||
if ttl == -1:
|
||||
if max_age == -1:
|
||||
query = cls.select().where(cls.query_hash == query_hash,
|
||||
cls.data_source == data_source).order_by(cls.retrieved_at.desc())
|
||||
else:
|
||||
query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source,
|
||||
peewee.SQL("retrieved_at + interval '%s second' >= now() at time zone 'utc'",
|
||||
ttl)).order_by(cls.retrieved_at.desc())
|
||||
max_age)).order_by(cls.retrieved_at.desc())
|
||||
|
||||
return query.first()
|
||||
|
||||
@@ -272,42 +373,61 @@ class QueryResult(BaseModel):
|
||||
|
||||
logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id)
|
||||
|
||||
updated_count = Query.update(latest_query_data=query_result).\
|
||||
where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
|
||||
execute()
|
||||
sql = "UPDATE queries SET latest_query_data_id = %s WHERE query_hash = %s AND data_source_id = %s RETURNING id"
|
||||
query_ids = [row[0] for row in db.database.execute_sql(sql, params=(query_result.id, query_hash, data_source_id))]
|
||||
|
||||
logging.info("Updated %s queries with result (%s).", updated_count, query_hash)
|
||||
# TODO: when peewee with update & returning support is released, we can get back to using this code:
|
||||
# updated_count = Query.update(latest_query_data=query_result).\
|
||||
# where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
|
||||
# execute()
|
||||
|
||||
return query_result
|
||||
logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash)
|
||||
|
||||
return query_result, query_ids
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
|
||||
|
||||
class Query(BaseModel):
|
||||
def should_schedule_next(previous_iteration, now, schedule):
|
||||
if schedule.isdigit():
|
||||
ttl = int(schedule)
|
||||
next_iteration = previous_iteration + datetime.timedelta(seconds=ttl)
|
||||
else:
|
||||
hour, minute = schedule.split(':')
|
||||
hour, minute = int(hour), int(minute)
|
||||
|
||||
# The following logic is needed for cases like the following:
|
||||
# - The query scheduled to run at 23:59.
|
||||
# - The scheduler wakes up at 00:01.
|
||||
# - Using naive implementation of comparing timestamps, it will skip the execution.
|
||||
normalized_previous_iteration = previous_iteration.replace(hour=hour, minute=minute)
|
||||
if normalized_previous_iteration > previous_iteration:
|
||||
previous_iteration = normalized_previous_iteration - datetime.timedelta(days=1)
|
||||
|
||||
next_iteration = (previous_iteration + datetime.timedelta(days=1)).replace(hour=hour, minute=minute)
|
||||
|
||||
return now > next_iteration
|
||||
|
||||
|
||||
class Query(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
data_source = peewee.ForeignKeyField(DataSource)
|
||||
data_source = peewee.ForeignKeyField(DataSource, null=True)
|
||||
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
|
||||
name = peewee.CharField(max_length=255)
|
||||
description = peewee.CharField(max_length=4096, null=True)
|
||||
query = peewee.TextField()
|
||||
query_hash = peewee.CharField(max_length=32)
|
||||
api_key = peewee.CharField(max_length=40)
|
||||
ttl = peewee.IntegerField()
|
||||
user_email = peewee.CharField(max_length=360, null=True)
|
||||
user = peewee.ForeignKeyField(User)
|
||||
last_modified_by = peewee.ForeignKeyField(User, null=True, related_name="modified_queries")
|
||||
is_archived = peewee.BooleanField(default=False, index=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
schedule = peewee.CharField(max_length=10, null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'queries'
|
||||
|
||||
def create_default_visualizations(self):
|
||||
table_visualization = Visualization(query=self, name="Table",
|
||||
description='',
|
||||
type="TABLE", options="{}")
|
||||
table_visualization.save()
|
||||
|
||||
def to_dict(self, with_stats=False, with_visualizations=False, with_user=True):
|
||||
d = {
|
||||
'id': self.id,
|
||||
@@ -316,15 +436,17 @@ class Query(BaseModel):
|
||||
'description': self.description,
|
||||
'query': self.query,
|
||||
'query_hash': self.query_hash,
|
||||
'ttl': self.ttl,
|
||||
'schedule': self.schedule,
|
||||
'api_key': self.api_key,
|
||||
'is_archived': self.is_archived,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at,
|
||||
'data_source_id': self._data.get('data_source', None)
|
||||
}
|
||||
|
||||
if with_user:
|
||||
d['user'] = self.user.to_dict()
|
||||
d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
|
||||
else:
|
||||
d['user_id'] = self._data['user']
|
||||
|
||||
@@ -340,7 +462,7 @@ class Query(BaseModel):
|
||||
|
||||
def archive(self):
|
||||
self.is_archived = True
|
||||
self.ttl = -1
|
||||
self.schedule = None
|
||||
|
||||
for vis in self.visualizations:
|
||||
for w in vis.widgets:
|
||||
@@ -361,27 +483,25 @@ class Query(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def outdated_queries(cls):
|
||||
# TODO: this will only find scheduled queries that were executed before. I think this is
|
||||
# a reasonable assumption, but worth revisiting.
|
||||
outdated_queries_ids = cls.select(
|
||||
peewee.Func('first_value', cls.id).over(partition_by=[cls.query_hash, cls.data_source])) \
|
||||
.join(QueryResult) \
|
||||
.where(cls.ttl > 0,
|
||||
cls.is_archived==False,
|
||||
(QueryResult.retrieved_at +
|
||||
(cls.ttl * peewee.SQL("interval '1 second'"))) <
|
||||
peewee.SQL("(now() at time zone 'utc')"))
|
||||
queries = cls.select(cls, QueryResult.retrieved_at, DataSource)\
|
||||
.join(QueryResult)\
|
||||
.switch(Query).join(DataSource)\
|
||||
.where(cls.schedule != None)
|
||||
|
||||
queries = cls.select(cls, DataSource).join(DataSource) \
|
||||
.where(cls.id << outdated_queries_ids)
|
||||
now = utils.utcnow()
|
||||
outdated_queries = {}
|
||||
for query in queries:
|
||||
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
|
||||
key = "{}:{}".format(query.query_hash, query.data_source.id)
|
||||
outdated_queries[key] = query
|
||||
|
||||
return queries
|
||||
return outdated_queries.values()
|
||||
|
||||
@classmethod
|
||||
def search(cls, term):
|
||||
# This is very naive implementation of search, to be replaced with PostgreSQL full-text-search solution.
|
||||
|
||||
where = (cls.name**"%{}%".format(term)) | (cls.description**"%{}%".format(term))
|
||||
where = (cls.name**u"%{}%".format(term)) | (cls.description**u"%{}%".format(term))
|
||||
|
||||
if term.isdigit():
|
||||
where |= cls.id == term
|
||||
@@ -392,6 +512,7 @@ class Query(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def recent(cls, user_id):
|
||||
# TODO: instead of t2 here, we should define table_alias for Query table
|
||||
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
|
||||
join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\
|
||||
where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\
|
||||
@@ -410,10 +531,23 @@ class Query(BaseModel):
|
||||
update = cls.update(**kwargs).where(cls.id == query_id)
|
||||
return update.execute()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def pre_save(self, created):
|
||||
super(Query, self).pre_save(created)
|
||||
self.query_hash = utils.gen_query_hash(self.query)
|
||||
self._set_api_key()
|
||||
super(Query, self).save(*args, **kwargs)
|
||||
|
||||
if self.last_modified_by is None:
|
||||
self.last_modified_by = self.user
|
||||
|
||||
def post_save(self, created):
|
||||
if created:
|
||||
self._create_default_visualizations()
|
||||
|
||||
def _create_default_visualizations(self):
|
||||
table_visualization = Visualization(query=self, name="Table",
|
||||
description='',
|
||||
type="TABLE", options="{}")
|
||||
table_visualization.save()
|
||||
|
||||
def _set_api_key(self):
|
||||
if not self.api_key:
|
||||
@@ -432,7 +566,84 @@ class Query(BaseModel):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class Dashboard(BaseModel):
|
||||
class Alert(ModelTimestampsMixin, BaseModel):
|
||||
UNKNOWN_STATE = 'unknown'
|
||||
OK_STATE = 'ok'
|
||||
TRIGGERED_STATE = 'triggered'
|
||||
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField()
|
||||
query = peewee.ForeignKeyField(Query, related_name='alerts')
|
||||
user = peewee.ForeignKeyField(User, related_name='alerts')
|
||||
options = JSONField()
|
||||
state = peewee.CharField(default=UNKNOWN_STATE)
|
||||
last_triggered_at = DateTimeTZField(null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'alerts'
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
return cls.select(Alert, User, Query).join(Query).switch(Alert).join(User)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'query': self.query.to_dict(),
|
||||
'user': self.user.to_dict(),
|
||||
'options': self.options,
|
||||
'state': self.state,
|
||||
'last_triggered_at': self.last_triggered_at,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
def evaluate(self):
|
||||
data = json.loads(self.query.latest_query_data.data)
|
||||
# todo: safe guard for empty
|
||||
value = data['rows'][0][self.options['column']]
|
||||
op = self.options['op']
|
||||
|
||||
if op == 'greater than' and value > self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'less than' and value < self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'equals' and value == self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
else:
|
||||
new_state = self.OK_STATE
|
||||
|
||||
return new_state
|
||||
|
||||
def subscribers(self):
|
||||
return User.select().join(AlertSubscription).where(AlertSubscription.alert==self)
|
||||
|
||||
|
||||
class AlertSubscription(ModelTimestampsMixin, BaseModel):
|
||||
user = peewee.ForeignKeyField(User)
|
||||
alert = peewee.ForeignKeyField(Alert)
|
||||
|
||||
class Meta:
|
||||
db_table = 'alert_subscriptions'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'user': self.user.to_dict(),
|
||||
'alert_id': self._data['alert']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def all(cls, alert_id):
|
||||
return AlertSubscription.select(AlertSubscription, User).join(User).where(AlertSubscription.alert==alert_id)
|
||||
|
||||
@classmethod
|
||||
def unsubscribe(cls, alert_id, user_id):
|
||||
query = AlertSubscription.delete().where(AlertSubscription.alert==alert_id).where(AlertSubscription.user==user_id)
|
||||
return query.execute()
|
||||
|
||||
|
||||
class Dashboard(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
slug = peewee.CharField(max_length=140, index=True)
|
||||
name = peewee.CharField(max_length=100)
|
||||
@@ -441,7 +652,6 @@ class Dashboard(BaseModel):
|
||||
layout = peewee.TextField()
|
||||
dashboard_filters_enabled = peewee.BooleanField(default=False)
|
||||
is_archived = peewee.BooleanField(default=False, index=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'dashboards'
|
||||
@@ -483,7 +693,9 @@ class Dashboard(BaseModel):
|
||||
'user_id': self._data['user'],
|
||||
'layout': layout,
|
||||
'dashboard_filters_enabled': self.dashboard_filters_enabled,
|
||||
'widgets': widgets_layout
|
||||
'widgets': widgets_layout,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -516,7 +728,7 @@ class Dashboard(BaseModel):
|
||||
return u"%s=%s" % (self.id, self.name)
|
||||
|
||||
|
||||
class Visualization(BaseModel):
|
||||
class Visualization(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
type = peewee.CharField(max_length=100)
|
||||
query = peewee.ForeignKeyField(Query, related_name='visualizations')
|
||||
@@ -534,6 +746,8 @@ class Visualization(BaseModel):
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'options': json.loads(self.options),
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
if with_query:
|
||||
@@ -545,14 +759,13 @@ class Visualization(BaseModel):
|
||||
return u"%s %s" % (self.id, self.type)
|
||||
|
||||
|
||||
class Widget(BaseModel):
|
||||
class Widget(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
visualization = peewee.ForeignKeyField(Visualization, related_name='widgets', null=True)
|
||||
text = peewee.TextField(null=True)
|
||||
width = peewee.IntegerField()
|
||||
options = peewee.TextField()
|
||||
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
|
||||
# unused; kept for backward compatability:
|
||||
type = peewee.CharField(max_length=100, null=True)
|
||||
@@ -567,7 +780,9 @@ class Widget(BaseModel):
|
||||
'width': self.width,
|
||||
'options': json.loads(self.options),
|
||||
'dashboard_id': self._data['dashboard'],
|
||||
'text': self.text
|
||||
'text': self.text,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
if self.visualization and self.visualization.id:
|
||||
@@ -586,13 +801,14 @@ class Widget(BaseModel):
|
||||
self.dashboard.save()
|
||||
super(Widget, self).delete_instance(*args, **kwargs)
|
||||
|
||||
|
||||
class Event(BaseModel):
|
||||
user = peewee.ForeignKeyField(User, related_name="events")
|
||||
user = peewee.ForeignKeyField(User, related_name="events", null=True)
|
||||
action = peewee.CharField()
|
||||
object_type = peewee.CharField()
|
||||
object_id = peewee.CharField(null=True)
|
||||
additional_properties = peewee.TextField(null=True)
|
||||
created_at = peewee.DateTimeField(default=datetime.datetime.now)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'events'
|
||||
@@ -616,7 +832,7 @@ class Event(BaseModel):
|
||||
return event
|
||||
|
||||
|
||||
all_models = (DataSource, User, QueryResult, Query, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
all_models = (DataSource, User, QueryResult, Query, Alert, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
|
||||
|
||||
def init_db():
|
||||
|
||||
33
redash/monitor.py
Normal file
33
redash/monitor.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from redash import redis_connection, models, __version__
|
||||
|
||||
def get_status():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
|
||||
return status
|
||||
121
redash/query_runner/__init__.py
Normal file
121
redash/query_runner/__init__.py
Normal file
@@ -0,0 +1,121 @@
|
||||
import logging
|
||||
import json
|
||||
|
||||
import jsonschema
|
||||
from jsonschema import ValidationError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
__all__ = [
|
||||
'ValidationError',
|
||||
'BaseQueryRunner',
|
||||
'TYPE_DATETIME',
|
||||
'TYPE_BOOLEAN',
|
||||
'TYPE_INTEGER',
|
||||
'TYPE_STRING',
|
||||
'TYPE_DATE',
|
||||
'TYPE_FLOAT',
|
||||
'SUPPORTED_COLUMN_TYPES',
|
||||
'register',
|
||||
'get_query_runner',
|
||||
'import_query_runners'
|
||||
]
|
||||
|
||||
# Valid types of columns returned in results:
|
||||
TYPE_INTEGER = 'integer'
|
||||
TYPE_FLOAT = 'float'
|
||||
TYPE_BOOLEAN = 'boolean'
|
||||
TYPE_STRING = 'string'
|
||||
TYPE_DATETIME = 'datetime'
|
||||
TYPE_DATE = 'date'
|
||||
|
||||
SUPPORTED_COLUMN_TYPES = set([
|
||||
TYPE_INTEGER,
|
||||
TYPE_FLOAT,
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_STRING,
|
||||
TYPE_DATETIME,
|
||||
TYPE_DATE
|
||||
])
|
||||
|
||||
class BaseQueryRunner(object):
|
||||
def __init__(self, configuration):
|
||||
jsonschema.validate(configuration, self.configuration_schema())
|
||||
self.syntax = 'sql'
|
||||
self.configuration = configuration
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return cls.__name__
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return cls.__name__.lower()
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {}
|
||||
|
||||
def run_query(self, query):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_schema(self):
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def to_dict(cls):
|
||||
return {
|
||||
'name': cls.name(),
|
||||
'type': cls.type(),
|
||||
'configuration_schema': cls.configuration_schema()
|
||||
}
|
||||
|
||||
|
||||
query_runners = {}
|
||||
|
||||
|
||||
def register(query_runner_class):
|
||||
global query_runners
|
||||
if query_runner_class.enabled():
|
||||
logger.debug("Registering %s (%s) query runner.", query_runner_class.name(), query_runner_class.type())
|
||||
query_runners[query_runner_class.type()] = query_runner_class
|
||||
else:
|
||||
logger.warning("%s query runner enabled but not supported, not registering. Either disable or install missing dependencies.", query_runner_class.name())
|
||||
|
||||
|
||||
def get_query_runner(query_runner_type, configuration_json):
|
||||
query_runner_class = query_runners.get(query_runner_type, None)
|
||||
if query_runner_class is None:
|
||||
return None
|
||||
|
||||
return query_runner_class(json.loads(configuration_json))
|
||||
|
||||
|
||||
def validate_configuration(query_runner_type, configuration_json):
|
||||
query_runner_class = query_runners.get(query_runner_type, None)
|
||||
if query_runner_class is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
if isinstance(configuration_json, basestring):
|
||||
configuration = json.loads(configuration_json)
|
||||
else:
|
||||
configuration = configuration_json
|
||||
jsonschema.validate(configuration, query_runner_class.configuration_schema())
|
||||
except (ValidationError, ValueError):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def import_query_runners(query_runner_imports):
|
||||
for runner_import in query_runner_imports:
|
||||
__import__(runner_import)
|
||||
202
redash/query_runner/big_query.py
Normal file
202
redash/query_runner/big_query.py
Normal file
@@ -0,0 +1,202 @@
|
||||
from base64 import b64decode
|
||||
import datetime
|
||||
import json
|
||||
import httplib2
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import apiclient.errors
|
||||
from apiclient.discovery import build
|
||||
from apiclient.errors import HttpError
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from oauth2client import gce
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install google-api-python-client and oauth2client.")
|
||||
logger.warning("You can use pip: pip install google-api-python-client oauth2client")
|
||||
|
||||
enabled = False
|
||||
|
||||
types_map = {
|
||||
'INTEGER': TYPE_INTEGER,
|
||||
'FLOAT': TYPE_FLOAT,
|
||||
'BOOLEAN': TYPE_BOOLEAN,
|
||||
'STRING': TYPE_STRING,
|
||||
'TIMESTAMP': TYPE_DATETIME,
|
||||
}
|
||||
|
||||
|
||||
def transform_row(row, fields):
|
||||
column_index = 0
|
||||
row_data = {}
|
||||
|
||||
for cell in row["f"]:
|
||||
field = fields[column_index]
|
||||
cell_value = cell['v']
|
||||
|
||||
if cell_value is None:
|
||||
pass
|
||||
# Otherwise just cast the value
|
||||
elif field['type'] == 'INTEGER':
|
||||
cell_value = int(cell_value)
|
||||
elif field['type'] == 'FLOAT':
|
||||
cell_value = float(cell_value)
|
||||
elif field['type'] == 'BOOLEAN':
|
||||
cell_value = cell_value.lower() == "true"
|
||||
elif field['type'] == 'TIMESTAMP':
|
||||
cell_value = datetime.datetime.fromtimestamp(float(cell_value))
|
||||
|
||||
row_data[field["name"]] = cell_value
|
||||
column_index += 1
|
||||
|
||||
return row_data
|
||||
|
||||
|
||||
def _load_key(filename):
|
||||
f = file(filename, "rb")
|
||||
try:
|
||||
return f.read()
|
||||
finally:
|
||||
f.close()
|
||||
|
||||
|
||||
def _get_query_results(jobs, project_id, job_id, start_index):
|
||||
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
|
||||
logging.debug('query_reply %s', query_reply)
|
||||
if not query_reply['jobComplete']:
|
||||
time.sleep(10)
|
||||
return _get_query_results(jobs, project_id, job_id, start_index)
|
||||
|
||||
return query_reply
|
||||
|
||||
|
||||
class BigQuery(BaseQueryRunner):
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'projectId': {
|
||||
'type': 'string',
|
||||
'title': 'Project ID'
|
||||
},
|
||||
'jsonKeyFile': {
|
||||
"type": "string",
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['jsonKeyFile', 'projectId']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(BigQuery, self).__init__(configuration_json)
|
||||
|
||||
def _get_bigquery_service(self):
|
||||
scope = [
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
def _get_project_id(self):
|
||||
return self.configuration["projectId"]
|
||||
|
||||
def run_query(self, query):
|
||||
bigquery_service = self._get_bigquery_service()
|
||||
|
||||
jobs = bigquery_service.jobs()
|
||||
job_data = {
|
||||
"configuration": {
|
||||
"query": {
|
||||
"query": query,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug("BigQuery got query: %s", query)
|
||||
|
||||
project_id = self._get_project_id()
|
||||
|
||||
try:
|
||||
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
|
||||
current_row = 0
|
||||
query_reply = _get_query_results(jobs, project_id=project_id,
|
||||
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
|
||||
|
||||
logger.debug("bigquery replied: %s", query_reply)
|
||||
|
||||
rows = []
|
||||
|
||||
while ("rows" in query_reply) and current_row < query_reply['totalRows']:
|
||||
for row in query_reply["rows"]:
|
||||
rows.append(transform_row(row, query_reply["schema"]["fields"]))
|
||||
|
||||
current_row += len(query_reply['rows'])
|
||||
query_reply = jobs.getQueryResults(projectId=project_id, jobId=query_reply['jobReference']['jobId'],
|
||||
startIndex=current_row).execute()
|
||||
|
||||
columns = [{'name': f["name"],
|
||||
'friendly_name': f["name"],
|
||||
'type': types_map.get(f['type'], "string")} for f in query_reply["schema"]["fields"]]
|
||||
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
except apiclient.errors.HttpError, e:
|
||||
json_data = None
|
||||
error = e.content
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
class BigQueryGCE(BigQuery):
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "bigquery_gce"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {}
|
||||
|
||||
def _get_project_id(self):
|
||||
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
|
||||
|
||||
def _get_bigquery_service(self):
|
||||
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
|
||||
register(BigQuery)
|
||||
register(BigQueryGCE)
|
||||
258
redash/query_runner/elasticsearch.py
Normal file
258
redash/query_runner/elasticsearch.py
Normal file
@@ -0,0 +1,258 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
|
||||
import requests
|
||||
import dateutil
|
||||
from dateutil.parser import parse
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
except ImportError:
|
||||
# Python 2
|
||||
import httplib as http_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ELASTICSEARCH_TYPES_MAPPING = {
|
||||
"integer" : TYPE_INTEGER,
|
||||
"long" : TYPE_INTEGER,
|
||||
"float" : TYPE_FLOAT,
|
||||
"double" : TYPE_FLOAT,
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
# "geo_point" TODO: Need to split to 2 fields somehow
|
||||
}
|
||||
|
||||
PYTHON_TYPES_MAPPING = {
|
||||
str: TYPE_STRING,
|
||||
unicode: TYPE_STRING,
|
||||
bool : TYPE_BOOLEAN,
|
||||
int : TYPE_INTEGER,
|
||||
long: TYPE_INTEGER,
|
||||
float: TYPE_FLOAT
|
||||
}
|
||||
|
||||
#
|
||||
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
|
||||
# but without the aggregation).
|
||||
#
|
||||
# Full blown JSON based ElasticSearch queries (including aggregations) will be
|
||||
# added later
|
||||
#
|
||||
# Simple query example:
|
||||
#
|
||||
# - Query the index named "twitter"
|
||||
# - Filter by "user:kimchy"
|
||||
# - Return the fields: "@timestamp", "tweet" and "user"
|
||||
# - Return up to 15 results
|
||||
# - Sort by @timestamp ascending
|
||||
#
|
||||
# {
|
||||
# "index" : "twitter",
|
||||
# "query" : "user:kimchy",
|
||||
# "fields" : ["@timestamp", "tweet", "user"],
|
||||
# "size" : 15,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Simple query on a logstash ElasticSearch instance:
|
||||
#
|
||||
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
|
||||
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
|
||||
# - Return up to 250 results
|
||||
# - Sort by @timestamp ascending
|
||||
|
||||
# {
|
||||
# "index" : "logstash-2015.04.*",
|
||||
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
# "size" : 250,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
|
||||
class ElasticSearch(BaseQueryRunner):
|
||||
DEBUG_ENABLED = False
|
||||
|
||||
"""
|
||||
ElastichSearch query runner for querying ElasticSearch servers.
|
||||
Query can be done using the Lucene Syntax (single line) or the more complex,
|
||||
full blown ElasticSearch JSON syntax
|
||||
"""
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'server': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
"required" : ["server"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(ElasticSearch, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
if self.DEBUG_ENABLED:
|
||||
http_client.HTTPConnection.debuglevel = 1
|
||||
|
||||
# you need to initialize logging, otherwise you will not see anything from requests
|
||||
logging.basicConfig()
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
requests_log = logging.getLogger("requests.packages.urllib3")
|
||||
requests_log.setLevel(logging.DEBUG)
|
||||
requests_log.propagate = True
|
||||
|
||||
def get_mappings(self, url):
|
||||
mappings = {}
|
||||
|
||||
r = requests.get(url)
|
||||
mappings_data = r.json()
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
for property_name in index_mappings["mappings"][m]["properties"]:
|
||||
property_data = index_mappings["mappings"][m]["properties"][property_name]
|
||||
if not property_name in mappings:
|
||||
property_type = property_data.get("type", None)
|
||||
if property_type:
|
||||
if property_type in ELASTICSEARCH_TYPES_MAPPING:
|
||||
mappings[property_name] = property_type
|
||||
else:
|
||||
raise "Unknown property type: {0}".format(property_type)
|
||||
|
||||
return mappings
|
||||
|
||||
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
|
||||
result_columns_index = {}
|
||||
for c in result_columns:
|
||||
result_columns_index[c["name"]] = c
|
||||
|
||||
result_fields_index = {}
|
||||
if result_fields:
|
||||
for r in result_fields:
|
||||
result_fields_index[r] = None
|
||||
|
||||
for h in raw_result["hits"]["hits"]:
|
||||
row = {}
|
||||
for column in h["_source"]:
|
||||
if result_fields and column not in result_fields_index:
|
||||
continue
|
||||
|
||||
if column not in result_columns_index:
|
||||
result_columns.append({
|
||||
"name" : column,
|
||||
"friendly_name" : column,
|
||||
"type" : mappings.get(column, "string")
|
||||
})
|
||||
result_columns_index[column] = result_columns[-1]
|
||||
|
||||
row[column] = h["_source"][column]
|
||||
|
||||
if row and len(row) > 0:
|
||||
result_rows.append(row)
|
||||
|
||||
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
|
||||
url += "&from={0}".format(_from)
|
||||
r = requests.get(url)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
|
||||
|
||||
raw_result = r.json()
|
||||
|
||||
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
|
||||
|
||||
total = raw_result["hits"]["total"]
|
||||
result_size = len(raw_result["hits"]["hits"])
|
||||
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
|
||||
|
||||
return raw_result["hits"]["total"]
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
error = None
|
||||
|
||||
logger.debug(query)
|
||||
query_params = json.loads(query)
|
||||
|
||||
index_name = query_params["index"]
|
||||
query_data = query_params["query"]
|
||||
size = int(query_params.get("size", 500))
|
||||
result_fields = query_params.get("fields", None)
|
||||
sort = query_params.get("sort", None)
|
||||
|
||||
server_url = self.configuration["server"]
|
||||
if not server_url:
|
||||
error = "Missing configuration key 'server'"
|
||||
return None, error
|
||||
|
||||
|
||||
if server_url[-1] == "/":
|
||||
server_url = server_url[:-1]
|
||||
|
||||
url = "{0}/{1}/_search?".format(server_url, index_name)
|
||||
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
|
||||
|
||||
mappings = self.get_mappings(mapping_url)
|
||||
|
||||
logger.debug(json.dumps(mappings, indent=4))
|
||||
|
||||
if size:
|
||||
url += "&size={0}".format(size)
|
||||
|
||||
if sort:
|
||||
url += "&sort={0}".format(urllib.quote_plus(sort))
|
||||
|
||||
url += "&q={0}".format(urllib.quote_plus(query_data))
|
||||
|
||||
logger.debug("Using URL: {0}".format(url))
|
||||
logger.debug("Using Query: {0}".format(query_data))
|
||||
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
if isinstance(query_data, str) or isinstance(query_data, unicode):
|
||||
_from = 0
|
||||
while True:
|
||||
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
|
||||
_from += size
|
||||
if _from >= total:
|
||||
break
|
||||
else:
|
||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||
raise Exception("Advanced queries are not supported")
|
||||
|
||||
json_data = json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
})
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(ElasticSearch)
|
||||
117
redash/query_runner/google_spreadsheets.py
Normal file
117
redash/query_runner/google_spreadsheets.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from base64 import b64decode
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import gspread
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from dateutil import parser
|
||||
enabled = True
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install gspread, dateutil and oauth2client.")
|
||||
logger.warning("You can use pip: pip install gspread dateutil oauth2client")
|
||||
|
||||
enabled = False
|
||||
|
||||
|
||||
def _load_key(filename):
|
||||
with open(filename, "rb") as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
|
||||
def _guess_type(value):
|
||||
try:
|
||||
val = int(value)
|
||||
return TYPE_INTEGER, val
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
val = float(value)
|
||||
return TYPE_FLOAT, val
|
||||
except ValueError:
|
||||
pass
|
||||
if str(value).lower() in ('true', 'false'):
|
||||
return TYPE_BOOLEAN, bool(value)
|
||||
try:
|
||||
val = parser.parse(value)
|
||||
return TYPE_DATETIME, val
|
||||
except ValueError:
|
||||
pass
|
||||
return TYPE_STRING, value
|
||||
|
||||
|
||||
class GoogleSpreadsheet(BaseQueryRunner):
|
||||
HEADER_INDEX = 0
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "google_spreadsheets"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'jsonKeyFile': {
|
||||
"type": "string",
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['jsonKeyFile']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(GoogleSpreadsheet, self).__init__(configuration_json)
|
||||
|
||||
def _get_spreadsheet_service(self):
|
||||
scope = [
|
||||
'https://spreadsheets.google.com/feeds',
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
|
||||
spreadsheetservice = gspread.authorize(credentials)
|
||||
return spreadsheetservice
|
||||
|
||||
def run_query(self, query):
|
||||
logger.debug("Spreadsheet is about to execute query: %s", query)
|
||||
values = query.split("|")
|
||||
key = values[0] #key of the spreadsheet
|
||||
worksheet_num = 0 if len(values) != 2 else int(values[1])# if spreadsheet contains more than one worksheet - this is the number of it
|
||||
try:
|
||||
spreadsheet_service = self._get_spreadsheet_service()
|
||||
spreadsheet = spreadsheet_service.open_by_key(key)
|
||||
worksheets = spreadsheet.worksheets()
|
||||
all_data = worksheets[worksheet_num].get_all_values()
|
||||
column_names = []
|
||||
columns = []
|
||||
for j, column_name in enumerate(all_data[self.HEADER_INDEX]):
|
||||
column_names.append(column_name)
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': _guess_type(all_data[self.HEADER_INDEX+1][j])
|
||||
})
|
||||
rows = [dict(zip(column_names, row)) for row in all_data[self.HEADER_INDEX+1:]]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(GoogleSpreadsheet)
|
||||
83
redash/query_runner/graphite.py
Normal file
83
redash/query_runner/graphite.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import json
|
||||
import datetime
|
||||
import requests
|
||||
import logging
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def _transform_result(response):
|
||||
columns = ({'name': 'Time::x', 'type': TYPE_DATETIME},
|
||||
{'name': 'value::y', 'type': TYPE_FLOAT},
|
||||
{'name': 'name::series', 'type': TYPE_STRING})
|
||||
|
||||
rows = []
|
||||
|
||||
for series in response.json():
|
||||
for values in series['datapoints']:
|
||||
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
|
||||
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
|
||||
class Graphite(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string'
|
||||
},
|
||||
'username': {
|
||||
'type': 'string'
|
||||
},
|
||||
'password': {
|
||||
'type': 'string'
|
||||
},
|
||||
'verify': {
|
||||
'type': 'boolean',
|
||||
'title': 'Verify SSL certificate'
|
||||
}
|
||||
},
|
||||
'required': ['url']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Graphite, self).__init__(configuration_json)
|
||||
|
||||
if "username" in self.configuration and self.configuration["username"]:
|
||||
self.auth = (self.configuration["username"], self.configuration["password"])
|
||||
else:
|
||||
self.auth = None
|
||||
|
||||
self.verify = self.configuration["verify"]
|
||||
self.base_url = "%s/render?format=json&" % self.configuration['url']
|
||||
|
||||
def run_query(self, query):
|
||||
url = "%s%s" % (self.base_url, "&".join(query.split("\n")))
|
||||
error = None
|
||||
data = None
|
||||
|
||||
try:
|
||||
response = requests.get(url, auth=self.auth, verify=self.verify)
|
||||
|
||||
if response.status_code == 200:
|
||||
data = _transform_result(response)
|
||||
else:
|
||||
error = "Failed getting results (%d)" % response.status_code
|
||||
except Exception, ex:
|
||||
data = None
|
||||
error = ex.message
|
||||
|
||||
return data, error
|
||||
|
||||
register(Graphite)
|
||||
159
redash/query_runner/impala_ds.py
Normal file
159
redash/query_runner/impala_ds.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from impala.dbapi import connect
|
||||
from impala.error import DatabaseError, RPCError
|
||||
enabled = True
|
||||
except ImportError, e:
|
||||
logger.exception(e)
|
||||
logger.warning("Missing dependencies. Please install impyla.")
|
||||
logger.warning("You can use pip: pip install impyla")
|
||||
enabled = False
|
||||
|
||||
COLUMN_NAME = 0
|
||||
COLUMN_TYPE = 1
|
||||
|
||||
types_map = {
|
||||
'BIGINT': TYPE_INTEGER,
|
||||
'TINYINT': TYPE_INTEGER,
|
||||
'SMALLINT': TYPE_INTEGER,
|
||||
'INT': TYPE_INTEGER,
|
||||
'DOUBLE': TYPE_FLOAT,
|
||||
'DECIMAL': TYPE_FLOAT,
|
||||
'FLOAT': TYPE_FLOAT,
|
||||
'REAL': TYPE_FLOAT,
|
||||
'BOOLEAN': TYPE_BOOLEAN,
|
||||
'TIMESTAMP': TYPE_DATETIME,
|
||||
'CHAR': TYPE_STRING,
|
||||
'STRING': TYPE_STRING,
|
||||
'VARCHAR': TYPE_STRING
|
||||
}
|
||||
|
||||
|
||||
class Impala(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
"protocol": {
|
||||
"type": "string",
|
||||
"title": "Please specify beeswax or hiveserver2"
|
||||
},
|
||||
"database": {
|
||||
"type": "string"
|
||||
},
|
||||
"use_ldap": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"ldap_user": {
|
||||
"type": "string"
|
||||
},
|
||||
"ldap_password": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": ["host"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "impala"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Impala, self).__init__(configuration_json)
|
||||
|
||||
def _run_query_internal(self, query):
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
return json.loads(results)['rows']
|
||||
|
||||
def get_schema(self):
|
||||
try:
|
||||
schemas_query = "show schemas;"
|
||||
|
||||
tables_query = "show tables in %s;"
|
||||
|
||||
columns_query = "show column stats %s;"
|
||||
|
||||
schema = {}
|
||||
for schema_name in map(lambda a: a['name'], self._run_query_internal(schemas_query)):
|
||||
for table_name in map(lambda a: a['name'], self._run_query_internal(tables_query % schema_name)):
|
||||
columns = map(lambda a: a['Column'], self._run_query_internal(columns_query % table_name))
|
||||
|
||||
if schema_name != 'default':
|
||||
table_name = '{}.{}'.format(schema_name, table_name)
|
||||
|
||||
schema[table_name] = {'name': table_name, 'columns': columns}
|
||||
except Exception, e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
|
||||
connection = None
|
||||
try:
|
||||
connection = connect(**self.configuration)
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
cursor.execute(query)
|
||||
|
||||
column_names = []
|
||||
columns = []
|
||||
|
||||
for column in cursor.description:
|
||||
column_name = column[COLUMN_NAME]
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column[COLUMN_TYPE], None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
except DatabaseError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = e.message
|
||||
except RPCError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = "Metastore Error [%s]" % e.message
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Impala)
|
||||
83
redash/query_runner/influx_db.py
Normal file
83
redash/query_runner/influx_db.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from influxdb import InfluxDBClusterClient
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install influxdb.")
|
||||
logger.warning("You can use pip: pip install influxdb")
|
||||
enabled = False
|
||||
|
||||
def _transform_result(results):
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
|
||||
for result in results:
|
||||
if not result_columns:
|
||||
for c in result.raw['series'][0]['columns']:
|
||||
result_columns.append({ "name": c })
|
||||
|
||||
for point in result.get_points():
|
||||
result_rows.append(point)
|
||||
|
||||
return json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
}, cls=JSONEncoder)
|
||||
|
||||
class InfluxDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'required': ['url']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "influxdb"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(InfluxDB, self).__init__(configuration_json)
|
||||
|
||||
def run_query(self, query):
|
||||
client = InfluxDBClusterClient.from_DSN(self.configuration['url'])
|
||||
|
||||
logger.debug("influxdb url: %s", self.configuration['url'])
|
||||
logger.debug("influxdb got query: %s", query)
|
||||
|
||||
try:
|
||||
results = client.query(query)
|
||||
if not isinstance(results, list):
|
||||
results = [results]
|
||||
|
||||
json_data = _transform_result(results)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(InfluxDB)
|
||||
273
redash/query_runner/mongodb.py
Normal file
273
redash/query_runner/mongodb.py
Normal file
@@ -0,0 +1,273 @@
|
||||
import json
|
||||
import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from dateutil.parser import parse
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import pymongo
|
||||
from bson.objectid import ObjectId
|
||||
from bson.son import SON
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install pymongo.")
|
||||
logger.warning("You can use pip: pip install pymongo")
|
||||
enabled = False
|
||||
|
||||
|
||||
TYPES_MAP = {
|
||||
str: TYPE_STRING,
|
||||
unicode: TYPE_STRING,
|
||||
int: TYPE_INTEGER,
|
||||
long: TYPE_INTEGER,
|
||||
float: TYPE_FLOAT,
|
||||
bool: TYPE_BOOLEAN,
|
||||
datetime.datetime: TYPE_DATETIME,
|
||||
}
|
||||
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
class MongoDBJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, ObjectId):
|
||||
return str(o)
|
||||
|
||||
return super(MongoDBJSONEncoder, self).default(o)
|
||||
|
||||
# Simple query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "my_collection",
|
||||
# "query" : {
|
||||
# "date" : {
|
||||
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
# },
|
||||
# "type" : 1
|
||||
# },
|
||||
# "fields" : {
|
||||
# "_id" : 1,
|
||||
# "name" : 2
|
||||
# },
|
||||
# "sort" : [
|
||||
# {
|
||||
# "name" : "date",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
#
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Aggregation
|
||||
# ===========
|
||||
# Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
# correct order of sorting, it uses a regular list for the "$sort" operation
|
||||
# that converts into a SON (sorted dictionary) object before execution.
|
||||
#
|
||||
# Aggregation query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "things",
|
||||
# "aggregate" : [
|
||||
# {
|
||||
# "$unwind" : "$tags"
|
||||
# },
|
||||
# {
|
||||
# "$group" : {
|
||||
# "_id" : "$tags",
|
||||
# "count" : { "$sum" : 1 }
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$sort" : [
|
||||
# {
|
||||
# "name" : "count",
|
||||
# "direction" : -1
|
||||
# },
|
||||
# {
|
||||
# "name" : "_id",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
#
|
||||
#
|
||||
class MongoDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'connectionString': {
|
||||
'type': 'string',
|
||||
'title': 'Connection String'
|
||||
},
|
||||
'dbName': {
|
||||
'type': 'string',
|
||||
'title': "Database Name"
|
||||
},
|
||||
'replicaSetName': {
|
||||
'type': 'string',
|
||||
'title': 'Replica Set Name'
|
||||
},
|
||||
},
|
||||
'required': ['connectionString']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(MongoDB, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = 'json'
|
||||
|
||||
self.db_name = self.configuration["dbName"]
|
||||
|
||||
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
|
||||
def _get_column_by_name(self, columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
def _fix_dates(self, data):
|
||||
for k in data:
|
||||
if isinstance(data[k], list):
|
||||
for i in range(0, len(data[k])):
|
||||
self._fix_dates(data[k][i])
|
||||
elif isinstance(data[k], dict):
|
||||
self._fix_dates(data[k])
|
||||
else:
|
||||
if isinstance(data[k], (str, unicode)):
|
||||
self._convert_date(data, k)
|
||||
|
||||
def _convert_date(self, q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
q[field_name] = parse(m[0], yearfirst=True)
|
||||
|
||||
def run_query(self, query):
|
||||
if self.is_replica_set:
|
||||
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
|
||||
else:
|
||||
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
|
||||
|
||||
db = db_connection[self.db_name]
|
||||
|
||||
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
|
||||
logger.debug("mongodb got query: %s", query)
|
||||
|
||||
try:
|
||||
query_data = json.loads(query)
|
||||
self._fix_dates(query_data)
|
||||
except ValueError:
|
||||
return None, "Invalid query format. The query is not a valid JSON."
|
||||
|
||||
if "collection" not in query_data:
|
||||
return None, "'collection' must have a value to run a query"
|
||||
else:
|
||||
collection = query_data["collection"]
|
||||
|
||||
q = query_data.get("query", None)
|
||||
f = None
|
||||
|
||||
aggregate = query_data.get("aggregate", None)
|
||||
if aggregate:
|
||||
for step in aggregate:
|
||||
if "$sort" in step:
|
||||
sort_list = []
|
||||
for sort_item in step["$sort"]:
|
||||
sort_list.append((sort_item["name"], sort_item["direction"]))
|
||||
|
||||
step["$sort"] = SON(sort_list)
|
||||
|
||||
if not aggregate:
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field in query_data["sort"]:
|
||||
s.append((field["name"], field["direction"]))
|
||||
|
||||
if "fields" in query_data:
|
||||
f = query_data["fields"]
|
||||
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field_data in query_data["sort"]:
|
||||
s.append((field_data["name"], field_data["direction"]))
|
||||
|
||||
columns = []
|
||||
rows = []
|
||||
|
||||
cursor = None
|
||||
if q or (not q and not aggregate):
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
|
||||
if "skip" in query_data:
|
||||
cursor = cursor.skip(query_data["skip"])
|
||||
|
||||
if "limit" in query_data:
|
||||
cursor = cursor.limit(query_data["limit"])
|
||||
|
||||
elif aggregate:
|
||||
r = db[collection].aggregate(aggregate)
|
||||
|
||||
# Backwards compatibility with older pymongo versions.
|
||||
#
|
||||
# Older pymongo version would return a dictionary from an aggregate command.
|
||||
# The dict would contain a "result" key which would hold the cursor.
|
||||
# Newer ones return pymongo.command_cursor.CommandCursor.
|
||||
if isinstance(r, dict):
|
||||
cursor = r["result"]
|
||||
else:
|
||||
cursor = r
|
||||
|
||||
for r in cursor:
|
||||
for k in r:
|
||||
if self._get_column_by_name(columns, k) is None:
|
||||
columns.append({
|
||||
"name": k,
|
||||
"friendly_name": k,
|
||||
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
|
||||
})
|
||||
|
||||
rows.append(r)
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
for k in sorted(f, key=f.get):
|
||||
ordered_columns.append(self._get_column_by_name(columns, k))
|
||||
|
||||
columns = ordered_columns
|
||||
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(MongoDB)
|
||||
152
redash/query_runner/mysql.py
Normal file
152
redash/query_runner/mysql.py
Normal file
@@ -0,0 +1,152 @@
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
types_map = {
|
||||
0: TYPE_FLOAT,
|
||||
1: TYPE_INTEGER,
|
||||
2: TYPE_INTEGER,
|
||||
3: TYPE_INTEGER,
|
||||
4: TYPE_FLOAT,
|
||||
5: TYPE_FLOAT,
|
||||
7: TYPE_DATETIME,
|
||||
8: TYPE_INTEGER,
|
||||
9: TYPE_INTEGER,
|
||||
10: TYPE_DATE,
|
||||
12: TYPE_DATETIME,
|
||||
15: TYPE_STRING,
|
||||
16: TYPE_INTEGER,
|
||||
246: TYPE_FLOAT,
|
||||
253: TYPE_STRING,
|
||||
254: TYPE_STRING,
|
||||
}
|
||||
|
||||
class Mysql(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
'type': 'string'
|
||||
},
|
||||
'user': {
|
||||
'type': 'string'
|
||||
},
|
||||
'passwd': {
|
||||
'type': 'string',
|
||||
'title': 'Password'
|
||||
},
|
||||
'db': {
|
||||
'type': 'string',
|
||||
'title': 'Database name'
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
},
|
||||
'required': ['db']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
try:
|
||||
import MySQLdb
|
||||
except ImportError:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Mysql, self).__init__(configuration_json)
|
||||
|
||||
def get_schema(self):
|
||||
query = """
|
||||
SELECT col.table_schema,
|
||||
col.table_name,
|
||||
col.column_name
|
||||
FROM `information_schema`.`columns` col
|
||||
INNER JOIN
|
||||
(SELECT table_schema,
|
||||
TABLE_NAME
|
||||
FROM information_schema.tables
|
||||
WHERE table_type <> 'SYSTEM VIEW' AND table_schema NOT IN ('performance_schema', 'mysql')) tables ON tables.table_schema = col.table_schema
|
||||
AND tables.TABLE_NAME = col.TABLE_NAME;
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != self.configuration['db']:
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
else:
|
||||
table_name = row['table_name']
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
import MySQLdb
|
||||
|
||||
connection = None
|
||||
try:
|
||||
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
|
||||
user=self.configuration.get('user', ''),
|
||||
passwd=self.configuration.get('passwd', ''),
|
||||
db=self.configuration['db'],
|
||||
port=self.configuration.get('port', 3306),
|
||||
charset='utf8', use_unicode=True)
|
||||
cursor = connection.cursor()
|
||||
logger.debug("MySQL running query: %s", query)
|
||||
cursor.execute(query)
|
||||
|
||||
data = cursor.fetchall()
|
||||
|
||||
# TODO - very similar to pg.py
|
||||
if cursor.description is not None:
|
||||
columns_data = [(i[0], i[1]) for i in cursor.description]
|
||||
|
||||
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
|
||||
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': types_map.get(col[1], None)} for col in columns_data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
else:
|
||||
json_data = None
|
||||
error = "No data was returned."
|
||||
|
||||
cursor.close()
|
||||
except MySQLdb.Error, e:
|
||||
json_data = None
|
||||
error = e.args[1]
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Mysql)
|
||||
173
redash/query_runner/pg.py
Normal file
173
redash/query_runner/pg.py
Normal file
@@ -0,0 +1,173 @@
|
||||
import json
|
||||
import logging
|
||||
import psycopg2
|
||||
import select
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
types_map = {
|
||||
20: TYPE_INTEGER,
|
||||
21: TYPE_INTEGER,
|
||||
23: TYPE_INTEGER,
|
||||
700: TYPE_FLOAT,
|
||||
1700: TYPE_FLOAT,
|
||||
701: TYPE_FLOAT,
|
||||
16: TYPE_BOOLEAN,
|
||||
1082: TYPE_DATE,
|
||||
1114: TYPE_DATETIME,
|
||||
1184: TYPE_DATETIME,
|
||||
1014: TYPE_STRING,
|
||||
1015: TYPE_STRING,
|
||||
1008: TYPE_STRING,
|
||||
1009: TYPE_STRING,
|
||||
2951: TYPE_STRING
|
||||
}
|
||||
|
||||
|
||||
def _wait(conn):
|
||||
while 1:
|
||||
try:
|
||||
state = conn.poll()
|
||||
if state == psycopg2.extensions.POLL_OK:
|
||||
break
|
||||
elif state == psycopg2.extensions.POLL_WRITE:
|
||||
select.select([], [conn.fileno()], [])
|
||||
elif state == psycopg2.extensions.POLL_READ:
|
||||
select.select([conn.fileno()], [], [])
|
||||
else:
|
||||
raise psycopg2.OperationalError("poll() returned %s" % state)
|
||||
except select.error:
|
||||
raise psycopg2.OperationalError("select.error received")
|
||||
|
||||
|
||||
class PostgreSQL(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
},
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
"dbname": {
|
||||
"type": "string",
|
||||
"title": "Database Name"
|
||||
}
|
||||
},
|
||||
"required": ["dbname"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "pg"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(PostgreSQL, self).__init__(configuration_json)
|
||||
|
||||
values = []
|
||||
for k, v in self.configuration.iteritems():
|
||||
values.append("{}={}".format(k, v))
|
||||
|
||||
self.connection_string = " ".join(values)
|
||||
|
||||
def get_schema(self):
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != 'public':
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
else:
|
||||
table_name = row['table_name']
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
connection = psycopg2.connect(self.connection_string, async=True)
|
||||
_wait(connection)
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
_wait(connection)
|
||||
|
||||
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
|
||||
# size of the data we can assume it's ok.
|
||||
column_names = []
|
||||
columns = []
|
||||
duplicates_counter = 1
|
||||
|
||||
if cursor.description is not None:
|
||||
for column in cursor.description:
|
||||
# TODO: this deduplication needs to be generalized and reused in all query runners.
|
||||
column_name = column.name
|
||||
if column_name in column_names:
|
||||
column_name += str(duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column.type_code, None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
except (select.error, OSError) as e:
|
||||
logging.exception(e)
|
||||
error = "Query interrupted. Please retry."
|
||||
json_data = None
|
||||
except psycopg2.DatabaseError as e:
|
||||
logging.exception(e)
|
||||
error = e.message
|
||||
json_data = None
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(PostgreSQL)
|
||||
98
redash/query_runner/presto.py
Normal file
98
redash/query_runner/presto.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import json
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from pyhive import presto
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install PyHive.")
|
||||
logger.warning("You can use pip: pip install pyhive")
|
||||
enabled = False
|
||||
|
||||
PRESTO_TYPES_MAPPING = {
|
||||
"integer" : TYPE_INTEGER,
|
||||
"long" : TYPE_INTEGER,
|
||||
"bigint" : TYPE_INTEGER,
|
||||
"float" : TYPE_FLOAT,
|
||||
"double" : TYPE_FLOAT,
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"varchar": TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
}
|
||||
|
||||
class Presto(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
'type': 'string'
|
||||
},
|
||||
'port': {
|
||||
'type': 'number'
|
||||
},
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
},
|
||||
'catalog': {
|
||||
'type': 'string'
|
||||
},
|
||||
'username': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'required': ['host']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "presto"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Presto, self).__init__(configuration_json)
|
||||
|
||||
def run_query(self, query):
|
||||
connection = presto.connect(
|
||||
host=self.configuration.get('host', ''),
|
||||
port=self.configuration.get('port', 8080),
|
||||
username=self.configuration.get('username', 'redash'),
|
||||
catalog=self.configuration.get('catalog', 'hive'),
|
||||
schema=self.configuration.get('schema', 'default'))
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
columns_data = [(row[0], row[1]) for row in cursor.description]
|
||||
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': PRESTO_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
|
||||
|
||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Presto)
|
||||
203
redash/query_runner/python.py
Normal file
203
redash/query_runner/python.py
Normal file
@@ -0,0 +1,203 @@
|
||||
import sys
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import weakref
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
|
||||
import importlib
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from RestrictedPython import compile_restricted
|
||||
from RestrictedPython.Guards import safe_builtins
|
||||
|
||||
|
||||
class CustomPrint(object):
|
||||
""" CustomPrint redirect "print" calls to be sent as "log" on the result object """
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
self.lines = []
|
||||
|
||||
def write(self, text):
|
||||
if self.enabled:
|
||||
if text and text.strip():
|
||||
log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
|
||||
self.lines.append(log_line)
|
||||
|
||||
def enable(self):
|
||||
self.enabled = True
|
||||
|
||||
def disable(self):
|
||||
self.enabled = False
|
||||
|
||||
def __call__(self):
|
||||
return self
|
||||
|
||||
|
||||
class Python(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'allowedImportModules': {
|
||||
'type': 'string',
|
||||
'title': 'Modules to import prior to running the script'
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Python, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "python"
|
||||
|
||||
self._allowed_modules = {}
|
||||
self._script_locals = { "result" : { "rows" : [], "columns" : [], "log" : [] } }
|
||||
self._enable_print_log = True
|
||||
self._custom_print = CustomPrint()
|
||||
|
||||
if self.configuration.get("allowedImportModules", None):
|
||||
for item in self.configuration["allowedImportModules"].split(","):
|
||||
self._allowed_modules[item] = None
|
||||
|
||||
def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
|
||||
if name in self._allowed_modules:
|
||||
m = None
|
||||
if self._allowed_modules[name] is None:
|
||||
m = importlib.import_module(name)
|
||||
self._allowed_modules[name] = m
|
||||
else:
|
||||
m = self._allowed_modules[name]
|
||||
|
||||
return m
|
||||
|
||||
raise Exception("'{0}' is not configured as a supported import module".format(name))
|
||||
|
||||
def custom_write(self, obj):
|
||||
"""
|
||||
Custom hooks which controls the way objects/lists/tuples/dicts behave in
|
||||
RestrictedPython
|
||||
"""
|
||||
return obj
|
||||
|
||||
def custom_get_item(self, obj, key):
|
||||
return obj[key]
|
||||
|
||||
def custom_get_iter(self, obj):
|
||||
return iter(obj)
|
||||
|
||||
def add_result_column(self, result, column_name, friendly_name, column_type):
|
||||
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
|
||||
if column_type not in SUPPORTED_COLUMN_TYPES:
|
||||
raise Exception("'{0}' is not a supported column type".format(column_type))
|
||||
|
||||
if not "columns" in result:
|
||||
result["columns"] = []
|
||||
|
||||
result["columns"].append({
|
||||
"name" : column_name,
|
||||
"friendly_name" : friendly_name,
|
||||
"type" : column_type
|
||||
})
|
||||
|
||||
def add_result_row(self, result, values):
|
||||
if not "rows" in result:
|
||||
result["rows"] = []
|
||||
|
||||
result["rows"].append(values)
|
||||
|
||||
def execute_query(self, data_source_name_or_id, query):
|
||||
try:
|
||||
if type(data_source_name_or_id) == int:
|
||||
data_source = models.DataSource.get_by_id(data_source_name_or_id)
|
||||
else:
|
||||
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
|
||||
except models.DataSource.DoesNotExist:
|
||||
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
|
||||
|
||||
query_runner = get_query_runner(data_source.type, data_source.options)
|
||||
|
||||
data, error = query_runner.run_query(query)
|
||||
if error is not None:
|
||||
raise Exception(error)
|
||||
|
||||
# TODO: allow avoiding the json.dumps/loads in same process
|
||||
return json.loads(data)
|
||||
|
||||
def get_query_result(self, query_id):
|
||||
try:
|
||||
query = models.Query.get_by_id(query_id)
|
||||
except models.Query.DoesNotExist:
|
||||
raise Exception("Query id %s does not exist." % query_id)
|
||||
|
||||
if query.latest_query_data is None:
|
||||
raise Exception("Query does not have results yet.")
|
||||
|
||||
if query.latest_query_data.data is None:
|
||||
raise Exception("Query does not have results yet.")
|
||||
|
||||
return json.loads(query.latest_query_data.data)
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
error = None
|
||||
|
||||
code = compile_restricted(query, '<string>', 'exec')
|
||||
|
||||
safe_builtins["_write_"] = self.custom_write
|
||||
safe_builtins["__import__"] = self.custom_import
|
||||
safe_builtins["_getattr_"] = getattr
|
||||
safe_builtins["getattr"] = getattr
|
||||
safe_builtins["_setattr_"] = setattr
|
||||
safe_builtins["setattr"] = setattr
|
||||
safe_builtins["_getitem_"] = self.custom_get_item
|
||||
safe_builtins["_getiter_"] = self.custom_get_iter
|
||||
safe_builtins["_print_"] = self._custom_print
|
||||
|
||||
restricted_globals = dict(__builtins__=safe_builtins)
|
||||
restricted_globals["get_query_result"] = self.get_query_result
|
||||
restricted_globals["execute_query"] = self.execute_query
|
||||
restricted_globals["add_result_column"] = self.add_result_column
|
||||
restricted_globals["add_result_row"] = self.add_result_row
|
||||
restricted_globals["disable_print_log"] = self._custom_print.disable
|
||||
restricted_globals["enable_print_log"] = self._custom_print.enable
|
||||
|
||||
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
|
||||
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
|
||||
restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
|
||||
restricted_globals["TYPE_STRING"] = TYPE_STRING
|
||||
restricted_globals["TYPE_DATE"] = TYPE_DATE
|
||||
restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT
|
||||
|
||||
# TODO: Figure out the best way to have a timeout on a script
|
||||
# One option is to use ETA with Celery + timeouts on workers
|
||||
# And replacement of worker process every X requests handled.
|
||||
|
||||
exec(code) in restricted_globals, self._script_locals
|
||||
|
||||
result = self._script_locals['result']
|
||||
result['log'] = self._custom_print.lines
|
||||
json_data = json.dumps(result)
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
error = str(e)
|
||||
json_data = None
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Python)
|
||||
67
redash/query_runner/script.py
Normal file
67
redash/query_runner/script.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import os
|
||||
import sys
|
||||
import subprocess
|
||||
|
||||
from redash.query_runner import *
|
||||
|
||||
|
||||
class Script(BaseQueryRunner):
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return "check_output" in subprocess.__dict__
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'path': {
|
||||
'type': 'string',
|
||||
'title': 'Scripts path'
|
||||
}
|
||||
},
|
||||
'required': ['path']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Script, self).__init__(configuration_json)
|
||||
|
||||
# Poor man's protection against running scripts from outside the scripts directory
|
||||
if self.configuration["path"].find("../") > -1:
|
||||
raise ValidationError("Scripts can only be run from the configured scripts directory")
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
json_data = None
|
||||
error = None
|
||||
|
||||
query = query.strip()
|
||||
|
||||
script = os.path.join(self.configuration["path"], query.split(" ")[0])
|
||||
if not os.path.exists(script):
|
||||
return None, "Script '%s' not found in script directory" % query
|
||||
|
||||
script = os.path.join(self.configuration["path"], query)
|
||||
|
||||
output = subprocess.check_output(script.split(" "), shell=False)
|
||||
if output is not None:
|
||||
output = output.strip()
|
||||
if output != "":
|
||||
return output, None
|
||||
|
||||
error = "Error reading output"
|
||||
except subprocess.CalledProcessError as e:
|
||||
return None, str(e)
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Script)
|
||||
@@ -1,16 +1,30 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import os
|
||||
import urllib2
|
||||
|
||||
def url(connection_string):
|
||||
from redash.query_runner import *
|
||||
|
||||
def query_runner(query):
|
||||
base_url = connection_string
|
||||
|
||||
class Url(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string',
|
||||
'title': 'URL base path'
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def run_query(self, query):
|
||||
base_url = self.configuration["url"]
|
||||
|
||||
try:
|
||||
json_data = None
|
||||
error = None
|
||||
|
||||
query = query.strip()
|
||||
@@ -41,5 +55,4 @@ def url(connection_string):
|
||||
|
||||
return json_data, error
|
||||
|
||||
query_runner.annotate_query = False
|
||||
return query_runner
|
||||
register(Url)
|
||||
145
redash/saml_auth.py
Normal file
145
redash/saml_auth.py
Normal file
@@ -0,0 +1,145 @@
|
||||
# Copyright 2015 Okta, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from flask.ext.login import login_user
|
||||
import requests
|
||||
from flask import redirect, url_for, Blueprint, request
|
||||
from flask_oauth import OAuth
|
||||
from redash import models, settings
|
||||
from saml2 import (
|
||||
BINDING_HTTP_POST,
|
||||
BINDING_HTTP_REDIRECT,
|
||||
entity,
|
||||
)
|
||||
from saml2.client import Saml2Client
|
||||
from saml2.config import Config as Saml2Config
|
||||
|
||||
|
||||
logger = logging.getLogger('saml_auth')
|
||||
|
||||
blueprint = Blueprint('saml_auth', __name__)
|
||||
|
||||
def get_saml_client():
|
||||
'''
|
||||
Return saml configuation.
|
||||
The configuration is a hash for use by saml2.config.Config
|
||||
'''
|
||||
|
||||
if settings.SAML_CALLBACK_SERVER_NAME:
|
||||
acs_url=settings.SAML_CALLBACK_SERVER_NAME + url_for("saml_auth.idp_initiated")
|
||||
else:
|
||||
acs_url = url_for("saml_auth.idp_initiated",_external=True)
|
||||
|
||||
# NOTE:
|
||||
# Ideally, this should fetch the metadata and pass it to
|
||||
# PySAML2 via the "inline" metadata type.
|
||||
# However, this method doesn't seem to work on PySAML2 v2.4.0
|
||||
#
|
||||
# SAML metadata changes very rarely. On a production system,
|
||||
# this data should be cached as approprate for your production system.
|
||||
rv = requests.get(settings.SAML_METADATA_URL)
|
||||
import tempfile
|
||||
tmp = tempfile.NamedTemporaryFile()
|
||||
f = open(tmp.name, 'w')
|
||||
f.write(rv.text)
|
||||
f.close()
|
||||
|
||||
saml_settings = {
|
||||
'metadata': {
|
||||
# 'inline': metadata,
|
||||
"local": [tmp.name]
|
||||
},
|
||||
'service': {
|
||||
'sp': {
|
||||
'endpoints': {
|
||||
'assertion_consumer_service': [
|
||||
(acs_url, BINDING_HTTP_REDIRECT),
|
||||
(acs_url, BINDING_HTTP_POST)
|
||||
],
|
||||
},
|
||||
# Don't verify that the incoming requests originate from us via
|
||||
# the built-in cache for authn request ids in pysaml2
|
||||
'allow_unsolicited': True,
|
||||
# Don't sign authn requests, since signed requests only make
|
||||
# sense in a situation where you control both the SP and IdP
|
||||
'authn_requests_signed': False,
|
||||
'logout_requests_signed': True,
|
||||
'want_assertions_signed': True,
|
||||
'want_response_signed': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
spConfig = Saml2Config()
|
||||
spConfig.load(saml_settings)
|
||||
spConfig.allow_unknown_attributes = True
|
||||
saml_client = Saml2Client(config=spConfig)
|
||||
tmp.close()
|
||||
return saml_client
|
||||
|
||||
|
||||
@blueprint.route("/saml/callback", methods=['POST'])
|
||||
def idp_initiated():
|
||||
saml_client = get_saml_client()
|
||||
authn_response = saml_client.parse_authn_request_response(
|
||||
request.form['SAMLResponse'],
|
||||
entity.BINDING_HTTP_POST)
|
||||
authn_response.get_identity()
|
||||
user_info = authn_response.get_subject()
|
||||
email = user_info.text
|
||||
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
|
||||
|
||||
# This is what as known as "Just In Time (JIT) provisioning".
|
||||
# What that means is that, if a user in a SAML assertion
|
||||
# isn't in the user store, we create that user first, then log them in
|
||||
try:
|
||||
user_object = models.User.get(models.User.email == email)
|
||||
if user_object.name != name:
|
||||
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
|
||||
user_object.name = name
|
||||
user_object.save()
|
||||
except models.User.DoesNotExist:
|
||||
logger.debug("Creating user object (%r)", name)
|
||||
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
|
||||
|
||||
login_user(user_object, remember=True)
|
||||
url = url_for('index')
|
||||
|
||||
return redirect(url)
|
||||
|
||||
@blueprint.route("/saml/login")
|
||||
def sp_initiated():
|
||||
if not settings.SAML_METADATA_URL:
|
||||
logger.error("Cannot invoke saml endpoint without metadata url in settings.")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
saml_client = get_saml_client()
|
||||
reqid, info = saml_client.prepare_for_authenticate()
|
||||
|
||||
redirect_url = None
|
||||
# Select the IdP URL to send the AuthN request to
|
||||
for key, value in info['headers']:
|
||||
if key is 'Location':
|
||||
redirect_url = value
|
||||
response = redirect(redirect_url, code=302)
|
||||
# NOTE:
|
||||
# I realize I _technically_ don't need to set Cache-Control or Pragma:
|
||||
# http://stackoverflow.com/a/5494469
|
||||
# However, Section 3.2.3.2 of the SAML spec suggests they are set:
|
||||
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
|
||||
# We set those headers here as a "belt and suspenders" approach,
|
||||
# since enterprise environments don't always conform to RFCs
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
return response
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user