mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
275 Commits
v0.6.0+b72
...
0.7.0
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27639f83c7 | ||
|
|
c08e6791df | ||
|
|
5c7158b6ae | ||
|
|
b886067a9f | ||
|
|
2421de8819 | ||
|
|
9e87e42400 | ||
|
|
8c750826e3 | ||
|
|
b14b6d1773 | ||
|
|
76cb73f4ce | ||
|
|
8854a45598 | ||
|
|
228b8c7614 | ||
|
|
5de79213ae | ||
|
|
c7d30c8b87 | ||
|
|
076710f0c6 | ||
|
|
a9172dac00 | ||
|
|
accca51f39 | ||
|
|
5f5774d01b | ||
|
|
00e99d858c | ||
|
|
da56dc883f | ||
|
|
02582cab65 | ||
|
|
bff4d31ada | ||
|
|
83554207e1 | ||
|
|
1c0c3e0b93 | ||
|
|
5feb563dc9 | ||
|
|
07b88d0b53 | ||
|
|
21f33462d5 | ||
|
|
6a9d95f1ac | ||
|
|
36b80fc4ef | ||
|
|
d89dd2c9af | ||
|
|
658af526c7 | ||
|
|
3d859ec5f3 | ||
|
|
fdff799d23 | ||
|
|
5fc0b88b23 | ||
|
|
63de247478 | ||
|
|
5d3caac1b5 | ||
|
|
e4b9d23dfe | ||
|
|
890f59a4c9 | ||
|
|
d4a18ba611 | ||
|
|
c4502b2925 | ||
|
|
1d5efdd93f | ||
|
|
2b95da102e | ||
|
|
d512cd0c1d | ||
|
|
3dc9c84a98 | ||
|
|
4a33b987b8 | ||
|
|
f7041977d5 | ||
|
|
83bc38579e | ||
|
|
4b8a94e795 | ||
|
|
406010a7a6 | ||
|
|
4f11f28efa | ||
|
|
c919602b20 | ||
|
|
7702b05635 | ||
|
|
5fc7c499a3 | ||
|
|
628240906e | ||
|
|
41b9b21a20 | ||
|
|
dbd3f754ba | ||
|
|
4ef3c27fe6 | ||
|
|
58a005c71b | ||
|
|
9d7ff31178 | ||
|
|
93d6b01fbf | ||
|
|
7d57f9d0f1 | ||
|
|
e80f470255 | ||
|
|
5636cec0eb | ||
|
|
912bbc1a4a | ||
|
|
d3bb58167e | ||
|
|
2911fa8af7 | ||
|
|
4503c6af66 | ||
|
|
7fc2d5ee0b | ||
|
|
3c9c1466a3 | ||
|
|
4a7c066bf0 | ||
|
|
b850da52a2 | ||
|
|
1a3657572e | ||
|
|
666e3281e4 | ||
|
|
66084b1a3b | ||
|
|
421470666a | ||
|
|
f8e2bc9eca | ||
|
|
079fbf33f4 | ||
|
|
c195362710 | ||
|
|
b671dd0431 | ||
|
|
7793f3b257 | ||
|
|
e09aa6f81a | ||
|
|
780e0c0418 | ||
|
|
43edb009d6 | ||
|
|
81978c5049 | ||
|
|
239813e195 | ||
|
|
28dd571a03 | ||
|
|
808126cf91 | ||
|
|
69a8295f4c | ||
|
|
a692e3f664 | ||
|
|
6860dde1f7 | ||
|
|
e183affdd0 | ||
|
|
6338be3811 | ||
|
|
3ee6371250 | ||
|
|
4f38d42182 | ||
|
|
39db74ff20 | ||
|
|
05c2c21a85 | ||
|
|
00edc29e50 | ||
|
|
3771af0a8c | ||
|
|
c32c2d43f7 | ||
|
|
4e2e3f9077 | ||
|
|
2a27422df9 | ||
|
|
f9e0ce8e9c | ||
|
|
a1d49f13d3 | ||
|
|
26aa199f9c | ||
|
|
4c77f3f914 | ||
|
|
d6be792595 | ||
|
|
59c1ea7f16 | ||
|
|
4d24005eff | ||
|
|
2dab35b614 | ||
|
|
0b61b88f5f | ||
|
|
e5cb58207c | ||
|
|
fc17d1af81 | ||
|
|
e6650e1e2d | ||
|
|
3aa1cd0133 | ||
|
|
e04833c327 | ||
|
|
b743cceb60 | ||
|
|
a0e134d3b5 | ||
|
|
d7fb2d7458 | ||
|
|
b913ce6022 | ||
|
|
1eb7945d16 | ||
|
|
37d0026ee4 | ||
|
|
9cdc2cb2f7 | ||
|
|
a9bff9063e | ||
|
|
380126ee44 | ||
|
|
d8377375b8 | ||
|
|
98ff701f9a | ||
|
|
f5ea3e97d3 | ||
|
|
719e96dd2f | ||
|
|
6c6c0256ba | ||
|
|
723df51cdd | ||
|
|
a0f4e263b2 | ||
|
|
4706bf8060 | ||
|
|
f96a9f659a | ||
|
|
63c273f896 | ||
|
|
622ac6d781 | ||
|
|
8dc564a8bc | ||
|
|
3ae5baef22 | ||
|
|
8d819068b5 | ||
|
|
585e056265 | ||
|
|
1914ed7c7c | ||
|
|
bd216e93e7 | ||
|
|
5e351de896 | ||
|
|
de0e534c77 | ||
|
|
5fa1f9440d | ||
|
|
b3ddc5f8b9 | ||
|
|
8cde5f9673 | ||
|
|
1bb53ca497 | ||
|
|
0a3cd9267f | ||
|
|
075d843354 | ||
|
|
b14e5e8c0e | ||
|
|
c9da4be422 | ||
|
|
276ee7c27a | ||
|
|
334040532a | ||
|
|
335a3a98b5 | ||
|
|
b17080a7f5 | ||
|
|
8441c12b01 | ||
|
|
3b4af1b6fa | ||
|
|
c3deb8e2fa | ||
|
|
a60b1686da | ||
|
|
b56e87ceb2 | ||
|
|
fc89bcdaf3 | ||
|
|
15ec8321bb | ||
|
|
e6ba62485c | ||
|
|
9077b01fb9 | ||
|
|
f45281be96 | ||
|
|
a1c8ef9037 | ||
|
|
f46e8af23f | ||
|
|
30a89bfd2c | ||
|
|
6312f8738d | ||
|
|
9e3d5c10c5 | ||
|
|
59b87ec4fd | ||
|
|
27ecf5f25c | ||
|
|
105971c4c8 | ||
|
|
690f8323c3 | ||
|
|
20eb110ce3 | ||
|
|
571c9d0aee | ||
|
|
0ee7292f16 | ||
|
|
8c28392dfd | ||
|
|
671f1f4478 | ||
|
|
557d3748be | ||
|
|
f00d080ed2 | ||
|
|
4e76c1305f | ||
|
|
36ef388e92 | ||
|
|
2e1ee7f76c | ||
|
|
fc1e38772d | ||
|
|
0e631a5121 | ||
|
|
d74175efca | ||
|
|
bf5fe7d2c7 | ||
|
|
0f022aba92 | ||
|
|
0b6e55e55a | ||
|
|
e1c409366c | ||
|
|
3b942118e9 | ||
|
|
7f1543db8f | ||
|
|
74a5121be2 | ||
|
|
26fe136a1a | ||
|
|
83fb189b05 | ||
|
|
5e8d0d36c0 | ||
|
|
4ae4cffa04 | ||
|
|
bc433e88fe | ||
|
|
513ef501a4 | ||
|
|
f2bdcbedfb | ||
|
|
fd056edb2a | ||
|
|
0f0acfdd12 | ||
|
|
1e3b507b2b | ||
|
|
84d95272f3 | ||
|
|
3b08e9e214 | ||
|
|
f4be83b06f | ||
|
|
4918d0430c | ||
|
|
e25b86b10d | ||
|
|
d3d305a843 | ||
|
|
825b93bfe9 | ||
|
|
8c98282200 | ||
|
|
768ac9eb04 | ||
|
|
71011d2fca | ||
|
|
9683a8ed82 | ||
|
|
10a6ac9313 | ||
|
|
dba325e9a2 | ||
|
|
fcd9ab533c | ||
|
|
68e3e8e1c5 | ||
|
|
7f8b738b9e | ||
|
|
8a35dcedfa | ||
|
|
ef763b7157 | ||
|
|
498e1d4474 | ||
|
|
73de936c75 | ||
|
|
e32b709a41 | ||
|
|
60652f63c4 | ||
|
|
d0d4101f90 | ||
|
|
646875794f | ||
|
|
cdad4be0d5 | ||
|
|
8f4285be62 | ||
|
|
acfa55e2d0 | ||
|
|
0b7cd07db0 | ||
|
|
6297ffd523 | ||
|
|
368f4fdbef | ||
|
|
f52044a209 | ||
|
|
9fb33cf746 | ||
|
|
e3c5da5bc5 | ||
|
|
e675690cc6 | ||
|
|
edc1622cf5 | ||
|
|
5ab3d4a40d | ||
|
|
cb29d87b63 | ||
|
|
6ff6bdad9f | ||
|
|
e3cc3ef9a4 | ||
|
|
1fe4f291f2 | ||
|
|
a54119f4a2 | ||
|
|
c5b7fe5321 | ||
|
|
d487ec9153 | ||
|
|
fa19b1ddc8 | ||
|
|
267c32b390 | ||
|
|
aeff3f1494 | ||
|
|
e80e52f6c9 | ||
|
|
fe41a70602 | ||
|
|
976d9abe2d | ||
|
|
041bc1100a | ||
|
|
5d095ff6ab | ||
|
|
ef01b61b29 | ||
|
|
faad6b656b | ||
|
|
0bc775584b | ||
|
|
f2d96d61a1 | ||
|
|
09bf2dd608 | ||
|
|
ad1b9b06cf | ||
|
|
a4bceae60b | ||
|
|
9385449feb | ||
|
|
562e1bb8c9 | ||
|
|
082b718303 | ||
|
|
c0872899e9 | ||
|
|
086bbf129d | ||
|
|
4b7561e538 | ||
|
|
407c5a839b | ||
|
|
b8aefd26b8 | ||
|
|
85a762bcd2 | ||
|
|
4f1b3d5beb | ||
|
|
9218a7c437 | ||
|
|
71a3f066a5 | ||
|
|
89436d779c | ||
|
|
3631e938da |
15
.env.example
15
.env.example
@@ -1,9 +1,6 @@
|
||||
REDASH_CONNECTION_ADAPTER=pg
|
||||
REDASH_CONNECTION_STRING="dbname=data"
|
||||
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
|
||||
REDASH_GOOGLE_APPS_DOMAIN=
|
||||
REDASH_ADMINS=
|
||||
REDASH_WORKERS_COUNT=2
|
||||
REDASH_COOKIE_SECRET=
|
||||
REDASH_DATABASE_URL='postgresql://rd'
|
||||
REDASH_LOG_LEVEL = "INFO"
|
||||
export REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
|
||||
export REDASH_LOG_LEVEL="INFO"
|
||||
export REDASH_REDIS_URL=redis://localhost:6379/1
|
||||
export REDASH_DATABASE_URL="postgresql://redash"
|
||||
export REDASH_COOKIE_SECRET=veryverysecret
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -8,6 +8,7 @@ celerybeat-schedule*
|
||||
.#*
|
||||
\#*#
|
||||
*~
|
||||
_build
|
||||
|
||||
# Vagrant related
|
||||
.vagrant
|
||||
|
||||
7
Makefile
7
Makefile
@@ -1,6 +1,7 @@
|
||||
NAME=redash
|
||||
VERSION=`python ./manage.py version`
|
||||
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
|
||||
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
|
||||
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
|
||||
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
|
||||
|
||||
@@ -12,11 +13,11 @@ deps:
|
||||
|
||||
pack:
|
||||
sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
|
||||
upload:
|
||||
python bin/upload_version.py $(VERSION) $(FILENAME)
|
||||
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
|
||||
|
||||
test:
|
||||
nosetests --with-coverage --cover-package=redash tests/*.py
|
||||
cd rd_ui && grunt test
|
||||
#cd rd_ui && grunt test
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
<p align="center">
|
||||
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
|
||||
|
||||
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
|
||||
</p>
|
||||
<p align="center">
|
||||
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
|
||||
@@ -11,7 +10,8 @@
|
||||
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
|
||||
|
||||
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts.
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
|
||||
Presto, Google Spreadsheets, Cloudera Impala and custom scripts.
|
||||
|
||||
**_re:dash_** consists of two parts:
|
||||
|
||||
@@ -28,7 +28,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
|
||||
|
||||
## Getting Started
|
||||
|
||||
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
|
||||
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
|
||||
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).
|
||||
|
||||
|
||||
|
||||
@@ -1,30 +0,0 @@
|
||||
#!/usr/bin/env python
|
||||
import sys
|
||||
import requests
|
||||
|
||||
if __name__ == '__main__':
|
||||
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
|
||||
|
||||
if response.status_code != 200:
|
||||
exit("Failed getting releases (status code: %s)." % response.status_code)
|
||||
|
||||
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
|
||||
|
||||
latest_release = sorted_releases[0]
|
||||
asset_url = latest_release['assets'][0]['url']
|
||||
filename = latest_release['assets'][0]['name']
|
||||
|
||||
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
|
||||
|
||||
if '--url-only' in sys.argv:
|
||||
print asset_url
|
||||
elif '--wget' in sys.argv:
|
||||
print wget_command
|
||||
else:
|
||||
print "Latest release: %s" % latest_release['tag_name']
|
||||
print latest_release['body']
|
||||
|
||||
print "\nTarball URL: %s" % asset_url
|
||||
print 'wget: %s' % (wget_command)
|
||||
|
||||
|
||||
147
bin/release_manager.py
Normal file
147
bin/release_manager.py
Normal file
@@ -0,0 +1,147 @@
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import requests
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
repo = 'EverythingMe/redash'
|
||||
|
||||
def _github_request(method, path, params=None, headers={}):
|
||||
if not path.startswith('https://api.github.com'):
|
||||
url = "https://api.github.com/{}".format(path)
|
||||
else:
|
||||
url = path
|
||||
|
||||
if params is not None:
|
||||
params = json.dumps(params)
|
||||
|
||||
response = requests.request(method, url, data=params, auth=auth)
|
||||
return response
|
||||
|
||||
def exception_from_error(message, response):
|
||||
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
|
||||
|
||||
def rc_tag_name(version):
|
||||
return "v{}-rc".format(version)
|
||||
|
||||
def get_rc_release(version):
|
||||
tag = rc_tag_name(version)
|
||||
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
|
||||
|
||||
if response.status_code == 404:
|
||||
return None
|
||||
elif response.status_code == 200:
|
||||
return response.json()
|
||||
|
||||
raise exception_from_error("Unknown error while looking RC release: ", response)
|
||||
|
||||
def create_release(version, commit_sha):
|
||||
tag = rc_tag_name(version)
|
||||
|
||||
params = {
|
||||
'tag_name': tag,
|
||||
'name': "{} - RC".format(version),
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
}
|
||||
|
||||
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
|
||||
|
||||
if response.status_code != 201:
|
||||
raise exception_from_error("Failed creating new release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def upload_asset(release, filepath):
|
||||
upload_url = release['upload_url'].replace('{?name}', '')
|
||||
filename = filepath.split('/')[-1]
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
|
||||
|
||||
if response.status_code != 201: # not 200/201/...
|
||||
raise exception_from_error('Failed uploading asset', response)
|
||||
|
||||
return response
|
||||
|
||||
def remove_previous_builds(release):
|
||||
for asset in release['assets']:
|
||||
response = _github_request('delete', asset['url'])
|
||||
if response.status_code != 204:
|
||||
raise exception_from_error("Failed deleting asset", response)
|
||||
|
||||
def get_changelog(commit_sha):
|
||||
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
|
||||
if latest_release.status_code != 200:
|
||||
raise exception_from_error('Failed getting latest release', latest_release)
|
||||
|
||||
latest_release = latest_release.json()
|
||||
previous_sha = latest_release['target_commitish']
|
||||
|
||||
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
|
||||
log = subprocess.check_output(args)
|
||||
changes = ["Changes since {}:".format(latest_release['name'])]
|
||||
|
||||
for line in log.split('\n'):
|
||||
try:
|
||||
sha, subject, body, parents = line[1:-1].split('|')
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
try:
|
||||
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
|
||||
pull_request = " #{}".format(pull_request)
|
||||
except Exception, ex:
|
||||
pull_request = ""
|
||||
|
||||
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
|
||||
|
||||
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
|
||||
|
||||
return "\n".join(changes)
|
||||
|
||||
def update_release_commit_sha(release, commit_sha):
|
||||
params = {
|
||||
'target_commitish': commit_sha,
|
||||
}
|
||||
|
||||
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating commit sha for existing release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def update_release(version, build_filepath, commit_sha):
|
||||
try:
|
||||
release = get_rc_release(version)
|
||||
if release:
|
||||
release = update_release_commit_sha(release, commit_sha)
|
||||
else:
|
||||
release = create_release(version, commit_sha)
|
||||
|
||||
print "Using release id: {}".format(release['id'])
|
||||
|
||||
remove_previous_builds(release)
|
||||
response = upload_asset(release, build_filepath)
|
||||
|
||||
changelog = get_changelog(commit_sha)
|
||||
|
||||
response = _github_request('patch', release['url'], {'body': changelog})
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating release description", response)
|
||||
|
||||
except Exception, ex:
|
||||
print ex
|
||||
|
||||
if __name__ == '__main__':
|
||||
commit_sha = sys.argv[1]
|
||||
version = sys.argv[2]
|
||||
filepath = sys.argv[3]
|
||||
|
||||
# TODO: make sure running from git directory & remote = repo
|
||||
update_release(version, filepath, commit_sha)
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Script to test concurrency (multithreading/multiprocess) issues with the workers. Use with caution.
|
||||
"""
|
||||
import json
|
||||
import atfork
|
||||
atfork.monkeypatch_os_fork_functions()
|
||||
import atfork.stdlib_fixer
|
||||
atfork.stdlib_fixer.fix_logging_module()
|
||||
|
||||
import time
|
||||
from redash.data import worker
|
||||
from redash import models, data_manager, redis_connection
|
||||
|
||||
if __name__ == '__main__':
|
||||
models.create_db(True, False)
|
||||
|
||||
print "Creating data source..."
|
||||
data_source = models.DataSource.create(name="Concurrency", type="pg", options="dbname=postgres")
|
||||
|
||||
print "Clear jobs/hashes:"
|
||||
redis_connection.delete("jobs")
|
||||
query_hashes = redis_connection.keys("query_hash_*")
|
||||
if query_hashes:
|
||||
redis_connection.delete(*query_hashes)
|
||||
|
||||
starting_query_results_count = models.QueryResult.select().count()
|
||||
jobs_count = 5000
|
||||
workers_count = 10
|
||||
|
||||
print "Creating jobs..."
|
||||
for i in xrange(jobs_count):
|
||||
query = "SELECT {}".format(i)
|
||||
print "Inserting: {}".format(query)
|
||||
data_manager.add_job(query=query, priority=worker.Job.LOW_PRIORITY,
|
||||
data_source=data_source)
|
||||
|
||||
print "Starting workers..."
|
||||
workers = data_manager.start_workers(workers_count)
|
||||
|
||||
print "Waiting for jobs to be done..."
|
||||
keep_waiting = True
|
||||
while keep_waiting:
|
||||
results_count = models.QueryResult.select().count() - starting_query_results_count
|
||||
print "QueryResults: {}".format(results_count)
|
||||
time.sleep(5)
|
||||
if results_count == jobs_count:
|
||||
print "Yay done..."
|
||||
keep_waiting = False
|
||||
|
||||
data_manager.stop_workers()
|
||||
|
||||
qr_count = 0
|
||||
for qr in models.QueryResult.select():
|
||||
number = int(qr.query.split()[1])
|
||||
data_number = json.loads(qr.data)['rows'][0].values()[0]
|
||||
|
||||
if number != data_number:
|
||||
print "Oops? {} != {} ({})".format(number, data_number, qr.id)
|
||||
qr_count += 1
|
||||
|
||||
print "Verified {} query results.".format(qr_count)
|
||||
|
||||
print "Done."
|
||||
@@ -1,46 +0,0 @@
|
||||
#!python
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import requests
|
||||
import subprocess
|
||||
|
||||
|
||||
def capture_output(command):
|
||||
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
|
||||
return proc.stdout.read()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
version = sys.argv[1]
|
||||
filepath = sys.argv[2]
|
||||
filename = filepath.split('/')[-1]
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
commit_sha = os.environ['CIRCLE_SHA1']
|
||||
|
||||
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
|
||||
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
|
||||
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
|
||||
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
|
||||
|
||||
params = json.dumps({
|
||||
'tag_name': 'v{0}'.format(version),
|
||||
'name': 're:dash v{0}'.format(version),
|
||||
'body': version_body,
|
||||
'target_commitish': commit_sha,
|
||||
'prerelease': True
|
||||
})
|
||||
|
||||
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
|
||||
data=params,
|
||||
auth=auth)
|
||||
|
||||
upload_url = response.json()['upload_url']
|
||||
upload_url = upload_url.replace('{?name}', '')
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
|
||||
headers=headers, verify=False)
|
||||
|
||||
@@ -7,6 +7,9 @@ machine:
|
||||
2.7.3
|
||||
dependencies:
|
||||
pre:
|
||||
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
|
||||
- tar xvf optipng-0.7.5.tar.gz
|
||||
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
|
||||
- make deps
|
||||
- pip install -r dev_requirements.txt
|
||||
- pip install -r requirements.txt
|
||||
@@ -26,3 +29,7 @@ deployment:
|
||||
notify:
|
||||
webhooks:
|
||||
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
|
||||
general:
|
||||
branches:
|
||||
ignore:
|
||||
- gh-pages
|
||||
|
||||
192
docs/Makefile
Normal file
192
docs/Makefile
Normal file
@@ -0,0 +1,192 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " applehelp to make an Apple Help Book"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/redash.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/redash.qhc"
|
||||
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@echo
|
||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||
"~/Library/Documentation/Help or install it in your application" \
|
||||
"bundle."
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/redash"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/redash"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
coverage:
|
||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||
@echo "Testing of coverage in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/coverage/python.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
111
docs/conf.py
Normal file
111
docs/conf.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# re:dash documentation build configuration file, created by
|
||||
# sphinx-quickstart on Mon Jul 20 22:40:24 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u're:dash'
|
||||
copyright = u'2015, EverythingMe'
|
||||
author = u'EverythingMe'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
import sphinx_rtd_theme
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
html_show_sphinx = False
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
html_show_copyright = False
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'redashdoc'
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'redash', u're:dash Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'redash', u're:dash Documentation',
|
||||
author, 'redash', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
245
docs/datasources.rst
Normal file
245
docs/datasources.rst
Normal file
@@ -0,0 +1,245 @@
|
||||
Supported Data Sources
|
||||
######################
|
||||
|
||||
re:dash supports several types of data sources (see below the full list)
|
||||
and their management is done with the CLI (``manage.py``):
|
||||
|
||||
Create new data source
|
||||
======================
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ cd /opt/redash/current
|
||||
$ sudo -u redash bin/run ./manage.py ds new -n {name} -t {type} -o {options}
|
||||
|
||||
If you omit any of the options (-n, -t, -o) it will show a prompt asking
|
||||
for it. Options is a JSON string with the connection parameters. Unless
|
||||
you're doing some sort of automation, it's probably easier to leave it
|
||||
empty and fill out the prompt.
|
||||
|
||||
See below for the different supported data sources types and the
|
||||
relevant options string format.
|
||||
|
||||
Listing existing data sources
|
||||
=============================
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ sudo -u redash bin/run ./manage.py ds list
|
||||
|
||||
Supported data sources
|
||||
======================
|
||||
|
||||
PostgreSQL / Redshift
|
||||
---------------------
|
||||
|
||||
- **Type**: pg
|
||||
- **Options**:
|
||||
|
||||
- User (user)
|
||||
- Password (password)
|
||||
- Host (host)
|
||||
- Port (port)
|
||||
- Database name (dbname) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: "user= password=
|
||||
host= port=5439 dbname="
|
||||
|
||||
MySQL
|
||||
-----
|
||||
|
||||
- **Type**: mysql
|
||||
- **Options**:
|
||||
|
||||
- User (user)
|
||||
- Password (passwd)
|
||||
- Host (host)
|
||||
- Port (port)
|
||||
- Database name (db) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**:
|
||||
"Server=localhost;User=;Pwd=;Database="
|
||||
|
||||
Note that you need to install the MySQLDb package as it is not included
|
||||
in the ``requirements.txt`` file.
|
||||
|
||||
Graphite
|
||||
--------
|
||||
|
||||
- **Type**: graphite
|
||||
- **Options**:
|
||||
|
||||
- Url (url) (mandatory)
|
||||
- User (username)
|
||||
- Password (password)
|
||||
- Verify SSL ceritficate (verify)
|
||||
|
||||
- **Options string format**: '{"url":
|
||||
"https://graphite.yourcompany.com", "auth": ["user", "password"],
|
||||
"verify": true}'
|
||||
|
||||
Google BigQuery
|
||||
---------------
|
||||
|
||||
- **Type**: bigquery
|
||||
- **Options**:
|
||||
|
||||
- Service Account (serviceAccount) (mandatory)
|
||||
- Project ID (projectId) (mandatory)
|
||||
- Private Key filename (privateKey) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: {"serviceAccount" :
|
||||
"43242343247-fjdfakljr3r2@developer.gserviceaccount.com",
|
||||
"privateKey" : "/somewhere/23fjkfjdsfj21312-privatekey.p12",
|
||||
"projectId" : "myproject-123" }
|
||||
|
||||
Notes:
|
||||
|
||||
1. To obtain BigQuery credentials follow the guidelines at:
|
||||
https://developers.google.com/bigquery/authorization#service-accounts
|
||||
2. You need to install the ``google-api-python-client``,
|
||||
``oauth2client`` and ``pyopenssl`` packages (PyOpenSSL requires
|
||||
``libffi-dev`` and ``libssl-dev`` packages), as they are not included
|
||||
in the ``requirements.txt`` file.
|
||||
|
||||
Google Spreadsheets
|
||||
-------------------
|
||||
|
||||
(supported from v0.6.4)
|
||||
|
||||
- **Type**: google\_spreadsheets
|
||||
- **Options**:
|
||||
|
||||
- Credentials filename (credentialsFilePath) (mandatory)
|
||||
|
||||
Notes:
|
||||
|
||||
1. To obtain Google ServiceAccount credentials follow the guidelines at:
|
||||
https://developers.google.com/console/help/new/#serviceaccounts (save
|
||||
the JSON version of the credentials file)
|
||||
2. To be able to load the spreadsheet in re:dash - share your it with
|
||||
your ServiceAccount's email (it can be found in the credentials json
|
||||
file, for example
|
||||
43242343247-fjdfakljr3r2@developer.gserviceaccount.com) Note: all the
|
||||
service account details can be seen inside the json file you should
|
||||
obtain following step #1
|
||||
3. The query format is "DOC\_UUID\|SHEET\_NUM" (for example
|
||||
"kjsdfhkjh4rsEFSDFEWR232jkddsfh\|0")
|
||||
4. You (might) need to install the ``gspread``, ``oauth2client`` and
|
||||
``dateutil`` packages as they are not included in the
|
||||
``requirements.txt`` file.
|
||||
|
||||
MongoDB
|
||||
-------
|
||||
|
||||
- **Type**: mongo
|
||||
- **Options**:
|
||||
|
||||
- Connection String (connectionString) (mandatory)
|
||||
- Database name (dbName)
|
||||
- Replica set name (replicaSetName)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: { "connectionString"
|
||||
: "mongodb://user:password@localhost:27017/mydb", "dbName" : "mydb" }
|
||||
|
||||
For ReplicaSet databases use the following connection string: \*
|
||||
**Options string format**: { "connectionString" :
|
||||
"mongodb://user:pasword@server1:27017,server2:27017/mydb", "dbName" :
|
||||
"mydb", "replicaSetName" : "myreplicaSet" }
|
||||
|
||||
Notes:
|
||||
|
||||
1. You need to install ``pymongo``, as it is not included in the
|
||||
``requirements.txt`` file.
|
||||
|
||||
URL
|
||||
---
|
||||
|
||||
A URL based data source which requests URLs that conforms to the
|
||||
supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
|
||||
Very useful in situations where you want to expose the data without
|
||||
connecting directly to the database.
|
||||
|
||||
The query itself inside re:dash will simply contain the URL to be
|
||||
executed (i.e. http://myserver/path/myquery)
|
||||
|
||||
- **Type**: url
|
||||
- **Options**:
|
||||
|
||||
- Url (url)
|
||||
|
||||
- **Options string format (optional) (for v0.5 and older)**:
|
||||
http://myserver/path/
|
||||
|
||||
Notes:
|
||||
|
||||
1. All URLs must return the supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
2. If the Options string is set, only URLs that are part of the supplied
|
||||
path can be executed using this data source. Not setting the options
|
||||
path allows any URL to be executed as long as it returns the
|
||||
supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
|
||||
Script
|
||||
------
|
||||
|
||||
Allows executing any executable script residing on the server as long as
|
||||
its standard output conforms to the supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
|
||||
This integration is useful in situations where you need more than just a
|
||||
query and requires some processing to happen.
|
||||
|
||||
Once the path to scripts is configured in the datasource the query needs
|
||||
to contain the file name of the script as well as any command line
|
||||
parameters the script requires (i.e. myscript.py param1 param2
|
||||
--param3=value)
|
||||
|
||||
- **Type**: script
|
||||
- **Options**:
|
||||
|
||||
- Scripts Path (path) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: /path/to/scripts/
|
||||
|
||||
Notes:
|
||||
|
||||
1. You MUST set a path to execute the scripts, otherwise the data source
|
||||
will not work.
|
||||
2. All scripts must be executable, otherwise results won't return
|
||||
3. The script data source does not allow relative paths in the form of
|
||||
"../". You may use a relative sub path such as "./mydir/myscript".
|
||||
4. All scripts must output to the standard output the supported :doc:`results
|
||||
JSON format </dev/results_format>` and
|
||||
only that, otherwise the data source will not be able to load the
|
||||
data.
|
||||
|
||||
Python
|
||||
------
|
||||
|
||||
Execute other queries, manipulate and compute with Python code
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The Python data source allows running Python code in a secure and safe
|
||||
environment. It won't allow writing files to disk, importing modules
|
||||
that were not pre-approved in the configuration etc.
|
||||
|
||||
One of the benefits of using the Python data source is its ability to
|
||||
execute queries (or saved queries) which you can store in a variable and
|
||||
then manipulate/transform/merge with other data and queries.
|
||||
|
||||
You can import data analysis libraries such as Pandas, NumPy and SciPy.
|
||||
|
||||
This saved the trouble of having outside scripts do the synthesis of
|
||||
data from multiple sources to create a single data set that can then be
|
||||
used in dashboards.
|
||||
|
||||
- **Type**: Python
|
||||
- **Options**:
|
||||
|
||||
- Allowed Modules in a comma separated list (optional). **NOTE:**
|
||||
You MUST make sure these modules are installed on the machine
|
||||
running the Celery workers
|
||||
11
docs/dev.rst
Normal file
11
docs/dev.rst
Normal file
@@ -0,0 +1,11 @@
|
||||
Developer Information
|
||||
=====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
dev/vagrant
|
||||
dev/*
|
||||
|
||||
|
||||
94
docs/dev/query_execution.rst
Normal file
94
docs/dev/query_execution.rst
Normal file
@@ -0,0 +1,94 @@
|
||||
Query Execution Model
|
||||
#####################
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
The first datasource which was used with re:dash was Redshift. Because
|
||||
we had billions of records in Redshift, and some queries were costly to
|
||||
re-run, from the get go there was the idea of caching query results in
|
||||
re:dash.
|
||||
|
||||
This was to relieve stress from the Redshift cluster and also to improve
|
||||
user experience.
|
||||
|
||||
How queries get executed and cached in re:dash?
|
||||
===============================================
|
||||
|
||||
Server
|
||||
------
|
||||
|
||||
To make sure each query is executed only once at any giving time, we
|
||||
translate the query to a ``query hash``, using the following code:
|
||||
|
||||
.. code:: python
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
|
||||
def gen_query_hash(sql):
|
||||
sql = COMMENTS_REGEX.sub("", sql)
|
||||
sql = "".join(sql.split()).lower()
|
||||
return hashlib.md5(sql.encode('utf-8')).hexdigest()
|
||||
|
||||
When query execution is done, the result gets stored to
|
||||
``query_results`` table. Also we check for all queries in the
|
||||
``queries`` table that have the same query hash and update their
|
||||
reference to the query result we just saved
|
||||
(`code <https://github.com/EverythingMe/redash/blob/master/redash/models.py#L235>`__).
|
||||
|
||||
Client
|
||||
------
|
||||
|
||||
The client (UI) will execute queries in two scenarios:
|
||||
|
||||
1. (automatically) When opening a query page of a query that doesn't
|
||||
have a result yet.
|
||||
2. (manually) When the user clicks on "Execute".
|
||||
|
||||
In each case the client does a POST request to ``/api/query_results``
|
||||
with the following parameters: ``query`` (the query text),
|
||||
``data_source_id`` (data source to execute the query with) and ``ttl``.
|
||||
|
||||
When loading a cached result, ``ttl`` will be the one set to the query
|
||||
(if it was set). This is a relic from previous versions, and I'm not
|
||||
sure if it's really used anymore, as usually we will fetch query result
|
||||
using its id.
|
||||
|
||||
When loading a non cached result, ``ttl`` will be 0 which will "force"
|
||||
the server to execute the query.
|
||||
|
||||
As a response to ``/api/query_results`` the server will send either the
|
||||
query results (in case of a cached query) or job id of the currently
|
||||
executing query. When job id received the client will start polling on
|
||||
this id, until a query result received (this is encapsulated in
|
||||
``Query`` and ``QueryResult`` services).
|
||||
|
||||
Ideas on how to implement query parameters
|
||||
==========================================
|
||||
|
||||
Client side only implementation
|
||||
-------------------------------
|
||||
|
||||
(This was actually implemented in. See pull request `#363 <https://github.com/EverythingMe/redash/pull/363>`__ for details.)
|
||||
|
||||
The basic idea of how to implement parametized queries is to treat the
|
||||
query as a template and merge it with parameters taken from query string
|
||||
or UI (or both).
|
||||
|
||||
When the caching facility isn't required (with queries that return in a
|
||||
reasonable time frame) the implementation can be completly client side
|
||||
and the backend can be "blind" to the parameters - it just receives the
|
||||
final query to execute and returns result.
|
||||
|
||||
As one improvement over this, we can let the UI/user specify the TTL
|
||||
value when making the request to ``/api/query_results``, in which case
|
||||
caching will be availble too, while not having to make the server aware
|
||||
of the parameters.
|
||||
|
||||
Hybrid
|
||||
------
|
||||
|
||||
Another option, will be to store the list of possible parameters for a
|
||||
query, with their default/optional values. In such case, the server can
|
||||
prefetch all the options and cache them to provide faster results to the
|
||||
client.
|
||||
30
docs/dev/results_format.rst
Normal file
30
docs/dev/results_format.rst
Normal file
@@ -0,0 +1,30 @@
|
||||
Data Source Results Format
|
||||
==========================
|
||||
|
||||
All data sources in re:dash return the following results in JSON format:
|
||||
|
||||
.. code:: javascript
|
||||
|
||||
{
|
||||
"columns" : [
|
||||
{
|
||||
// Required: a unique identifier of the column name in this result
|
||||
"name" : "COLUMN_NAME",
|
||||
// Required: friendly name of the column that will appear in the results
|
||||
"friendly_name" : "FRIENDLY_NAME",
|
||||
// Optional: If not specified sort might not work well.
|
||||
// Supported types: integer, float, boolean, string (default), datetime (ISO-8601 text format)
|
||||
"type" : "VALUE_TYPE"
|
||||
},
|
||||
...
|
||||
],
|
||||
"rows" : [
|
||||
{
|
||||
// name is the column name as it appears in the columns above.
|
||||
// VALUE is a valid JSON value. For dates its an ISO-8601 string.
|
||||
"name" : VALUE,
|
||||
"name2" : VALUE2
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
49
docs/dev/vagrant.rst
Normal file
49
docs/dev/vagrant.rst
Normal file
@@ -0,0 +1,49 @@
|
||||
Setting up development environment (using Vagrant)
|
||||
==================================================
|
||||
|
||||
To simplify contribution there is a `Vagrant
|
||||
box <https://vagrantcloud.com/redash/boxes/dev>`__ available with all
|
||||
the needed software to run re:dash for development (use it only for
|
||||
development, for demo purposes there is
|
||||
`redash/demo <https://vagrantcloud.com/redash/boxes/demo>`__ box and the
|
||||
AWS/GCE images).
|
||||
|
||||
To get started with this box:
|
||||
|
||||
1. Make sure you have recent version of
|
||||
`Vagrant <https://www.vagrantup.com/>`__ installed.
|
||||
2. Clone the re:dash repository:
|
||||
``git clone https://github.com/EverythingMe/redash.git``.
|
||||
3. Change dir into the repository (``cd redash``) and run run
|
||||
``vagrant up``. This might take some time the first time you run it,
|
||||
as it downloads the Vagrant virtual box.
|
||||
4. Once Vagrant is ready, ssh into the instance (``vagrant ssh``), and
|
||||
change dir to ``/opt/redash/current`` -- this is where your local
|
||||
repository copy synced to.
|
||||
5. Copy ``.env`` file into this directory (``cp ../.env ./``).
|
||||
6. From ``/opt/redash/current/rd_ui`` run ``bower install`` to install
|
||||
frontend packages. This can be done from your host machine as well,
|
||||
if you have bower installed.
|
||||
7. Go back to ``/opt/redash/current`` and install python dependencies
|
||||
``sudo pip install -r requirements.txt``
|
||||
8. Apply migrations
|
||||
|
||||
::
|
||||
|
||||
PYTHONPATH=. bin/run python migrations/0001_allow_delete_query.py
|
||||
PYTHONPATH=. bin/run python migrations/0002_fix_timestamp_fields.py
|
||||
PYTHONPATH=. bin/run python migrations/0003_update_data_source_config.py
|
||||
PYTHONPATH=. bin/run python migrations/0004_allow_null_in_event_user.py
|
||||
PYTHONPATH=. bin/run python migrations/0005_add_updated_at.py
|
||||
PYTHONPATH=. bin/run python migrations/0006_queries_last_edit_by.py
|
||||
PYTHONPATH=. bin/run python migrations/0007_add_schedule_to_queries.py
|
||||
PYTHONPATH=. bin/run python migrations/0008_make_ds_name_unique.py
|
||||
PYTHONPATH=. bin/run python migrations/0009_add_api_key_to_user.py
|
||||
PYTHONPATH=. bin/run python migrations/0010_create_alerts.py
|
||||
PYTHONPATH=. bin/run python migrations/0010_allow_deleting_datasources.py
|
||||
PYTHONPATH=. bin/run python migrations/0011_migrate_bigquery_to_json.py
|
||||
|
||||
9. Start the server and background workers with
|
||||
``bin/run honcho start -f Procfile.dev``.
|
||||
10. Now the server should be available on your host on port 9001 and you
|
||||
can login with username admin and password admin.
|
||||
57
docs/index.rst
Normal file
57
docs/index.rst
Normal file
@@ -0,0 +1,57 @@
|
||||
.. image:: http://redash.io/static/img/redash_logo.png
|
||||
:width: 200px
|
||||
|
||||
Open Source Data Collaboration and Visualization Platform
|
||||
===================================
|
||||
|
||||
**re:dash** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
|
||||
|
||||
Prior to **re:dash**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
|
||||
|
||||
**re:dash** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery,Google Spreadsheets, PostgreSQL, MySQL, Graphite and custom scripts.
|
||||
|
||||
Features
|
||||
########
|
||||
|
||||
1. **Query Editor**: think of `JS Fiddle`_ for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it.
|
||||
2. **Visualizations**: once you have a dataset, you can create different visualizations out of it. Currently it supports charts, pivot table and cohorts.
|
||||
3. **Dashboards**: combine several visualizations into a single dashboard.
|
||||
|
||||
Demo
|
||||
####
|
||||
|
||||
.. figure:: https://raw.github.com/EverythingMe/redash/screenshots/screenshots.gif
|
||||
:alt: Screenshots
|
||||
|
||||
You can try out the demo instance: `http://demo.redash.io`_ (login with any Google account).
|
||||
|
||||
.. _http://demo.redash.io: http://demo.redash.io
|
||||
.. _JS Fiddle: http://jsfiddle.net
|
||||
|
||||
Getting Started
|
||||
###############
|
||||
|
||||
:doc:`Setting up re:dash instance </setup>` (includes links to ready made AWS/GCE images).
|
||||
|
||||
Getting Help
|
||||
############
|
||||
|
||||
* Source: https://github.com/everythingme/redash
|
||||
* Issues: https://github.com/everythingme/redash/issues
|
||||
* Mailing List: https://groups.google.com/forum/#!forum/redash-users
|
||||
* Gitter (chat): https://gitter.im/EverythingMe/redash
|
||||
* Contact Arik, the maintainer directly: arik@everything.me.
|
||||
|
||||
TOC
|
||||
###
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
setup
|
||||
upgrade
|
||||
datasources
|
||||
usage
|
||||
dev
|
||||
misc
|
||||
10
docs/misc.rst
Normal file
10
docs/misc.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
Miscellaneous
|
||||
=============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
misc/*
|
||||
|
||||
|
||||
50
docs/misc/google_developers_project.rst
Normal file
50
docs/misc/google_developers_project.rst
Normal file
@@ -0,0 +1,50 @@
|
||||
How To: Create a Google Developers Project
|
||||
==========================================
|
||||
|
||||
1. Go to the `Google Developers
|
||||
Console <https://console.developers.google.com/>`__.
|
||||
2. Select a project, or create a new one by clicking Create Project:
|
||||
|
||||
1. In the Project name field, type in a name for your project.
|
||||
2. In the Project ID field, optionally type in a project ID for your
|
||||
project or use the one that the console has created for you. This
|
||||
ID must be unique world-wide.
|
||||
3. Click the **Create** button and wait for the project to be
|
||||
created.
|
||||
4. Click on the new project name in the list to start editing the
|
||||
project.
|
||||
|
||||
3. In the left sidebar, select the **APIs** item below "APIs & auth". A
|
||||
list of Google web services appears.
|
||||
4. Find the **Google+ API** service and set its status to **ON**—notice
|
||||
that this action moves the service to the top of the list.
|
||||
5. In the sidebar under "APIs & auth", select **Consent screen**.
|
||||
|
||||
- Choose an Email Address and specify a Product Name.
|
||||
|
||||
6. In the sidebar under "APIs & auth", select **Credentials**.
|
||||
7. Click **Create a new Client ID** — a dialog box appears.
|
||||
|
||||
- In the **Application type** section of the dialog, select **Web
|
||||
application**.
|
||||
- In the **Authorized JavaScript origins** field, enter the origin
|
||||
for your app. You can enter multiple origins to use with multiple
|
||||
re:dash instance. Wildcards are not allowed. In the example below,
|
||||
we assume your re:dash instance address is *redash.example.com*:
|
||||
|
||||
::
|
||||
|
||||
http://redash.example.com
|
||||
https://redash.example.com
|
||||
|
||||
- In the Authorized redirect URI field, enter the redirect URI
|
||||
callback:
|
||||
|
||||
::
|
||||
|
||||
http://redash.example.com/oauth/google_callback
|
||||
|
||||
- Click the ``Create Client ID`` button.
|
||||
|
||||
8. In the resulting **Client ID for web application** section, copy the
|
||||
**Client ID** and **Client secret** to your ``.env`` file.
|
||||
59
docs/misc/ssl.rst
Normal file
59
docs/misc/ssl.rst
Normal file
@@ -0,0 +1,59 @@
|
||||
SSL (HTTPS) Setup
|
||||
=================
|
||||
|
||||
If you used the provided images or the bootstrap script, to start using
|
||||
SSL with your instance you need to:
|
||||
|
||||
1. Update the nginx config file (``/etc/nginx/sites-available/redash``)
|
||||
with SSL configuration (see below an example). Make sure to upload
|
||||
the certificate to the server, and set the paths correctly in the new
|
||||
config.
|
||||
|
||||
2. Open port 443 in your security group (if using AWS or GCE).
|
||||
|
||||
.. code:: nginx
|
||||
|
||||
upstream redash_servers {
|
||||
server 127.0.0.1:5000;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
# Allow accessing /ping without https. Useful when placing behind load balancer.
|
||||
location /ping {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://redash_servers;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Enforce SSL.
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
|
||||
# Make sure to set paths to your certificate .pem and .key files.
|
||||
ssl on;
|
||||
ssl_certificate /path-to/cert.pem; # or crt
|
||||
ssl_certificate_key /path-to/cert.key;
|
||||
|
||||
access_log /var/log/nginx/redash.access.log;
|
||||
|
||||
gzip on;
|
||||
gzip_types *;
|
||||
gzip_proxied any;
|
||||
|
||||
location / {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_pass http://redash_servers;
|
||||
proxy_redirect off;
|
||||
}
|
||||
}
|
||||
3
docs/requirements.txt
Normal file
3
docs/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
sphinx
|
||||
sphinx-autobuild
|
||||
sphinx_rtd_theme
|
||||
159
docs/setup.rst
Normal file
159
docs/setup.rst
Normal file
@@ -0,0 +1,159 @@
|
||||
Setting up re:dash instance
|
||||
###########################
|
||||
|
||||
The `provisioning
|
||||
script <https://github.com/EverythingMe/redash/blob/master/setup/bootstrap.sh>`__
|
||||
works on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy. This script
|
||||
installs all needed dependencies and creates basic setup.
|
||||
|
||||
To ease the process, there are also images for AWS and Google Compute
|
||||
Cloud. These images created with the same provision script using Packer.
|
||||
|
||||
Create an instance
|
||||
==================
|
||||
|
||||
Google Compute Engine
|
||||
---------------------
|
||||
|
||||
First, you need to add the images to your account:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images add redash-063-b906 gs://redash-images/redash.0.6.3.b906.tar.gz
|
||||
|
||||
Next you need to launch an instance using this image (n1-standard-1
|
||||
instance type is recommended). If you plan using re:dash with BigQuery,
|
||||
you can use a dedicated image which comes with BigQuery preconfigured
|
||||
(using instance permissions):
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images add redash-063-b906-bq gs://redash-images/redash.0.6.3.b906-bq.tar.gz
|
||||
|
||||
Note that you need to launch this instance with BigQuery access:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute instances create <your_instance_name> --image redash-060-b812-bq --scopes storage-ro bigquery
|
||||
|
||||
(the same can be done from the web interface, just make sure to enable
|
||||
BigQuery access)
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
AWS
|
||||
---
|
||||
|
||||
Launch the instance with from the pre-baked AMI (for small deployments
|
||||
t2.micro should be enough):
|
||||
|
||||
- us-east-1:
|
||||
`ami-47b4612c <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-47b4612c>`__
|
||||
- us-west-1:
|
||||
`ami-a72edde3 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-a72edde3>`__
|
||||
- us-west-2:
|
||||
`ami-f9d6d5c9 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-f9d6d5c9>`__
|
||||
- eu-central-1:
|
||||
`ami-72eed46f <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-72eed46f>`__
|
||||
- eu-west-1:
|
||||
`ami-5a135c2d <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-5a135c2d>`__
|
||||
- sa-east-1:
|
||||
`ami-2b78f436 <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-2b78f436>`__
|
||||
- ap-northeast-1:
|
||||
`ami-0a55fd0a <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-0a55fd0a>`__
|
||||
- ap-southeast-2:
|
||||
`ami-9f793ea5 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-9f793ea5>`__
|
||||
- ap-southeast-1:
|
||||
`ami-12545740 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-12545740>`__
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
Other
|
||||
-----
|
||||
|
||||
Download the provision script and run it on your machine. Note that:
|
||||
|
||||
1. You need to run the script as root.
|
||||
2. It was tested only on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy.
|
||||
|
||||
Setup
|
||||
=====
|
||||
|
||||
Once you created the instance with either the image or the script, you
|
||||
should have a running re:dash instance with everything you need to get
|
||||
started. You can even login to it with the user "admin" (password:
|
||||
"admin"). But to make it useful, there are a few more steps that you
|
||||
need to manually do to complete the setup:
|
||||
|
||||
First ssh to your instance and change directory to ``/opt/redash``. If
|
||||
you're using the GCE image, switch to root (``sudo su``).
|
||||
|
||||
Users & Google Authentication setup
|
||||
-----------------------------------
|
||||
|
||||
Most of the settings you need to edit are in the ``/opt/redash/.env``
|
||||
file.
|
||||
|
||||
1. Update the cookie secret (important! otherwise anyone can sign new
|
||||
cookies and impersonate users): change "veryverysecret" in the line:
|
||||
``export REDASH_COOKIE_SECRET=veryverysecret`` to something else (you
|
||||
can use ``pwgen 32 -1`` to generate random string).
|
||||
|
||||
2. By default we create an admin user with the password "admin". You
|
||||
need to change the password:
|
||||
|
||||
- ``cd /opt/redash/current``
|
||||
- ``sudo -u redash bin/run ./manage.py users password admin {new password}``
|
||||
|
||||
3. If you want to use Google OAuth to authenticate users, you need to
|
||||
create a Google Developers project (see :doc:`instructions </misc/google_developers_project>`)
|
||||
and then add the needed configuration in the ``.env`` file:
|
||||
|
||||
.. code::
|
||||
|
||||
export REDASH_GOOGLE_CLIENT_ID=""
|
||||
export REDASH_GOOGLE_CLIENT_SECRET=""
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=""
|
||||
|
||||
|
||||
|
||||
``REDASH_GOOGLE_CLIENT_ID`` and ``REDASH_GOOGLE_CLIENT_SECRET`` are the values you get after registering with Google. ``READASH_GOOGLE_APPS_DOMAIN`` is used in case you want to limit access to single Google apps domain (*if you leave it empty anyone with a Google account can access your instance*).
|
||||
|
||||
4. Restart the web server to apply the configuration changes:
|
||||
``sudo supervisorctl restart redash_server``.
|
||||
|
||||
5. Once you have Google OAuth enabled, you can login using your Google
|
||||
Apps account. If you want to grant admin permissions to some users,
|
||||
you can do it with the ``users grant_admin`` command:
|
||||
``sudo -u redash bin/run ./manage.py users grant_admin {email}``.
|
||||
|
||||
6. If you don't use Google OAuth or just need username/password logins,
|
||||
you can create additional users using the CLI (see :doc:`documentation </usage/users>`).
|
||||
|
||||
Datasources
|
||||
-----------
|
||||
|
||||
To make re:dash truly useful, you need to setup your data sources in it.
|
||||
Currently all data sources management is done with the CLI.
|
||||
|
||||
See
|
||||
:doc:`documentation </datasources>`
|
||||
for the different options. Your instance comes ready with dependencies
|
||||
needed to setup supported sources.
|
||||
|
||||
Follow issue
|
||||
`#193 <https://github.com/EverythingMe/redash/issues/193>`__ to know
|
||||
when UI was implemented to manage data sources.
|
||||
|
||||
How to upgrade?
|
||||
---------------
|
||||
|
||||
It's recommended to upgrade once in a while your re:dash instance to
|
||||
benefit from bug fixes and new features. See :doc:`here </upgrade>` for full upgrade
|
||||
instructions (including Fabric script).
|
||||
|
||||
Notes
|
||||
=====
|
||||
|
||||
- If this is a production setup, you should enforce HTTPS and make sure
|
||||
you set the cookie secret (see :doc:`instructions </misc/ssl>`).
|
||||
34
docs/upgrade.rst
Normal file
34
docs/upgrade.rst
Normal file
@@ -0,0 +1,34 @@
|
||||
How to Upgrade
|
||||
##############
|
||||
|
||||
It's recommended to upgrade your re:dash instance once there are new
|
||||
releases, to benefit from new features and bug fixes. The upgrade
|
||||
process is relatively simple, and assuming you used one of the base
|
||||
images we provide, you can just use the
|
||||
`Fabric <http://www.fabfile.org/>`__ script provided here:
|
||||
https://gist.github.com/arikfr/440d1403b4aeb76ebaf8.
|
||||
|
||||
How to run the Fabric script
|
||||
============================
|
||||
|
||||
1. Install Fabric: ``pip install fabric requests`` (needed only once)
|
||||
2. Download the ``fabfile.py`` from the gist.
|
||||
3. Run the script:
|
||||
``fab -H{your re:dash host} -u{the ssh user for this host} deploy_latest_release``
|
||||
|
||||
What the Fabric script does
|
||||
===========================
|
||||
|
||||
Even if you didn't use the image, it's very likely you can reuse most of
|
||||
this script with small modifications. What this script does is:
|
||||
|
||||
1. Find the URL of the latest release tarball (from `GitHub releases
|
||||
page <github.com/everythingme/redash/releases>`__).
|
||||
2. Download it.
|
||||
3. Create new directory for this version (for example:
|
||||
``/opt/redash/redash.0.5.0.b685``).
|
||||
4. Unpack that (``tar -C {dir} -xvf {tarball path}``).
|
||||
5. Link ``/opt/redash/.env`` file into this directory.
|
||||
6. Apply any new migrations.
|
||||
7. Link ``/opt/redash/current`` to new version.
|
||||
8. Restart web server and celery workers.
|
||||
12
docs/usage.rst
Normal file
12
docs/usage.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
Usage
|
||||
=====
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
usage/maintenance.rst
|
||||
usage/users.rst
|
||||
usage/*
|
||||
|
||||
|
||||
48
docs/usage/elasticsearch_querying.rst
Normal file
48
docs/usage/elasticsearch_querying.rst
Normal file
@@ -0,0 +1,48 @@
|
||||
ElasticSearch: Querying
|
||||
#######################
|
||||
|
||||
ElasticSearch currently supports only simple Lucene style queries (like
|
||||
Kibana but without the aggregation).
|
||||
|
||||
Full blown JSON based ElasticSearch queries (including aggregations)
|
||||
will be added later.
|
||||
|
||||
Simple query example:
|
||||
=====================
|
||||
|
||||
- Query the index named "twitter"
|
||||
- Filter by "user:kimchy"
|
||||
- Return the fields: "@timestamp", "tweet" and "user"
|
||||
- Return up to 15 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "twitter",
|
||||
"query" : "user:kimchy",
|
||||
"fields" : ["@timestamp", "tweet", "user"],
|
||||
"size" : 15,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
|
||||
Simple query on a logstash ElasticSearch instance:
|
||||
==================================================
|
||||
|
||||
- Query the index named "logstash-2015.04.\*" (in this case its all of
|
||||
April 2015)
|
||||
- Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
- Return fields: "@timestamp", "userId", "channel", "utm\_source",
|
||||
"utm\_medium", "utm\_campaign", "utm\_content"
|
||||
- Return up to 250 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "logstash-2015.04.*",
|
||||
"query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
"fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
"size" : 250,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
94
docs/usage/maintenance.rst
Normal file
94
docs/usage/maintenance.rst
Normal file
@@ -0,0 +1,94 @@
|
||||
Ongoing Maintanence and Basic Operations
|
||||
########################################
|
||||
|
||||
Configuration and logs
|
||||
======================
|
||||
|
||||
The supervisor config can be found in
|
||||
``/opt/redash/supervisord/supervisord.conf``.
|
||||
|
||||
There you can see the names of its programs (``redash_celery``,
|
||||
``redash_server``) and the location of their logs.
|
||||
|
||||
Restart
|
||||
=======
|
||||
|
||||
Restarting the Web Server
|
||||
-------------------------
|
||||
|
||||
``sudo supervisorctl stop redash_server``
|
||||
|
||||
Restarting Celery Workers
|
||||
-------------------------
|
||||
|
||||
``sudo supervisorctl restart redash_celery``
|
||||
|
||||
Restarting Celery Workers & the Queries Queue
|
||||
---------------------------------------------
|
||||
|
||||
In case you are handling a problem, and you need to stop the currently
|
||||
running queries and reset the queue, follow the steps below.
|
||||
|
||||
1. Stop celery: ``sudo supervisorctl stop redash_celery`` (celery might
|
||||
take some time to stop, if it's in the middle of running a query)
|
||||
|
||||
2. Flush redis: ``redis-cli flushdb``
|
||||
|
||||
3. Start celery: ``sudo supervisorctl start redash_celery``
|
||||
|
||||
Changing the Number of Workers
|
||||
==============================
|
||||
|
||||
By default, Celery will start a worker per CPU core. Because most of
|
||||
re:dash's tasks are IO bound, the real limit for number of workers you
|
||||
can use depends on the amount of memory your machine has. It's
|
||||
recommended to increase number of workers, to support more concurrent
|
||||
queries.
|
||||
|
||||
1. Open the supervisord configuration file:
|
||||
``/opt/redash/supervisord/supervisord.conf``
|
||||
|
||||
2. Edit the ``[program:redash_celery]`` section and add to the *command*
|
||||
value, the param "-c" with the number of concurrent workers you need.
|
||||
|
||||
3. Restart supervisord to apply new configuration:
|
||||
``sudo /etc/init.d/redash_supervisord restart``.
|
||||
|
||||
DB
|
||||
==
|
||||
|
||||
Show the Currently Configured Data Source
|
||||
-----------------------------------------
|
||||
|
||||
This varies based on the redash version and personal preferences. You
|
||||
can do one of the following:
|
||||
|
||||
Using the CLI
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
In ``/opt/redash/current``, run:
|
||||
``sudo -u redash bin/run ./manage.py ds list``
|
||||
|
||||
Using the Admin
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
(available from version 0.6b797). Browse to ``/admin/datasource``
|
||||
|
||||
View the Definition Directly in the DB
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
1. Open psql: ``sudo -u redash psql``
|
||||
|
||||
2. Run the query: ``SELECT * from data_sources;``
|
||||
|
||||
Backup re:dash's DB:
|
||||
--------------------
|
||||
|
||||
``sudo -u redash pg_dump > backup_filename.sql``
|
||||
|
||||
Version
|
||||
=======
|
||||
|
||||
See current version:
|
||||
|
||||
``bin/run ./manage.py version``
|
||||
74
docs/usage/mongodb_querying.rst
Normal file
74
docs/usage/mongodb_querying.rst
Normal file
@@ -0,0 +1,74 @@
|
||||
MongoDB: Querying
|
||||
#################
|
||||
|
||||
Simple query example:
|
||||
=====================
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"collection" : "my_collection",
|
||||
"query" : {
|
||||
"date" : {
|
||||
"$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
},
|
||||
"type" : 1
|
||||
},
|
||||
"fields" : {
|
||||
"_id" : 1,
|
||||
"name" : 2
|
||||
},
|
||||
"sort" : [
|
||||
{
|
||||
"name" : "date",
|
||||
"direction" : -1
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Live example on the demo instance:
|
||||
http://demo.redash.io/queries/394/source.
|
||||
|
||||
Aggregation
|
||||
===========
|
||||
|
||||
Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
correct order of sorting, it uses a regular list for the "$sort"
|
||||
operation that converts into a SON (sorted dictionary) object before
|
||||
execution.
|
||||
|
||||
Aggregation query example:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"collection" : "things",
|
||||
"aggregate" : [
|
||||
{
|
||||
"$unwind" : "$tags"
|
||||
},
|
||||
{
|
||||
"$group" : {
|
||||
"_id" : "$tags",
|
||||
"count" : { "$sum" : 1 }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$sort" : [
|
||||
{
|
||||
"name" : "count",
|
||||
"direction" : -1
|
||||
},
|
||||
{
|
||||
"name" : "_id",
|
||||
"direction" : -1
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Live examples on the demo instance:
|
||||
|
||||
1. http://demo.redash.io/queries/393/source
|
||||
2. http://demo.redash.io/queries/387/source
|
||||
39
docs/usage/users.rst
Normal file
39
docs/usage/users.rst
Normal file
@@ -0,0 +1,39 @@
|
||||
Users' Management
|
||||
#################
|
||||
|
||||
If you use Google OpenID authentication, then each user from the domains
|
||||
you allowed will automatically be logged in and have the default
|
||||
permissions.
|
||||
|
||||
If you want to give some user different permissions or you want to
|
||||
create password based users (make sure you enabled this options in
|
||||
settings first), you need to use the CLI (``manage.py``).
|
||||
|
||||
Create a new user
|
||||
=================
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ bin/run ./manage.py users create --help
|
||||
usage: users create [-h] [--permissions PERMISSIONS] [--password PASSWORD]
|
||||
[--google] [--admin]
|
||||
name email
|
||||
|
||||
positional arguments:
|
||||
name User's full name
|
||||
email User's email
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--permissions PERMISSIONS
|
||||
Comma seperated list of permissions (leave blank for
|
||||
default).
|
||||
--password PASSWORD Password for users who don't use Google Auth (leave
|
||||
blank for prompt).
|
||||
--google user uses Google Auth to login
|
||||
--admin set user as admin
|
||||
|
||||
Grant admin permissions
|
||||
=======================
|
||||
|
||||
``sudo -u redash bin/run ./manage.py users grant_admin {email}``
|
||||
19
manage.py
19
manage.py
@@ -2,12 +2,15 @@
|
||||
"""
|
||||
CLI to manage redash.
|
||||
"""
|
||||
import json
|
||||
|
||||
from flask.ext.script import Manager
|
||||
|
||||
from redash import settings, models, __version__
|
||||
from redash.wsgi import app
|
||||
from redash.import_export import import_manager
|
||||
from redash.cli import users, database, data_sources
|
||||
from redash.monitor import get_status
|
||||
|
||||
manager = Manager(app)
|
||||
manager.add_command("database", database.manager)
|
||||
@@ -21,6 +24,9 @@ def version():
|
||||
"""Displays re:dash version."""
|
||||
print __version__
|
||||
|
||||
@manager.command
|
||||
def status():
|
||||
print json.dumps(get_status(), indent=2)
|
||||
|
||||
@manager.command
|
||||
def runworkers():
|
||||
@@ -37,12 +43,15 @@ def make_shell_context():
|
||||
@manager.command
|
||||
def check_settings():
|
||||
"""Show the settings as re:dash sees them (useful for debugging)."""
|
||||
from types import ModuleType
|
||||
for name, item in settings.all_settings().iteritems():
|
||||
print "{} = {}".format(name, item)
|
||||
|
||||
for name in dir(settings):
|
||||
item = getattr(settings, name)
|
||||
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
|
||||
print "{} = {}".format(name, item)
|
||||
@manager.command
|
||||
def send_test_mail():
|
||||
from redash import mail
|
||||
from flask_mail import Message
|
||||
|
||||
mail.send(Message(subject="Test Message from re:dash", recipients=[settings.MAIL_DEFAULT_SENDER], body="Test message."))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
26
migrations/0005_add_updated_at.py
Normal file
26
migrations/0005_add_updated_at.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('queries', 'updated_at', models.Query.updated_at),
|
||||
migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at),
|
||||
migrator.add_column('widgets', 'updated_at', models.Widget.updated_at),
|
||||
migrator.add_column('users', 'created_at', models.User.created_at),
|
||||
migrator.add_column('users', 'updated_at', models.User.updated_at),
|
||||
migrator.add_column('visualizations', 'created_at', models.Visualization.created_at),
|
||||
migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at)
|
||||
)
|
||||
|
||||
db.database.execute_sql("UPDATE queries SET updated_at = created_at;")
|
||||
db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;")
|
||||
db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
19
migrations/0006_queries_last_edit_by.py
Normal file
19
migrations/0006_queries_last_edit_by.py
Normal file
@@ -0,0 +1,19 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('queries', 'last_modified_by_id', models.Query.last_modified_by)
|
||||
)
|
||||
|
||||
db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
23
migrations/0007_add_schedule_to_queries.py
Normal file
23
migrations/0007_add_schedule_to_queries.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('queries', 'schedule', models.Query.schedule),
|
||||
)
|
||||
|
||||
db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")
|
||||
|
||||
migrate(
|
||||
migrator.drop_column('queries', 'ttl')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
20
migrations/0008_make_ds_name_unique.py
Normal file
20
migrations/0008_make_ds_name_unique.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
with db.database.transaction():
|
||||
# Make sure all data sources names are unique.
|
||||
db.database.execute_sql("""
|
||||
UPDATE data_sources
|
||||
SET name = new_names.name
|
||||
FROM (
|
||||
SELECT id, name || ' ' || id as name
|
||||
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
|
||||
) AS new_names
|
||||
WHERE data_sources.id = new_names.id;
|
||||
""")
|
||||
# Add unique constraint on data_sources.name.
|
||||
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
|
||||
|
||||
db.close_db(None)
|
||||
27
migrations/0009_add_api_key_to_user.py
Normal file
27
migrations/0009_add_api_key_to_user.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
column = models.User.api_key
|
||||
column.null = True
|
||||
migrate(
|
||||
migrator.add_column('users', 'api_key', models.User.api_key),
|
||||
)
|
||||
|
||||
for user in models.User.select():
|
||||
user.save()
|
||||
|
||||
migrate(
|
||||
migrator.add_not_null('users', 'api_key')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
18
migrations/0010_allow_deleting_datasources.py
Normal file
18
migrations/0010_allow_deleting_datasources.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.drop_not_null('queries', 'data_source_id'),
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
|
||||
8
migrations/0010_create_alerts.py
Normal file
8
migrations/0010_create_alerts.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from redash.models import db, Alert, AlertSubscription
|
||||
|
||||
if __name__ == '__main__':
|
||||
with db.database.transaction():
|
||||
Alert.create_table()
|
||||
AlertSubscription.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
44
migrations/0011_migrate_bigquery_to_json.py
Normal file
44
migrations/0011_migrate_bigquery_to_json.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from base64 import b64encode
|
||||
import json
|
||||
from redash.models import DataSource
|
||||
|
||||
|
||||
def convert_p12_to_pem(p12file):
|
||||
from OpenSSL import crypto
|
||||
with open(p12file, 'rb') as f:
|
||||
p12 = crypto.load_pkcs12(f.read(), "notasecret")
|
||||
|
||||
return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for ds in DataSource.all():
|
||||
|
||||
if ds.type == 'bigquery':
|
||||
options = json.loads(ds.options)
|
||||
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
new_options = {
|
||||
'projectId': options['projectId'],
|
||||
'jsonKeyFile': b64encode(json.dumps({
|
||||
'client_email': options['serviceAccount'],
|
||||
'private_key': convert_p12_to_pem(options['privateKey'])
|
||||
}))
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save()
|
||||
elif ds.type == 'google_spreadsheets':
|
||||
options = json.loads(ds.options)
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
with open(options['credentialsFilePath']) as f:
|
||||
new_options = {
|
||||
'jsonKeyFile': b64encode(f.read())
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save()
|
||||
@@ -19,6 +19,7 @@
|
||||
"trailing": true,
|
||||
"smarttabs": true,
|
||||
"globals": {
|
||||
"angular": false
|
||||
"angular": false,
|
||||
"_": false
|
||||
}
|
||||
}
|
||||
|
||||
BIN
rd_ui/app/images/favicon-16x16.png
Executable file
BIN
rd_ui/app/images/favicon-16x16.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.3 KiB |
BIN
rd_ui/app/images/favicon-32x32.png
Executable file
BIN
rd_ui/app/images/favicon-32x32.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 2.0 KiB |
BIN
rd_ui/app/images/favicon-96x96.png
Executable file
BIN
rd_ui/app/images/favicon-96x96.png
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 3.8 KiB |
BIN
rd_ui/app/images/redash_icon_small.png
Normal file
BIN
rd_ui/app/images/redash_icon_small.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 6.0 KiB |
@@ -18,8 +18,15 @@
|
||||
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
|
||||
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
|
||||
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
|
||||
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<div growl></div>
|
||||
@@ -33,15 +40,15 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
|
||||
</div>
|
||||
{% raw %}
|
||||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<span ng-repeat="(name, group) in groupedDashboards">
|
||||
<li class="dropdown-submenu">
|
||||
<a href="#" ng-bind="name"></a>
|
||||
@@ -59,13 +66,19 @@
|
||||
<li><a data-toggle="modal" href="#new_dashboard_dialog" ng-show="currentUser.hasPermission('create_dashboard')">New Dashboard</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Queries <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle>Queries <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<li ng-show="currentUser.hasPermission('create_query')"><a href="/queries/new">New Query</a></li>
|
||||
<li><a href="/queries">Queries</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="/alerts">Alerts</a>
|
||||
</li>
|
||||
<li ng-show="currentUser.hasPermission('admin')">
|
||||
<a href="/data_sources">Data Sources</a>
|
||||
</li>
|
||||
</ul>
|
||||
<form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()">
|
||||
<div class="form-group">
|
||||
@@ -105,9 +118,11 @@
|
||||
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/python/python.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
|
||||
<script src="/bower_components/angular-ui-codemirror/ui-codemirror.js"></script>
|
||||
<script src="/bower_components/highcharts/highcharts.js"></script>
|
||||
<script src="/bower_components/highcharts/modules/exporting.js"></script>
|
||||
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>
|
||||
@@ -121,15 +136,17 @@
|
||||
<script src="/bower_components/angular-ui-select/dist/select.js"></script>
|
||||
<script src="/bower_components/underscore.string/lib/underscore.string.js"></script>
|
||||
<script src="/bower_components/marked/lib/marked.js"></script>
|
||||
<script src="/bower_components/angular-base64-upload/dist/angular-base64-upload.js"></script>
|
||||
<script src="/scripts/ng_highchart.js"></script>
|
||||
<script src="/scripts/ng_smart_table.js"></script>
|
||||
<script src="/scripts/ui-bootstrap-tpls-0.5.0.min.js"></script>
|
||||
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
|
||||
<script src="/bower_components/bucky/bucky.js"></script>
|
||||
<script src="/bower_components/pace/pace.js"></script>
|
||||
<script src="/bower_components/mustache/mustache.js"></script>
|
||||
<script src="/bower_components/canvg/rgbcolor.js"></script>
|
||||
<script src="/bower_components/canvg/StackBlur.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
|
||||
@@ -141,18 +158,22 @@
|
||||
<script src="/scripts/controllers/controllers.js"></script>
|
||||
<script src="/scripts/controllers/dashboard.js"></script>
|
||||
<script src="/scripts/controllers/admin_controllers.js"></script>
|
||||
<script src="/scripts/controllers/data_sources.js"></script>
|
||||
<script src="/scripts/controllers/query_view.js"></script>
|
||||
<script src="/scripts/controllers/query_source.js"></script>
|
||||
<script src="/scripts/visualizations/base.js"></script>
|
||||
<script src="/scripts/visualizations/chart.js"></script>
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/map.js"></script>
|
||||
<script src="/scripts/visualizations/counter.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/visualizations/pivot.js"></script>
|
||||
<script src="/scripts/directives/directives.js"></script>
|
||||
<script src="/scripts/directives/query_directives.js"></script>
|
||||
<script src="/scripts/directives/data_source_directives.js"></script>
|
||||
<script src="/scripts/directives/dashboard_directives.js"></script>
|
||||
<script src="/scripts/filters.js"></script>
|
||||
<script src="/scripts/controllers/alerts.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<script>
|
||||
@@ -167,7 +188,7 @@
|
||||
|
||||
currentUser.hasPermission = function(permission) {
|
||||
return this.permissions.indexOf(permission) != -1;
|
||||
}
|
||||
};
|
||||
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
@@ -13,6 +13,10 @@
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<link rel="stylesheet" href="/styles/login.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -26,13 +30,20 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
{% with messages = get_flashed_messages() %}
|
||||
{% if messages %}
|
||||
{% for message in messages %}
|
||||
<div class="alert alert-warning" role="alert">{{ message }}</div>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
<div class="main">
|
||||
{% if show_google_openid %}
|
||||
@@ -48,6 +59,19 @@
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% if show_saml_login %}
|
||||
|
||||
<div class="row">
|
||||
<a href="/saml/login">SAML Login</a>
|
||||
</div>
|
||||
|
||||
<div class="login-or">
|
||||
<hr class="hr-or">
|
||||
<span class="span-or">or</span>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
|
||||
<form role="form" method="post" name="login">
|
||||
<div class="form-group">
|
||||
<label for="inputUsernameEmail">Username or email</label>
|
||||
|
||||
@@ -6,7 +6,6 @@ angular.module('redash', [
|
||||
'redash.services',
|
||||
'redash.renderers',
|
||||
'redash.visualization',
|
||||
'ui.codemirror',
|
||||
'highchart',
|
||||
'ui.select2',
|
||||
'angular-growl',
|
||||
@@ -15,7 +14,8 @@ angular.module('redash', [
|
||||
'smartTable.table',
|
||||
'ngResource',
|
||||
'ngRoute',
|
||||
'ui.select'
|
||||
'ui.select',
|
||||
'naif.base64'
|
||||
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider',
|
||||
function ($routeProvider, $locationProvider, $compileProvider, growlProvider) {
|
||||
if (featureFlags.clientSideMetrics) {
|
||||
@@ -81,9 +81,23 @@ angular.module('redash', [
|
||||
templateUrl: '/views/admin_status.html',
|
||||
controller: 'AdminStatusCtrl'
|
||||
});
|
||||
$routeProvider.when('/admin/workers', {
|
||||
templateUrl: '/views/admin_workers.html',
|
||||
controller: 'AdminWorkersCtrl'
|
||||
|
||||
$routeProvider.when('/alerts', {
|
||||
templateUrl: '/views/alerts/list.html',
|
||||
controller: 'AlertsCtrl'
|
||||
});
|
||||
$routeProvider.when('/alerts/:alertId', {
|
||||
templateUrl: '/views/alerts/edit.html',
|
||||
controller: 'AlertCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/data_sources/:dataSourceId', {
|
||||
templateUrl: '/views/data_sources/edit.html',
|
||||
controller: 'DataSourceCtrl'
|
||||
});
|
||||
$routeProvider.when('/data_sources', {
|
||||
templateUrl: '/views/data_sources/list.html',
|
||||
controller: 'DataSourcesCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/', {
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
};
|
||||
|
||||
refresh();
|
||||
}
|
||||
};
|
||||
|
||||
angular.module('redash.admin_controllers', [])
|
||||
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])
|
||||
|
||||
174
rd_ui/app/scripts/controllers/alerts.js
Normal file
174
rd_ui/app/scripts/controllers/alerts.js
Normal file
@@ -0,0 +1,174 @@
|
||||
(function() {
|
||||
|
||||
var AlertsCtrl = function($scope, Events, Alert) {
|
||||
Events.record(currentUser, "view", "page", "alerts");
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alerts = []
|
||||
Alert.query(function(alerts) {
|
||||
var stateClass = {
|
||||
'ok': 'label label-success',
|
||||
'triggered': 'label label-danger',
|
||||
'unknown': 'label label-warning'
|
||||
};
|
||||
_.each(alerts, function(alert) {
|
||||
alert.class = stateClass[alert.state];
|
||||
})
|
||||
$scope.alerts = alerts;
|
||||
|
||||
});
|
||||
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: 50,
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
"map": "name",
|
||||
"cellTemplate": '<a href="/alerts/{{dataRow.id}}">{{dataRow.name}}</a> (<a href="/queries/{{dataRow.query.id}}">query</a>)'
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user.name'
|
||||
},
|
||||
{
|
||||
'label': 'State',
|
||||
'cellTemplate': '<span ng-class="dataRow.class">{{dataRow.state | uppercase}}</span> since <span am-time-ago="dataRow.updated_at"></span>'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
var AlertCtrl = function($scope, $routeParams, $location, growl, Query, Events, Alert) {
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alertId = $routeParams.alertId;
|
||||
if ($scope.alertId === "new") {
|
||||
Events.record(currentUser, 'view', 'page', 'alerts/new');
|
||||
} else {
|
||||
Events.record(currentUser, 'view', 'alert', $scope.alertId);
|
||||
}
|
||||
|
||||
$scope.onQuerySelected = function(item) {
|
||||
$scope.selectedQuery = item;
|
||||
item.getQueryResultPromise().then(function(result) {
|
||||
$scope.queryResult = result;
|
||||
$scope.alert.options.column = $scope.alert.options.column || result.getColumnNames()[0];
|
||||
});
|
||||
};
|
||||
|
||||
if ($scope.alertId === "new") {
|
||||
$scope.alert = new Alert({options: {}});
|
||||
} else {
|
||||
$scope.alert = Alert.get({id: $scope.alertId}, function(alert) {
|
||||
$scope.onQuerySelected(new Query($scope.alert.query));
|
||||
});
|
||||
}
|
||||
|
||||
$scope.ops = ['greater than', 'less than', 'equals'];
|
||||
$scope.selectedQuery = null;
|
||||
|
||||
$scope.getDefaultName = function() {
|
||||
if (!$scope.alert.query) {
|
||||
return undefined;
|
||||
}
|
||||
return _.template("<%= query.name %>: <%= options.column %> <%= options.op %> <%= options.value %>", $scope.alert);
|
||||
};
|
||||
|
||||
$scope.searchQueries = function (term) {
|
||||
if (!term || term.length < 3) {
|
||||
return;
|
||||
}
|
||||
|
||||
Query.search({q: term}, function(results) {
|
||||
$scope.queries = results;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
if ($scope.alert.name === undefined || $scope.alert.name === '') {
|
||||
$scope.alert.name = $scope.getDefaultName();
|
||||
}
|
||||
|
||||
$scope.alert.$save(function(alert) {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
if ($scope.alertId === "new") {
|
||||
$location.path('/alerts/' + alert.id).replace();
|
||||
}
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving alert.");
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
angular.module('redash.directives').directive('alertSubscribers', ['AlertSubscription', function (AlertSubscription) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/alerts/subscribers.html',
|
||||
scope: {
|
||||
'alertId': '='
|
||||
},
|
||||
controller: function ($scope) {
|
||||
$scope.subscribers = AlertSubscription.query({alertId: $scope.alertId});
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.directives').directive('subscribeButton', ['AlertSubscription', 'growl', function (AlertSubscription, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
template: '<button class="btn btn-default btn-xs" ng-click="toggleSubscription()"><i ng-class="class"></i></button>',
|
||||
controller: function ($scope) {
|
||||
var updateClass = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.class = "fa fa-eye-slash";
|
||||
} else {
|
||||
$scope.class = "fa fa-eye";
|
||||
}
|
||||
}
|
||||
|
||||
$scope.subscribers.$promise.then(function() {
|
||||
$scope.subscription = _.find($scope.subscribers, function(subscription) {
|
||||
return (subscription.user.email == currentUser.email);
|
||||
});
|
||||
|
||||
updateClass();
|
||||
});
|
||||
|
||||
$scope.toggleSubscription = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.subscription.$delete(function() {
|
||||
$scope.subscribers = _.without($scope.subscribers, $scope.subscription);
|
||||
$scope.subscription = undefined;
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving subscription.");
|
||||
});
|
||||
} else {
|
||||
$scope.subscription = new AlertSubscription({alert_id: $scope.alertId});
|
||||
$scope.subscription.$save(function() {
|
||||
$scope.subscribers.push($scope.subscription);
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Unsubscription failed.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('AlertsCtrl', ['$scope', 'Events', 'Alert', AlertsCtrl])
|
||||
.controller('AlertCtrl', ['$scope', '$routeParams', '$location', 'growl', 'Query', 'Events', 'Alert', AlertCtrl])
|
||||
|
||||
})();
|
||||
@@ -1,4 +1,11 @@
|
||||
(function () {
|
||||
var dateFormatter = function (value) {
|
||||
if (!value) {
|
||||
return "-";
|
||||
}
|
||||
return value.toDate().toLocaleString();
|
||||
};
|
||||
|
||||
var QuerySearchCtrl = function($scope, $location, $filter, Events, Query) {
|
||||
$scope.$parent.pageTitle = "Queries Search";
|
||||
|
||||
@@ -8,11 +15,6 @@
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
var dateFormatter = function (value) {
|
||||
if (!value) return "-";
|
||||
return value.format("DD/MM/YY HH:mm");
|
||||
}
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
@@ -30,9 +32,9 @@
|
||||
},
|
||||
{
|
||||
'label': 'Update Schedule',
|
||||
'map': 'ttl',
|
||||
'map': 'schedule',
|
||||
'formatFunction': function (value) {
|
||||
return $filter('refreshRateHumanize')(value);
|
||||
return $filter('scheduleHumanize')(value);
|
||||
}
|
||||
}
|
||||
];
|
||||
@@ -70,11 +72,6 @@
|
||||
$scope.allQueries = [];
|
||||
$scope.queries = [];
|
||||
|
||||
var dateFormatter = function (value) {
|
||||
if (!value) return "-";
|
||||
return value.format("DD/MM/YY HH:mm");
|
||||
}
|
||||
|
||||
var filterQueries = function () {
|
||||
$scope.queries = _.filter($scope.allQueries, function (query) {
|
||||
if (!$scope.selectedTab) {
|
||||
@@ -130,9 +127,9 @@
|
||||
},
|
||||
{
|
||||
'label': 'Update Schedule',
|
||||
'map': 'ttl',
|
||||
'map': 'schedule',
|
||||
'formatFunction': function (value) {
|
||||
return $filter('refreshRateHumanize')(value);
|
||||
return $filter('scheduleHumanize')(value);
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
@@ -100,9 +100,13 @@
|
||||
Events.record(currentUser, "autorefresh", "dashboard", dashboard.id, {'enable': $scope.refreshEnabled});
|
||||
|
||||
if ($scope.refreshEnabled) {
|
||||
var refreshRate = _.min(_.flatten($scope.dashboard.widgets), function(widget) {
|
||||
return widget.visualization.query.ttl;
|
||||
}).visualization.query.ttl;
|
||||
var refreshRate = _.min(_.map(_.flatten($scope.dashboard.widgets), function(widget) {
|
||||
var schedule = widget.visualization.query.schedule;
|
||||
if (schedule === null || schedule.match(/\d\d:\d\d/) !== null) {
|
||||
return 60;
|
||||
}
|
||||
return widget.visualization.query.schedule;
|
||||
}));
|
||||
|
||||
$scope.refreshRate = _.max([120, refreshRate * 2]) * 1000;
|
||||
|
||||
@@ -138,7 +142,6 @@
|
||||
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
|
||||
var maxAge = $location.search()['maxAge'];
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
$scope.nextUpdateTime = moment(new Date(($scope.query.updated_at + $scope.query.ttl + $scope.query.runtime + 300) * 1000)).fromNow();
|
||||
|
||||
$scope.type = 'visualization';
|
||||
} else {
|
||||
|
||||
47
rd_ui/app/scripts/controllers/data_sources.js
Normal file
47
rd_ui/app/scripts/controllers/data_sources.js
Normal file
@@ -0,0 +1,47 @@
|
||||
(function () {
|
||||
var DataSourcesCtrl = function ($scope, $location, growl, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_sources");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSources = DataSource.query();
|
||||
|
||||
$scope.openDataSource = function(datasource) {
|
||||
$location.path('/data_sources/' + datasource.id);
|
||||
};
|
||||
|
||||
$scope.deleteDataSource = function(event, datasource) {
|
||||
event.stopPropagation();
|
||||
Events.record(currentUser, "delete", "datasource", datasource.id);
|
||||
datasource.$delete(function(resource) {
|
||||
growl.addSuccessMessage("Data source deleted succesfully.");
|
||||
this.$parent.dataSources = _.without(this.dataSources, resource);
|
||||
}.bind(this), function(httpResponse) {
|
||||
console.log("Failed to delete data source: ", httpResponse.status, httpResponse.statusText, httpResponse.data);
|
||||
growl.addErrorMessage("Failed to delete data source.");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
var DataSourceCtrl = function ($scope, $routeParams, $http, $location, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_source");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSourceId = $routeParams.dataSourceId;
|
||||
|
||||
if ($scope.dataSourceId == "new") {
|
||||
$scope.dataSource = new DataSource({options: {}});
|
||||
} else {
|
||||
$scope.dataSource = DataSource.get({id: $routeParams.dataSourceId});
|
||||
}
|
||||
|
||||
$scope.$watch('dataSource.id', function(id) {
|
||||
if (id != $scope.dataSourceId && id !== undefined) {
|
||||
$location.path('/data_sources/' + id).replace();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('DataSourcesCtrl', ['$scope', '$location', 'growl', 'Events', 'DataSource', DataSourcesCtrl])
|
||||
.controller('DataSourceCtrl', ['$scope', '$routeParams', '$http', '$location', 'Events', 'DataSource', DataSourceCtrl])
|
||||
})();
|
||||
@@ -17,7 +17,7 @@
|
||||
saveQuery = $scope.saveQuery;
|
||||
|
||||
$scope.sourceMode = true;
|
||||
$scope.canEdit = currentUser.canEdit($scope.query);
|
||||
$scope.canEdit = true;
|
||||
$scope.isDirty = false;
|
||||
|
||||
$scope.newVisualization = undefined;
|
||||
@@ -68,7 +68,7 @@
|
||||
$scope.duplicateQuery = function() {
|
||||
Events.record(currentUser, 'fork', 'query', $scope.query.id);
|
||||
$scope.query.id = null;
|
||||
$scope.query.ttl = -1;
|
||||
$scope.query.schedule = null;
|
||||
|
||||
$scope.saveQuery({
|
||||
successMessage: 'Query forked',
|
||||
|
||||
@@ -1,33 +1,67 @@
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, Query, DataSource) {
|
||||
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, $modal, Query, DataSource) {
|
||||
var DEFAULT_TAB = 'table';
|
||||
|
||||
var getQueryResult = function(ttl) {
|
||||
var getQueryResult = function(maxAge) {
|
||||
// Collect params, and getQueryResult with params; getQueryResult merges it into the query
|
||||
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
|
||||
if (ttl == undefined) {
|
||||
ttl = $location.search()['maxAge'];
|
||||
if (maxAge == undefined) {
|
||||
maxAge = $location.search()['maxAge'];
|
||||
}
|
||||
$scope.queryResult = $scope.query.getQueryResult(ttl, parameters);
|
||||
|
||||
if (maxAge == undefined) {
|
||||
maxAge = -1;
|
||||
}
|
||||
|
||||
$scope.showLog = false;
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
}
|
||||
|
||||
$scope.dataSource = {};
|
||||
$scope.query = $route.current.locals.query;
|
||||
|
||||
var updateSchema = function() {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
|
||||
DataSource.getSchema({id: dataSourceId}, function(data) {
|
||||
if (data && data.length > 0) {
|
||||
$scope.schema = data;
|
||||
_.each(data, function(table) {
|
||||
table.collapsed = true;
|
||||
});
|
||||
|
||||
$scope.editorSize = "col-md-9";
|
||||
$scope.hasSchema = true;
|
||||
} else {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
Events.record(currentUser, 'view', 'query', $scope.query.id);
|
||||
getQueryResult();
|
||||
$scope.queryExecuting = false;
|
||||
|
||||
$scope.isQueryOwner = currentUser.id === $scope.query.user.id;
|
||||
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
|
||||
$scope.canViewSource = currentUser.hasPermission('view_source');
|
||||
|
||||
$scope.dataSources = DataSource.get(function(dataSources) {
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.dataSources = DataSource.query(function(dataSources) {
|
||||
updateSchema();
|
||||
|
||||
if ($scope.query.isNew()) {
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
}
|
||||
});
|
||||
|
||||
// in view mode, latest dataset is always visible
|
||||
// source mode changes this behavior
|
||||
$scope.showDataset = true;
|
||||
$scope.showLog = false;
|
||||
|
||||
$scope.lockButton = function(lock) {
|
||||
$scope.queryExecuting = lock;
|
||||
@@ -70,6 +104,9 @@
|
||||
};
|
||||
|
||||
$scope.executeQuery = function() {
|
||||
if (!$scope.query.query) {
|
||||
return;
|
||||
}
|
||||
getQueryResult(0);
|
||||
$scope.lockButton(true);
|
||||
$scope.cancelling = false;
|
||||
@@ -81,24 +118,24 @@
|
||||
$scope.queryResult.cancelExecution();
|
||||
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
|
||||
};
|
||||
|
||||
|
||||
$scope.archiveQuery = function(options, data) {
|
||||
if (data) {
|
||||
data.id = $scope.query.id;
|
||||
} else {
|
||||
data = $scope.query;
|
||||
}
|
||||
|
||||
|
||||
$scope.isDirty = false;
|
||||
|
||||
|
||||
options = _.extend({}, {
|
||||
successMessage: 'Query archived',
|
||||
errorMessage: 'Query could not be archived'
|
||||
}, options);
|
||||
|
||||
|
||||
return Query.delete({id: data.id}, function() {
|
||||
$scope.query.is_archived = true;
|
||||
$scope.query.ttl = -1;
|
||||
$scope.query.schedule = null;
|
||||
growl.addSuccessMessage(options.successMessage);
|
||||
// This feels dirty.
|
||||
$('#archive-confirmation-modal').modal('hide');
|
||||
@@ -121,6 +158,8 @@
|
||||
});
|
||||
}
|
||||
|
||||
updateSchema();
|
||||
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
$scope.executeQuery();
|
||||
};
|
||||
|
||||
@@ -166,8 +205,34 @@
|
||||
if (status === 'done' || status === 'failed') {
|
||||
$scope.lockButton(false);
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getLog() != null) {
|
||||
$scope.showLog = true;
|
||||
}
|
||||
});
|
||||
|
||||
$scope.openScheduleForm = function() {
|
||||
if (!$scope.isQueryOwner) {
|
||||
return;
|
||||
};
|
||||
|
||||
$modal.open({
|
||||
templateUrl: '/views/schedule_form.html',
|
||||
size: 'sm',
|
||||
scope: $scope,
|
||||
controller: ['$scope', '$modalInstance', function($scope, $modalInstance) {
|
||||
$scope.close = function() {
|
||||
$modalInstance.close();
|
||||
}
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
$scope.refreshType = 'daily';
|
||||
} else {
|
||||
$scope.refreshType = 'periodic';
|
||||
}
|
||||
}]
|
||||
});
|
||||
};
|
||||
|
||||
$scope.$watch(function() {
|
||||
return $location.hash()
|
||||
}, function(hash) {
|
||||
@@ -180,5 +245,5 @@
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('QueryViewCtrl',
|
||||
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', 'Query', 'DataSource', QueryViewCtrl]);
|
||||
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', '$modal', 'Query', 'DataSource', QueryViewCtrl]);
|
||||
})();
|
||||
|
||||
76
rd_ui/app/scripts/directives/data_source_directives.js
Normal file
76
rd_ui/app/scripts/directives/data_source_directives.js
Normal file
@@ -0,0 +1,76 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var directives = angular.module('redash.directives');
|
||||
|
||||
// Angular strips data- from the directive, so data-source-form becomes sourceForm...
|
||||
directives.directive('sourceForm', ['$http', 'growl', function ($http, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/data_sources/form.html',
|
||||
scope: {
|
||||
'dataSource': '='
|
||||
},
|
||||
link: function ($scope) {
|
||||
var setType = function(types) {
|
||||
if ($scope.dataSource.type === undefined) {
|
||||
$scope.dataSource.type = types[0].type;
|
||||
return types[0];
|
||||
}
|
||||
|
||||
$scope.type = _.find(types, function (t) {
|
||||
return t.type == $scope.dataSource.type;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.files = {};
|
||||
|
||||
$scope.$watchCollection('files', function() {
|
||||
_.each($scope.files, function(v, k) {
|
||||
if (v) {
|
||||
$scope.dataSource.options[k] = v.base64;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$http.get('/api/data_sources/types').success(function (types) {
|
||||
setType(types);
|
||||
|
||||
$scope.dataSourceTypes = types;
|
||||
|
||||
_.each(types, function (type) {
|
||||
_.each(type.configuration_schema.properties, function (prop, name) {
|
||||
if (name == 'password' || name == 'passwd') {
|
||||
prop.type = 'password';
|
||||
}
|
||||
|
||||
if (_.string.endsWith(name, "File")) {
|
||||
prop.type = 'file';
|
||||
}
|
||||
|
||||
prop.required = _.contains(type.configuration_schema.required, name);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
$scope.$watch('dataSource.type', function(current, prev) {
|
||||
if (prev !== current) {
|
||||
if (prev !== undefined) {
|
||||
$scope.dataSource.options = {};
|
||||
}
|
||||
setType($scope.dataSourceTypes);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
$scope.dataSource.$save(function() {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
})();
|
||||
@@ -8,7 +8,7 @@
|
||||
'query': '=',
|
||||
'visualization': '=?'
|
||||
},
|
||||
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
link: function(scope, element) {
|
||||
scope.link = '/queries/' + scope.query.id;
|
||||
if (scope.visualization) {
|
||||
@@ -29,7 +29,7 @@
|
||||
restrict: 'E',
|
||||
template: '<span ng-show="query.id && canViewSource">\
|
||||
<a ng-show="!sourceMode"\
|
||||
ng-href="{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
</a>\
|
||||
<a ng-show="sourceMode"\
|
||||
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
|
||||
@@ -63,26 +63,97 @@
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
'query': '=',
|
||||
'lock': '='
|
||||
'lock': '=',
|
||||
'schema': '=',
|
||||
'syntax': '='
|
||||
},
|
||||
template: '<textarea\
|
||||
ui-codemirror="editorOptions"\
|
||||
ng-model="query.query">',
|
||||
link: function($scope) {
|
||||
$scope.editorOptions = {
|
||||
mode: 'text/x-sql',
|
||||
template: '<textarea></textarea>',
|
||||
link: {
|
||||
pre: function ($scope, element) {
|
||||
$scope.syntax = $scope.syntax || 'sql';
|
||||
|
||||
var modes = {
|
||||
'sql': 'text/x-sql',
|
||||
'python': 'text/x-python',
|
||||
'json': 'application/json'
|
||||
};
|
||||
|
||||
var textarea = element.children()[0];
|
||||
var editorOptions = {
|
||||
mode: modes[$scope.syntax],
|
||||
lineWrapping: true,
|
||||
lineNumbers: true,
|
||||
readOnly: false,
|
||||
matchBrackets: true,
|
||||
autoCloseBrackets: true
|
||||
};
|
||||
autoCloseBrackets: true,
|
||||
extraKeys: {"Ctrl-Space": "autocomplete"}
|
||||
};
|
||||
|
||||
$scope.$watch('lock', function(locked) {
|
||||
$scope.editorOptions.readOnly = locked ? 'nocursor' : false;
|
||||
});
|
||||
var additionalHints = [];
|
||||
|
||||
CodeMirror.commands.autocomplete = function(cm) {
|
||||
var hinter = function(editor, options) {
|
||||
var hints = CodeMirror.hint.anyword(editor, options);
|
||||
var cur = editor.getCursor(), token = editor.getTokenAt(cur).string;
|
||||
|
||||
hints.list = _.union(hints.list, _.filter(additionalHints, function (h) {
|
||||
return h.search(token) === 0;
|
||||
}));
|
||||
|
||||
return hints;
|
||||
};
|
||||
|
||||
// CodeMirror.showHint(cm, CodeMirror.hint.anyword);
|
||||
CodeMirror.showHint(cm, hinter);
|
||||
};
|
||||
|
||||
var codemirror = CodeMirror.fromTextArea(textarea, editorOptions);
|
||||
|
||||
codemirror.on('change', function(instance) {
|
||||
var newValue = instance.getValue();
|
||||
|
||||
if (newValue !== $scope.query.query) {
|
||||
$scope.$evalAsync(function() {
|
||||
$scope.query.query = newValue;
|
||||
});
|
||||
}
|
||||
|
||||
$('.schema-container').css('height', $('.CodeMirror').css('height'));
|
||||
});
|
||||
|
||||
$scope.$watch('query.query', function () {
|
||||
if ($scope.query.query !== codemirror.getValue()) {
|
||||
codemirror.setValue($scope.query.query);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.$watch('schema', function (schema) {
|
||||
if (schema) {
|
||||
var keywords = [];
|
||||
_.each(schema, function (table) {
|
||||
keywords.push(table.name);
|
||||
_.each(table.columns, function (c) {
|
||||
keywords.push(c);
|
||||
});
|
||||
});
|
||||
|
||||
additionalHints = _.unique(keywords);
|
||||
}
|
||||
|
||||
codemirror.refresh();
|
||||
});
|
||||
|
||||
$scope.$watch('syntax', function(syntax) {
|
||||
codemirror.setOption('mode', modes[syntax]);
|
||||
});
|
||||
|
||||
$scope.$watch('lock', function (locked) {
|
||||
var readOnly = locked ? 'nocursor' : false;
|
||||
codemirror.setOption('readOnly', readOnly);
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function queryFormatter($http) {
|
||||
@@ -111,42 +182,98 @@
|
||||
}
|
||||
}
|
||||
|
||||
function queryTimePicker() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<select ng-disabled="refreshType != \'daily\'" ng-model="hour" ng-change="updateSchedule()" ng-options="c as c for c in hourOptions"></select> :\
|
||||
<select ng-disabled="refreshType != \'daily\'" ng-model="minute" ng-change="updateSchedule()" ng-options="c as c for c in minuteOptions"></select>',
|
||||
link: function($scope) {
|
||||
var padWithZeros = function(size, v) {
|
||||
v = String(v);
|
||||
if (v.length < size) {
|
||||
v = "0" + v;
|
||||
}
|
||||
return v;
|
||||
};
|
||||
|
||||
$scope.hourOptions = _.map(_.range(0, 24), _.partial(padWithZeros, 2));
|
||||
$scope.minuteOptions = _.map(_.range(0, 60, 5), _.partial(padWithZeros, 2));
|
||||
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
var parts = $scope.query.scheduleInLocalTime().split(':');
|
||||
$scope.minute = parts[1];
|
||||
$scope.hour = parts[0];
|
||||
} else {
|
||||
$scope.minute = "15";
|
||||
$scope.hour = "00";
|
||||
}
|
||||
|
||||
$scope.updateSchedule = function() {
|
||||
var newSchedule = moment().hour($scope.hour).minute($scope.minute).utc().format('HH:mm');
|
||||
if (newSchedule != $scope.query.schedule) {
|
||||
$scope.query.schedule = newSchedule;
|
||||
$scope.saveQuery();
|
||||
}
|
||||
};
|
||||
|
||||
$scope.$watch('refreshType', function() {
|
||||
if ($scope.refreshType == 'daily') {
|
||||
$scope.updateSchedule();
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function queryRefreshSelect() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<select\
|
||||
ng-disabled="!isQueryOwner"\
|
||||
ng-model="query.ttl"\
|
||||
ng-disabled="refreshType != \'periodic\'"\
|
||||
ng-model="query.schedule"\
|
||||
ng-change="saveQuery()"\
|
||||
ng-options="c.value as c.name for c in refreshOptions">\
|
||||
<option value="">No Refresh</option>\
|
||||
</select>',
|
||||
link: function($scope) {
|
||||
$scope.refreshOptions = [
|
||||
{
|
||||
value: -1,
|
||||
name: 'No Refresh'
|
||||
},
|
||||
{
|
||||
value: 60,
|
||||
value: "60",
|
||||
name: 'Every minute'
|
||||
},
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
_.each([5, 10, 15, 30], function(i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: String(i*60),
|
||||
name: "Every " + i + " minutes"
|
||||
})
|
||||
});
|
||||
|
||||
_.each(_.range(1, 13), function (i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: i * 3600,
|
||||
value: String(i * 3600),
|
||||
name: 'Every ' + i + 'h'
|
||||
});
|
||||
})
|
||||
|
||||
$scope.refreshOptions.push({
|
||||
value: 24 * 3600,
|
||||
value: String(24 * 3600),
|
||||
name: 'Every 24h'
|
||||
});
|
||||
$scope.refreshOptions.push({
|
||||
value: 7 * 24 * 3600,
|
||||
value: String(7 * 24 * 3600),
|
||||
name: 'Once a week'
|
||||
});
|
||||
|
||||
$scope.$watch('refreshType', function() {
|
||||
if ($scope.refreshType == 'periodic') {
|
||||
if ($scope.query.hasDailySchedule()) {
|
||||
$scope.query.schedule = null;
|
||||
$scope.saveQuery();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
@@ -158,5 +285,6 @@
|
||||
.directive('queryResultLink', queryResultCSVLink)
|
||||
.directive('queryEditor', queryEditor)
|
||||
.directive('queryRefreshSelect', queryRefreshSelect)
|
||||
.directive('queryTimePicker', queryTimePicker)
|
||||
.directive('queryFormatter', ['$http', queryFormatter]);
|
||||
})();
|
||||
@@ -24,13 +24,17 @@ angular.module('redash.filters', []).
|
||||
return durationHumanize;
|
||||
})
|
||||
|
||||
.filter('refreshRateHumanize', function () {
|
||||
return function (ttl) {
|
||||
if (ttl == -1) {
|
||||
.filter('scheduleHumanize', function() {
|
||||
return function (schedule) {
|
||||
if (schedule === null) {
|
||||
return "Never";
|
||||
} else {
|
||||
return "Every " + durationHumanize(ttl);
|
||||
} else if (schedule.match(/\d\d:\d\d/) !== null) {
|
||||
var parts = schedule.split(':');
|
||||
var localTime = moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
return "Every day at " + localTime;
|
||||
}
|
||||
|
||||
return "Every " + durationHumanize(parseInt(schedule));
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
@@ -50,7 +50,7 @@
|
||||
;
|
||||
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
|
||||
var s = '<b>' + this.x.toDate().toLocaleString() + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
$.each(this.points, function (i, point) {
|
||||
@@ -145,7 +145,7 @@
|
||||
|
||||
if (!hasTotalsAlready) {
|
||||
this.addSeries({
|
||||
data: _.values(data),
|
||||
data: _.sortBy(_.values(data), 'x'),
|
||||
type: 'line',
|
||||
name: 'Total'
|
||||
}, false)
|
||||
@@ -308,21 +308,22 @@
|
||||
// We check either for true or undefined for backward compatibility.
|
||||
var series = scope.series;
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
// If this is a chart that has just one row for multiple columns, sort
|
||||
// by the Y values. For example:
|
||||
//
|
||||
// A | B | C
|
||||
// 20 | 30 | 15
|
||||
//
|
||||
// Will be sorted:
|
||||
// C | A | B
|
||||
// 15 | 20 | 30
|
||||
var sortable = _.every(series, function(s) { return s.data.length == 1 });
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
if (sortable) {
|
||||
series = _.sortBy(series, function (s) {
|
||||
return s.data[0].y
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
|
||||
@@ -359,6 +360,23 @@
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,28 @@
|
||||
(function () {
|
||||
function QueryResultError(errorMessage) {
|
||||
this.errorMessage = errorMessage;
|
||||
}
|
||||
|
||||
QueryResultError.prototype.getError = function() {
|
||||
return this.errorMessage;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getStatus = function() {
|
||||
return 'failed';
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getLog = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getChartData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
var QueryResult = function ($resource, $timeout, $q) {
|
||||
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
|
||||
var Job = $resource('/api/jobs/:id', {id: '@id'});
|
||||
@@ -12,6 +36,8 @@
|
||||
|
||||
var columnTypes = {};
|
||||
|
||||
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
|
||||
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
|
||||
_.each(this.query_result.data.rows, function (row) {
|
||||
_.each(row, function (v, k) {
|
||||
if (angular.isNumber(v)) {
|
||||
@@ -30,7 +56,9 @@
|
||||
|
||||
_.each(this.query_result.data.columns, function(column) {
|
||||
if (columnTypes[column.name]) {
|
||||
column.type = columnTypes[column.name];
|
||||
if (column.type == null || column.type == 'string') {
|
||||
column.type = columnTypes[column.name];
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
@@ -40,7 +68,7 @@
|
||||
} else {
|
||||
this.status = undefined;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function QueryResult(props) {
|
||||
this.deferred = $q.defer();
|
||||
@@ -91,6 +119,14 @@
|
||||
return this.job.error;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getLog = function() {
|
||||
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.query_result.data.log;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getUpdatedAt = function () {
|
||||
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
|
||||
}
|
||||
@@ -243,26 +279,9 @@
|
||||
return parts[0];
|
||||
};
|
||||
|
||||
var charConversionMap = {
|
||||
'__pct': /%/g,
|
||||
'_': / /g,
|
||||
'__qm': /\?/g,
|
||||
'__brkt': /[\(\)\[\]]/g,
|
||||
'__dash': /-/g,
|
||||
'__amp': /&/g,
|
||||
'__sl': /\//g,
|
||||
'__fsl': /\\/g,
|
||||
};
|
||||
|
||||
QueryResult.prototype.getColumnCleanName = function (column) {
|
||||
var name = this.getColumnNameWithoutType(column);
|
||||
|
||||
if (name != '') {
|
||||
_.each(charConversionMap, function(regex, replacement) {
|
||||
name = name.replace(regex, replacement);
|
||||
});
|
||||
}
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
@@ -325,7 +344,7 @@
|
||||
this.filters = filters;
|
||||
}
|
||||
|
||||
var refreshStatus = function (queryResult, query, ttl) {
|
||||
var refreshStatus = function (queryResult, query) {
|
||||
Job.get({'id': queryResult.job.id}, function (response) {
|
||||
queryResult.update(response);
|
||||
|
||||
@@ -335,7 +354,7 @@
|
||||
});
|
||||
} else if (queryResult.getStatus() != "failed") {
|
||||
$timeout(function () {
|
||||
refreshStatus(queryResult, query, ttl);
|
||||
refreshStatus(queryResult, query);
|
||||
}, 3000);
|
||||
}
|
||||
})
|
||||
@@ -355,14 +374,19 @@
|
||||
return this.deferred.promise;
|
||||
}
|
||||
|
||||
QueryResult.get = function (data_source_id, query, ttl) {
|
||||
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
|
||||
var queryResult = new QueryResult();
|
||||
|
||||
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'ttl': ttl}, function (response) {
|
||||
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
|
||||
if (queryId !== undefined) {
|
||||
params['query_id'] = queryId;
|
||||
};
|
||||
|
||||
QueryResultResource.post(params, function (response) {
|
||||
queryResult.update(response);
|
||||
|
||||
if ('job' in response) {
|
||||
refreshStatus(queryResult, query, ttl);
|
||||
refreshStatus(queryResult, query);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -390,7 +414,7 @@
|
||||
return new Query({
|
||||
query: "",
|
||||
name: "New Query",
|
||||
ttl: -1,
|
||||
schedule: null,
|
||||
user: currentUser
|
||||
});
|
||||
};
|
||||
@@ -414,11 +438,23 @@
|
||||
return '/queries/' + this.id + '/source';
|
||||
};
|
||||
|
||||
Query.prototype.getQueryResult = function (ttl, parameters) {
|
||||
if (ttl == undefined) {
|
||||
ttl = this.ttl;
|
||||
}
|
||||
Query.prototype.isNew = function() {
|
||||
return this.id === undefined;
|
||||
};
|
||||
|
||||
Query.prototype.hasDailySchedule = function() {
|
||||
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
|
||||
};
|
||||
|
||||
Query.prototype.scheduleInLocalTime = function() {
|
||||
var parts = this.schedule.split(':');
|
||||
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
};
|
||||
|
||||
Query.prototype.getQueryResult = function (maxAge, parameters) {
|
||||
if (!this.query) {
|
||||
return;
|
||||
}
|
||||
var queryText = this.query;
|
||||
|
||||
var queryParameters = this.getParameters();
|
||||
@@ -443,16 +479,18 @@
|
||||
this.latest_query_data_id = null;
|
||||
}
|
||||
|
||||
if (this.latest_query_data && ttl != 0) {
|
||||
if (this.latest_query_data && maxAge != 0) {
|
||||
if (!this.queryResult) {
|
||||
this.queryResult = new QueryResult({'query_result': this.latest_query_data});
|
||||
}
|
||||
} else if (this.latest_query_data_id && ttl != 0) {
|
||||
} else if (this.latest_query_data_id && maxAge != 0) {
|
||||
if (!this.queryResult) {
|
||||
this.queryResult = QueryResult.getById(this.latest_query_data_id);
|
||||
}
|
||||
} else if (this.data_source_id) {
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, ttl);
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
|
||||
} else {
|
||||
return new QueryResultError("Please select data source to run this query.");
|
||||
}
|
||||
|
||||
return this.queryResult;
|
||||
@@ -488,10 +526,42 @@
|
||||
|
||||
|
||||
var DataSource = function ($resource) {
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
|
||||
var actions = {
|
||||
'get': {'method': 'GET', 'cache': false, 'isArray': false},
|
||||
'query': {'method': 'GET', 'cache': false, 'isArray': true},
|
||||
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
|
||||
};
|
||||
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
|
||||
|
||||
|
||||
return DataSourceResource;
|
||||
}
|
||||
};
|
||||
|
||||
var AlertSubscription = function ($resource) {
|
||||
var resource = $resource('/api/alerts/:alertId/subscriptions/:userId', {alertId: '@alert_id', userId: '@user.id'});
|
||||
return resource;
|
||||
};
|
||||
|
||||
var Alert = function ($resource, $http) {
|
||||
var actions = {
|
||||
save: {
|
||||
method: 'POST',
|
||||
transformRequest: [function(data) {
|
||||
var newData = _.extend({}, data);
|
||||
if (newData.query_id === undefined) {
|
||||
newData.query_id = newData.query.id;
|
||||
delete newData.query;
|
||||
}
|
||||
|
||||
return newData;
|
||||
}].concat($http.defaults.transformRequest)
|
||||
}
|
||||
};
|
||||
var resource = $resource('/api/alerts/:id', {id: '@id'}, actions);
|
||||
|
||||
return resource;
|
||||
};
|
||||
|
||||
var Widget = function ($resource, Query) {
|
||||
var WidgetResource = $resource('/api/widgets/:id', {id: '@id'});
|
||||
@@ -518,5 +588,7 @@
|
||||
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
|
||||
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
|
||||
.factory('DataSource', ['$resource', DataSource])
|
||||
.factory('Alert', ['$resource', '$http', Alert])
|
||||
.factory('AlertSubscription', ['$resource', AlertSubscription])
|
||||
.factory('Widget', ['$resource', 'Query', Widget]);
|
||||
})();
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -55,6 +55,22 @@
|
||||
}];
|
||||
};
|
||||
|
||||
var VisualizationName = function(Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
visualization: '='
|
||||
},
|
||||
template: '<small>{{name}}</small>',
|
||||
replace: false,
|
||||
link: function (scope) {
|
||||
if (Visualization.visualizations[scope.visualization.type].name != scope.visualization.name) {
|
||||
scope.name = scope.visualization.name;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var VisualizationRenderer = function ($location, Visualization) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
@@ -72,42 +88,9 @@
|
||||
width: '50%'
|
||||
};
|
||||
|
||||
function readURL() {
|
||||
var searchFilters = angular.fromJson($location.search().filters);
|
||||
if (searchFilters) {
|
||||
_.forEach(scope.filters, function(filter) {
|
||||
var value = searchFilters[filter.friendlyName];
|
||||
if (value) {
|
||||
filter.current = value;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function updateURL(filters) {
|
||||
var current = {};
|
||||
_.each(filters, function(filter) {
|
||||
if (filter.current) {
|
||||
current[filter.friendlyName] = filter.current;
|
||||
}
|
||||
});
|
||||
|
||||
var newSearch = angular.extend($location.search(), {
|
||||
filters: angular.toJson(current)
|
||||
});
|
||||
$location.search(newSearch);
|
||||
}
|
||||
|
||||
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
|
||||
if (filters) {
|
||||
scope.filters = filters;
|
||||
|
||||
if (filters.length && false) {
|
||||
readURL();
|
||||
|
||||
// start watching for changes and update URL
|
||||
scope.$watch('filters', updateURL, true);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -138,7 +121,7 @@
|
||||
query: '=',
|
||||
queryResult: '=',
|
||||
visualization: '=?',
|
||||
openEditor: '=?',
|
||||
openEditor: '@',
|
||||
onNewSuccess: '=?'
|
||||
},
|
||||
link: function (scope, element, attrs) {
|
||||
@@ -167,9 +150,13 @@
|
||||
scope.$watch('visualization.type', function (type, oldType) {
|
||||
// if not edited by user, set name to match type
|
||||
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
|
||||
// poor man's titlecase
|
||||
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
|
||||
scope.visualization.name = _.string.titleize(scope.visualization.type);
|
||||
}
|
||||
|
||||
if (type && oldType != type && scope.visualization) {
|
||||
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
scope.submit = function () {
|
||||
@@ -208,6 +195,7 @@
|
||||
.provider('Visualization', VisualizationProvider)
|
||||
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
|
||||
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
|
||||
.directive('visualizationName', ['Visualization', VisualizationName])
|
||||
.directive('filters', Filters)
|
||||
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])
|
||||
})();
|
||||
|
||||
@@ -112,9 +112,6 @@
|
||||
|
||||
scope.columnTypes = {
|
||||
"X": "x",
|
||||
// "X (Date time)": "x",
|
||||
// "X (Linear)": "x-linear",
|
||||
// "X (Category)": "x-category",
|
||||
"Y": "y",
|
||||
"Series": "series",
|
||||
"Unused": "unused"
|
||||
@@ -166,7 +163,7 @@
|
||||
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
|
||||
}
|
||||
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
|
||||
|
||||
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
|
||||
});
|
||||
scope.zIndexes = _.range(scope.series.length);
|
||||
scope.yAxes = [[0, 'left'], [1, 'right']];
|
||||
@@ -227,6 +224,12 @@
|
||||
}
|
||||
});
|
||||
|
||||
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
|
||||
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
|
||||
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
|
||||
scope.visualization.options.xAxis.labels.enabled = true;
|
||||
}
|
||||
|
||||
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
|
||||
|
||||
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {
|
||||
|
||||
@@ -26,7 +26,10 @@
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
|
||||
} else {
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(), "date");
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(),function(r) {
|
||||
return r['date'] + r['day_number'] ;
|
||||
});
|
||||
|
||||
var grouped = _.groupBy(sortedData, "date");
|
||||
var maxColumns = _.reduce(grouped, function(memo, data){
|
||||
return (data.length > memo)? data.length : memo;
|
||||
|
||||
238
rd_ui/app/scripts/visualizations/map.js
Normal file
238
rd_ui/app/scripts/visualizations/map.js
Normal file
@@ -0,0 +1,238 @@
|
||||
'use strict';
|
||||
|
||||
(function() {
|
||||
var module = angular.module('redash.visualization');
|
||||
|
||||
module.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
var renderTemplate =
|
||||
'<map-renderer ' +
|
||||
'options="visualization.options" query-result="queryResult">' +
|
||||
'</map-renderer>';
|
||||
|
||||
var editTemplate = '<map-editor></map-editor>';
|
||||
var defaultOptions = {
|
||||
'height': 500,
|
||||
'draw': 'Marker',
|
||||
'classify':'none'
|
||||
};
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'MAP',
|
||||
name: 'Map',
|
||||
renderTemplate: renderTemplate,
|
||||
editorTemplate: editTemplate,
|
||||
defaultOptions: defaultOptions
|
||||
});
|
||||
}
|
||||
]);
|
||||
|
||||
module.directive('mapRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
|
||||
var setBounds = function(){
|
||||
var b = $scope.visualization.options.bounds;
|
||||
|
||||
if(b){
|
||||
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
|
||||
} else if ($scope.features.length > 0){
|
||||
var group= new L.featureGroup($scope.features);
|
||||
$scope.map.fitBounds(group.getBounds());
|
||||
}
|
||||
};
|
||||
|
||||
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
|
||||
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
|
||||
function() {
|
||||
var marker = function(lat,lon){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
return L.marker([lat, lon]);
|
||||
};
|
||||
|
||||
var heatpoint = function(lat,lon,obj){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
var color = 'red';
|
||||
|
||||
if (obj &&
|
||||
obj[$scope.visualization.options.classify] &&
|
||||
$scope.visualization.options.classification){
|
||||
var v = $.grep($scope.visualization.options.classification,function(e){
|
||||
return e.value == obj[$scope.visualization.options.classify];
|
||||
});
|
||||
if (v.length >0) color = v[0].color;
|
||||
}
|
||||
|
||||
var style = {
|
||||
fillColor:color,
|
||||
fillOpacity:0.5,
|
||||
stroke:false
|
||||
};
|
||||
|
||||
return L.circleMarker([lat,lon],style)
|
||||
};
|
||||
|
||||
var color = function(val){
|
||||
// taken from http://jsfiddle.net/xgJ2e/2/
|
||||
|
||||
var h= Math.floor((100 - val) * 120 / 100);
|
||||
var s = Math.abs(val - 50)/50;
|
||||
var v = 1;
|
||||
|
||||
var rgb, i, data = [];
|
||||
if (s === 0) {
|
||||
rgb = [v,v,v];
|
||||
} else {
|
||||
h = h / 60;
|
||||
i = Math.floor(h);
|
||||
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
|
||||
switch(i) {
|
||||
case 0:
|
||||
rgb = [v, data[2], data[0]];
|
||||
break;
|
||||
case 1:
|
||||
rgb = [data[1], v, data[0]];
|
||||
break;
|
||||
case 2:
|
||||
rgb = [data[0], v, data[2]];
|
||||
break;
|
||||
case 3:
|
||||
rgb = [data[0], data[1], v];
|
||||
break;
|
||||
case 4:
|
||||
rgb = [data[2], data[0], v];
|
||||
break;
|
||||
default:
|
||||
rgb = [v, data[0], data[1]];
|
||||
break;
|
||||
}
|
||||
}
|
||||
return '#' + rgb.map(function(x){
|
||||
return ("0" + Math.round(x*255).toString(16)).slice(-2);
|
||||
}).join('');
|
||||
};
|
||||
|
||||
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
|
||||
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
|
||||
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
|
||||
|
||||
function getBounds(e) {
|
||||
$scope.visualization.options.bounds = $scope.map.getBounds();
|
||||
}
|
||||
|
||||
var queryData = $scope.queryResult.getData();
|
||||
var classify = $scope.visualization.options.classify;
|
||||
|
||||
if (queryData) {
|
||||
$scope.visualization.options.classification = [];
|
||||
|
||||
for (var row in queryData) {
|
||||
if (queryData[row][classify] &&
|
||||
$.grep($scope.visualization.options.classification, function (e) {
|
||||
return e.value == queryData[row][classify]
|
||||
}).length == 0) {
|
||||
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
|
||||
}
|
||||
}
|
||||
|
||||
$.each($scope.visualization.options.classification, function (i, c) {
|
||||
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
|
||||
});
|
||||
|
||||
if (!$scope.map) {
|
||||
$scope.map = L.map(elm[0].children[0].children[0])
|
||||
}
|
||||
|
||||
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
|
||||
}).addTo($scope.map);
|
||||
|
||||
$scope.features = $scope.features || [];
|
||||
|
||||
var tmp_features = [];
|
||||
|
||||
var lat_col = $scope.visualization.options.latColName || 'lat';
|
||||
var lon_col = $scope.visualization.options.lonColName || 'lon';
|
||||
|
||||
for (var row in queryData) {
|
||||
var feature;
|
||||
|
||||
if ($scope.visualization.options.draw == 'Marker') {
|
||||
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
|
||||
} else if ($scope.visualization.options.draw == 'Color') {
|
||||
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
|
||||
}
|
||||
|
||||
if (!feature) continue;
|
||||
|
||||
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
|
||||
for (var k in queryData[row]){
|
||||
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
|
||||
}
|
||||
obj_description += '</ul>';
|
||||
feature.bindPopup(obj_description);
|
||||
tmp_features.push(feature);
|
||||
}
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
$scope.map.removeLayer(f);
|
||||
});
|
||||
|
||||
$scope.features = tmp_features;
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
f.addTo($scope.map)
|
||||
});
|
||||
|
||||
setBounds();
|
||||
|
||||
$scope.map.on('focus',function(){
|
||||
$scope.map.on('moveend', getBounds);
|
||||
});
|
||||
|
||||
$scope.map.on('blur',function(){
|
||||
$scope.map.off('moveend', getBounds);
|
||||
});
|
||||
|
||||
|
||||
// We redraw the map if it was loaded in a hidden tab
|
||||
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
|
||||
|
||||
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
|
||||
setTimeout(function() {
|
||||
$scope.map.invalidateSize(false);
|
||||
|
||||
setBounds();
|
||||
},500);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}, true);
|
||||
|
||||
$scope.$watch('visualization.options.height', function() {
|
||||
|
||||
if (!$scope.map) return;
|
||||
$scope.map.invalidateSize(false);
|
||||
setBounds();
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.directive('mapEditor', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map_editor.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
$scope.draw_options = ['Marker','Color'];
|
||||
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
})();
|
||||
@@ -78,14 +78,14 @@
|
||||
};
|
||||
} else if (columnType === 'date') {
|
||||
columnDefinition.formatFunction = function (value) {
|
||||
if (value) {
|
||||
if (value && moment.isMoment(value)) {
|
||||
return value.toDate().toLocaleDateString();
|
||||
}
|
||||
return value;
|
||||
};
|
||||
} else if (columnType === 'datetime') {
|
||||
columnDefinition.formatFunction = function (value) {
|
||||
if (value) {
|
||||
if (value && moment.isMoment(value)) {
|
||||
return value.toDate().toLocaleString();
|
||||
}
|
||||
return value;
|
||||
|
||||
@@ -14,7 +14,12 @@ a.page-title {
|
||||
}
|
||||
|
||||
a.navbar-brand {
|
||||
font-style: italic;
|
||||
padding: 5px 5px 0px 0px;
|
||||
margin-left: 0px !important;
|
||||
}
|
||||
|
||||
a.navbar-brand img {
|
||||
height: 40px;
|
||||
}
|
||||
|
||||
.graph {
|
||||
@@ -92,7 +97,16 @@ a.navbar-brand {
|
||||
}
|
||||
|
||||
.panel-heading .query-link:hover {
|
||||
text-decoration: none;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.list-group-item.clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.list-group-item.clickable:focus,
|
||||
.list-group-item.clickable:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
/* angular-growl */
|
||||
@@ -308,6 +322,28 @@ counter-renderer counter-name {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
.schema-container {
|
||||
height: 300px;
|
||||
}
|
||||
|
||||
.schema-browser {
|
||||
height: 100%;
|
||||
overflow-y: auto;
|
||||
overflow-x: hidden;
|
||||
}
|
||||
|
||||
div.table-name {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
white-space: nowrap;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.blankslate {
|
||||
text-align: center;
|
||||
padding: 30px;
|
||||
}
|
||||
|
||||
/*
|
||||
bootstrap's hidden-xs class adds display:block when not hidden
|
||||
use this class when you need to keep the original display value
|
||||
@@ -317,3 +353,7 @@ use this class when you need to keep the original display value
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.log-container {
|
||||
margin-bottom: 50px;
|
||||
}
|
||||
|
||||
58
rd_ui/app/views/alerts/edit.html
Normal file
58
rd_ui/app/views/alerts/edit.html
Normal file
@@ -0,0 +1,58 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li><a href="/alerts">Alerts</a></li>
|
||||
<li class="active">{{alert.name || getDefaultName() || "New"}}</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<form name="alertForm" ng-submit="saveChanges()" class="form">
|
||||
<div class="form-group">
|
||||
<label>Query</label>
|
||||
<ui-select ng-model="alert.query" theme="bootstrap" reset-search-input="false" on-select="onQuerySelected($item)">
|
||||
<ui-select-match placeholder="Search a query by name">{{$select.selected.name}}</ui-select-match>
|
||||
<ui-select-choices repeat="q in queries"
|
||||
refresh="searchQueries($select.search)"
|
||||
refresh-delay="0">
|
||||
<div ng-bind-html="q.name | highlight: $select.search | trustAsHtml"></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
|
||||
<div class="form-group" ng-show="selectedQuery">
|
||||
<label>Name</label>
|
||||
<input type="string" placeholder="{{getDefaultName()}}" class="form-control" ng-model="alert.name">
|
||||
</div>
|
||||
|
||||
<div ng-show="queryResult" class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="control-label col-md-2">Value column</label>
|
||||
<div class="col-md-4">
|
||||
<select ng-options="name for name in queryResult.getColumnNames()" ng-model="alert.options.column" class="form-control"></select>
|
||||
</div>
|
||||
<label class="control-label col-md-2">Value</label>
|
||||
<div class="col-md-4">
|
||||
<p class="form-control-static">{{queryResult.getData()[0][alert.options.column]}}</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-md-2">Op</label>
|
||||
<div class="col-md-4">
|
||||
<select ng-options="name for name in ops" ng-model="alert.options.op" class="form-control"></select>
|
||||
</div>
|
||||
<label class="control-label col-md-2">Reference</label>
|
||||
<div class="col-md-4">
|
||||
<input type="number" class="form-control" ng-model="alert.options.value" placeholder="reference value" required/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<button class="btn btn-primary" ng-disabled="!alertForm.$valid">Save</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="col-md-4" ng-if="alert.id">
|
||||
<alert-subscribers alert-id="alert.id"></alert-subscribers>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
16
rd_ui/app/views/alerts/list.html
Normal file
16
rd_ui/app/views/alerts/list.html
Normal file
@@ -0,0 +1,16 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li class="active">Alerts</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<p>
|
||||
<a href="/alerts/new" class="btn btn-default"><i class="fa fa-plus"></i> New Alert</a>
|
||||
</p>
|
||||
|
||||
<smart-table rows="alerts" columns="gridColumns"
|
||||
config="gridConfig"
|
||||
class="table table-condensed table-hover"></smart-table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
4
rd_ui/app/views/alerts/subscribers.html
Normal file
4
rd_ui/app/views/alerts/subscribers.html
Normal file
@@ -0,0 +1,4 @@
|
||||
<div>
|
||||
<strong>Subscribers</strong> <subscribe-button alert-id="alertId" subscribers="subscribers"></subscribe-button><br/>
|
||||
<img ng-src="{{s.user.gravatar_url}}" class="img-circle" alt="{{s.user.name}}" ng-repeat="s in subscribers"/>
|
||||
</div>
|
||||
@@ -28,6 +28,7 @@
|
||||
<p>
|
||||
<span ng-hide="currentUser.hasPermission('view_query')">{{query.name}}</span>
|
||||
<query-link query="query" visualization="widget.visualization" ng-show="currentUser.hasPermission('view_query')"></query-link>
|
||||
<visualization-name visualization="widget.visualization"/>
|
||||
</p>
|
||||
<div class="text-muted" ng-bind-html="query.description | markdown"></div>
|
||||
</h3>
|
||||
@@ -37,7 +38,7 @@
|
||||
|
||||
<div class="panel-footer">
|
||||
<span class="label label-default"
|
||||
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip="(query runtime: {{queryResult.getRuntime() | durationHumanize}})"
|
||||
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
|
||||
|
||||
<span class="pull-right">
|
||||
|
||||
11
rd_ui/app/views/data_sources/edit.html
Normal file
11
rd_ui/app/views/data_sources/edit.html
Normal file
@@ -0,0 +1,11 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li><a href="/data_sources">Data Sources</a></li>
|
||||
<li class="active">{{dataSource.name || "New"}}</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<data-source-form data-data-source="dataSource" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
20
rd_ui/app/views/data_sources/form.html
Normal file
20
rd_ui/app/views/data_sources/form.html
Normal file
@@ -0,0 +1,20 @@
|
||||
<form name="dataSourceForm" ng-submit="saveChanges()">
|
||||
<div class="form-group">
|
||||
<label for="dataSourceName">Name</label>
|
||||
<input type="string" class="form-control" name="dataSourceName" ng-model="dataSource.name" required>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="type">Type</label>
|
||||
<select name="type" class="form-control" ng-options="type.type as type.name for type in dataSourceTypes" ng-model="dataSource.type"></select>
|
||||
</div>
|
||||
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
|
||||
<label>{{input.title || name | capitalize}}</label>
|
||||
<input name="input" type="{{input.type}}" class="form-control" ng-model="dataSource.options[name]" ng-required="input.required"
|
||||
ng-if="input.type !== 'file'" accesskey="tab">
|
||||
|
||||
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required"
|
||||
base-sixty-four-input
|
||||
ng-if="input.type === 'file'">
|
||||
</div>
|
||||
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid">Save</button>
|
||||
</form>
|
||||
18
rd_ui/app/views/data_sources/list.html
Normal file
18
rd_ui/app/views/data_sources/list.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li class="active">Data Sources</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="list-group">
|
||||
<div class="list-group-item clickable" ng-repeat="dataSource in dataSources" ng-click="openDataSource(dataSource)">
|
||||
<i class="fa fa-database"></i> {{dataSource.name}}
|
||||
<button class="btn btn-xs btn-danger pull-right" ng-click="deleteDataSource($event, dataSource)">Delete</button>
|
||||
</div>
|
||||
<a ng-href="/data_sources/new" class="list-group-item">
|
||||
<i class="fa fa-plus"></i> Add Data Source
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -59,9 +59,9 @@
|
||||
|
||||
<hr>
|
||||
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<div ng-show="sourceMode">
|
||||
<div class="row" ng-if="sourceMode">
|
||||
<div ng-class="editorSize">
|
||||
<div>
|
||||
<p>
|
||||
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
|
||||
<span class="glyphicon glyphicon-play"></span> Execute
|
||||
@@ -77,21 +77,43 @@
|
||||
</button>
|
||||
</span>
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<!-- code editor -->
|
||||
<div ng-show="sourceMode">
|
||||
<p>
|
||||
<query-editor query="query" lock="queryFormatting"></query-editor>
|
||||
<query-editor query="query" schema="schema" syntax="dataSource.syntax" lock="queryFormatting"></query-editor>
|
||||
</p>
|
||||
<hr>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-3 schema-container" ng-show="hasSchema">
|
||||
<div ng-show="schema.length < 200">
|
||||
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
|
||||
</div>
|
||||
<div class="schema-browser">
|
||||
<div ng-repeat="table in schema | filter:schemaFilter track by table.name">
|
||||
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
|
||||
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
|
||||
</div>
|
||||
<div collapse="table.collapsed && !schemaFilter">
|
||||
<div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<hr ng-if="sourceMode">
|
||||
<div class="row">
|
||||
<div class="col-lg-3 rd-hidden-xs">
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Created By </span>
|
||||
<strong>{{query.user.name}}</strong>
|
||||
</p>
|
||||
<p ng-if="query.last_modified_by && query.user.id != query.last_modified_by.id">
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Last Modified By </span>
|
||||
<strong>{{query.last_modified_by.name}}</strong>
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-time"></span>
|
||||
<span class="text-muted">Last update </span>
|
||||
@@ -99,12 +121,6 @@
|
||||
<rd-time-ago value="queryResult.query_result.retrieved_at"></rd-time-ago>
|
||||
</strong>
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-user"></span>
|
||||
<span class="text-muted">Created By </span>
|
||||
<strong ng-hide="isQueryOwner">{{query.user.name}}</strong>
|
||||
<strong ng-show="isQueryOwner">You</strong>
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-play"></span>
|
||||
<span class="text-muted">Runtime </span>
|
||||
@@ -117,12 +133,12 @@
|
||||
</p>
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-refresh"></span>
|
||||
<span class="text-muted">Refresh Interval</span>
|
||||
<query-refresh-select></query-refresh-select>
|
||||
<span class="text-muted">Refresh Schedule</span>
|
||||
<a href="" ng-click="openScheduleForm()">{{query.schedule | scheduleHumanize}}</a>
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-hdd"></span>
|
||||
<i class="fa fa-database"></i>
|
||||
<span class="text-muted">Data Source</span>
|
||||
<select ng-disabled="!isQueryOwner" ng-model="query.data_source_id" ng-change="updateDataSource()" ng-options="ds.id as ds.name for ds in dataSources"></select>
|
||||
</p>
|
||||
@@ -176,6 +192,16 @@
|
||||
</div>
|
||||
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
|
||||
|
||||
<div class="row log-container" ng-show="showLog">
|
||||
<span ng-show="showLog">Log Information:</span>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr ng-repeat="l in queryResult.getLog()">
|
||||
<td>{{l}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<!-- tabs and data -->
|
||||
<div ng-show="showDataset">
|
||||
<div class="row">
|
||||
@@ -186,7 +212,7 @@
|
||||
<rd-tab tab-id="{{vis.id}}" name="{{vis.name}}" ng-if="vis.type!='TABLE'" ng-repeat="vis in query.visualizations">
|
||||
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> ×</span>
|
||||
</rd-tab>
|
||||
<rd-tab tab-id="add" name="+ New" removeable="true" ng-show="canEdit"></rd-tab>
|
||||
<rd-tab tab-id="add" name="+ New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
|
||||
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
18
rd_ui/app/views/schedule_form.html
Normal file
18
rd_ui/app/views/schedule_form.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<div class="modal-header">
|
||||
<button type="button" class="close" aria-label="Close" ng-click="close()"><span aria-hidden="true">×</span></button>
|
||||
<h4 class="modal-title">Refresh Schedule</h4>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" value="periodic" ng-model="refreshType">
|
||||
<query-refresh-select ng-disabled="refreshType != 'periodic'"></query-refresh-select>
|
||||
</label>
|
||||
</div>
|
||||
<div class="radio">
|
||||
<label>
|
||||
<input type="radio" value="daily" ng-model="refreshType">
|
||||
<query-time-picker ng-disabled="refreshType != 'daily'"></query-time-picker>
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
@@ -54,6 +54,14 @@
|
||||
ng-model="visualization.options.sortX">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Show X Axis Labels</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="sortX" type="checkbox" class="form-control"
|
||||
ng-model="visualization.options.xAxis.labels.enabled">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -100,6 +108,15 @@
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Index</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
|
||||
ng-options="o as o for o in zIndexes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">y Axis</label>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<div>
|
||||
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
|
||||
|
||||
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<div class="form-group">
|
||||
<label class="control-label">Name</label>
|
||||
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">
|
||||
|
||||
3
rd_ui/app/views/visualizations/map.html
Normal file
3
rd_ui/app/views/visualizations/map.html
Normal file
@@ -0,0 +1,3 @@
|
||||
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
|
||||
<div style="width:100%; height:100%;"></div>
|
||||
</div>
|
||||
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
@@ -0,0 +1,55 @@
|
||||
<div class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Map height (px)</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" type="number" ng-model = "visualization.options.height" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Draw option</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Latitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Longitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div ng-show = "visualization.options.draw == 'Color'">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Classify by column</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row" >
|
||||
<div class="col-lg-6">
|
||||
<div ng-repeat="element in visualization.options.classification" class="list-group">
|
||||
<div class="list-group-item active">
|
||||
{{element.value}}
|
||||
</div>
|
||||
|
||||
<div class="list-group-item">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-4">Color</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -12,8 +12,7 @@
|
||||
"es5-shim": "2.0.8",
|
||||
"angular-moment": "0.2.0",
|
||||
"moment": "2.1.0",
|
||||
"angular-ui-bootstrap": "0.5.0",
|
||||
"angular-ui-codemirror": "0.0.5",
|
||||
"codemirror": "4.8.0",
|
||||
"highcharts": "3.0.10",
|
||||
"underscore": "1.5.1",
|
||||
"pivottable": "~1.1.1",
|
||||
@@ -26,10 +25,13 @@
|
||||
"marked": "~0.3.2",
|
||||
"bucky": "~0.2.6",
|
||||
"pace": "~0.5.1",
|
||||
"angular-ui-select": "0.8.2",
|
||||
"angular-ui-select": "~0.12.0",
|
||||
"font-awesome": "~4.2.0",
|
||||
"mustache": "~1.0.0",
|
||||
"canvg": "gabelerner/canvg"
|
||||
"canvg": "gabelerner/canvg",
|
||||
"angular-ui-bootstrap-bower": "~0.12.1",
|
||||
"leaflet": "~0.7.3",
|
||||
"angular-base64-upload": "~0.1.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "1.2.18",
|
||||
|
||||
BIN
rd_ui/favicon.ico
Executable file
BIN
rd_ui/favicon.ico
Executable file
Binary file not shown.
|
After Width: | Height: | Size: 1.1 KiB |
@@ -36,6 +36,7 @@
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "grunt test"
|
||||
"test": "grunt test",
|
||||
"bower": "bower"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,12 @@ import logging
|
||||
import urlparse
|
||||
import redis
|
||||
from statsd import StatsClient
|
||||
from flask_mail import Mail
|
||||
|
||||
from redash import settings
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = '0.6.0'
|
||||
__version__ = '0.7.0'
|
||||
|
||||
|
||||
def setup_logging():
|
||||
@@ -32,6 +33,8 @@ def create_redis_connection():
|
||||
|
||||
setup_logging()
|
||||
redis_connection = create_redis_connection()
|
||||
mail = Mail()
|
||||
mail.init_mail(settings.all_settings())
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
|
||||
import_query_runners(settings.QUERY_RUNNERS)
|
||||
|
||||
109
redash/admin.py
Normal file
109
redash/admin.py
Normal file
@@ -0,0 +1,109 @@
|
||||
import json
|
||||
from flask_admin.contrib.peewee import ModelView
|
||||
from flask.ext.admin import Admin
|
||||
from flask_admin.contrib.peewee.form import CustomModelConverter
|
||||
from flask_admin.form.widgets import DateTimePickerWidget
|
||||
from playhouse.postgres_ext import ArrayField, DateTimeTZField
|
||||
from wtforms import fields
|
||||
from wtforms.widgets import TextInput
|
||||
|
||||
from redash import models
|
||||
from redash import query_runner
|
||||
from redash.permissions import require_permission
|
||||
|
||||
|
||||
class ArrayListField(fields.Field):
|
||||
widget = TextInput()
|
||||
|
||||
def _value(self):
|
||||
if self.data:
|
||||
return u', '.join(self.data)
|
||||
else:
|
||||
return u''
|
||||
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = [x.strip() for x in valuelist[0].split(',')]
|
||||
else:
|
||||
self.data = []
|
||||
|
||||
|
||||
class JSONTextAreaField(fields.TextAreaField):
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
try:
|
||||
json.loads(valuelist[0])
|
||||
except ValueError:
|
||||
raise ValueError(self.gettext(u'Invalid JSON'))
|
||||
self.data = valuelist[0]
|
||||
else:
|
||||
self.data = ''
|
||||
|
||||
class PasswordHashField(fields.PasswordField):
|
||||
def _value(self):
|
||||
return u''
|
||||
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
self.data = models.pwd_context.encrypt(valuelist[0])
|
||||
else:
|
||||
self.data = u''
|
||||
|
||||
|
||||
class PgModelConverter(CustomModelConverter):
|
||||
def __init__(self, view, additional=None):
|
||||
additional = {ArrayField: self.handle_array_field,
|
||||
DateTimeTZField: self.handle_datetime_tz_field,
|
||||
}
|
||||
super(PgModelConverter, self).__init__(view, additional)
|
||||
self.view = view
|
||||
|
||||
def handle_array_field(self, model, field, **kwargs):
|
||||
return field.name, ArrayListField(**kwargs)
|
||||
|
||||
def handle_datetime_tz_field(self, model, field, **kwargs):
|
||||
kwargs['widget'] = DateTimePickerWidget()
|
||||
return field.name, fields.DateTimeField(**kwargs)
|
||||
|
||||
|
||||
class BaseModelView(ModelView):
|
||||
model_form_converter = PgModelConverter
|
||||
|
||||
@require_permission('admin')
|
||||
def is_accessible(self):
|
||||
return True
|
||||
|
||||
|
||||
class UserModelView(BaseModelView):
|
||||
column_searchable_list = ('name', 'email')
|
||||
form_excluded_columns = ('created_at', 'updated_at')
|
||||
column_exclude_list = ('password_hash',)
|
||||
|
||||
form_overrides = dict(password_hash=PasswordHashField)
|
||||
form_args = {
|
||||
'password_hash': {'label': 'Password'}
|
||||
}
|
||||
|
||||
|
||||
class QueryResultModelView(BaseModelView):
|
||||
column_exclude_list = ('data',)
|
||||
|
||||
|
||||
class QueryModelView(BaseModelView):
|
||||
column_exclude_list = ('latest_query_data',)
|
||||
|
||||
|
||||
class DashboardModelView(BaseModelView):
|
||||
column_searchable_list = ('name', 'slug')
|
||||
|
||||
|
||||
def init_admin(app):
|
||||
admin = Admin(app, name='re:dash admin', template_mode='bootstrap3')
|
||||
|
||||
admin.add_view(UserModelView(models.User))
|
||||
admin.add_view(QueryModelView(models.Query))
|
||||
admin.add_view(QueryResultModelView(models.QueryResult))
|
||||
admin.add_view(DashboardModelView(models.Dashboard))
|
||||
|
||||
for m in (models.Visualization, models.Widget, models.ActivityLog, models.Group, models.Event):
|
||||
admin.add_view(BaseModelView(m))
|
||||
@@ -1,13 +1,13 @@
|
||||
import functools
|
||||
import hashlib
|
||||
import hmac
|
||||
import time
|
||||
import logging
|
||||
|
||||
from flask import request, make_response, redirect, url_for
|
||||
from flask.ext.login import LoginManager, login_user, current_user, logout_user
|
||||
from flask.ext.login import LoginManager
|
||||
from flask.ext.login import user_logged_in
|
||||
|
||||
from redash import models, settings, google_oauth
|
||||
from redash import models, settings, google_oauth, saml_auth
|
||||
from redash.tasks import record_event
|
||||
|
||||
login_manager = LoginManager()
|
||||
logger = logging.getLogger('authentication')
|
||||
@@ -23,77 +23,85 @@ def sign(key, path, expires):
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
class Authentication(object):
|
||||
def verify_authentication(self):
|
||||
return False
|
||||
|
||||
def required(self, fn):
|
||||
@functools.wraps(fn)
|
||||
def decorated(*args, **kwargs):
|
||||
if current_user.is_authenticated() or self.verify_authentication():
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return make_response(redirect(url_for("login", next=request.url)))
|
||||
|
||||
return decorated
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return models.User.get_by_id(user_id)
|
||||
|
||||
|
||||
class ApiKeyAuthentication(Authentication):
|
||||
def verify_authentication(self):
|
||||
api_key = request.args.get('api_key')
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
def hmac_load_user_from_request(request):
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
user_id = request.args.get('user_id', None)
|
||||
|
||||
if query_id and api_key:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and time.time() < expires <= time.time() + 3600:
|
||||
if user_id:
|
||||
user = models.User.get_by_id(user_id)
|
||||
calculated_signature = sign(user.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and api_key == query.api_key:
|
||||
login_user(models.ApiUser(query.api_key), remember=False)
|
||||
return True
|
||||
if user.api_key and signature == calculated_signature:
|
||||
return user
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class HMACAuthentication(Authentication):
|
||||
def verify_authentication(self):
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and query_id and time.time() < expires <= time.time() + 3600:
|
||||
if query_id:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
calculated_signature = sign(query.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and signature == calculated_signature:
|
||||
login_user(models.ApiUser(query.api_key), remember=False)
|
||||
return True
|
||||
return models.ApiUser(query.api_key)
|
||||
|
||||
return False
|
||||
return None
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
# If the user was previously logged in as api user, the user_id will be the api key and will raise an exception as
|
||||
# it can't be casted to int.
|
||||
if isinstance(user_id, basestring) and not user_id.isdigit():
|
||||
def get_user_from_api_key(api_key, query_id):
|
||||
if not api_key:
|
||||
return None
|
||||
|
||||
return models.User.select().where(models.User.id == user_id).first()
|
||||
user = None
|
||||
try:
|
||||
user = models.User.get_by_api_key(api_key)
|
||||
except models.User.DoesNotExist:
|
||||
if query_id:
|
||||
query = models.Query.get_by_id(query_id)
|
||||
if query and query.api_key == api_key:
|
||||
user = models.ApiUser(api_key)
|
||||
|
||||
return user
|
||||
|
||||
def api_key_load_user_from_request(request):
|
||||
api_key = request.args.get('api_key', None)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
user = get_user_from_api_key(api_key, query_id)
|
||||
return user
|
||||
|
||||
|
||||
def log_user_logged_in(app, user):
|
||||
event = {
|
||||
'user_id': user.id,
|
||||
'action': 'login',
|
||||
'object_type': 'redash',
|
||||
'timestamp': int(time.time()),
|
||||
}
|
||||
|
||||
record_event.delay(event)
|
||||
|
||||
|
||||
def setup_authentication(app):
|
||||
login_manager.init_app(app)
|
||||
login_manager.anonymous_user = models.AnonymousUser
|
||||
login_manager.login_view = 'login'
|
||||
app.secret_key = settings.COOKIE_SECRET
|
||||
app.register_blueprint(google_oauth.blueprint)
|
||||
app.register_blueprint(saml_auth.blueprint)
|
||||
|
||||
user_logged_in.connect(log_user_logged_in)
|
||||
|
||||
if settings.AUTH_TYPE == 'hmac':
|
||||
auth = HMACAuthentication()
|
||||
login_manager.request_loader(hmac_load_user_from_request)
|
||||
elif settings.AUTH_TYPE == 'api_key':
|
||||
auth = ApiKeyAuthentication()
|
||||
login_manager.request_loader(api_key_load_user_from_request)
|
||||
else:
|
||||
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
|
||||
auth = HMACAuthentication()
|
||||
login_manager.request_loader(hmac_load_user_from_request)
|
||||
|
||||
return auth
|
||||
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
from flask import make_response
|
||||
from functools import update_wrapper
|
||||
|
||||
ONE_YEAR = 60 * 60 * 24 * 365.25
|
||||
|
||||
headers = {
|
||||
|
||||
@@ -12,17 +12,19 @@ import time
|
||||
import logging
|
||||
|
||||
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
|
||||
session, url_for
|
||||
from flask.ext.restful import Resource, abort
|
||||
from flask_login import current_user, login_user, logout_user
|
||||
session, url_for, current_app, flash
|
||||
from flask.ext.restful import Resource, abort, reqparse
|
||||
from flask_login import current_user, login_user, logout_user, login_required
|
||||
from funcy import project
|
||||
import sqlparse
|
||||
|
||||
from redash import redis_connection, statsd_client, models, settings, utils, __version__
|
||||
from redash.wsgi import app, auth, api
|
||||
from redash import statsd_client, models, settings, utils
|
||||
from redash.wsgi import app, api
|
||||
from redash.tasks import QueryTask, record_event
|
||||
from redash.cache import headers as cache_headers
|
||||
from redash.permissions import require_permission
|
||||
from redash.query_runner import query_runners, validate_configuration
|
||||
from redash.monitor import get_status
|
||||
|
||||
|
||||
@app.route('/ping', methods=['GET'])
|
||||
@@ -30,14 +32,19 @@ def ping():
|
||||
return 'PONG.'
|
||||
|
||||
|
||||
@app.route('/admin/<anything>/<whatever>')
|
||||
@app.route('/admin/<anything>')
|
||||
@app.route('/dashboard/<anything>')
|
||||
@app.route('/alerts')
|
||||
@app.route('/alerts/<pk>')
|
||||
@app.route('/queries')
|
||||
@app.route('/data_sources')
|
||||
@app.route('/data_sources/<pk>')
|
||||
@app.route('/queries/<query_id>')
|
||||
@app.route('/queries/<query_id>/<anything>')
|
||||
@app.route('/personal')
|
||||
@app.route('/')
|
||||
@auth.required
|
||||
@login_required
|
||||
def index(**kwargs):
|
||||
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
@@ -66,22 +73,30 @@ def login():
|
||||
return redirect(request.args.get('next') or '/')
|
||||
|
||||
if not settings.PASSWORD_LOGIN_ENABLED:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
if settings.SAML_LOGIN_ENABLED:
|
||||
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
|
||||
else:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
|
||||
if request.method == 'POST':
|
||||
user = models.User.select().where(models.User.email == request.form['username']).first()
|
||||
if user and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
login_user(user, remember=remember)
|
||||
return redirect(request.args.get('next') or '/')
|
||||
try:
|
||||
user = models.User.get_by_email(request.form['username'])
|
||||
if user and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
login_user(user, remember=remember)
|
||||
return redirect(request.args.get('next') or '/')
|
||||
else:
|
||||
flash("Wrong username or password.")
|
||||
except models.User.DoesNotExist:
|
||||
flash("Wrong username or password.")
|
||||
|
||||
return render_template("login.html",
|
||||
name=settings.NAME,
|
||||
analytics=settings.ANALYTICS,
|
||||
next=request.args.get('next'),
|
||||
username=request.form.get('username', ''),
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
|
||||
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
show_saml_login=settings.SAML_LOGIN_ENABLED)
|
||||
|
||||
@app.route('/logout')
|
||||
def logout():
|
||||
@@ -91,43 +106,16 @@ def logout():
|
||||
return redirect('/login')
|
||||
|
||||
@app.route('/status.json')
|
||||
@auth.required
|
||||
@login_required
|
||||
@require_permission('admin')
|
||||
def status_api():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = models.Query.outdated_queries().count()
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
status = get_status()
|
||||
|
||||
return jsonify(status)
|
||||
|
||||
|
||||
@app.route('/api/queries/format', methods=['POST'])
|
||||
@auth.required
|
||||
@login_required
|
||||
def format_sql_query():
|
||||
arguments = request.get_json(force=True)
|
||||
query = arguments.get("query", "")
|
||||
@@ -135,8 +123,26 @@ def format_sql_query():
|
||||
return sqlparse.format(query, reindent=True, keyword_case='upper')
|
||||
|
||||
|
||||
@app.route('/queries/new', methods=['POST'])
|
||||
@login_required
|
||||
def create_query_route():
|
||||
query = request.form.get('query', None)
|
||||
data_source_id = request.form.get('data_source_id', None)
|
||||
|
||||
if query is None or data_source_id is None:
|
||||
abort(400)
|
||||
|
||||
query = models.Query.create(name="New Query",
|
||||
query=query,
|
||||
data_source=data_source_id,
|
||||
user=current_user._get_current_object(),
|
||||
schedule=None)
|
||||
|
||||
return redirect('/queries/{}'.format(query.id), 303)
|
||||
|
||||
|
||||
class BaseResource(Resource):
|
||||
decorators = [auth.required]
|
||||
decorators = [login_required]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BaseResource, self).__init__(*args, **kwargs)
|
||||
@@ -181,6 +187,34 @@ class DataSourceTypeListAPI(BaseResource):
|
||||
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
|
||||
|
||||
|
||||
class DataSourceAPI(BaseResource):
|
||||
@require_permission('admin')
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_permission('admin')
|
||||
def post(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
req = request.get_json(True)
|
||||
if not validate_configuration(req['type'], req['options']):
|
||||
abort(400)
|
||||
|
||||
data_source.name = req['name']
|
||||
data_source.options = json.dumps(req['options'])
|
||||
|
||||
data_source.save()
|
||||
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_permission('admin')
|
||||
def delete(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
data_source.delete_instance(recursive=True)
|
||||
|
||||
return make_response('', 204)
|
||||
|
||||
|
||||
class DataSourceListAPI(BaseResource):
|
||||
def get(self):
|
||||
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
|
||||
@@ -197,14 +231,23 @@ class DataSourceListAPI(BaseResource):
|
||||
if not validate_configuration(req['type'], req['options']):
|
||||
abort(400)
|
||||
|
||||
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options'])
|
||||
|
||||
return datasource.to_dict()
|
||||
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=json.dumps(req['options']))
|
||||
|
||||
return datasource.to_dict(all=True)
|
||||
|
||||
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
|
||||
api.add_resource(DataSourceAPI, '/api/data_sources/<data_source_id>', endpoint='data_source')
|
||||
|
||||
|
||||
class DataSourceSchemaAPI(BaseResource):
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
schema = data_source.get_schema()
|
||||
|
||||
return schema
|
||||
|
||||
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
|
||||
|
||||
class DashboardRecentAPI(BaseResource):
|
||||
def get(self):
|
||||
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]
|
||||
@@ -318,7 +361,7 @@ class QueryListAPI(BaseResource):
|
||||
@require_permission('create_query')
|
||||
def post(self):
|
||||
query_def = request.get_json(force=True)
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
query_def['user'] = self.current_user
|
||||
@@ -326,8 +369,6 @@ class QueryListAPI(BaseResource):
|
||||
query = models.Query(**query_def)
|
||||
query.save()
|
||||
|
||||
query.create_default_visualizations()
|
||||
|
||||
return query.to_dict()
|
||||
|
||||
@require_permission('view_query')
|
||||
@@ -339,9 +380,9 @@ class QueryAPI(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
def post(self, query_id):
|
||||
query = models.Query.get_by_id(query_id)
|
||||
|
||||
|
||||
query_def = request.get_json(force=True)
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user']:
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
|
||||
query_def.pop(field, None)
|
||||
|
||||
if 'latest_query_data_id' in query_def:
|
||||
@@ -350,6 +391,9 @@ class QueryAPI(BaseResource):
|
||||
if 'data_source_id' in query_def:
|
||||
query_def['data_source'] = query_def.pop('data_source_id')
|
||||
|
||||
query_def['last_modified_by'] = self.current_user
|
||||
|
||||
# TODO: use #save() with #dirty_fields.
|
||||
models.Query.update_instance(query_id, **query_def)
|
||||
|
||||
query = models.Query.get_by_id(query_id)
|
||||
@@ -388,7 +432,7 @@ class VisualizationListAPI(BaseResource):
|
||||
kwargs = request.get_json(force=True)
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs['query'] = kwargs.pop('query_id')
|
||||
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
vis.save()
|
||||
|
||||
@@ -423,7 +467,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
|
||||
class QueryResultListAPI(BaseResource):
|
||||
@require_permission('execute_query')
|
||||
def post(self):
|
||||
params = request.json
|
||||
params = request.get_json(force=True)
|
||||
|
||||
if settings.FEATURE_TABLES_PERMISSIONS:
|
||||
metadata = utils.SQLMetaData(params['query'])
|
||||
@@ -449,16 +493,19 @@ class QueryResultListAPI(BaseResource):
|
||||
activity=params['query']
|
||||
).save()
|
||||
|
||||
if params['ttl'] == 0:
|
||||
max_age = int(params.get('max_age', -1))
|
||||
|
||||
if max_age == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], int(params['ttl']))
|
||||
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], max_age)
|
||||
|
||||
if query_result:
|
||||
return {'query_result': query_result.to_dict()}
|
||||
else:
|
||||
data_source = models.DataSource.get_by_id(params['data_source_id'])
|
||||
job = QueryTask.add_task(params['query'], data_source)
|
||||
query_id = params.get('query_id', 'adhoc')
|
||||
job = QueryTask.add_task(params['query'], data_source, metadata={"Username": self.current_user.name, "Query ID": query_id})
|
||||
return {'job': job.to_dict()}
|
||||
|
||||
|
||||
@@ -478,6 +525,28 @@ class QueryResultAPI(BaseResource):
|
||||
headers.update(cache_headers)
|
||||
return make_response(s.getvalue(), 200, headers)
|
||||
|
||||
@staticmethod
|
||||
def add_cors_headers(headers):
|
||||
if 'Origin' in request.headers:
|
||||
origin = request.headers['Origin']
|
||||
|
||||
if origin in settings.ACCESS_CONTROL_ALLOW_ORIGIN:
|
||||
headers['Access-Control-Allow-Origin'] = origin
|
||||
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
|
||||
|
||||
@require_permission('view_query')
|
||||
def options(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
headers = {}
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
if settings.ACCESS_CONTROL_REQUEST_METHOD:
|
||||
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
|
||||
|
||||
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
|
||||
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
|
||||
|
||||
return make_response("", 200, headers)
|
||||
|
||||
@require_permission('view_query')
|
||||
def get(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
if query_result_id is None and query_id is not None:
|
||||
@@ -507,9 +576,15 @@ class QueryResultAPI(BaseResource):
|
||||
|
||||
record_event.delay(event)
|
||||
|
||||
headers = {}
|
||||
|
||||
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
if filetype == 'json':
|
||||
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
|
||||
return make_response(data, 200, cache_headers)
|
||||
headers.update(cache_headers)
|
||||
return make_response(data, 200, headers)
|
||||
else:
|
||||
return self.csv_response(query_result)
|
||||
|
||||
@@ -537,13 +612,110 @@ class JobAPI(BaseResource):
|
||||
|
||||
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
|
||||
|
||||
|
||||
class AlertAPI(BaseResource):
|
||||
def get(self, alert_id):
|
||||
alert = models.Alert.get_by_id(alert_id)
|
||||
return alert.to_dict()
|
||||
|
||||
def post(self, alert_id):
|
||||
req = request.get_json(True)
|
||||
params = project(req, ('options', 'name', 'query_id'))
|
||||
alert = models.Alert.get_by_id(alert_id)
|
||||
if 'query_id' in params:
|
||||
params['query'] = params.pop('query_id')
|
||||
|
||||
alert.update_instance(**params)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'edit',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
return alert.to_dict()
|
||||
|
||||
|
||||
class AlertListAPI(BaseResource):
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
required_fields = ('options', 'name', 'query_id')
|
||||
for f in required_fields:
|
||||
if f not in req:
|
||||
abort(400)
|
||||
|
||||
alert = models.Alert.create(
|
||||
name=req['name'],
|
||||
query=req['query_id'],
|
||||
user=self.current_user,
|
||||
options=req['options']
|
||||
)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'create',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
# TODO: should be in model?
|
||||
models.AlertSubscription.create(alert=alert, user=self.current_user)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'subscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
return alert.to_dict()
|
||||
|
||||
def get(self):
|
||||
return [alert.to_dict() for alert in models.Alert.all()]
|
||||
|
||||
|
||||
class AlertSubscriptionListResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
subscription = models.AlertSubscription.create(alert=alert_id, user=self.current_user)
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'subscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
return subscription.to_dict()
|
||||
|
||||
def get(self, alert_id):
|
||||
subscriptions = models.AlertSubscription.all(alert_id)
|
||||
return [s.to_dict() for s in subscriptions]
|
||||
|
||||
|
||||
class AlertSubscriptionResource(BaseResource):
|
||||
def delete(self, alert_id, subscriber_id):
|
||||
models.AlertSubscription.unsubscribe(alert_id, subscriber_id)
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'unsubscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
api.add_resource(AlertAPI, '/api/alerts/<alert_id>', endpoint='alert')
|
||||
api.add_resource(AlertSubscriptionListResource, '/api/alerts/<alert_id>/subscriptions', endpoint='alert_subscriptions')
|
||||
api.add_resource(AlertSubscriptionResource, '/api/alerts/<alert_id>/subscriptions/<subscriber_id>', endpoint='alert_subscription')
|
||||
api.add_resource(AlertListAPI, '/api/alerts', endpoint='alerts')
|
||||
|
||||
@app.route('/<path:filename>')
|
||||
def send_static(filename):
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
||||
|
||||
|
||||
if current_app.debug:
|
||||
cache_timeout = 0
|
||||
else:
|
||||
cache_timeout = None
|
||||
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
import logging
|
||||
from flask.ext.login import login_user
|
||||
import requests
|
||||
from flask import redirect, url_for, Blueprint
|
||||
from flask import redirect, url_for, Blueprint, flash
|
||||
from flask_oauth import OAuth
|
||||
from redash import models, settings
|
||||
|
||||
logger = logging.getLogger('google_oauth')
|
||||
oauth = OAuth()
|
||||
|
||||
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
|
||||
|
||||
if settings.GOOGLE_APPS_DOMAIN:
|
||||
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
|
||||
else:
|
||||
if not settings.GOOGLE_APPS_DOMAIN:
|
||||
logger.warning("No Google Apps domain defined, all Google accounts allowed.")
|
||||
|
||||
google = oauth.remote_app('google',
|
||||
base_url='https://www.google.com/accounts/',
|
||||
authorize_url='https://accounts.google.com/o/oauth2/auth',
|
||||
request_token_url=None,
|
||||
request_token_params=request_token_params,
|
||||
request_token_params={
|
||||
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
|
||||
'response_type': 'code'
|
||||
},
|
||||
access_token_url='https://accounts.google.com/o/oauth2/token',
|
||||
access_token_method='POST',
|
||||
access_token_params={'grant_type': 'authorization_code'},
|
||||
@@ -31,7 +31,7 @@ blueprint = Blueprint('google_oauth', __name__)
|
||||
|
||||
|
||||
def get_user_profile(access_token):
|
||||
headers = {'Authorization': 'OAuth '+access_token}
|
||||
headers = {'Authorization': 'OAuth {}'.format(access_token)}
|
||||
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
|
||||
|
||||
if response.status_code == 401:
|
||||
@@ -41,9 +41,17 @@ def get_user_profile(access_token):
|
||||
return response.json()
|
||||
|
||||
|
||||
def verify_profile(profile):
|
||||
if not settings.GOOGLE_APPS_DOMAIN:
|
||||
return True
|
||||
|
||||
domain = profile['email'].split('@')[-1]
|
||||
return domain in settings.GOOGLE_APPS_DOMAIN
|
||||
|
||||
|
||||
def create_and_login_user(name, email):
|
||||
try:
|
||||
user_object = models.User.get(models.User.email == email)
|
||||
user_object = models.User.get_by_email(email)
|
||||
if user_object.name != name:
|
||||
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
|
||||
user_object.name = name
|
||||
@@ -70,10 +78,17 @@ def authorized(resp):
|
||||
|
||||
if access_token is None:
|
||||
logger.warning("Access token missing in call back request.")
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
profile = get_user_profile(access_token)
|
||||
if profile is None:
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
if not verify_profile(profile):
|
||||
logger.warning("User tried to login with unauthorized domain name: %s", profile['email'])
|
||||
flash("Your Google Apps domain name isn't allowed.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
create_and_login_user(profile['name'], profile['email'])
|
||||
|
||||
@@ -28,7 +28,7 @@ class Importer(object):
|
||||
def import_query(self, user, query):
|
||||
new_query = self._get_or_create(models.Query, query['id'], name=query['name'],
|
||||
user=user,
|
||||
ttl=-1,
|
||||
schedule=None,
|
||||
query=query['query'],
|
||||
query_hash=query['query_hash'],
|
||||
description=query['description'],
|
||||
|
||||
315
redash/models.py
315
redash/models.py
@@ -11,8 +11,11 @@ import peewee
|
||||
from passlib.apps import custom_app_context as pwd_context
|
||||
from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDatabase
|
||||
from flask.ext.login import UserMixin, AnonymousUserMixin
|
||||
import psycopg2
|
||||
|
||||
from redash import utils, settings
|
||||
from redash import utils, settings, redis_connection
|
||||
from redash.query_runner import get_query_runner
|
||||
from utils import generate_token
|
||||
|
||||
|
||||
class Database(object):
|
||||
@@ -60,6 +63,41 @@ class BaseModel(peewee.Model):
|
||||
def get_by_id(cls, model_id):
|
||||
return cls.get(cls.id == model_id)
|
||||
|
||||
def pre_save(self, created):
|
||||
pass
|
||||
|
||||
def post_save(self, created):
|
||||
# Handler for post_save operations. Overriding if needed.
|
||||
pass
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
pk_value = self._get_pk_value()
|
||||
created = kwargs.get('force_insert', False) or not bool(pk_value)
|
||||
self.pre_save(created)
|
||||
super(BaseModel, self).save(*args, **kwargs)
|
||||
self.post_save(created)
|
||||
|
||||
def update_instance(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
# setattr(model_instance, field_name, field_obj.python_value(value))
|
||||
setattr(self, k, v)
|
||||
|
||||
dirty_fields = self.dirty_fields
|
||||
if hasattr(self, 'updated_at'):
|
||||
dirty_fields = dirty_fields + [self.__class__.updated_at]
|
||||
|
||||
self.save(only=dirty_fields)
|
||||
|
||||
|
||||
class ModelTimestampsMixin(BaseModel):
|
||||
updated_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
def pre_save(self, created):
|
||||
super(ModelTimestampsMixin, self).pre_save(created)
|
||||
|
||||
self.updated_at = datetime.datetime.now()
|
||||
|
||||
|
||||
class PermissionsCheckMixin(object):
|
||||
def has_permission(self, permission):
|
||||
@@ -118,7 +156,7 @@ class Group(BaseModel):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
DEFAULT_GROUPS = ['default']
|
||||
|
||||
id = peewee.PrimaryKeyField()
|
||||
@@ -126,6 +164,7 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
email = peewee.CharField(max_length=320, index=True, unique=True)
|
||||
password_hash = peewee.CharField(max_length=128, null=True)
|
||||
groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS)
|
||||
api_key = peewee.CharField(max_length=40, unique=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'users'
|
||||
@@ -134,13 +173,27 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'email': self.email
|
||||
'email': self.email,
|
||||
'gravatar_url': self.gravatar_url,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(User, self).__init__(*args, **kwargs)
|
||||
self._allowed_tables = None
|
||||
|
||||
def pre_save(self, created):
|
||||
super(User, self).pre_save(created)
|
||||
|
||||
if not self.api_key:
|
||||
self.api_key = generate_token(40)
|
||||
|
||||
@property
|
||||
def gravatar_url(self):
|
||||
email_md5 = hashlib.md5(self.email.lower()).hexdigest()
|
||||
return "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
@property
|
||||
def permissions(self):
|
||||
# TODO: this should be cached.
|
||||
@@ -160,8 +213,12 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
def get_by_email(cls, email):
|
||||
return cls.get(cls.email == email)
|
||||
|
||||
@classmethod
|
||||
def get_by_api_key(cls, api_key):
|
||||
return cls.get(cls.api_key == api_key)
|
||||
|
||||
def __unicode__(self):
|
||||
return '%r, %r' % (self.name, self.email)
|
||||
return u'%s (%s)' % (self.name, self.email)
|
||||
|
||||
def hash_password(self, password):
|
||||
self.password_hash = pwd_context.encrypt(password)
|
||||
@@ -197,28 +254,68 @@ class ActivityLog(BaseModel):
|
||||
|
||||
class DataSource(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField()
|
||||
name = peewee.CharField(unique=True)
|
||||
type = peewee.CharField()
|
||||
options = peewee.TextField()
|
||||
queue_name = peewee.CharField(default="queries")
|
||||
scheduled_queue_name = peewee.CharField(default="queries")
|
||||
scheduled_queue_name = peewee.CharField(default="scheduled_queries")
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'data_sources'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
def to_dict(self, all=False):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'type': self.type
|
||||
'type': self.type,
|
||||
'syntax': self.query_runner.syntax
|
||||
}
|
||||
|
||||
if all:
|
||||
d['options'] = json.loads(self.options)
|
||||
d['queue_name'] = self.queue_name
|
||||
d['scheduled_queue_name'] = self.scheduled_queue_name
|
||||
|
||||
return d
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def get_schema(self, refresh=False):
|
||||
key = "data_source:schema:{}".format(self.id)
|
||||
|
||||
cache = None
|
||||
if not refresh:
|
||||
cache = redis_connection.get(key)
|
||||
|
||||
if cache is None:
|
||||
query_runner = self.query_runner
|
||||
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
|
||||
|
||||
redis_connection.set(key, json.dumps(schema))
|
||||
else:
|
||||
schema = json.loads(cache)
|
||||
|
||||
return schema
|
||||
|
||||
@property
|
||||
def query_runner(self):
|
||||
return get_query_runner(self.type, self.options)
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
return cls.select().order_by(cls.id.asc())
|
||||
|
||||
|
||||
class JSONField(peewee.TextField):
|
||||
def db_value(self, value):
|
||||
return json.dumps(value)
|
||||
|
||||
def python_value(self, value):
|
||||
return json.loads(value)
|
||||
|
||||
|
||||
class QueryResult(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
data_source = peewee.ForeignKeyField(DataSource)
|
||||
@@ -252,16 +349,16 @@ class QueryResult(BaseModel):
|
||||
return unused_results
|
||||
|
||||
@classmethod
|
||||
def get_latest(cls, data_source, query, ttl=0):
|
||||
def get_latest(cls, data_source, query, max_age=0):
|
||||
query_hash = utils.gen_query_hash(query)
|
||||
|
||||
if ttl == -1:
|
||||
if max_age == -1:
|
||||
query = cls.select().where(cls.query_hash == query_hash,
|
||||
cls.data_source == data_source).order_by(cls.retrieved_at.desc())
|
||||
else:
|
||||
query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source,
|
||||
peewee.SQL("retrieved_at + interval '%s second' >= now() at time zone 'utc'",
|
||||
ttl)).order_by(cls.retrieved_at.desc())
|
||||
max_age)).order_by(cls.retrieved_at.desc())
|
||||
|
||||
return query.first()
|
||||
|
||||
@@ -276,42 +373,61 @@ class QueryResult(BaseModel):
|
||||
|
||||
logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id)
|
||||
|
||||
updated_count = Query.update(latest_query_data=query_result).\
|
||||
where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
|
||||
execute()
|
||||
sql = "UPDATE queries SET latest_query_data_id = %s WHERE query_hash = %s AND data_source_id = %s RETURNING id"
|
||||
query_ids = [row[0] for row in db.database.execute_sql(sql, params=(query_result.id, query_hash, data_source_id))]
|
||||
|
||||
logging.info("Updated %s queries with result (%s).", updated_count, query_hash)
|
||||
# TODO: when peewee with update & returning support is released, we can get back to using this code:
|
||||
# updated_count = Query.update(latest_query_data=query_result).\
|
||||
# where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
|
||||
# execute()
|
||||
|
||||
return query_result
|
||||
logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash)
|
||||
|
||||
return query_result, query_ids
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
|
||||
|
||||
class Query(BaseModel):
|
||||
def should_schedule_next(previous_iteration, now, schedule):
|
||||
if schedule.isdigit():
|
||||
ttl = int(schedule)
|
||||
next_iteration = previous_iteration + datetime.timedelta(seconds=ttl)
|
||||
else:
|
||||
hour, minute = schedule.split(':')
|
||||
hour, minute = int(hour), int(minute)
|
||||
|
||||
# The following logic is needed for cases like the following:
|
||||
# - The query scheduled to run at 23:59.
|
||||
# - The scheduler wakes up at 00:01.
|
||||
# - Using naive implementation of comparing timestamps, it will skip the execution.
|
||||
normalized_previous_iteration = previous_iteration.replace(hour=hour, minute=minute)
|
||||
if normalized_previous_iteration > previous_iteration:
|
||||
previous_iteration = normalized_previous_iteration - datetime.timedelta(days=1)
|
||||
|
||||
next_iteration = (previous_iteration + datetime.timedelta(days=1)).replace(hour=hour, minute=minute)
|
||||
|
||||
return now > next_iteration
|
||||
|
||||
|
||||
class Query(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
data_source = peewee.ForeignKeyField(DataSource)
|
||||
data_source = peewee.ForeignKeyField(DataSource, null=True)
|
||||
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
|
||||
name = peewee.CharField(max_length=255)
|
||||
description = peewee.CharField(max_length=4096, null=True)
|
||||
query = peewee.TextField()
|
||||
query_hash = peewee.CharField(max_length=32)
|
||||
api_key = peewee.CharField(max_length=40)
|
||||
ttl = peewee.IntegerField()
|
||||
user_email = peewee.CharField(max_length=360, null=True)
|
||||
user = peewee.ForeignKeyField(User)
|
||||
last_modified_by = peewee.ForeignKeyField(User, null=True, related_name="modified_queries")
|
||||
is_archived = peewee.BooleanField(default=False, index=True)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
schedule = peewee.CharField(max_length=10, null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'queries'
|
||||
|
||||
def create_default_visualizations(self):
|
||||
table_visualization = Visualization(query=self, name="Table",
|
||||
description='',
|
||||
type="TABLE", options="{}")
|
||||
table_visualization.save()
|
||||
|
||||
def to_dict(self, with_stats=False, with_visualizations=False, with_user=True):
|
||||
d = {
|
||||
'id': self.id,
|
||||
@@ -320,15 +436,17 @@ class Query(BaseModel):
|
||||
'description': self.description,
|
||||
'query': self.query,
|
||||
'query_hash': self.query_hash,
|
||||
'ttl': self.ttl,
|
||||
'schedule': self.schedule,
|
||||
'api_key': self.api_key,
|
||||
'is_archived': self.is_archived,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at,
|
||||
'data_source_id': self._data.get('data_source', None)
|
||||
}
|
||||
|
||||
if with_user:
|
||||
d['user'] = self.user.to_dict()
|
||||
d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
|
||||
else:
|
||||
d['user_id'] = self._data['user']
|
||||
|
||||
@@ -344,7 +462,7 @@ class Query(BaseModel):
|
||||
|
||||
def archive(self):
|
||||
self.is_archived = True
|
||||
self.ttl = -1
|
||||
self.schedule = None
|
||||
|
||||
for vis in self.visualizations:
|
||||
for w in vis.widgets:
|
||||
@@ -365,21 +483,19 @@ class Query(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def outdated_queries(cls):
|
||||
# TODO: this will only find scheduled queries that were executed before. I think this is
|
||||
# a reasonable assumption, but worth revisiting.
|
||||
outdated_queries_ids = cls.select(
|
||||
peewee.Func('first_value', cls.id).over(partition_by=[cls.query_hash, cls.data_source])) \
|
||||
.join(QueryResult) \
|
||||
.where(cls.ttl > 0,
|
||||
cls.is_archived==False,
|
||||
(QueryResult.retrieved_at +
|
||||
(cls.ttl * peewee.SQL("interval '1 second'"))) <
|
||||
peewee.SQL("(now() at time zone 'utc')"))
|
||||
queries = cls.select(cls, QueryResult.retrieved_at, DataSource)\
|
||||
.join(QueryResult)\
|
||||
.switch(Query).join(DataSource)\
|
||||
.where(cls.schedule != None)
|
||||
|
||||
queries = cls.select(cls, DataSource).join(DataSource) \
|
||||
.where(cls.id << outdated_queries_ids)
|
||||
now = utils.utcnow()
|
||||
outdated_queries = {}
|
||||
for query in queries:
|
||||
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
|
||||
key = "{}:{}".format(query.query_hash, query.data_source.id)
|
||||
outdated_queries[key] = query
|
||||
|
||||
return queries
|
||||
return outdated_queries.values()
|
||||
|
||||
@classmethod
|
||||
def search(cls, term):
|
||||
@@ -396,6 +512,7 @@ class Query(BaseModel):
|
||||
|
||||
@classmethod
|
||||
def recent(cls, user_id):
|
||||
# TODO: instead of t2 here, we should define table_alias for Query table
|
||||
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
|
||||
join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\
|
||||
where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\
|
||||
@@ -414,10 +531,23 @@ class Query(BaseModel):
|
||||
update = cls.update(**kwargs).where(cls.id == query_id)
|
||||
return update.execute()
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
def pre_save(self, created):
|
||||
super(Query, self).pre_save(created)
|
||||
self.query_hash = utils.gen_query_hash(self.query)
|
||||
self._set_api_key()
|
||||
super(Query, self).save(*args, **kwargs)
|
||||
|
||||
if self.last_modified_by is None:
|
||||
self.last_modified_by = self.user
|
||||
|
||||
def post_save(self, created):
|
||||
if created:
|
||||
self._create_default_visualizations()
|
||||
|
||||
def _create_default_visualizations(self):
|
||||
table_visualization = Visualization(query=self, name="Table",
|
||||
description='',
|
||||
type="TABLE", options="{}")
|
||||
table_visualization.save()
|
||||
|
||||
def _set_api_key(self):
|
||||
if not self.api_key:
|
||||
@@ -436,7 +566,84 @@ class Query(BaseModel):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class Dashboard(BaseModel):
|
||||
class Alert(ModelTimestampsMixin, BaseModel):
|
||||
UNKNOWN_STATE = 'unknown'
|
||||
OK_STATE = 'ok'
|
||||
TRIGGERED_STATE = 'triggered'
|
||||
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField()
|
||||
query = peewee.ForeignKeyField(Query, related_name='alerts')
|
||||
user = peewee.ForeignKeyField(User, related_name='alerts')
|
||||
options = JSONField()
|
||||
state = peewee.CharField(default=UNKNOWN_STATE)
|
||||
last_triggered_at = DateTimeTZField(null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'alerts'
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
return cls.select(Alert, User, Query).join(Query).switch(Alert).join(User)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'query': self.query.to_dict(),
|
||||
'user': self.user.to_dict(),
|
||||
'options': self.options,
|
||||
'state': self.state,
|
||||
'last_triggered_at': self.last_triggered_at,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
def evaluate(self):
|
||||
data = json.loads(self.query.latest_query_data.data)
|
||||
# todo: safe guard for empty
|
||||
value = data['rows'][0][self.options['column']]
|
||||
op = self.options['op']
|
||||
|
||||
if op == 'greater than' and value > self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'less than' and value < self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'equals' and value == self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
else:
|
||||
new_state = self.OK_STATE
|
||||
|
||||
return new_state
|
||||
|
||||
def subscribers(self):
|
||||
return User.select().join(AlertSubscription).where(AlertSubscription.alert==self)
|
||||
|
||||
|
||||
class AlertSubscription(ModelTimestampsMixin, BaseModel):
|
||||
user = peewee.ForeignKeyField(User)
|
||||
alert = peewee.ForeignKeyField(Alert)
|
||||
|
||||
class Meta:
|
||||
db_table = 'alert_subscriptions'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'user': self.user.to_dict(),
|
||||
'alert_id': self._data['alert']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def all(cls, alert_id):
|
||||
return AlertSubscription.select(AlertSubscription, User).join(User).where(AlertSubscription.alert==alert_id)
|
||||
|
||||
@classmethod
|
||||
def unsubscribe(cls, alert_id, user_id):
|
||||
query = AlertSubscription.delete().where(AlertSubscription.alert==alert_id).where(AlertSubscription.user==user_id)
|
||||
return query.execute()
|
||||
|
||||
|
||||
class Dashboard(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
slug = peewee.CharField(max_length=140, index=True)
|
||||
name = peewee.CharField(max_length=100)
|
||||
@@ -445,7 +652,6 @@ class Dashboard(BaseModel):
|
||||
layout = peewee.TextField()
|
||||
dashboard_filters_enabled = peewee.BooleanField(default=False)
|
||||
is_archived = peewee.BooleanField(default=False, index=True)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'dashboards'
|
||||
@@ -487,7 +693,9 @@ class Dashboard(BaseModel):
|
||||
'user_id': self._data['user'],
|
||||
'layout': layout,
|
||||
'dashboard_filters_enabled': self.dashboard_filters_enabled,
|
||||
'widgets': widgets_layout
|
||||
'widgets': widgets_layout,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -520,7 +728,7 @@ class Dashboard(BaseModel):
|
||||
return u"%s=%s" % (self.id, self.name)
|
||||
|
||||
|
||||
class Visualization(BaseModel):
|
||||
class Visualization(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
type = peewee.CharField(max_length=100)
|
||||
query = peewee.ForeignKeyField(Query, related_name='visualizations')
|
||||
@@ -538,6 +746,8 @@ class Visualization(BaseModel):
|
||||
'name': self.name,
|
||||
'description': self.description,
|
||||
'options': json.loads(self.options),
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
if with_query:
|
||||
@@ -549,14 +759,13 @@ class Visualization(BaseModel):
|
||||
return u"%s %s" % (self.id, self.type)
|
||||
|
||||
|
||||
class Widget(BaseModel):
|
||||
class Widget(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
visualization = peewee.ForeignKeyField(Visualization, related_name='widgets', null=True)
|
||||
text = peewee.TextField(null=True)
|
||||
width = peewee.IntegerField()
|
||||
options = peewee.TextField()
|
||||
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
# unused; kept for backward compatability:
|
||||
type = peewee.CharField(max_length=100, null=True)
|
||||
@@ -571,7 +780,9 @@ class Widget(BaseModel):
|
||||
'width': self.width,
|
||||
'options': json.loads(self.options),
|
||||
'dashboard_id': self._data['dashboard'],
|
||||
'text': self.text
|
||||
'text': self.text,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
if self.visualization and self.visualization.id:
|
||||
@@ -621,7 +832,7 @@ class Event(BaseModel):
|
||||
return event
|
||||
|
||||
|
||||
all_models = (DataSource, User, QueryResult, Query, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
all_models = (DataSource, User, QueryResult, Query, Alert, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
|
||||
|
||||
def init_db():
|
||||
|
||||
33
redash/monitor.py
Normal file
33
redash/monitor.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from redash import redis_connection, models, __version__
|
||||
|
||||
def get_status():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
|
||||
return status
|
||||
@@ -15,6 +15,7 @@ __all__ = [
|
||||
'TYPE_STRING',
|
||||
'TYPE_DATE',
|
||||
'TYPE_FLOAT',
|
||||
'SUPPORTED_COLUMN_TYPES',
|
||||
'register',
|
||||
'get_query_runner',
|
||||
'import_query_runners'
|
||||
@@ -28,10 +29,19 @@ TYPE_STRING = 'string'
|
||||
TYPE_DATETIME = 'datetime'
|
||||
TYPE_DATE = 'date'
|
||||
|
||||
SUPPORTED_COLUMN_TYPES = set([
|
||||
TYPE_INTEGER,
|
||||
TYPE_FLOAT,
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_STRING,
|
||||
TYPE_DATETIME,
|
||||
TYPE_DATE
|
||||
])
|
||||
|
||||
class BaseQueryRunner(object):
|
||||
def __init__(self, configuration):
|
||||
jsonschema.validate(configuration, self.configuration_schema())
|
||||
self.syntax = 'sql'
|
||||
self.configuration = configuration
|
||||
|
||||
@classmethod
|
||||
@@ -57,6 +67,9 @@ class BaseQueryRunner(object):
|
||||
def run_query(self, query):
|
||||
raise NotImplementedError()
|
||||
|
||||
def get_schema(self):
|
||||
return []
|
||||
|
||||
@classmethod
|
||||
def to_dict(cls):
|
||||
return {
|
||||
@@ -92,7 +105,11 @@ def validate_configuration(query_runner_type, configuration_json):
|
||||
return False
|
||||
|
||||
try:
|
||||
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema())
|
||||
if isinstance(configuration_json, basestring):
|
||||
configuration = json.loads(configuration_json)
|
||||
else:
|
||||
configuration = configuration_json
|
||||
jsonschema.validate(configuration, query_runner_class.configuration_schema())
|
||||
except (ValidationError, ValueError):
|
||||
return False
|
||||
|
||||
@@ -101,4 +118,4 @@ def validate_configuration(query_runner_type, configuration_json):
|
||||
|
||||
def import_query_runners(query_runner_imports):
|
||||
for runner_import in query_runner_imports:
|
||||
__import__(runner_import)
|
||||
__import__(runner_import)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from base64 import b64decode
|
||||
import datetime
|
||||
import json
|
||||
import httplib2
|
||||
@@ -5,6 +6,8 @@ import logging
|
||||
import sys
|
||||
import time
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
@@ -15,6 +18,7 @@ try:
|
||||
from apiclient.discovery import build
|
||||
from apiclient.errors import HttpError
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from oauth2client import gce
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
@@ -66,18 +70,6 @@ def _load_key(filename):
|
||||
f.close()
|
||||
|
||||
|
||||
def _get_bigquery_service(service_account, private_key):
|
||||
scope = [
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
]
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(service_account, private_key, scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
|
||||
def _get_query_results(jobs, project_id, job_id, start_index):
|
||||
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
|
||||
logging.debug('query_reply %s', query_reply)
|
||||
@@ -98,30 +90,39 @@ class BigQuery(BaseQueryRunner):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'serviceAccount': {
|
||||
'type': 'string',
|
||||
'title': 'Service Account'
|
||||
},
|
||||
'projectId': {
|
||||
'type': 'string',
|
||||
'title': 'Project ID'
|
||||
},
|
||||
'privateKey': {
|
||||
'type': 'string',
|
||||
'title': 'Private Key Path'
|
||||
'jsonKeyFile': {
|
||||
"type": "string",
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['serviceAccount', 'projectId', 'privateKey']
|
||||
'required': ['jsonKeyFile', 'projectId']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(BigQuery, self).__init__(configuration_json)
|
||||
|
||||
self.private_key = _load_key(self.configuration["privateKey"])
|
||||
def _get_bigquery_service(self):
|
||||
scope = [
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
def _get_project_id(self):
|
||||
return self.configuration["projectId"]
|
||||
|
||||
def run_query(self, query):
|
||||
bigquery_service = _get_bigquery_service(self.configuration["serviceAccount"],
|
||||
self.private_key)
|
||||
bigquery_service = self._get_bigquery_service()
|
||||
|
||||
jobs = bigquery_service.jobs()
|
||||
job_data = {
|
||||
@@ -134,13 +135,13 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
logger.debug("BigQuery got query: %s", query)
|
||||
|
||||
project_id = self.configuration["projectId"]
|
||||
project_id = self._get_project_id()
|
||||
|
||||
try:
|
||||
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
|
||||
current_row = 0
|
||||
query_reply = _get_query_results(jobs, project_id=project_id,
|
||||
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
|
||||
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
|
||||
|
||||
logger.debug("bigquery replied: %s", query_reply)
|
||||
|
||||
@@ -176,4 +177,26 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(BigQuery)
|
||||
|
||||
class BigQueryGCE(BigQuery):
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "bigquery_gce"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {}
|
||||
|
||||
def _get_project_id(self):
|
||||
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
|
||||
|
||||
def _get_bigquery_service(self):
|
||||
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
|
||||
register(BigQuery)
|
||||
register(BigQueryGCE)
|
||||
|
||||
259
redash/query_runner/elasticsearch.py
Normal file
259
redash/query_runner/elasticsearch.py
Normal file
@@ -0,0 +1,259 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
|
||||
import requests
|
||||
import dateutil
|
||||
from dateutil.parser import parse
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
except ImportError:
|
||||
# Python 2
|
||||
import httplib as http_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ELASTICSEARCH_TYPES_MAPPING = {
|
||||
"integer" : TYPE_INTEGER,
|
||||
"long" : TYPE_INTEGER,
|
||||
"float" : TYPE_FLOAT,
|
||||
"double" : TYPE_FLOAT,
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
# "geo_point" TODO: Need to split to 2 fields somehow
|
||||
}
|
||||
|
||||
PYTHON_TYPES_MAPPING = {
|
||||
str: TYPE_STRING,
|
||||
unicode: TYPE_STRING,
|
||||
bool : TYPE_BOOLEAN,
|
||||
int : TYPE_INTEGER,
|
||||
long: TYPE_INTEGER,
|
||||
float: TYPE_FLOAT
|
||||
}
|
||||
|
||||
#
|
||||
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
|
||||
# but without the aggregation).
|
||||
#
|
||||
# Full blown JSON based ElasticSearch queries (including aggregations) will be
|
||||
# added later
|
||||
#
|
||||
# Simple query example:
|
||||
#
|
||||
# - Query the index named "twitter"
|
||||
# - Filter by "user:kimchy"
|
||||
# - Return the fields: "@timestamp", "tweet" and "user"
|
||||
# - Return up to 15 results
|
||||
# - Sort by @timestamp ascending
|
||||
#
|
||||
# {
|
||||
# "index" : "twitter",
|
||||
# "query" : "user:kimchy",
|
||||
# "fields" : ["@timestamp", "tweet", "user"],
|
||||
# "size" : 15,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Simple query on a logstash ElasticSearch instance:
|
||||
#
|
||||
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
|
||||
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
|
||||
# - Return up to 250 results
|
||||
# - Sort by @timestamp ascending
|
||||
|
||||
# {
|
||||
# "index" : "logstash-2015.04.*",
|
||||
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
# "size" : 250,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
|
||||
class ElasticSearch(BaseQueryRunner):
|
||||
DEBUG_ENABLED = False
|
||||
|
||||
"""
|
||||
ElastichSearch query runner for querying ElasticSearch servers.
|
||||
Query can be done using the Lucene Syntax (single line) or the more complex,
|
||||
full blown ElasticSearch JSON syntax
|
||||
"""
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'server': {
|
||||
'type': 'string',
|
||||
'title': 'Base URL'
|
||||
}
|
||||
},
|
||||
"required" : ["server"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(ElasticSearch, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
if self.DEBUG_ENABLED:
|
||||
http_client.HTTPConnection.debuglevel = 1
|
||||
|
||||
# you need to initialize logging, otherwise you will not see anything from requests
|
||||
logging.basicConfig()
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
requests_log = logging.getLogger("requests.packages.urllib3")
|
||||
requests_log.setLevel(logging.DEBUG)
|
||||
requests_log.propagate = True
|
||||
|
||||
def get_mappings(self, url):
|
||||
mappings = {}
|
||||
|
||||
r = requests.get(url)
|
||||
mappings_data = r.json()
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
for property_name in index_mappings["mappings"][m]["properties"]:
|
||||
property_data = index_mappings["mappings"][m]["properties"][property_name]
|
||||
if not property_name in mappings:
|
||||
property_type = property_data.get("type", None)
|
||||
if property_type:
|
||||
if property_type in ELASTICSEARCH_TYPES_MAPPING:
|
||||
mappings[property_name] = property_type
|
||||
else:
|
||||
raise "Unknown property type: {0}".format(property_type)
|
||||
|
||||
return mappings
|
||||
|
||||
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
|
||||
result_columns_index = {}
|
||||
for c in result_columns:
|
||||
result_columns_index[c["name"]] = c
|
||||
|
||||
result_fields_index = {}
|
||||
if result_fields:
|
||||
for r in result_fields:
|
||||
result_fields_index[r] = None
|
||||
|
||||
for h in raw_result["hits"]["hits"]:
|
||||
row = {}
|
||||
for column in h["_source"]:
|
||||
if result_fields and column not in result_fields_index:
|
||||
continue
|
||||
|
||||
if column not in result_columns_index:
|
||||
result_columns.append({
|
||||
"name" : column,
|
||||
"friendly_name" : column,
|
||||
"type" : mappings.get(column, "string")
|
||||
})
|
||||
result_columns_index[column] = result_columns[-1]
|
||||
|
||||
row[column] = h["_source"][column]
|
||||
|
||||
if row and len(row) > 0:
|
||||
result_rows.append(row)
|
||||
|
||||
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
|
||||
url += "&from={0}".format(_from)
|
||||
r = requests.get(url)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
|
||||
|
||||
raw_result = r.json()
|
||||
|
||||
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
|
||||
|
||||
total = raw_result["hits"]["total"]
|
||||
result_size = len(raw_result["hits"]["hits"])
|
||||
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
|
||||
|
||||
return raw_result["hits"]["total"]
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
error = None
|
||||
|
||||
logger.debug(query)
|
||||
query_params = json.loads(query)
|
||||
|
||||
index_name = query_params["index"]
|
||||
query_data = query_params["query"]
|
||||
size = int(query_params.get("size", 500))
|
||||
result_fields = query_params.get("fields", None)
|
||||
sort = query_params.get("sort", None)
|
||||
|
||||
server_url = self.configuration["server"]
|
||||
if not server_url:
|
||||
error = "Missing configuration key 'server'"
|
||||
return None, error
|
||||
|
||||
|
||||
if server_url[-1] == "/":
|
||||
server_url = server_url[:-1]
|
||||
|
||||
url = "{0}/{1}/_search?".format(server_url, index_name)
|
||||
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
|
||||
|
||||
mappings = self.get_mappings(mapping_url)
|
||||
|
||||
logger.debug(json.dumps(mappings, indent=4))
|
||||
|
||||
if size:
|
||||
url += "&size={0}".format(size)
|
||||
|
||||
if sort:
|
||||
url += "&sort={0}".format(urllib.quote_plus(sort))
|
||||
|
||||
url += "&q={0}".format(urllib.quote_plus(query_data))
|
||||
|
||||
logger.debug("Using URL: {0}".format(url))
|
||||
logger.debug("Using Query: {0}".format(query_data))
|
||||
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
if isinstance(query_data, str) or isinstance(query_data, unicode):
|
||||
_from = 0
|
||||
while True:
|
||||
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
|
||||
_from += size
|
||||
if _from >= total:
|
||||
break
|
||||
else:
|
||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||
raise Exception("Advanced queries are not supported")
|
||||
|
||||
json_data = json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
})
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(ElasticSearch)
|
||||
117
redash/query_runner/google_spreadsheets.py
Normal file
117
redash/query_runner/google_spreadsheets.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from base64 import b64decode
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import gspread
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from dateutil import parser
|
||||
enabled = True
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install gspread, dateutil and oauth2client.")
|
||||
logger.warning("You can use pip: pip install gspread dateutil oauth2client")
|
||||
|
||||
enabled = False
|
||||
|
||||
|
||||
def _load_key(filename):
|
||||
with open(filename, "rb") as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
|
||||
def _guess_type(value):
|
||||
try:
|
||||
val = int(value)
|
||||
return TYPE_INTEGER, val
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
val = float(value)
|
||||
return TYPE_FLOAT, val
|
||||
except ValueError:
|
||||
pass
|
||||
if str(value).lower() in ('true', 'false'):
|
||||
return TYPE_BOOLEAN, bool(value)
|
||||
try:
|
||||
val = parser.parse(value)
|
||||
return TYPE_DATETIME, val
|
||||
except ValueError:
|
||||
pass
|
||||
return TYPE_STRING, value
|
||||
|
||||
|
||||
class GoogleSpreadsheet(BaseQueryRunner):
|
||||
HEADER_INDEX = 0
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "google_spreadsheets"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'jsonKeyFile': {
|
||||
"type": "string",
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['jsonKeyFile']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(GoogleSpreadsheet, self).__init__(configuration_json)
|
||||
|
||||
def _get_spreadsheet_service(self):
|
||||
scope = [
|
||||
'https://spreadsheets.google.com/feeds',
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
|
||||
spreadsheetservice = gspread.authorize(credentials)
|
||||
return spreadsheetservice
|
||||
|
||||
def run_query(self, query):
|
||||
logger.debug("Spreadsheet is about to execute query: %s", query)
|
||||
values = query.split("|")
|
||||
key = values[0] #key of the spreadsheet
|
||||
worksheet_num = 0 if len(values) != 2 else int(values[1])# if spreadsheet contains more than one worksheet - this is the number of it
|
||||
try:
|
||||
spreadsheet_service = self._get_spreadsheet_service()
|
||||
spreadsheet = spreadsheet_service.open_by_key(key)
|
||||
worksheets = spreadsheet.worksheets()
|
||||
all_data = worksheets[worksheet_num].get_all_values()
|
||||
column_names = []
|
||||
columns = []
|
||||
for j, column_name in enumerate(all_data[self.HEADER_INDEX]):
|
||||
column_names.append(column_name)
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': _guess_type(all_data[self.HEADER_INDEX+1][j])
|
||||
})
|
||||
rows = [dict(zip(column_names, row)) for row in all_data[self.HEADER_INDEX+1:]]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(GoogleSpreadsheet)
|
||||
159
redash/query_runner/impala_ds.py
Normal file
159
redash/query_runner/impala_ds.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from impala.dbapi import connect
|
||||
from impala.error import DatabaseError, RPCError
|
||||
enabled = True
|
||||
except ImportError, e:
|
||||
logger.exception(e)
|
||||
logger.warning("Missing dependencies. Please install impyla.")
|
||||
logger.warning("You can use pip: pip install impyla")
|
||||
enabled = False
|
||||
|
||||
COLUMN_NAME = 0
|
||||
COLUMN_TYPE = 1
|
||||
|
||||
types_map = {
|
||||
'BIGINT': TYPE_INTEGER,
|
||||
'TINYINT': TYPE_INTEGER,
|
||||
'SMALLINT': TYPE_INTEGER,
|
||||
'INT': TYPE_INTEGER,
|
||||
'DOUBLE': TYPE_FLOAT,
|
||||
'DECIMAL': TYPE_FLOAT,
|
||||
'FLOAT': TYPE_FLOAT,
|
||||
'REAL': TYPE_FLOAT,
|
||||
'BOOLEAN': TYPE_BOOLEAN,
|
||||
'TIMESTAMP': TYPE_DATETIME,
|
||||
'CHAR': TYPE_STRING,
|
||||
'STRING': TYPE_STRING,
|
||||
'VARCHAR': TYPE_STRING
|
||||
}
|
||||
|
||||
|
||||
class Impala(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
"protocol": {
|
||||
"type": "string",
|
||||
"title": "Please specify beeswax or hiveserver2"
|
||||
},
|
||||
"database": {
|
||||
"type": "string"
|
||||
},
|
||||
"use_ldap": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"ldap_user": {
|
||||
"type": "string"
|
||||
},
|
||||
"ldap_password": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": ["host"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "impala"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Impala, self).__init__(configuration_json)
|
||||
|
||||
def _run_query_internal(self, query):
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
return json.loads(results)['rows']
|
||||
|
||||
def get_schema(self):
|
||||
try:
|
||||
schemas_query = "show schemas;"
|
||||
|
||||
tables_query = "show tables in %s;"
|
||||
|
||||
columns_query = "show column stats %s;"
|
||||
|
||||
schema = {}
|
||||
for schema_name in map(lambda a: a['name'], self._run_query_internal(schemas_query)):
|
||||
for table_name in map(lambda a: a['name'], self._run_query_internal(tables_query % schema_name)):
|
||||
columns = map(lambda a: a['Column'], self._run_query_internal(columns_query % table_name))
|
||||
|
||||
if schema_name != 'default':
|
||||
table_name = '{}.{}'.format(schema_name, table_name)
|
||||
|
||||
schema[table_name] = {'name': table_name, 'columns': columns}
|
||||
except Exception, e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
|
||||
connection = None
|
||||
try:
|
||||
connection = connect(**self.configuration)
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
cursor.execute(query)
|
||||
|
||||
column_names = []
|
||||
columns = []
|
||||
|
||||
for column in cursor.description:
|
||||
column_name = column[COLUMN_NAME]
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column[COLUMN_TYPE], None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
except DatabaseError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = e.message
|
||||
except RPCError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = "Metastore Error [%s]" % e.message
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Impala)
|
||||
83
redash/query_runner/influx_db.py
Normal file
83
redash/query_runner/influx_db.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from influxdb import InfluxDBClusterClient
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install influxdb.")
|
||||
logger.warning("You can use pip: pip install influxdb")
|
||||
enabled = False
|
||||
|
||||
def _transform_result(results):
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
|
||||
for result in results:
|
||||
if not result_columns:
|
||||
for c in result.raw['series'][0]['columns']:
|
||||
result_columns.append({ "name": c })
|
||||
|
||||
for point in result.get_points():
|
||||
result_rows.append(point)
|
||||
|
||||
return json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
}, cls=JSONEncoder)
|
||||
|
||||
class InfluxDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'required': ['url']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "influxdb"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(InfluxDB, self).__init__(configuration_json)
|
||||
|
||||
def run_query(self, query):
|
||||
client = InfluxDBClusterClient.from_DSN(self.configuration['url'])
|
||||
|
||||
logger.debug("influxdb url: %s", self.configuration['url'])
|
||||
logger.debug("influxdb got query: %s", query)
|
||||
|
||||
try:
|
||||
results = client.query(query)
|
||||
if not isinstance(results, list):
|
||||
results = [results]
|
||||
|
||||
json_data = _transform_result(results)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(InfluxDB)
|
||||
@@ -3,6 +3,7 @@ import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from dateutil.parser import parse
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
@@ -12,6 +13,7 @@ logger = logging.getLogger(__name__)
|
||||
try:
|
||||
import pymongo
|
||||
from bson.objectid import ObjectId
|
||||
from bson.son import SON
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
@@ -32,24 +34,73 @@ TYPES_MAP = {
|
||||
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
class MongoDBJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, ObjectId):
|
||||
return str(o)
|
||||
|
||||
def _get_column_by_name(columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _convert_date(q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
if q[field_name].find(":") == -1:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
|
||||
else:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
|
||||
|
||||
return super(MongoDBJSONEncoder, self).default(o)
|
||||
|
||||
# Simple query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "my_collection",
|
||||
# "query" : {
|
||||
# "date" : {
|
||||
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
# },
|
||||
# "type" : 1
|
||||
# },
|
||||
# "fields" : {
|
||||
# "_id" : 1,
|
||||
# "name" : 2
|
||||
# },
|
||||
# "sort" : [
|
||||
# {
|
||||
# "name" : "date",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
#
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Aggregation
|
||||
# ===========
|
||||
# Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
# correct order of sorting, it uses a regular list for the "$sort" operation
|
||||
# that converts into a SON (sorted dictionary) object before execution.
|
||||
#
|
||||
# Aggregation query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "things",
|
||||
# "aggregate" : [
|
||||
# {
|
||||
# "$unwind" : "$tags"
|
||||
# },
|
||||
# {
|
||||
# "$group" : {
|
||||
# "_id" : "$tags",
|
||||
# "count" : { "$sum" : 1 }
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$sort" : [
|
||||
# {
|
||||
# "name" : "count",
|
||||
# "direction" : -1
|
||||
# },
|
||||
# {
|
||||
# "name" : "_id",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
#
|
||||
#
|
||||
class MongoDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
@@ -68,8 +119,8 @@ class MongoDB(BaseQueryRunner):
|
||||
'type': 'string',
|
||||
'title': 'Replica Set Name'
|
||||
},
|
||||
'required': ['connectionString']
|
||||
}
|
||||
},
|
||||
'required': ['connectionString']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -83,26 +134,49 @@ class MongoDB(BaseQueryRunner):
|
||||
def __init__(self, configuration_json):
|
||||
super(MongoDB, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = 'json'
|
||||
|
||||
self.db_name = self.configuration["dbName"]
|
||||
|
||||
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
|
||||
def _get_column_by_name(self, columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
def _fix_dates(self, data):
|
||||
for k in data:
|
||||
if isinstance(data[k], list):
|
||||
for i in range(0, len(data[k])):
|
||||
self._fix_dates(data[k][i])
|
||||
elif isinstance(data[k], dict):
|
||||
self._fix_dates(data[k])
|
||||
else:
|
||||
if isinstance(data[k], (str, unicode)):
|
||||
self._convert_date(data, k)
|
||||
|
||||
def _convert_date(self, q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
q[field_name] = parse(m[0], yearfirst=True)
|
||||
|
||||
def run_query(self, query):
|
||||
if self.is_replica_set:
|
||||
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
|
||||
else:
|
||||
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
|
||||
|
||||
if self.db_name not in db_connection.database_names():
|
||||
return None, "Unknown database name '%s'" % self.db_name
|
||||
|
||||
db = db_connection[self.db_name ]
|
||||
db = db_connection[self.db_name]
|
||||
|
||||
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
|
||||
logger.debug("mongodb got query: %s", query)
|
||||
|
||||
try:
|
||||
query_data = json.loads(query)
|
||||
self._fix_dates(query_data)
|
||||
except ValueError:
|
||||
return None, "Invalid query format. The query is not a valid JSON."
|
||||
|
||||
@@ -111,58 +185,79 @@ class MongoDB(BaseQueryRunner):
|
||||
else:
|
||||
collection = query_data["collection"]
|
||||
|
||||
q = None
|
||||
if "query" in query_data:
|
||||
q = query_data["query"]
|
||||
for k in q:
|
||||
if q[k] and type(q[k]) in [str, unicode]:
|
||||
logging.debug(q[k])
|
||||
_convert_date(q, k)
|
||||
elif q[k] and type(q[k]) is dict:
|
||||
for k2 in q[k]:
|
||||
if type(q[k][k2]) in [str, unicode]:
|
||||
_convert_date(q[k], k2)
|
||||
|
||||
q = query_data.get("query", None)
|
||||
f = None
|
||||
|
||||
aggregate = query_data.get("aggregate", None)
|
||||
if aggregate:
|
||||
for step in aggregate:
|
||||
if "$sort" in step:
|
||||
sort_list = []
|
||||
for sort_item in step["$sort"]:
|
||||
sort_list.append((sort_item["name"], sort_item["direction"]))
|
||||
|
||||
step["$sort"] = SON(sort_list)
|
||||
|
||||
if not aggregate:
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field in query_data["sort"]:
|
||||
s.append((field["name"], field["direction"]))
|
||||
|
||||
if "fields" in query_data:
|
||||
f = query_data["fields"]
|
||||
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field_name in query_data["sort"]:
|
||||
s.append((field_name, query_data["sort"][field_name]))
|
||||
for field_data in query_data["sort"]:
|
||||
s.append((field_data["name"], field_data["direction"]))
|
||||
|
||||
columns = []
|
||||
rows = []
|
||||
|
||||
error = None
|
||||
json_data = None
|
||||
cursor = None
|
||||
if q or (not q and not aggregate):
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
if "skip" in query_data:
|
||||
cursor = cursor.skip(query_data["skip"])
|
||||
|
||||
if "limit" in query_data:
|
||||
cursor = cursor.limit(query_data["limit"])
|
||||
|
||||
elif aggregate:
|
||||
r = db[collection].aggregate(aggregate)
|
||||
|
||||
# Backwards compatibility with older pymongo versions.
|
||||
#
|
||||
# Older pymongo version would return a dictionary from an aggregate command.
|
||||
# The dict would contain a "result" key which would hold the cursor.
|
||||
# Newer ones return pymongo.command_cursor.CommandCursor.
|
||||
if isinstance(r, dict):
|
||||
cursor = r["result"]
|
||||
else:
|
||||
cursor = r
|
||||
|
||||
for r in cursor:
|
||||
for k in r:
|
||||
if _get_column_by_name(columns, k) is None:
|
||||
if self._get_column_by_name(columns, k) is None:
|
||||
columns.append({
|
||||
"name": k,
|
||||
"friendly_name": k,
|
||||
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
|
||||
})
|
||||
|
||||
# Convert ObjectId to string
|
||||
if type(r[k]) == ObjectId:
|
||||
r[k] = str(r[k])
|
||||
|
||||
rows.append(r)
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
for k in sorted(f, key=f.get):
|
||||
ordered_columns.append(_get_column_by_name(columns, k))
|
||||
ordered_columns.append(self._get_column_by_name(columns, k))
|
||||
|
||||
columns = ordered_columns
|
||||
|
||||
@@ -171,8 +266,8 @@ class MongoDB(BaseQueryRunner):
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(MongoDB)
|
||||
register(MongoDB)
|
||||
|
||||
@@ -7,6 +7,24 @@ from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
types_map = {
|
||||
0: TYPE_FLOAT,
|
||||
1: TYPE_INTEGER,
|
||||
2: TYPE_INTEGER,
|
||||
3: TYPE_INTEGER,
|
||||
4: TYPE_FLOAT,
|
||||
5: TYPE_FLOAT,
|
||||
7: TYPE_DATETIME,
|
||||
8: TYPE_INTEGER,
|
||||
9: TYPE_INTEGER,
|
||||
10: TYPE_DATE,
|
||||
12: TYPE_DATETIME,
|
||||
15: TYPE_STRING,
|
||||
16: TYPE_INTEGER,
|
||||
246: TYPE_FLOAT,
|
||||
253: TYPE_STRING,
|
||||
254: TYPE_STRING,
|
||||
}
|
||||
|
||||
class Mysql(BaseQueryRunner):
|
||||
@classmethod
|
||||
@@ -27,7 +45,10 @@ class Mysql(BaseQueryRunner):
|
||||
'db': {
|
||||
'type': 'string',
|
||||
'title': 'Database name'
|
||||
}
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
},
|
||||
'required': ['db']
|
||||
}
|
||||
@@ -44,34 +65,67 @@ class Mysql(BaseQueryRunner):
|
||||
def __init__(self, configuration_json):
|
||||
super(Mysql, self).__init__(configuration_json)
|
||||
|
||||
def get_schema(self):
|
||||
query = """
|
||||
SELECT col.table_schema,
|
||||
col.table_name,
|
||||
col.column_name
|
||||
FROM `information_schema`.`columns` col
|
||||
INNER JOIN
|
||||
(SELECT table_schema,
|
||||
TABLE_NAME
|
||||
FROM information_schema.tables
|
||||
WHERE table_type <> 'SYSTEM VIEW' AND table_schema NOT IN ('performance_schema', 'mysql')) tables ON tables.table_schema = col.table_schema
|
||||
AND tables.TABLE_NAME = col.TABLE_NAME;
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != self.configuration['db']:
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
else:
|
||||
table_name = row['table_name']
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
import MySQLdb
|
||||
|
||||
connection = MySQLdb.connect(self.configuration.get('host', ''),
|
||||
self.configuration.get('user', ''),
|
||||
self.configuration.get('passwd', ''),
|
||||
self.configuration['db'],
|
||||
charset='utf8', use_unicode=True)
|
||||
cursor = connection.cursor()
|
||||
|
||||
logger.debug("MySQL running query: %s", query)
|
||||
|
||||
connection = None
|
||||
try:
|
||||
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
|
||||
user=self.configuration.get('user', ''),
|
||||
passwd=self.configuration.get('passwd', ''),
|
||||
db=self.configuration['db'],
|
||||
port=self.configuration.get('port', 3306),
|
||||
charset='utf8', use_unicode=True)
|
||||
cursor = connection.cursor()
|
||||
logger.debug("MySQL running query: %s", query)
|
||||
cursor.execute(query)
|
||||
|
||||
data = cursor.fetchall()
|
||||
|
||||
cursor_desc = cursor.description
|
||||
if cursor_desc is not None:
|
||||
num_fields = len(cursor_desc)
|
||||
column_names = [i[0] for i in cursor.description]
|
||||
# TODO - very similar to pg.py
|
||||
if cursor.description is not None:
|
||||
columns_data = [(i[0], i[1]) for i in cursor.description]
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in data]
|
||||
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
|
||||
|
||||
# TODO: add types support
|
||||
columns = [{'name': col_name,
|
||||
'friendly_name': col_name,
|
||||
'type': None} for col_name in column_names]
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': types_map.get(col[1], None)} for col in columns_data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
@@ -90,7 +144,8 @@ class Mysql(BaseQueryRunner):
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
@@ -83,6 +83,34 @@ class PostgreSQL(BaseQueryRunner):
|
||||
|
||||
self.connection_string = " ".join(values)
|
||||
|
||||
def get_schema(self):
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != 'public':
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
else:
|
||||
table_name = row['table_name']
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
connection = psycopg2.connect(self.connection_string, async=True)
|
||||
_wait(connection)
|
||||
@@ -99,35 +127,38 @@ class PostgreSQL(BaseQueryRunner):
|
||||
columns = []
|
||||
duplicates_counter = 1
|
||||
|
||||
for column in cursor.description:
|
||||
# TODO: this deduplication needs to be generalized and reused in all query runners.
|
||||
column_name = column.name
|
||||
if column_name in column_names:
|
||||
column_name += str(duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
if cursor.description is not None:
|
||||
for column in cursor.description:
|
||||
# TODO: this deduplication needs to be generalized and reused in all query runners.
|
||||
column_name = column.name
|
||||
if column_name in column_names:
|
||||
column_name += str(duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
|
||||
column_names.append(column_name)
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column.type_code, None)
|
||||
})
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column.type_code, None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
except (select.error, OSError) as e:
|
||||
logging.exception(e)
|
||||
error = "Query interrupted. Please retry."
|
||||
json_data = None
|
||||
except psycopg2.DatabaseError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = e.message
|
||||
json_data = None
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
|
||||
98
redash/query_runner/presto.py
Normal file
98
redash/query_runner/presto.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import json
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from pyhive import presto
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install PyHive.")
|
||||
logger.warning("You can use pip: pip install pyhive")
|
||||
enabled = False
|
||||
|
||||
PRESTO_TYPES_MAPPING = {
|
||||
"integer" : TYPE_INTEGER,
|
||||
"long" : TYPE_INTEGER,
|
||||
"bigint" : TYPE_INTEGER,
|
||||
"float" : TYPE_FLOAT,
|
||||
"double" : TYPE_FLOAT,
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"varchar": TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
}
|
||||
|
||||
class Presto(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
'type': 'string'
|
||||
},
|
||||
'port': {
|
||||
'type': 'number'
|
||||
},
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
},
|
||||
'catalog': {
|
||||
'type': 'string'
|
||||
},
|
||||
'username': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'required': ['host']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "presto"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Presto, self).__init__(configuration_json)
|
||||
|
||||
def run_query(self, query):
|
||||
connection = presto.connect(
|
||||
host=self.configuration.get('host', ''),
|
||||
port=self.configuration.get('port', 8080),
|
||||
username=self.configuration.get('username', 'redash'),
|
||||
catalog=self.configuration.get('catalog', 'hive'),
|
||||
schema=self.configuration.get('schema', 'default'))
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
columns_data = [(row[0], row[1]) for row in cursor.description]
|
||||
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': PRESTO_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
|
||||
|
||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Presto)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user