Compare commits

...

83 Commits

Author SHA1 Message Date
Arik Fraimovich
f00d080ed2 Install optipng in CircleCI. 2015-05-12 10:33:11 +03:00
Arik Fraimovich
4e76c1305f Merge pull request #425 from EverythingMe/new_logo
New logo
2015-05-12 10:27:01 +03:00
Arik Fraimovich
36ef388e92 Bump version 2015-05-12 10:26:16 +03:00
Arik Fraimovich
2e1ee7f76c New logo 2015-05-12 10:25:57 +03:00
Arik Fraimovich
fc1e38772d New logo! 2015-05-11 23:13:15 +03:00
Arik Fraimovich
0e631a5121 Merge pull request #422 from EverythingMe/feature/288_bq_instance_auth
Feature: BigQueryGCE query runner that uses instance auth (fixes #288)
2015-05-10 23:18:45 +03:00
Arik Fraimovich
d74175efca Feature: BigQueryGCE query runner that uses instance auth 2015-05-10 08:46:41 +03:00
Arik Fraimovich
bf5fe7d2c7 Merge pull request #421 from EverythingMe/fix/issue_417
Feature: show visualization name next to query name (#418)
2015-05-08 22:28:12 +03:00
Arik Fraimovich
0f022aba92 Feature: show visualization name next to query name. 2015-05-07 21:58:12 +03:00
Arik Fraimovich
0b6e55e55a Remove unused code 2015-05-07 21:58:08 +03:00
Arik Fraimovich
e1c409366c Merge pull request #420 from EverythingMe/fix/issue_417
Fix: Make query editor auto resize again to prevent scroll issues
2015-05-07 21:52:07 +03:00
Arik Fraimovich
3b942118e9 Make query editor auto resize again to prevent scroll issues 2015-05-07 21:39:25 +03:00
Arik Fraimovich
7f1543db8f Merge pull request #419 from EverythingMe/fix/issue_417
Fix #417: integer columns treated as floats
2015-05-07 21:38:54 +03:00
Arik Fraimovich
74a5121be2 Fix #417: integer columns treated as floats 2015-05-07 21:25:30 +03:00
Arik Fraimovich
26fe136a1a Merge pull request #416 from daamien/patch-1
Upgrade to requests 2.3.0
2015-05-07 09:30:43 +03:00
damien clochard
83fb189b05 Update requirements.txt
The bootstrap.sh script fails on Debian 7.8

I solved the problem with :

$ sudo pip install requests==2.3.0

Check this bug for more details :
https://github.com/kennethreitz/requests/issues/2028
2015-05-06 18:36:24 +02:00
Arik Fraimovich
5e8d0d36c0 Merge pull request #409 from erans/master
Fix: minor fixes for MongoDB, script and Python query runners
2015-04-26 11:07:33 +03:00
Eran Sandler
4ae4cffa04 Removed a copy-paste duplication. Hmpf. 2015-04-26 11:05:40 +03:00
Eran Sandler
bc433e88fe Fix for _getitem_ error when accessing a dictionary directly. 2015-04-26 11:03:53 +03:00
Arik Fraimovich
513ef501a4 Merge pull request #410 from stanhu/sort-by-y-values
Feature: sort by Y values charts that have a single value per series
2015-04-26 10:23:06 +03:00
Stan Hu
f2bdcbedfb Simplify code and remove sortY option to avoid confusion 2015-04-26 00:18:03 -07:00
Stan Hu
fd056edb2a Support sort by y values for charts that have a single value per series 2015-04-21 22:52:14 -07:00
Eran Sandler
0f0acfdd12 Fix which prevented MongoDB connections to execute queries due to a faulty json schema configuration. 2015-04-22 00:18:28 +03:00
Eran Sandler
1e3b507b2b For for the script data source when command line parameters are passed as part of the query. 2015-04-21 09:36:05 +03:00
Arik Fraimovich
84d95272f3 Comment out active tasks cleanup, as it sometimes fails. 2015-04-20 10:05:04 +03:00
Arik Fraimovich
3b08e9e214 Merge pull request #408 from alexanderlz/master
Feature: additional metadata in query annotation (username, query id, queue name)
2015-04-20 08:48:59 +03:00
Arik Fraimovich
f4be83b06f Use query id from UI & annotate scheduled queries 2015-04-20 08:46:01 +03:00
Alexander Leibzon
4918d0430c add redash username/query_id to query for easier backtracking 2015-04-20 02:16:12 +03:00
Arik Fraimovich
e25b86b10d Merge pull request #398 from lenguyenthedat/data_sources_name_unique
Fix: make the data_sources' name unique
2015-04-18 22:51:12 +03:00
Arik Fraimovich
d3d305a843 Make sure data sources have unique names in tests 2015-04-18 22:46:42 +03:00
Arik Fraimovich
825b93bfe9 Fix migration numbering (there is 0007 already) 2015-04-18 22:46:42 +03:00
Arik Fraimovich
8c98282200 Rename only data sources with duplicates 2015-04-18 22:46:42 +03:00
Dat Le
768ac9eb04 Fix: make the data_sources's name unique
Also added migration script.
2015-04-18 22:46:42 +03:00
Arik Fraimovich
71011d2fca Merge pull request #407 from stanhu/add-flask-admin 2015-04-18 22:23:10 +03:00
Arik Fraimovich
9683a8ed82 Dedicated view for data source 2015-04-18 22:21:58 +03:00
Arik Fraimovich
10a6ac9313 Dedicated view for User model 2015-04-18 18:48:44 +03:00
Arik Fraimovich
dba325e9a2 Use ArrayListField for Array fields. 2015-04-18 18:47:54 +03:00
Arik Fraimovich
fcd9ab533c Fix: correctly call CustomModelConverter __init__. 2015-04-18 18:46:32 +03:00
Arik Fraimovich
68e3e8e1c5 Update name in admin screens 2015-04-18 18:00:52 +03:00
Arik Fraimovich
7f8b738b9e Fix requirements.txt (peewee was specified twice) 2015-04-18 16:58:05 +03:00
Arik Fraimovich
8a35dcedfa Merge pull request #406 from stanhu/add-mysql-port
Add support for configuring MySQL port
2015-04-18 16:14:26 +03:00
Stan Hu
ef763b7157 Use Flask-Admin to provide basic Web-based /admin page 2015-04-18 04:11:30 -07:00
Stan Hu
498e1d4474 Add support for configuring MySQL port 2015-04-17 22:57:34 -07:00
Arik Fraimovich
73de936c75 Merge pull request #405 from EverythingMe/feature/syntax_highglight
Feature: use correct syntax highlighting for Python/Mongo data sources
2015-04-14 17:53:46 +03:00
Arik Fraimovich
e32b709a41 Typo fix in the python query runner 2015-04-14 17:50:36 +03:00
Arik Fraimovich
60652f63c4 Use correct syntax highlighting for Python/Mongo sources 2015-04-14 17:48:36 +03:00
Arik Fraimovich
d0d4101f90 Merge pull request #404 from erans/master
Improvement: make Python datasource to use the RestrictedPython sandbox
2015-04-13 16:13:00 +03:00
Eran Sandler
646875794f Per request by Arik - the BDFL :-) 2015-04-13 15:27:28 +03:00
Eran Sandler
cdad4be0d5 Removed the try..catch block in the import of RestrictedPython since we are putting it in the requirements.txt file. 2015-04-13 15:23:49 +03:00
Eran Sandler
8f4285be62 Minor fixes from code review. 2015-04-13 15:21:43 +03:00
Eran Sandler
acfa55e2d0 Python datasource that uses RestrictedPython. Only modules listed in "allowedImportModules" (command separated) will be allowed to be imported and the code assume they are installed on the server running the actual code. 2015-04-13 11:22:22 +03:00
Arik Fraimovich
0b7cd07db0 Merge pull request #403 from EverythingMe/chore/release_process
Fix: schema browser styles
2015-04-08 16:14:30 +03:00
Arik Fraimovich
6297ffd523 Fix: schema browser styles 2015-04-08 16:13:03 +03:00
Arik Fraimovich
368f4fdbef Merge pull request #402 from EverythingMe/chore/release_process
New release process.
2015-04-06 12:51:12 +03:00
Arik Fraimovich
f52044a209 New release process 2015-04-06 12:50:17 +03:00
Arik Fraimovich
9fb33cf746 Merge pull request #399 from EverythingMe/feature/schema
Feature: schema browser and simple autocomplete
2015-04-02 17:10:07 +03:00
Arik Fraimovich
e3c5da5bc5 Fix tests to use correct data 2015-04-02 17:05:16 +03:00
Arik Fraimovich
e675690cc6 Sort schema by name 2015-04-02 16:56:00 +03:00
Arik Fraimovich
edc1622cf5 Schema support for MySQL 2015-04-02 16:55:52 +03:00
Arik Fraimovich
5ab3d4a40d Basic autocomplete functionality 2015-04-02 16:12:33 +03:00
Arik Fraimovich
cb29d87b63 Improve formatting of schema browser 2015-04-02 15:40:43 +03:00
Arik Fraimovich
6ff6bdad9f Use the correct redis connection in tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
e3cc3ef9a4 Move schema fetching to DataSource + tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
1fe4f291f2 Flush test redis db after each test 2015-04-02 11:25:22 +03:00
Arik Fraimovich
a54119f4a2 Show schema along side the query 2015-04-02 11:25:22 +03:00
Arik Fraimovich
c5b7fe5321 Use codemirror directly without ui-codemirror 2015-04-02 11:24:47 +03:00
Arik Fraimovich
d487ec9153 Upgrade codemirror to latest version 2015-04-02 11:24:18 +03:00
Arik Fraimovich
fa19b1ddc8 Endpoint to return data source schema 2015-04-02 11:23:52 +03:00
Arik Fraimovich
267c32b390 Merge pull request #401 from EverythingMe/fix/wrong_time_zone
Fix: use correct date when converting to UTC to get correct timezone.
2015-04-02 07:40:27 +03:00
Arik Fraimovich
aeff3f1494 Fix: use correct date when converting to UTC to get correct timezone. 2015-04-02 07:39:37 +03:00
Arik Fraimovich
e80e52f6c9 Add annotations for the injector. 2015-04-01 20:23:18 +03:00
Arik Fraimovich
fe41a70602 Merge pull request #400 from EverythingMe/feature/better_scheduler
Improved query scheduling option
2015-04-01 17:28:02 +03:00
Arik Fraimovich
976d9abe2d Disable UI tests, as they are no longer maintained :-( 2015-04-01 17:23:08 +03:00
Arik Fraimovich
041bc1100a New UI for query schedule setting 2015-04-01 17:07:19 +03:00
Arik Fraimovich
5d095ff6ab Resolve #113: upgrade to latest ui-bootstrap 2015-04-01 12:48:24 +03:00
Arik Fraimovich
ef01b61b29 Fix: refresh selector had empty option 2015-04-01 12:11:14 +03:00
Arik Fraimovich
faad6b656b Change query ttl field to be a string and named schedule.
This to allow other types of scheduling than just repeat every X seconds.
The first supported option will be: repeat every day at hour X.
2015-04-01 11:23:26 +03:00
Arik Fraimovich
0bc775584b Merge pull request #397 from EverythingMe/feature/edit_others_queries
Fix: forking broken
2015-03-22 17:32:13 +02:00
Arik Fraimovich
f2d96d61a1 Fix: forking broken 2015-03-22 17:28:47 +02:00
Arik Fraimovich
09bf2dd608 Merge pull request #396 from EverythingMe/feature/edit_others_queries
Feature: allow editing others' queries
2015-03-22 14:53:02 +02:00
Arik Fraimovich
ad1b9b06cf Fix test. 2015-03-22 14:42:08 +02:00
Arik Fraimovich
a4bceae60b Allow anyone to edit any query & show who edited it 2015-03-22 13:22:11 +02:00
Arik Fraimovich
9385449feb Add updated_at timestamp to visualization, query, dashboard and users models 2015-03-22 12:58:26 +02:00
53 changed files with 1339 additions and 347 deletions

View File

@@ -1,6 +1,7 @@
NAME=redash
VERSION=`python ./manage.py version`
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
@@ -15,8 +16,8 @@ pack:
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload:
python bin/upload_version.py $(VERSION) $(FILENAME)
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
test:
nosetests --with-coverage --cover-package=redash tests/*.py
cd rd_ui && grunt test
#cd rd_ui && grunt test

View File

@@ -1,6 +1,5 @@
<p align="center">
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
</p>
<p align="center">
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env python
import sys
import requests
if __name__ == '__main__':
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
if response.status_code != 200:
exit("Failed getting releases (status code: %s)." % response.status_code)
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
latest_release = sorted_releases[0]
asset_url = latest_release['assets'][0]['url']
filename = latest_release['assets'][0]['name']
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
if '--url-only' in sys.argv:
print asset_url
elif '--wget' in sys.argv:
print wget_command
else:
print "Latest release: %s" % latest_release['tag_name']
print latest_release['body']
print "\nTarball URL: %s" % asset_url
print 'wget: %s' % (wget_command)

130
bin/release_manager.py Normal file
View File

@@ -0,0 +1,130 @@
import os
import sys
import json
import re
import subprocess
import requests
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
repo = 'EverythingMe/redash'
def _github_request(method, path, params=None, headers={}):
if not path.startswith('https://api.github.com'):
url = "https://api.github.com/{}".format(path)
else:
url = path
if params is not None:
params = json.dumps(params)
response = requests.request(method, url, data=params, auth=auth)
return response
def exception_from_error(message, response):
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
def rc_tag_name(version):
return "v{}-rc".format(version)
def get_rc_release(version):
tag = rc_tag_name(version)
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
if response.status_code == 404:
return None
elif response.status_code == 200:
return response.json()
raise exception_from_error("Unknown error while looking RC release: ", response)
def create_release(version, commit_sha):
tag = rc_tag_name(version)
params = {
'tag_name': tag,
'name': "{} - RC".format(version),
'target_commitish': commit_sha,
'prerelease': True
}
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
if response.status_code != 201:
raise exception_from_error("Failed creating new release", response)
return response.json()
def upload_asset(release, filepath):
upload_url = release['upload_url'].replace('{?name}', '')
filename = filepath.split('/')[-1]
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
if response.status_code != 201: # not 200/201/...
raise exception_from_error('Failed uploading asset', response)
return response
def remove_previous_builds(release):
for asset in release['assets']:
response = _github_request('delete', asset['url'])
if response.status_code != 204:
raise exception_from_error("Failed deleting asset", response)
def get_changelog(commit_sha):
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
if latest_release.status_code != 200:
raise exception_from_error('Failed getting latest release', latest_release)
latest_release = latest_release.json()
previous_sha = latest_release['target_commitish']
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
log = subprocess.check_output(args)
changes = ["Changes since {}:".format(latest_release['name'])]
for line in log.split('\n'):
try:
sha, subject, body, parents = line[1:-1].split('|')
except ValueError:
continue
try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request)
except Exception, ex:
pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
return "\n".join(changes)
def update_release(version, build_filepath, commit_sha):
try:
release = get_rc_release(version) or create_release(version, commit_sha)
print "Using release id: {}".format(release['id'])
remove_previous_builds(release)
response = upload_asset(release, build_filepath)
changelog = get_changelog(commit_sha)
response = _github_request('patch', release['url'], {'body': changelog})
if response.status_code != 200:
raise exception_from_error("Failed updating release description", response)
except Exception, ex:
print ex
if __name__ == '__main__':
commit_sha = sys.argv[1]
version = sys.argv[2]
filepath = sys.argv[3]
# TODO: make sure running from git directory & remote = repo
update_release(version, filepath, commit_sha)

View File

@@ -1,46 +0,0 @@
#!python
import os
import sys
import json
import requests
import subprocess
def capture_output(command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
return proc.stdout.read()
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'body': version_body,
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
headers=headers, verify=False)

View File

@@ -7,6 +7,9 @@ machine:
2.7.3
dependencies:
pre:
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
- tar xvf optipng-0.7.5.tar.gz
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
- make deps
- pip install -r dev_requirements.txt
- pip install -r requirements.txt

View File

@@ -0,0 +1,26 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'updated_at', models.Query.updated_at),
migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at),
migrator.add_column('widgets', 'updated_at', models.Widget.updated_at),
migrator.add_column('users', 'created_at', models.User.created_at),
migrator.add_column('users', 'updated_at', models.User.updated_at),
migrator.add_column('visualizations', 'created_at', models.Visualization.created_at),
migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at)
)
db.database.execute_sql("UPDATE queries SET updated_at = created_at;")
db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;")
db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")
db.close_db(None)

View File

@@ -0,0 +1,19 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'last_modified_by_id', models.Query.last_modified_by)
)
db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")
db.close_db(None)

View File

@@ -0,0 +1,23 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'schedule', models.Query.schedule),
)
db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")
migrate(
migrator.drop_column('queries', 'ttl')
)
db.close_db(None)

View File

@@ -0,0 +1,20 @@
from redash.models import db
if __name__ == '__main__':
db.connect_db()
with db.database.transaction():
# Make sure all data sources names are unique.
db.database.execute_sql("""
UPDATE data_sources
SET name = new_names.name
FROM (
SELECT id, name || ' ' || id as name
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
) AS new_names
WHERE data_sources.id = new_names.id;
""")
# Add unique constraint on data_sources.name.
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
db.close_db(None)

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

View File

@@ -18,8 +18,14 @@
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
<link rel="stylesheet" href="/styles/redash.css">
<!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head>
<body>
<div growl></div>
@@ -33,15 +39,15 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
</div>
{% raw %}
<div class="collapse navbar-collapse navbar-ex1-collapse">
<ul class="nav navbar-nav">
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')">
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
<ul class="dropdown-menu">
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
<ul class="dropdown-menu" dropdown-menu>
<span ng-repeat="(name, group) in groupedDashboards">
<li class="dropdown-submenu">
<a href="#" ng-bind="name"></a>
@@ -59,9 +65,9 @@
<li><a data-toggle="modal" href="#new_dashboard_dialog" ng-show="currentUser.hasPermission('create_dashboard')">New Dashboard</a></li>
</ul>
</li>
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Queries <b class="caret"></b></a>
<ul class="dropdown-menu">
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')" dropdown>
<a href="#" class="dropdown-toggle" dropdown-toggle>Queries <b class="caret"></b></a>
<ul class="dropdown-menu" dropdown-menu>
<li ng-show="currentUser.hasPermission('create_query')"><a href="/queries/new">New Query</a></li>
<li><a href="/queries">Queries</a></li>
</ul>
@@ -105,9 +111,11 @@
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
<script src="/bower_components/codemirror/mode/python/python.js"></script>
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
<script src="/bower_components/angular-ui-codemirror/ui-codemirror.js"></script>
<script src="/bower_components/highcharts/highcharts.js"></script>
<script src="/bower_components/highcharts/modules/exporting.js"></script>
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>
@@ -123,7 +131,7 @@
<script src="/bower_components/marked/lib/marked.js"></script>
<script src="/scripts/ng_highchart.js"></script>
<script src="/scripts/ng_smart_table.js"></script>
<script src="/scripts/ui-bootstrap-tpls-0.5.0.min.js"></script>
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
<script src="/bower_components/bucky/bucky.js"></script>
<script src="/bower_components/pace/pace.js"></script>
<script src="/bower_components/mustache/mustache.js"></script>

View File

@@ -6,7 +6,6 @@ angular.module('redash', [
'redash.services',
'redash.renderers',
'redash.visualization',
'ui.codemirror',
'highchart',
'ui.select2',
'angular-growl',

View File

@@ -1,6 +1,8 @@
(function () {
var dateFormatter = function (value) {
if (!value) return "-";
if (!value) {
return "-";
}
return value.toDate().toLocaleString();
};
@@ -30,9 +32,9 @@
},
{
'label': 'Update Schedule',
'map': 'ttl',
'map': 'schedule',
'formatFunction': function (value) {
return $filter('refreshRateHumanize')(value);
return $filter('scheduleHumanize')(value);
}
}
];
@@ -127,9 +129,9 @@
},
{
'label': 'Update Schedule',
'map': 'ttl',
'map': 'schedule',
'formatFunction': function (value) {
return $filter('refreshRateHumanize')(value);
return $filter('scheduleHumanize')(value);
}
}
]

View File

@@ -100,9 +100,13 @@
Events.record(currentUser, "autorefresh", "dashboard", dashboard.id, {'enable': $scope.refreshEnabled});
if ($scope.refreshEnabled) {
var refreshRate = _.min(_.flatten($scope.dashboard.widgets), function(widget) {
return widget.visualization.query.ttl;
}).visualization.query.ttl;
var refreshRate = _.min(_.map(_.flatten($scope.dashboard.widgets), function(widget) {
var schedule = widget.visualization.query.schedule;
if (schedule === null || schedule.match(/\d\d:\d\d/) !== null) {
return 60;
}
return widget.visualization.query.schedule;
}));
$scope.refreshRate = _.max([120, refreshRate * 2]) * 1000;
@@ -138,7 +142,6 @@
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
var maxAge = $location.search()['maxAge'];
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
$scope.nextUpdateTime = moment(new Date(($scope.query.updated_at + $scope.query.ttl + $scope.query.runtime + 300) * 1000)).fromNow();
$scope.type = 'visualization';
} else {

View File

@@ -17,7 +17,7 @@
saveQuery = $scope.saveQuery;
$scope.sourceMode = true;
$scope.canEdit = currentUser.canEdit($scope.query);
$scope.canEdit = true;
$scope.isDirty = false;
$scope.newVisualization = undefined;
@@ -68,7 +68,7 @@
$scope.duplicateQuery = function() {
Events.record(currentUser, 'fork', 'query', $scope.query.id);
$scope.query.id = null;
$scope.query.ttl = -1;
$scope.query.schedule = null;
$scope.saveQuery({
successMessage: 'Query forked',

View File

@@ -1,28 +1,57 @@
(function() {
'use strict';
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, Query, DataSource) {
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, $modal, Query, DataSource) {
var DEFAULT_TAB = 'table';
var getQueryResult = function(ttl) {
var getQueryResult = function(maxAge) {
// Collect params, and getQueryResult with params; getQueryResult merges it into the query
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
if (ttl == undefined) {
ttl = $location.search()['maxAge'];
if (maxAge == undefined) {
maxAge = $location.search()['maxAge'];
}
$scope.queryResult = $scope.query.getQueryResult(ttl, parameters);
if (maxAge == undefined) {
maxAge = -1;
}
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
}
$scope.dataSource = {};
$scope.query = $route.current.locals.query;
var updateSchema = function() {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
DataSource.getSchema({id: dataSourceId}, function(data) {
if (data && data.length > 0) {
$scope.schema = data;
_.each(data, function(table) {
table.collapsed = true;
});
$scope.editorSize = "col-md-9";
$scope.hasSchema = true;
} else {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
}
});
}
Events.record(currentUser, 'view', 'query', $scope.query.id);
getQueryResult();
$scope.queryExecuting = false;
$scope.isQueryOwner = currentUser.id === $scope.query.user.id;
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
$scope.canViewSource = currentUser.hasPermission('view_source');
$scope.dataSources = DataSource.get(function(dataSources) {
updateSchema();
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
});
// in view mode, latest dataset is always visible
@@ -98,7 +127,7 @@
return Query.delete({id: data.id}, function() {
$scope.query.is_archived = true;
$scope.query.ttl = -1;
$scope.query.schedule = null;
growl.addSuccessMessage(options.successMessage);
// This feels dirty.
$('#archive-confirmation-modal').modal('hide');
@@ -121,6 +150,8 @@
});
}
updateSchema();
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
$scope.executeQuery();
};
@@ -168,6 +199,28 @@
}
});
$scope.openScheduleForm = function() {
if (!$scope.isQueryOwner) {
return;
};
$modal.open({
templateUrl: '/views/schedule_form.html',
size: 'sm',
scope: $scope,
controller: ['$scope', '$modalInstance', function($scope, $modalInstance) {
$scope.close = function() {
$modalInstance.close();
}
if ($scope.query.hasDailySchedule()) {
$scope.refreshType = 'daily';
} else {
$scope.refreshType = 'periodic';
}
}]
});
};
$scope.$watch(function() {
return $location.hash()
}, function(hash) {
@@ -180,5 +233,5 @@
angular.module('redash.controllers')
.controller('QueryViewCtrl',
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', 'Query', 'DataSource', QueryViewCtrl]);
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', '$modal', 'Query', 'DataSource', QueryViewCtrl]);
})();

View File

@@ -29,7 +29,7 @@
restrict: 'E',
template: '<span ng-show="query.id && canViewSource">\
<a ng-show="!sourceMode"\
ng-href="{{query.id}}/source#{{selectedTab}}">Show Source\
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
</a>\
<a ng-show="sourceMode"\
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
@@ -63,26 +63,95 @@
restrict: 'E',
scope: {
'query': '=',
'lock': '='
'lock': '=',
'schema': '=',
'syntax': '='
},
template: '<textarea\
ui-codemirror="editorOptions"\
ng-model="query.query">',
link: function($scope) {
$scope.editorOptions = {
mode: 'text/x-sql',
template: '<textarea></textarea>',
link: {
pre: function ($scope, element) {
$scope.syntax = $scope.syntax || 'sql';
var modes = {
'sql': 'text/x-sql',
'python': 'text/x-python',
'json': 'application/json'
};
var textarea = element.children()[0];
var editorOptions = {
mode: modes[$scope.syntax],
lineWrapping: true,
lineNumbers: true,
readOnly: false,
matchBrackets: true,
autoCloseBrackets: true
};
autoCloseBrackets: true,
extraKeys: {"Ctrl-Space": "autocomplete"}
};
$scope.$watch('lock', function(locked) {
$scope.editorOptions.readOnly = locked ? 'nocursor' : false;
});
var additionalHints = [];
CodeMirror.commands.autocomplete = function(cm) {
var hinter = function(editor, options) {
var hints = CodeMirror.hint.anyword(editor, options);
var cur = editor.getCursor(), token = editor.getTokenAt(cur).string;
hints.list = _.union(hints.list, _.filter(additionalHints, function (h) {
return h.search(token) === 0;
}));
return hints;
};
// CodeMirror.showHint(cm, CodeMirror.hint.anyword);
CodeMirror.showHint(cm, hinter);
};
var codemirror = CodeMirror.fromTextArea(textarea, editorOptions);
codemirror.on('change', function(instance) {
var newValue = instance.getValue();
if (newValue !== $scope.query.query) {
$scope.$evalAsync(function() {
$scope.query.query = newValue;
});
}
$('.schema-container').css('height', $('.CodeMirror').css('height'));
});
$scope.$watch('query.query', function () {
if ($scope.query.query !== codemirror.getValue()) {
codemirror.setValue($scope.query.query);
}
});
$scope.$watch('schema', function (schema) {
if (schema) {
var keywords = [];
_.each(schema, function (table) {
keywords.push(table.name);
_.each(table.columns, function (c) {
keywords.push(c);
});
});
additionalHints = _.unique(keywords);
}
});
$scope.$watch('syntax', function(syntax) {
codemirror.setOption('mode', modes[syntax]);
});
$scope.$watch('lock', function (locked) {
var readOnly = locked ? 'nocursor' : false;
codemirror.setOption('readOnly', readOnly);
});
}
}
}
};
}
function queryFormatter($http) {
@@ -111,42 +180,91 @@
}
}
function queryTimePicker() {
return {
restrict: 'E',
template: '<select ng-disabled="refreshType != \'daily\'" ng-model="hour" ng-change="updateSchedule()" ng-options="c as c for c in hourOptions"></select> :\
<select ng-disabled="refreshType != \'daily\'" ng-model="minute" ng-change="updateSchedule()" ng-options="c as c for c in minuteOptions"></select>',
link: function($scope) {
var padWithZeros = function(size, v) {
v = String(v);
if (v.length < size) {
v = "0" + v;
}
return v;
};
$scope.hourOptions = _.map(_.range(0, 24), _.partial(padWithZeros, 2));
$scope.minuteOptions = _.map(_.range(0, 60, 5), _.partial(padWithZeros, 2));
if ($scope.query.hasDailySchedule()) {
var parts = $scope.query.scheduleInLocalTime().split(':');
$scope.minute = parts[1];
$scope.hour = parts[0];
} else {
$scope.minute = "15";
$scope.hour = "00";
}
$scope.updateSchedule = function() {
var newSchedule = moment().hour($scope.hour).minute($scope.minute).utc().format('HH:mm');
if (newSchedule != $scope.query.schedule) {
$scope.query.schedule = newSchedule;
$scope.saveQuery();
}
};
$scope.$watch('refreshType', function() {
if ($scope.refreshType == 'daily') {
$scope.updateSchedule();
}
});
}
}
}
function queryRefreshSelect() {
return {
restrict: 'E',
template: '<select\
ng-disabled="!isQueryOwner"\
ng-model="query.ttl"\
ng-disabled="refreshType != \'periodic\'"\
ng-model="query.schedule"\
ng-change="saveQuery()"\
ng-options="c.value as c.name for c in refreshOptions">\
<option value="">No Refresh</option>\
</select>',
link: function($scope) {
$scope.refreshOptions = [
{
value: -1,
name: 'No Refresh'
},
{
value: 60,
value: "60",
name: 'Every minute'
},
}
]
_.each(_.range(1, 13), function (i) {
$scope.refreshOptions.push({
value: i * 3600,
value: String(i * 3600),
name: 'Every ' + i + 'h'
});
})
$scope.refreshOptions.push({
value: 24 * 3600,
value: String(24 * 3600),
name: 'Every 24h'
});
$scope.refreshOptions.push({
value: 7 * 24 * 3600,
value: String(7 * 24 * 3600),
name: 'Once a week'
});
$scope.$watch('refreshType', function() {
if ($scope.refreshType == 'periodic') {
if ($scope.query.hasDailySchedule()) {
$scope.query.schedule = null;
$scope.saveQuery();
}
}
});
}
}
@@ -158,5 +276,6 @@
.directive('queryResultLink', queryResultCSVLink)
.directive('queryEditor', queryEditor)
.directive('queryRefreshSelect', queryRefreshSelect)
.directive('queryTimePicker', queryTimePicker)
.directive('queryFormatter', ['$http', queryFormatter]);
})();

View File

@@ -24,13 +24,17 @@ angular.module('redash.filters', []).
return durationHumanize;
})
.filter('refreshRateHumanize', function () {
return function (ttl) {
if (ttl == -1) {
.filter('scheduleHumanize', function() {
return function (schedule) {
if (schedule === null) {
return "Never";
} else {
return "Every " + durationHumanize(ttl);
} else if (schedule.match(/\d\d:\d\d/) !== null) {
var parts = schedule.split(':');
var localTime = moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
return "Every day at " + localTime;
}
return "Every " + durationHumanize(parseInt(schedule));
}
})

View File

@@ -325,6 +325,23 @@
series = seriesCopy;
}
// If this is a chart that has just one row for multiple columns, sort
// by the Y values. For example:
//
// A | B | C
// 20 | 30 | 15
//
// Will be sorted:
// C | A | B
// 15 | 20 | 30
var sortable = _.every(series, function(s) { return s.data.length == 1 });
if (sortable) {
series = _.sortBy(series, function (s) {
return s.data[0].y
});
}
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
if (series.length > 0 && _.some(series[0].data, function (p) {
return (angular.isString(p.x) || angular.isDefined(p.name));

View File

@@ -30,7 +30,9 @@
_.each(this.query_result.data.columns, function(column) {
if (columnTypes[column.name]) {
column.type = columnTypes[column.name];
if (column.type == null) {
column.type = columnTypes[column.name];
}
}
});
@@ -308,7 +310,7 @@
this.filters = filters;
}
var refreshStatus = function (queryResult, query, ttl) {
var refreshStatus = function (queryResult, query) {
Job.get({'id': queryResult.job.id}, function (response) {
queryResult.update(response);
@@ -318,7 +320,7 @@
});
} else if (queryResult.getStatus() != "failed") {
$timeout(function () {
refreshStatus(queryResult, query, ttl);
refreshStatus(queryResult, query);
}, 3000);
}
})
@@ -338,14 +340,19 @@
return this.deferred.promise;
}
QueryResult.get = function (data_source_id, query, ttl) {
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
var queryResult = new QueryResult();
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'ttl': ttl}, function (response) {
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
if (queryId !== undefined) {
params['query_id'] = queryId;
};
QueryResultResource.post(params, function (response) {
queryResult.update(response);
if ('job' in response) {
refreshStatus(queryResult, query, ttl);
refreshStatus(queryResult, query);
}
});
@@ -373,7 +380,7 @@
return new Query({
query: "",
name: "New Query",
ttl: -1,
schedule: null,
user: currentUser
});
};
@@ -397,10 +404,19 @@
return '/queries/' + this.id + '/source';
};
Query.prototype.getQueryResult = function (ttl, parameters) {
if (ttl == undefined) {
ttl = this.ttl;
}
Query.prototype.hasDailySchedule = function() {
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
}
Query.prototype.scheduleInLocalTime = function() {
var parts = this.schedule.split(':');
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
}
Query.prototype.getQueryResult = function (maxAge, parameters) {
// if (ttl == undefined) {
// ttl = this.ttl;
// }
var queryText = this.query;
@@ -426,16 +442,16 @@
this.latest_query_data_id = null;
}
if (this.latest_query_data && ttl != 0) {
if (this.latest_query_data && maxAge != 0) {
if (!this.queryResult) {
this.queryResult = new QueryResult({'query_result': this.latest_query_data});
}
} else if (this.latest_query_data_id && ttl != 0) {
} else if (this.latest_query_data_id && maxAge != 0) {
if (!this.queryResult) {
this.queryResult = QueryResult.getById(this.latest_query_data_id);
}
} else if (this.data_source_id) {
this.queryResult = QueryResult.get(this.data_source_id, queryText, ttl);
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
}
return this.queryResult;
@@ -471,7 +487,12 @@
var DataSource = function ($resource) {
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
var actions = {
'get': {'method': 'GET', 'cache': true, 'isArray': true},
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
};
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
return DataSourceResource;
}

View File

@@ -55,6 +55,22 @@
}];
};
var VisualizationName = function(Visualization) {
return {
restrict: 'E',
scope: {
visualization: '='
},
template: '<small>{{name}}</small>',
replace: false,
link: function (scope) {
if (Visualization.visualizations[scope.visualization.type].name != scope.visualization.name) {
scope.name = scope.visualization.name;
}
}
}
}
var VisualizationRenderer = function ($location, Visualization) {
return {
restrict: 'E',
@@ -72,42 +88,9 @@
width: '50%'
};
function readURL() {
var searchFilters = angular.fromJson($location.search().filters);
if (searchFilters) {
_.forEach(scope.filters, function(filter) {
var value = searchFilters[filter.friendlyName];
if (value) {
filter.current = value;
}
});
}
}
function updateURL(filters) {
var current = {};
_.each(filters, function(filter) {
if (filter.current) {
current[filter.friendlyName] = filter.current;
}
});
var newSearch = angular.extend($location.search(), {
filters: angular.toJson(current)
});
$location.search(newSearch);
}
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
if (filters) {
scope.filters = filters;
if (filters.length && false) {
readURL();
// start watching for changes and update URL
scope.$watch('filters', updateURL, true);
}
}
});
}
@@ -208,6 +191,7 @@
.provider('Visualization', VisualizationProvider)
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
.directive('visualizationName', ['Visualization', VisualizationName])
.directive('filters', Filters)
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])
})();

View File

@@ -14,7 +14,12 @@ a.page-title {
}
a.navbar-brand {
font-style: italic;
padding: 5px 5px 0px 0px;
margin-left: 0px !important;
}
a.navbar-brand img {
height: 40px;
}
.graph {
@@ -308,6 +313,23 @@ counter-renderer counter-name {
height: 100%;
}
.schema-container {
height: 300px;
}
.schema-browser {
height: 100%;
overflow-y: auto;
overflow-x: hidden;
}
div.table-name {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
cursor: pointer;
}
/*
bootstrap's hidden-xs class adds display:block when not hidden
use this class when you need to keep the original display value

View File

@@ -28,6 +28,7 @@
<p>
<span ng-hide="currentUser.hasPermission('view_query')">{{query.name}}</span>
<query-link query="query" visualization="widget.visualization" ng-show="currentUser.hasPermission('view_query')"></query-link>
<visualization-name visualization="widget.visualization"/>
</p>
<div class="text-muted" ng-bind-html="query.description | markdown"></div>
</h3>
@@ -37,7 +38,7 @@
<div class="panel-footer">
<span class="label label-default"
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
tooltip="(query runtime: {{queryResult.getRuntime() | durationHumanize}})"
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
<span class="pull-right">

View File

@@ -59,9 +59,9 @@
<hr>
<div class="row">
<div class="col-lg-12">
<div ng-show="sourceMode">
<div class="row" ng-if="sourceMode">
<div ng-class="editorSize">
<div>
<p>
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
<span class="glyphicon glyphicon-play"></span> Execute
@@ -77,21 +77,43 @@
</button>
</span>
</p>
</div>
<!-- code editor -->
<div ng-show="sourceMode">
<p>
<query-editor query="query" lock="queryFormatting"></query-editor>
<query-editor query="query" schema="schema" syntax="dataSource.syntax" lock="queryFormatting"></query-editor>
</p>
<hr>
</div>
</div>
<div class="col-md-3 schema-container" ng-show="hasSchema">
<div>
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
</div>
<div class="schema-browser">
<div ng-repeat="table in schema | filter:schemaFilter">
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
</div>
<div collapse="table.collapsed">
<div ng-repeat="column in table.columns | filter:schemaFilter" style="padding-left:16px;">{{column}}</div>
</div>
</div>
</div>
</div>
</div>
</div>
<hr ng-if="sourceMode">
<div class="row">
<div class="col-lg-3 rd-hidden-xs">
<p>
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Created By </span>
<strong>{{query.user.name}}</strong>
</p>
<p ng-if="query.last_modified_by && query.user.id != query.last_modified_by.id">
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Last Modified By </span>
<strong>{{query.last_modified_by.name}}</strong>
</p>
<p>
<span class="glyphicon glyphicon-time"></span>
<span class="text-muted">Last update </span>
@@ -99,12 +121,6 @@
<rd-time-ago value="queryResult.query_result.retrieved_at"></rd-time-ago>
</strong>
</p>
<p>
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Created By </span>
<strong ng-hide="isQueryOwner">{{query.user.name}}</strong>
<strong ng-show="isQueryOwner">You</strong>
</p>
<p>
<span class="glyphicon glyphicon-play"></span>
<span class="text-muted">Runtime </span>
@@ -117,8 +133,8 @@
</p>
<p>
<span class="glyphicon glyphicon-refresh"></span>
<span class="text-muted">Refresh Interval</span>
<query-refresh-select></query-refresh-select>
<span class="text-muted">Refresh Schedule</span>
<a href="" ng-click="openScheduleForm()">{{query.schedule | scheduleHumanize}}</a>
</p>
<p>
@@ -186,7 +202,7 @@
<rd-tab tab-id="{{vis.id}}" name="{{vis.name}}" ng-if="vis.type!='TABLE'" ng-repeat="vis in query.visualizations">
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> &times;</span>
</rd-tab>
<rd-tab tab-id="add" name="&plus; New" removeable="true" ng-show="canEdit"></rd-tab>
<rd-tab tab-id="add" name="&plus; New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
</ul>
</div>

View File

@@ -0,0 +1,18 @@
<div class="modal-header">
<button type="button" class="close" aria-label="Close" ng-click="close()"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Refresh Schedule</h4>
</div>
<div class="modal-body">
<div class="radio">
<label>
<input type="radio" value="periodic" ng-model="refreshType">
<query-refresh-select ng-disabled="refreshType != 'periodic'"></query-refresh-select>
</label>
</div>
<div class="radio">
<label>
<input type="radio" value="daily" ng-model="refreshType">
<query-time-picker ng-disabled="refreshType != 'daily'"></query-time-picker>
</label>
</div>
</div>

View File

@@ -12,8 +12,7 @@
"es5-shim": "2.0.8",
"angular-moment": "0.2.0",
"moment": "2.1.0",
"angular-ui-bootstrap": "0.5.0",
"angular-ui-codemirror": "0.0.5",
"codemirror": "4.8.0",
"highcharts": "3.0.10",
"underscore": "1.5.1",
"pivottable": "~1.1.1",
@@ -29,7 +28,8 @@
"angular-ui-select": "0.8.2",
"font-awesome": "~4.2.0",
"mustache": "~1.0.0",
"canvg": "gabelerner/canvg"
"canvg": "gabelerner/canvg",
"angular-ui-bootstrap-bower": "~0.12.1"
},
"devDependencies": {
"angular-mocks": "1.2.18",

BIN
rd_ui/favicon.ico Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -6,7 +6,7 @@ from statsd import StatsClient
from redash import settings
from redash.query_runner import import_query_runners
__version__ = '0.6.0'
__version__ = '0.6.1'
def setup_logging():

116
redash/admin.py Normal file
View File

@@ -0,0 +1,116 @@
import json
from flask_admin.contrib.peewee import ModelView
from flask.ext.admin import Admin
from flask_admin.contrib.peewee.form import CustomModelConverter
from flask_admin.form.widgets import DateTimePickerWidget
from playhouse.postgres_ext import ArrayField, DateTimeTZField
from wtforms import fields
from wtforms.widgets import TextInput
from redash import models
from redash import query_runner
from redash.permissions import require_permission
class ArrayListField(fields.Field):
widget = TextInput()
def _value(self):
if self.data:
return u', '.join(self.data)
else:
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = [x.strip() for x in valuelist[0].split(',')]
else:
self.data = []
class JSONTextAreaField(fields.TextAreaField):
def process_formdata(self, valuelist):
if valuelist:
try:
json.loads(valuelist[0])
except ValueError:
raise ValueError(self.gettext(u'Invalid JSON'))
self.data = valuelist[0]
else:
self.data = ''
class PasswordHashField(fields.PasswordField):
def _value(self):
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = models.pwd_context.encrypt(valuelist[0])
else:
self.data = u''
class PgModelConverter(CustomModelConverter):
def __init__(self, view, additional=None):
additional = {ArrayField: self.handle_array_field,
DateTimeTZField: self.handle_datetime_tz_field}
super(PgModelConverter, self).__init__(view, additional)
self.view = view
def handle_array_field(self, model, field, **kwargs):
return field.name, ArrayListField(**kwargs)
def handle_datetime_tz_field(self, model, field, **kwargs):
kwargs['widget'] = DateTimePickerWidget()
return field.name, fields.DateTimeField(**kwargs)
class BaseModelView(ModelView):
model_form_converter = PgModelConverter
@require_permission('admin')
def is_accessible(self):
return True
class UserModelView(BaseModelView):
column_searchable_list = ('name', 'email')
form_excluded_columns = ('created_at', 'updated_at')
column_exclude_list = ('password_hash',)
form_overrides = dict(password_hash=PasswordHashField)
form_args = {
'password_hash': {'label': 'Password'}
}
def query_runner_type_formatter(view, context, model, name):
qr = query_runner.query_runners.get(model.type, None)
if qr:
return qr.name()
return model.type
class DataSourceModelView(BaseModelView):
form_overrides = dict(type=fields.SelectField, options=JSONTextAreaField)
form_args = dict(type={
'choices': [(k, r.name()) for k, r in query_runner.query_runners.iteritems()]
})
column_formatters = dict(type=query_runner_type_formatter)
column_filters = ('type',)
def init_admin(app):
admin = Admin(app, name='re:dash admin')
views = {
models.User: UserModelView(models.User),
models.DataSource: DataSourceModelView(models.DataSource)
}
for m in models.all_models:
if m in views:
admin.add_view(views[m])
else:
admin.add_view(BaseModelView(m))

View File

@@ -1,6 +1,3 @@
from flask import make_response
from functools import update_wrapper
ONE_YEAR = 60 * 60 * 24 * 365.25
headers = {

View File

@@ -108,7 +108,7 @@ def status_api():
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = models.Query.outdated_queries().count()
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
@@ -148,7 +148,7 @@ def create_query_route():
query=query,
data_source=data_source_id,
user=current_user._get_current_object(),
ttl=-1)
schedule=None)
return redirect('/queries/{}'.format(query.id), 303)
@@ -219,10 +219,18 @@ class DataSourceListAPI(BaseResource):
return datasource.to_dict()
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
class DataSourceSchemaAPI(BaseResource):
def get(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
schema = data_source.get_schema()
return schema
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
class DashboardRecentAPI(BaseResource):
def get(self):
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]
@@ -336,7 +344,7 @@ class QueryListAPI(BaseResource):
@require_permission('create_query')
def post(self):
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
query_def.pop(field, None)
query_def['user'] = self.current_user
@@ -357,7 +365,7 @@ class QueryAPI(BaseResource):
query = models.Query.get_by_id(query_id)
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user']:
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
query_def.pop(field, None)
if 'latest_query_data_id' in query_def:
@@ -366,6 +374,8 @@ class QueryAPI(BaseResource):
if 'data_source_id' in query_def:
query_def['data_source'] = query_def.pop('data_source_id')
query_def['last_modified_by'] = self.current_user
# TODO: use #save() with #dirty_fields.
models.Query.update_instance(query_id, **query_def)
@@ -466,16 +476,19 @@ class QueryResultListAPI(BaseResource):
activity=params['query']
).save()
if params['ttl'] == 0:
max_age = int(params['max_age'])
if max_age == 0:
query_result = None
else:
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], int(params['ttl']))
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], max_age)
if query_result:
return {'query_result': query_result.to_dict()}
else:
data_source = models.DataSource.get_by_id(params['data_source_id'])
job = QueryTask.add_task(params['query'], data_source)
query_id = params.get('query_id', 'adhoc')
job = QueryTask.add_task(params['query'], data_source, metadata={"Username": self.current_user.name, "Query ID": query_id})
return {'job': job.to_dict()}

View File

@@ -28,7 +28,7 @@ class Importer(object):
def import_query(self, user, query):
new_query = self._get_or_create(models.Query, query['id'], name=query['name'],
user=user,
ttl=-1,
schedule=None,
query=query['query'],
query_hash=query['query_hash'],
description=query['description'],

View File

@@ -11,8 +11,10 @@ import peewee
from passlib.apps import custom_app_context as pwd_context
from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDatabase
from flask.ext.login import UserMixin, AnonymousUserMixin
import psycopg2
from redash import utils, settings
from redash import utils, settings, redis_connection
from redash.query_runner import get_query_runner
class Database(object):
@@ -61,7 +63,6 @@ class BaseModel(peewee.Model):
return cls.get(cls.id == model_id)
def pre_save(self, created):
# Handler for pre_save operations. Overriding if needed.
pass
def post_save(self, created):
@@ -76,6 +77,16 @@ class BaseModel(peewee.Model):
self.post_save(created)
class ModelTimestampsMixin(BaseModel):
updated_at = DateTimeTZField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
def pre_save(self, created):
super(ModelTimestampsMixin, self).pre_save(created)
self.updated_at = datetime.datetime.now()
class PermissionsCheckMixin(object):
def has_permission(self, permission):
return self.has_permissions((permission,))
@@ -133,7 +144,7 @@ class Group(BaseModel):
return unicode(self.id)
class User(BaseModel, UserMixin, PermissionsCheckMixin):
class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
DEFAULT_GROUPS = ['default']
id = peewee.PrimaryKeyField()
@@ -149,7 +160,9 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
return {
'id': self.id,
'name': self.name,
'email': self.email
'email': self.email,
'updated_at': self.updated_at,
'created_at': self.created_at
}
def __init__(self, *args, **kwargs):
@@ -212,7 +225,7 @@ class ActivityLog(BaseModel):
class DataSource(BaseModel):
id = peewee.PrimaryKeyField()
name = peewee.CharField()
name = peewee.CharField(unique=True)
type = peewee.CharField()
options = peewee.TextField()
queue_name = peewee.CharField(default="queries")
@@ -226,9 +239,31 @@ class DataSource(BaseModel):
return {
'id': self.id,
'name': self.name,
'type': self.type
'type': self.type,
'syntax': self.query_runner.syntax
}
def get_schema(self, refresh=False):
key = "data_source:schema:{}".format(self.id)
cache = None
if not refresh:
cache = redis_connection.get(key)
if cache is None:
query_runner = self.query_runner
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
redis_connection.set(key, json.dumps(schema))
else:
schema = json.loads(cache)
return schema
@property
def query_runner(self):
return get_query_runner(self.type, self.options)
@classmethod
def all(cls):
return cls.select().order_by(cls.id.asc())
@@ -267,16 +302,16 @@ class QueryResult(BaseModel):
return unused_results
@classmethod
def get_latest(cls, data_source, query, ttl=0):
def get_latest(cls, data_source, query, max_age=0):
query_hash = utils.gen_query_hash(query)
if ttl == -1:
if max_age == -1:
query = cls.select().where(cls.query_hash == query_hash,
cls.data_source == data_source).order_by(cls.retrieved_at.desc())
else:
query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source,
peewee.SQL("retrieved_at + interval '%s second' >= now() at time zone 'utc'",
ttl)).order_by(cls.retrieved_at.desc())
max_age)).order_by(cls.retrieved_at.desc())
return query.first()
@@ -303,7 +338,28 @@ class QueryResult(BaseModel):
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
class Query(BaseModel):
def should_schedule_next(previous_iteration, now, schedule):
if schedule.isdigit():
ttl = int(schedule)
next_iteration = previous_iteration + datetime.timedelta(seconds=ttl)
else:
hour, minute = schedule.split(':')
hour, minute = int(hour), int(minute)
# The following logic is needed for cases like the following:
# - The query scheduled to run at 23:59.
# - The scheduler wakes up at 00:01.
# - Using naive implementation of comparing timestamps, it will skip the execution.
normalized_previous_iteration = previous_iteration.replace(hour=hour, minute=minute)
if normalized_previous_iteration > previous_iteration:
previous_iteration = normalized_previous_iteration - datetime.timedelta(days=1)
next_iteration = (previous_iteration + datetime.timedelta(days=1)).replace(hour=hour, minute=minute)
return now > next_iteration
class Query(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource)
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
@@ -312,11 +368,11 @@ class Query(BaseModel):
query = peewee.TextField()
query_hash = peewee.CharField(max_length=32)
api_key = peewee.CharField(max_length=40)
ttl = peewee.IntegerField()
user_email = peewee.CharField(max_length=360, null=True)
user = peewee.ForeignKeyField(User)
last_modified_by = peewee.ForeignKeyField(User, null=True, related_name="modified_queries")
is_archived = peewee.BooleanField(default=False, index=True)
created_at = DateTimeTZField(default=datetime.datetime.now)
schedule = peewee.CharField(max_length=10, null=True)
class Meta:
db_table = 'queries'
@@ -329,15 +385,17 @@ class Query(BaseModel):
'description': self.description,
'query': self.query,
'query_hash': self.query_hash,
'ttl': self.ttl,
'schedule': self.schedule,
'api_key': self.api_key,
'is_archived': self.is_archived,
'updated_at': self.updated_at,
'created_at': self.created_at,
'data_source_id': self._data.get('data_source', None)
}
if with_user:
d['user'] = self.user.to_dict()
d['last_modified_by'] = self.last_modified_by.to_dict()
else:
d['user_id'] = self._data['user']
@@ -353,7 +411,7 @@ class Query(BaseModel):
def archive(self):
self.is_archived = True
self.ttl = -1
self.schedule = None
for vis in self.visualizations:
for w in vis.widgets:
@@ -374,21 +432,19 @@ class Query(BaseModel):
@classmethod
def outdated_queries(cls):
# TODO: this will only find scheduled queries that were executed before. I think this is
# a reasonable assumption, but worth revisiting.
outdated_queries_ids = cls.select(
peewee.Func('first_value', cls.id).over(partition_by=[cls.query_hash, cls.data_source])) \
.join(QueryResult) \
.where(cls.ttl > 0,
cls.is_archived==False,
(QueryResult.retrieved_at +
(cls.ttl * peewee.SQL("interval '1 second'"))) <
peewee.SQL("(now() at time zone 'utc')"))
queries = cls.select(cls, QueryResult.retrieved_at, DataSource)\
.join(QueryResult)\
.switch(Query).join(DataSource)\
.where(cls.schedule != None)
queries = cls.select(cls, DataSource).join(DataSource) \
.where(cls.id << outdated_queries_ids)
now = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None))
outdated_queries = {}
for query in queries:
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
key = "{}:{}".format(query.query_hash, query.data_source.id)
outdated_queries[key] = query
return queries
return outdated_queries.values()
@classmethod
def search(cls, term):
@@ -425,9 +481,13 @@ class Query(BaseModel):
return update.execute()
def pre_save(self, created):
super(Query, self).pre_save(created)
self.query_hash = utils.gen_query_hash(self.query)
self._set_api_key()
if self.last_modified_by is None:
self.last_modified_by = self.user
def post_save(self, created):
if created:
self._create_default_visualizations()
@@ -455,7 +515,7 @@ class Query(BaseModel):
return unicode(self.id)
class Dashboard(BaseModel):
class Dashboard(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
slug = peewee.CharField(max_length=140, index=True)
name = peewee.CharField(max_length=100)
@@ -464,7 +524,6 @@ class Dashboard(BaseModel):
layout = peewee.TextField()
dashboard_filters_enabled = peewee.BooleanField(default=False)
is_archived = peewee.BooleanField(default=False, index=True)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'dashboards'
@@ -506,7 +565,9 @@ class Dashboard(BaseModel):
'user_id': self._data['user'],
'layout': layout,
'dashboard_filters_enabled': self.dashboard_filters_enabled,
'widgets': widgets_layout
'widgets': widgets_layout,
'updated_at': self.updated_at,
'created_at': self.created_at
}
@classmethod
@@ -539,7 +600,7 @@ class Dashboard(BaseModel):
return u"%s=%s" % (self.id, self.name)
class Visualization(BaseModel):
class Visualization(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
type = peewee.CharField(max_length=100)
query = peewee.ForeignKeyField(Query, related_name='visualizations')
@@ -557,6 +618,8 @@ class Visualization(BaseModel):
'name': self.name,
'description': self.description,
'options': json.loads(self.options),
'updated_at': self.updated_at,
'created_at': self.created_at
}
if with_query:
@@ -568,14 +631,13 @@ class Visualization(BaseModel):
return u"%s %s" % (self.id, self.type)
class Widget(BaseModel):
class Widget(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
visualization = peewee.ForeignKeyField(Visualization, related_name='widgets', null=True)
text = peewee.TextField(null=True)
width = peewee.IntegerField()
options = peewee.TextField()
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
created_at = DateTimeTZField(default=datetime.datetime.now)
# unused; kept for backward compatability:
type = peewee.CharField(max_length=100, null=True)
@@ -590,7 +652,9 @@ class Widget(BaseModel):
'width': self.width,
'options': json.loads(self.options),
'dashboard_id': self._data['dashboard'],
'text': self.text
'text': self.text,
'updated_at': self.updated_at,
'created_at': self.created_at
}
if self.visualization and self.visualization.id:

View File

@@ -15,6 +15,7 @@ __all__ = [
'TYPE_STRING',
'TYPE_DATE',
'TYPE_FLOAT',
'SUPPORTED_COLUMN_TYPES',
'register',
'get_query_runner',
'import_query_runners'
@@ -28,10 +29,19 @@ TYPE_STRING = 'string'
TYPE_DATETIME = 'datetime'
TYPE_DATE = 'date'
SUPPORTED_COLUMN_TYPES = set([
TYPE_INTEGER,
TYPE_FLOAT,
TYPE_BOOLEAN,
TYPE_STRING,
TYPE_DATETIME,
TYPE_DATE
])
class BaseQueryRunner(object):
def __init__(self, configuration):
jsonschema.validate(configuration, self.configuration_schema())
self.syntax = 'sql'
self.configuration = configuration
@classmethod
@@ -57,6 +67,9 @@ class BaseQueryRunner(object):
def run_query(self, query):
raise NotImplementedError()
def get_schema(self):
return []
@classmethod
def to_dict(cls):
return {
@@ -101,4 +114,4 @@ def validate_configuration(query_runner_type, configuration_json):
def import_query_runners(query_runner_imports):
for runner_import in query_runner_imports:
__import__(runner_import)
__import__(runner_import)

View File

@@ -5,6 +5,8 @@ import logging
import sys
import time
import requests
from redash.query_runner import *
from redash.utils import JSONEncoder
@@ -15,6 +17,7 @@ try:
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials
from oauth2client import gce
enabled = True
except ImportError:
@@ -66,18 +69,6 @@ def _load_key(filename):
f.close()
def _get_bigquery_service(service_account, private_key):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
credentials = SignedJwtAssertionCredentials(service_account, private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
@@ -117,11 +108,23 @@ class BigQuery(BaseQueryRunner):
def __init__(self, configuration_json):
super(BigQuery, self).__init__(configuration_json)
self.private_key = _load_key(self.configuration["privateKey"])
def _get_bigquery_service(self):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
private_key = _load_key(self.configuration["privateKey"])
credentials = SignedJwtAssertionCredentials(self.configuration['serviceAccount'], private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_project_id(self):
return self.configuration["projectId"]
def run_query(self, query):
bigquery_service = _get_bigquery_service(self.configuration["serviceAccount"],
self.private_key)
bigquery_service = self._get_bigquery_service()
jobs = bigquery_service.jobs()
job_data = {
@@ -134,13 +137,13 @@ class BigQuery(BaseQueryRunner):
logger.debug("BigQuery got query: %s", query)
project_id = self.configuration["projectId"]
project_id = self._get_project_id()
try:
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
current_row = 0
query_reply = _get_query_results(jobs, project_id=project_id,
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
logger.debug("bigquery replied: %s", query_reply)
@@ -176,4 +179,26 @@ class BigQuery(BaseQueryRunner):
return json_data, error
register(BigQuery)
class BigQueryGCE(BigQuery):
@classmethod
def type(cls):
return "bigquery_gce"
@classmethod
def configuration_schema(cls):
return {}
def _get_project_id(self):
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
def _get_bigquery_service(self):
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
register(BigQuery)
register(BigQueryGCE)

View File

@@ -68,8 +68,8 @@ class MongoDB(BaseQueryRunner):
'type': 'string',
'title': 'Replica Set Name'
},
'required': ['connectionString']
}
},
'required': ['connectionString']
}
@classmethod
@@ -83,6 +83,8 @@ class MongoDB(BaseQueryRunner):
def __init__(self, configuration_json):
super(MongoDB, self).__init__(configuration_json)
self.syntax = 'json'
self.db_name = self.configuration["dbName"]
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
@@ -130,8 +132,8 @@ class MongoDB(BaseQueryRunner):
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field_name in query_data["sort"]:
s.append((field_name, query_data["sort"][field_name]))
for field_data in query_data["sort"]:
s.append((field_data["name"], field_data["direction"]))
columns = []
rows = []
@@ -144,6 +146,9 @@ class MongoDB(BaseQueryRunner):
else:
cursor = db[collection].find(q, f)
if "limit" in query_data and query_data["limit"]:
cursor = cursor.limit(query_data["limit"])
for r in cursor:
for k in r:
if _get_column_by_name(columns, k) is None:
@@ -175,4 +180,4 @@ class MongoDB(BaseQueryRunner):
return json_data, error
register(MongoDB)
register(MongoDB)

View File

@@ -27,7 +27,10 @@ class Mysql(BaseQueryRunner):
'db': {
'type': 'string',
'title': 'Database name'
}
},
"port": {
"type": "number"
},
},
'required': ['db']
}
@@ -44,13 +47,49 @@ class Mysql(BaseQueryRunner):
def __init__(self, configuration_json):
super(Mysql, self).__init__(configuration_json)
def get_schema(self):
query = """
SELECT col.table_schema,
col.table_name,
col.column_name
FROM `information_schema`.`columns` col
INNER JOIN
(SELECT table_schema,
TABLE_NAME
FROM information_schema.tables
WHERE table_type <> 'SYSTEM VIEW' AND table_schema NOT IN ('performance_schema', 'mysql')) tables ON tables.table_schema = col.table_schema
AND tables.TABLE_NAME = col.TABLE_NAME;
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != self.configuration['db']:
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
import MySQLdb
connection = MySQLdb.connect(self.configuration.get('host', ''),
self.configuration.get('user', ''),
self.configuration.get('passwd', ''),
self.configuration['db'],
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
@@ -94,4 +133,4 @@ class Mysql(BaseQueryRunner):
return json_data, error
register(Mysql)
register(Mysql)

View File

@@ -83,6 +83,34 @@ class PostgreSQL(BaseQueryRunner):
self.connection_string = " ".join(values)
def get_schema(self):
query = """
SELECT table_schema, table_name, column_name
FROM information_schema.columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != 'public':
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
connection = psycopg2.connect(self.connection_string, async=True)
_wait(connection)

View File

@@ -1,9 +1,43 @@
import sys
import json
import logging
from redash.query_runner import *
from redash import models
import importlib
logger = logging.getLogger(__name__)
from RestrictedPython import compile_restricted
from RestrictedPython.Guards import safe_builtins
ALLOWED_MODULES = {}
def custom_write(obj):
"""
Custom hooks which controls the way objects/lists/tuples/dicts behave in
RestrictedPython
"""
return obj
def custom_import(name, globals=None, locals=None, fromlist=(), level=0):
if name in ALLOWED_MODULES:
m = None
if ALLOWED_MODULES[name] is None:
m = importlib.import_module(name)
ALLOWED_MODULES[name] = m
else:
m = ALLOWED_MODULES[name]
return m
raise Exception("'{0}' is not configured as a supported import module".format(name))
def custom_get_item(obj, key):
return obj[key]
def get_query_result(query_id):
try:
@@ -20,11 +54,14 @@ def get_query_result(query_id):
return json.loads(query.latest_query_data.data)
def execute_query(data_source_name, query):
def execute_query(data_source_name_or_id, query):
try:
data_source = models.DataSource.get(models.DataSource.name==data_source_name)
if type(data_source_name_or_id) == int:
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name: %s." % data_source_name)
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
query_runner = get_query_runner(data_source.type, data_source.options)
@@ -36,6 +73,28 @@ def execute_query(data_source_name, query):
return json.loads(data)
def add_result_column(result, column_name, friendly_name, column_type):
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
if column_type not in SUPPORTED_COLUMN_TYPES:
raise Exception("'{0}' is not a supported column type".format(column_type))
if not "columns" in result:
result["columns"] = []
result["columns"].append({
"name" : column_name,
"friendly_name" : friendly_name,
"type" : column_type
})
def add_result_row(result, values):
if not "rows" in result:
result["rows"] = []
result["rows"].append(values)
class Python(BaseQueryRunner):
"""
This is very, very unsafe. Use at your own risk with people you really trust.
@@ -45,24 +104,66 @@ class Python(BaseQueryRunner):
return {
'type': 'object',
'properties': {
}
'allowedImportModules': {
'type': 'string',
'title': 'Modules to import prior to running the script'
}
},
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
global ALLOWED_MODULES
super(Python, self).__init__(configuration_json)
self.syntax = "python"
if self.configuration.get("allowedImportModules", None):
for item in self.configuration["allowedImportModules"].split(","):
ALLOWED_MODULES[item] = None
def run_query(self, query):
try:
error = None
script_globals = {'get_query_result': get_query_result, 'execute_query': execute_query}
script_locals = {'result': None}
# TODO: timeout, sandboxing
exec query in script_globals, script_locals
code = compile_restricted(query, '<string>', 'exec')
safe_builtins["_write_"] = custom_write
safe_builtins["__import__"] = custom_import
safe_builtins["_getattr_"] = getattr
safe_builtins["getattr"] = getattr
safe_builtins["_setattr_"] = setattr
safe_builtins["setattr"] = setattr
safe_builtins["_getitem_"] = custom_get_item
script_locals = { "result" : { "rows" : [], "columns" : [] } }
restricted_globals = dict(__builtins__=safe_builtins)
restricted_globals["get_query_result"] = get_query_result
restricted_globals["execute_query"] = execute_query
restricted_globals["add_result_column"] = add_result_column
restricted_globals["add_result_row"] = add_result_row
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
restricted_globals["TYPE_STRING"] = TYPE_STRING
restricted_globals["TYPE_DATE"] = TYPE_DATE
restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT
# TODO: Figure out the best way to have a timeout on a script
# One option is to use ETA with Celery + timeouts on workers
# And replacement of worker process every X requests handled.
exec(code) in restricted_globals, script_locals
if script_locals['result'] is None:
raise Exception("result wasn't set to value.")
@@ -76,4 +177,5 @@ class Python(BaseQueryRunner):
return json_data, error
register(Python)

View File

@@ -30,7 +30,7 @@ class Script(BaseQueryRunner):
def __init__(self, configuration_json):
super(Script, self).__init__(configuration_json)
# Poor man's protection against running scripts from output the scripts directory
# Poor man's protection against running scripts from outside the scripts directory
if self.configuration["path"].find("../") > -1:
raise ValidationError("Scripts can only be run from the configured scripts directory")
@@ -41,11 +41,13 @@ class Script(BaseQueryRunner):
query = query.strip()
script = os.path.join(self.configuration["path"], query)
script = os.path.join(self.configuration["path"], query.split(" ")[0])
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
output = subprocess.check_output(script, shell=False)
script = os.path.join(self.configuration["path"], query)
output = subprocess.check_output(script.split(" "), shell=False)
if output is not None:
output = output.strip()
if output != "":
@@ -62,4 +64,4 @@ class Script(BaseQueryRunner):
return json_data, error
register(Script)
register(Script)

View File

@@ -47,12 +47,13 @@ class QueryTask(object):
return self._async_result.id
@classmethod
def add_task(cls, query, data_source, scheduled=False):
def add_task(cls, query, data_source, scheduled=False, metadata={}):
query_hash = gen_query_hash(query)
logging.info("[Manager][%s] Inserting job", query_hash)
logging.info("[Manager] Metadata: [%s]", metadata)
try_count = 0
job = None
while try_count < cls.MAX_RETRIES:
try_count += 1
@@ -77,8 +78,9 @@ class QueryTask(object):
else:
queue_name = data_source.queue_name
result = execute_query.apply_async(args=(query, data_source.id), queue=queue_name)
result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)
job = cls(async_result=result)
logging.info("[Manager][%s] Created new job: %s", query_hash, job.id)
pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)
pipe.execute()
@@ -146,8 +148,8 @@ def refresh_queries():
outdated_queries_count = 0
for query in models.Query.outdated_queries():
# TODO: this should go into lower priority
QueryTask.add_task(query.query, query.data_source, scheduled=True)
QueryTask.add_task(query.query, query.data_source, scheduled=True,
metadata={'Query ID': query.id, 'Username': 'Scheduled'})
outdated_queries_count += 1
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
@@ -197,9 +199,9 @@ def cleanup_tasks():
logger.warning("%s is ready (%s), removing lock.", lock_keys[i], t.celery_status)
redis_connection.delete(lock_keys[i])
if t.celery_status == 'STARTED' and t.id not in all_tasks:
logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
redis_connection.delete(lock_keys[i])
# if t.celery_status == 'STARTED' and t.id not in all_tasks:
# logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
# redis_connection.delete(lock_keys[i])
@celery.task(base=BaseTask)
@@ -218,9 +220,19 @@ def cleanup_query_results():
logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
@celery.task(base=BaseTask)
def refresh_schemas():
"""
Refershs the datasources schema.
"""
for ds in models.DataSource.all():
logger.info("Refreshing schema for: {}".format(ds.name))
ds.get_schema(refresh=True)
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?
def execute_query(self, query, data_source_id, metadata):
start_time = time.time()
logger.info("Loading data source (%d)...", data_source_id)
@@ -236,9 +248,15 @@ def execute_query(self, query, data_source_id):
query_runner = get_query_runner(data_source.type, data_source.options)
if query_runner.annotate_query():
# TODO: annotate with queue name
annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \
(self.request.id, query_hash, query)
metadata['Task ID'] = self.request.id
metadata['Query Hash'] = query_hash
metadata['Queue'] = self.request.delivery_info['routing_key']
annotation = ", ".join(["{}: {}".format(k, v) for k, v in metadata.iteritems()])
logging.debug("Annotation: %s", annotation)
annotated_query = "/* {} */ {}".format(annotation, query)
else:
annotated_query = query

View File

@@ -15,6 +15,10 @@ celery_schedule = {
'cleanup_tasks': {
'task': 'redash.tasks.cleanup_tasks',
'schedule': timedelta(minutes=5)
},
'refresh_schemas': {
'task': 'redash.tasks.refresh_schemas',
'schedule': timedelta(minutes=30)
}
}

View File

@@ -4,6 +4,8 @@ from flask.ext.restful import Api
from redash import settings, utils
from redash.models import db
from redash.admin import init_admin
__version__ = '0.4.0'
@@ -14,6 +16,7 @@ app = Flask(__name__,
api = Api(app)
init_admin(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})

View File

@@ -1,4 +1,5 @@
Flask==0.10.1
Flask-Admin==1.1.0
Flask-RESTful==0.2.10
Flask-Login==0.2.9
Flask-OAuth==0.12
@@ -14,7 +15,7 @@ psycopg2==2.5.2
python-dateutil==2.1
pytz==2013.9
redis==2.7.5
requests==2.2.0
requests==2.3.0
six==1.5.2
sqlparse==0.1.8
wsgiref==0.1.2
@@ -25,3 +26,5 @@ gunicorn==18.0
celery==3.1.11
jsonschema==2.4.0
click==3.3
RestrictedPython==3.6.0
wtf-peewee==0.2.3

View File

@@ -1,12 +1,17 @@
import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
@@ -18,4 +23,19 @@ class BaseTestCase(TestCase):
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))

View File

@@ -45,9 +45,9 @@ user_factory = ModelFactory(redash.models.User,
data_source_factory = ModelFactory(redash.models.DataSource,
name='Test',
name=Sequence('Test {}'),
type='pg',
options='')
options='{"dbname": "test"}')
dashboard_factory = ModelFactory(redash.models.Dashboard,
@@ -58,9 +58,9 @@ query_factory = ModelFactory(redash.models.Query,
name='New Query',
description='',
query='SELECT 1',
ttl=-1,
user=user_factory.create,
is_archived=False,
schedule=None,
data_source=data_source_factory.create)
query_result_factory = ModelFactory(redash.models.QueryResult,
@@ -83,4 +83,4 @@ widget_factory = ModelFactory(redash.models.Widget,
width=1,
options='{}',
dashboard=dashboard_factory.create,
visualization=visualization_factory.create)
visualization=visualization_factory.create)

View File

@@ -1,6 +1,7 @@
from contextlib import contextmanager
import json
import time
import datetime
from unittest import TestCase
from flask import url_for
from flask.ext.login import current_user
@@ -104,7 +105,11 @@ class DashboardAPITest(BaseTestCase, AuthenticationTestMixin):
with app.test_client() as c, authenticated_user(c):
rv = c.get('/api/dashboards/{0}'.format(d1.slug))
self.assertEquals(rv.status_code, 200)
self.assertDictEqual(json.loads(rv.data), d1.to_dict(with_widgets=True))
expected = d1.to_dict(with_widgets=True)
actual = json.loads(rv.data)
self.assertResponseEqual(expected, actual)
def test_get_non_existint_dashbaord(self):
with app.test_client() as c, authenticated_user(c):
@@ -222,10 +227,13 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
def test_update_query(self):
query = query_factory.create()
with app.test_client() as c, authenticated_user(c):
other_user = user_factory.create()
with app.test_client() as c, authenticated_user(c, user=other_user):
rv = json_request(c.post, '/api/queries/{0}'.format(query.id), data={'name': 'Testing'})
self.assertEqual(rv.status_code, 200)
self.assertEquals(rv.json['name'], 'Testing')
self.assertEqual(rv.json['name'], 'Testing')
self.assertEqual(rv.json['last_modified_by']['id'], other_user.id)
def test_create_query(self):
user = user_factory.create()
@@ -233,7 +241,7 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
query_data = {
'name': 'Testing',
'query': 'SELECT 1',
'ttl': 3600,
'schedule': "3600",
'data_source_id': data_source.id
}
@@ -256,9 +264,7 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
rv = json_request(c.get, '/api/queries/{0}'.format(query.id))
self.assertEquals(rv.status_code, 200)
d = query.to_dict(with_visualizations=True)
d.pop('created_at')
self.assertDictContainsSubset(d, rv.json)
self.assertResponseEqual(rv.json, query.to_dict(with_visualizations=True))
def test_get_all_queries(self):
queries = [query_factory.create() for _ in range(10)]

View File

@@ -1,10 +1,13 @@
#encoding: utf8
import datetime
import json
from unittest import TestCase
import mock
from tests import BaseTestCase
from redash import models
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
from redash.utils import gen_query_hash
from redash import query_runner
class DashboardTest(BaseTestCase):
@@ -69,20 +72,98 @@ class QueryTest(BaseTestCase):
q = query_factory.create()
self.assertEquals(q.visualizations.count(), 1)
def test_save_updates_updated_at_field(self):
# This should be a test of ModelTimestampsMixin, but it's easier to test in context of existing model... :-\
one_day_ago = datetime.datetime.today() - datetime.timedelta(days=1)
q = query_factory.create(created_at=one_day_ago, updated_at=one_day_ago)
q.save()
self.assertNotEqual(q.updated_at, one_day_ago)
class ShouldScheduleNextTest(TestCase):
def test_interval_schedule_that_needs_reschedule(self):
now = datetime.datetime.now()
two_hours_ago = now - datetime.timedelta(hours=2)
self.assertTrue(models.should_schedule_next(two_hours_ago, now, "3600"))
def test_interval_schedule_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
half_an_hour_ago = now - datetime.timedelta(minutes=30)
self.assertFalse(models.should_schedule_next(half_an_hour_ago, now, "3600"))
def test_exact_time_that_needs_reschedule(self):
now = datetime.datetime.now()
yesterday = now - datetime.timedelta(days=1)
schedule = "{:02d}:00".format(now.hour - 3)
self.assertTrue(models.should_schedule_next(yesterday, now, schedule))
def test_exact_time_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
yesterday = (now - datetime.timedelta(days=1)).replace(hour=now.hour+3, minute=now.minute+1)
schedule = "{:02d}:00".format(now.hour + 3)
self.assertFalse(models.should_schedule_next(yesterday, now, schedule))
def test_exact_time_with_day_change(self):
now = datetime.datetime.now().replace(hour=0, minute=1)
previous = (now - datetime.timedelta(days=2)).replace(hour=23, minute=59)
schedule = "23:59".format(now.hour + 3)
self.assertTrue(models.should_schedule_next(previous, now, schedule))
class QueryOutdatedQueriesTest(BaseTestCase):
# TODO: this test can be refactored to use mock version of should_schedule_next to simplify it.
def test_outdated_queries_skips_unscheduled_queries(self):
query = query_factory.create(schedule=None)
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_ttl_based_schedule(self):
two_hours_ago = datetime.datetime.now() - datetime.timedelta(hours=2)
query = query_factory.create(schedule="3600")
query_result = query_result_factory.create(query=query, retrieved_at=two_hours_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
def test_skips_fresh_queries(self):
half_an_hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule="3600")
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_specific_time_schedule(self):
half_an_hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M'))
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1))
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
class QueryArchiveTest(BaseTestCase):
def setUp(self):
super(QueryArchiveTest, self).setUp()
def test_archive_query_sets_flag(self):
query = query_factory.create(ttl=1)
query = query_factory.create()
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEquals(query.is_archived, True)
def test_archived_query_doesnt_return_in_all(self):
query = query_factory.create(ttl=1)
query = query_factory.create(schedule="1")
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
query_result = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
123, yesterday)
@@ -107,15 +188,53 @@ class QueryArchiveTest(BaseTestCase):
self.assertRaises(models.Widget.DoesNotExist, models.Widget.get_by_id, widget.id)
def test_removes_scheduling(self):
query = query_factory.create(ttl=1)
query = query_factory.create(schedule="1")
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEqual(-1, query.ttl)
self.assertEqual(None, query.schedule)
class DataSourceTest(BaseTestCase):
def test_get_schema(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
def test_get_schema_uses_cache(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
self.assertEqual(patched_get_schema.call_count, 1)
def test_get_schema_skips_cache_with_refresh_true(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
new_return_value = [{'name': 'new_table', 'columns': []}]
patched_get_schema.return_value = new_return_value
schema = ds.get_schema(refresh=True)
self.assertEqual(new_return_value, schema)
self.assertEqual(patched_get_schema.call_count, 2)
class QueryResultTest(BaseTestCase):
def setUp(self):
@@ -149,7 +268,7 @@ class QueryResultTest(BaseTestCase):
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
qr = query_result_factory.create(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=60)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=60)
self.assertIsNone(found_query_result)
@@ -157,7 +276,7 @@ class QueryResultTest(BaseTestCase):
yesterday = datetime.datetime.now() - datetime.timedelta(seconds=30)
qr = query_result_factory.create(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=120)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=120)
self.assertEqual(found_query_result, qr)

View File

@@ -1,5 +1,5 @@
import datetime
from mock import patch, call
from mock import patch, call, ANY
from tests import BaseTestCase
from tests.factories import query_factory, query_result_factory
from redash.tasks import refresh_queries
@@ -10,7 +10,7 @@ from redash.tasks import refresh_queries
# 2. test for the refresh_query task
class TestRefreshQueries(BaseTestCase):
def test_enqueues_outdated_queries(self):
query = query_factory.create(ttl=60)
query = query_factory.create(schedule="60")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -19,10 +19,10 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True, metadata=ANY)
def test_skips_fresh_queries(self):
query = query_factory.create(ttl=1200)
query = query_factory.create(schedule="1200")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -32,7 +32,7 @@ class TestRefreshQueries(BaseTestCase):
self.assertFalse(add_job_mock.called)
def test_skips_queries_with_no_ttl(self):
query = query_factory.create(ttl=-1)
query = query_factory.create(schedule=None)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -42,8 +42,8 @@ class TestRefreshQueries(BaseTestCase):
self.assertFalse(add_job_mock.called)
def test_enqueues_query_only_once(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash,
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash,
data_source=query.data_source)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
@@ -55,11 +55,11 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)#{'Query ID': query.id, 'Username': 'Scheduled'})
def test_enqueues_query_with_correct_data_source(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash)
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -70,13 +70,16 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True), call(query.query, query.data_source, scheduled=True)], any_order=True)
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True, metadata=ANY),
call(query.query, query.data_source, scheduled=True, metadata=ANY)],
any_order=True)
self.assertEquals(2, add_job_mock.call_count)
def test_enqueues_only_for_relevant_data_source(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=3600, query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash)
import psycopg2
retrieved_at = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -86,4 +89,4 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)