Compare commits

...

104 Commits

Author SHA1 Message Date
Arik Fraimovich
150fc6dbf0 Fix repo name 2015-04-06 12:39:30 +03:00
Arik Fraimovich
ccff9614d4 New release process 2015-04-06 12:17:47 +03:00
Arik Fraimovich
a7b881874f Remove unused script 2015-04-06 09:38:01 +03:00
Arik Fraimovich
9fb33cf746 Merge pull request #399 from EverythingMe/feature/schema
Feature: schema browser and simple autocomplete
2015-04-02 17:10:07 +03:00
Arik Fraimovich
e3c5da5bc5 Fix tests to use correct data 2015-04-02 17:05:16 +03:00
Arik Fraimovich
e675690cc6 Sort schema by name 2015-04-02 16:56:00 +03:00
Arik Fraimovich
edc1622cf5 Schema support for MySQL 2015-04-02 16:55:52 +03:00
Arik Fraimovich
5ab3d4a40d Basic autocomplete functionality 2015-04-02 16:12:33 +03:00
Arik Fraimovich
cb29d87b63 Improve formatting of schema browser 2015-04-02 15:40:43 +03:00
Arik Fraimovich
6ff6bdad9f Use the correct redis connection in tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
e3cc3ef9a4 Move schema fetching to DataSource + tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
1fe4f291f2 Flush test redis db after each test 2015-04-02 11:25:22 +03:00
Arik Fraimovich
a54119f4a2 Show schema along side the query 2015-04-02 11:25:22 +03:00
Arik Fraimovich
c5b7fe5321 Use codemirror directly without ui-codemirror 2015-04-02 11:24:47 +03:00
Arik Fraimovich
d487ec9153 Upgrade codemirror to latest version 2015-04-02 11:24:18 +03:00
Arik Fraimovich
fa19b1ddc8 Endpoint to return data source schema 2015-04-02 11:23:52 +03:00
Arik Fraimovich
267c32b390 Merge pull request #401 from EverythingMe/fix/wrong_time_zone
Fix: use correct date when converting to UTC to get correct timezone.
2015-04-02 07:40:27 +03:00
Arik Fraimovich
aeff3f1494 Fix: use correct date when converting to UTC to get correct timezone. 2015-04-02 07:39:37 +03:00
Arik Fraimovich
e80e52f6c9 Add annotations for the injector. 2015-04-01 20:23:18 +03:00
Arik Fraimovich
fe41a70602 Merge pull request #400 from EverythingMe/feature/better_scheduler
Improved query scheduling option
2015-04-01 17:28:02 +03:00
Arik Fraimovich
976d9abe2d Disable UI tests, as they are no longer maintained :-( 2015-04-01 17:23:08 +03:00
Arik Fraimovich
041bc1100a New UI for query schedule setting 2015-04-01 17:07:19 +03:00
Arik Fraimovich
5d095ff6ab Resolve #113: upgrade to latest ui-bootstrap 2015-04-01 12:48:24 +03:00
Arik Fraimovich
ef01b61b29 Fix: refresh selector had empty option 2015-04-01 12:11:14 +03:00
Arik Fraimovich
faad6b656b Change query ttl field to be a string and named schedule.
This to allow other types of scheduling than just repeat every X seconds.
The first supported option will be: repeat every day at hour X.
2015-04-01 11:23:26 +03:00
Arik Fraimovich
0bc775584b Merge pull request #397 from EverythingMe/feature/edit_others_queries
Fix: forking broken
2015-03-22 17:32:13 +02:00
Arik Fraimovich
f2d96d61a1 Fix: forking broken 2015-03-22 17:28:47 +02:00
Arik Fraimovich
09bf2dd608 Merge pull request #396 from EverythingMe/feature/edit_others_queries
Feature: allow editing others' queries
2015-03-22 14:53:02 +02:00
Arik Fraimovich
ad1b9b06cf Fix test. 2015-03-22 14:42:08 +02:00
Arik Fraimovich
a4bceae60b Allow anyone to edit any query & show who edited it 2015-03-22 13:22:11 +02:00
Arik Fraimovich
9385449feb Add updated_at timestamp to visualization, query, dashboard and users models 2015-03-22 12:58:26 +02:00
Arik Fraimovich
562e1bb8c9 Merge pull request #395 from EverythingMe/feature/post_to_create_a_query
Convert additional dates to user's formatting
2015-03-19 08:54:54 +02:00
Arik Fraimovich
082b718303 Convert additional dates to user's formatting 2015-03-19 08:54:04 +02:00
Arik Fraimovich
c0872899e9 Merge pull request #394 from EverythingMe/feature/post_to_create_a_query
Fix: column definitions weren't updated.
2015-03-19 08:43:17 +02:00
Arik Fraimovich
086bbf129d Fix: column definitions weren't udpated 2015-03-19 08:40:21 +02:00
Arik Fraimovich
4b7561e538 Merge pull request #393 from EverythingMe/feature/post_to_create_a_query
Fix: allow Unicode and other special chars in column names
2015-03-19 08:34:06 +02:00
Arik Fraimovich
407c5a839b Fix: allow Unicode and other special chars in column names
Stopped using Angular's $parse and just accessing the property directly.
2015-03-19 08:33:16 +02:00
Arik Fraimovich
b8aefd26b8 Merge pull request #392 from EverythingMe/feature/post_to_create_a_query
Support posting to /queries/new to create a new query.
2015-03-18 13:42:55 +02:00
Arik Fraimovich
85a762bcd2 Support posting to /queries/new to create a new query. 2015-03-18 13:28:23 +02:00
Arik Fraimovich
4f1b3d5beb Merge pull request #391 from EverythingMe/feature/api_key_auth
Fix: allow dots in column name
2015-03-16 15:08:00 +02:00
Arik Fraimovich
9218a7c437 Fix: allow dots in column name 2015-03-16 14:59:51 +02:00
Arik Fraimovich
71a3f066a5 Ignore gh-pages branch in CircleCI. 2015-03-16 09:03:52 +02:00
Arik Fraimovich
89436d779c Merge pull request #390 from fedex1/patch-1
Update bootstrap.sh
2015-03-16 05:37:33 +02:00
Ralph Yozzo
3631e938da Update bootstrap.sh
# modified by @fedex1 3/15/2015 seems to be the latest version at this point in time.
2015-03-15 23:27:45 -04:00
Arik Fraimovich
c0a9db68f0 Merge pull request #389 from EverythingMe/feature/api_key_auth
Fix: show date/time with respect to user's locale
2015-03-15 18:53:31 +02:00
Arik Fraimovich
bec9c9e14e Fix: show date/time in user's locale 2015-03-15 18:53:02 +02:00
Arik Fraimovich
47bbc25277 Merge pull request #388 from EverythingMe/feature/api_key_auth
Make it possible to set enabled query runners from env
2015-03-12 12:00:26 +02:00
Arik Fraimovich
f02c2588d2 Make it possible to set enabled query runners from env 2015-03-12 11:52:31 +02:00
Arik Fraimovich
7db5449dad Merge pull request #387 from EverythingMe/feature/api_key_auth
Record event when accessing query result from API
2015-03-12 11:46:35 +02:00
Arik Fraimovich
7f6c7f0634 Record event when accessing query result from API 2015-03-12 11:43:21 +02:00
Arik Fraimovich
73955c74f7 Merge pull request #386 from EverythingMe/feature/api_key_auth
Code cleanup (remove "worker's status" dead link & unused settings)
2015-03-11 11:30:15 +02:00
Arik Fraimovich
7de85da8ef Remove unused settings 2015-03-11 07:50:49 +02:00
Arik Fraimovich
0aab35252a Remove broken "Worker's Status" page 2015-03-11 07:47:10 +02:00
Arik Fraimovich
141dbc9e70 Merge pull request #385 from EverythingMe/feature/api_key_auth
Feature: optional API Key authentication instead of HMAC
2015-03-10 18:29:01 +02:00
Arik Fraimovich
2e513c347c Cleanup 2015-03-10 18:21:51 +02:00
Arik Fraimovich
335c136ec2 Show API Key button in query view 2015-03-10 18:08:02 +02:00
Arik Fraimovich
df1170eb9b Feature: optional api key only authentication 2015-03-10 17:51:17 +02:00
Arik Fraimovich
69bcaddbe0 Fix: migrations stopped working due to peewee upgrade 2015-03-09 16:55:55 +02:00
Arik Fraimovich
67958cc27b MySQL query runner: make configuration access safer 2015-03-09 10:16:06 +02:00
Arik Fraimovich
6c716f23d9 Fix migration & query runner for mysql 2015-03-09 08:58:03 +02:00
Arik Fraimovich
bea11b0ac2 Merge pull request #384 from EverythingMe/feature/python_query_runner
Experimental Python query runner
2015-03-08 15:03:59 +02:00
Arik Fraimovich
4927386299 Experimental Python query runner 2015-03-08 15:02:57 +02:00
Arik Fraimovich
30a8550f6b Merge pull request #383 from EverythingMe/fix/migration
Fix: make migration work with new peewee
2015-03-08 14:37:42 +02:00
Arik Fraimovich
0389a45be4 Fix: make migration work with new peewee 2015-03-08 13:28:18 +02:00
Arik Fraimovich
707c169867 Merge pull request #382 from EverythingMe/feature/datasources_v2
Fix: import should be global
2015-03-08 12:27:34 +02:00
Arik Fraimovich
fca034ac0d Fix: import should be global 2015-03-08 12:23:51 +02:00
Arik Fraimovich
97691ea5ee Merge pull request #380 from EverythingMe/feature/datasources_v2
Refactor datasources (query runners)
2015-03-08 11:50:09 +02:00
Arik Fraimovich
40335a0e21 Fix: add missing option flags 2015-03-08 11:00:56 +02:00
Arik Fraimovich
9344cbd078 Update bootstrap script to support new format 2015-03-08 10:38:50 +02:00
Arik Fraimovich
9442fd9465 Update logging messages 2015-03-02 09:49:17 +02:00
Arik Fraimovich
c816f1003d Bump version 2015-03-02 09:45:29 +02:00
Arik Fraimovich
2107b79a80 Use validation for data source editing 2015-03-02 09:44:55 +02:00
Arik Fraimovich
8fae6de8c7 Update datasource CLI to use new format 2015-03-02 09:40:15 +02:00
Arik Fraimovich
d798c77574 Support for already valid data source config 2015-03-02 07:34:06 +02:00
Arik Fraimovich
0abce27381 Set configuration in base ctor 2015-02-24 07:50:10 +02:00
Arik Fraimovich
8a171ba39a Use JSON Schema for data source configuration 2015-02-24 07:50:10 +02:00
Arik Fraimovich
20af276772 Updated configuration spec to include friendly name and more 2015-02-24 07:50:10 +02:00
Arik Fraimovich
4058342763 WIP: configuration object 2015-02-24 07:50:10 +02:00
Arik Fraimovich
af64657260 Migration to update all data source options 2015-02-24 07:50:09 +02:00
Arik Fraimovich
b6bd46e59e New query runners implementation 2015-02-24 07:50:09 +02:00
Arik Fraimovich
31fe547e03 Merge pull request #378 from EverythingMe/feature/variables
Fix #263: timestamp fields should be with time zone
2015-02-23 11:10:20 +02:00
Arik Fraimovich
aff324071e Update peewee version 2015-02-23 09:19:39 +02:00
Arik Fraimovich
131266e408 Fix #263: timestamp fields should be with time zone 2015-02-23 09:02:16 +02:00
Arik Fraimovich
b1f97e8c8d Merge pull request #377 from olgakogan/master
'Download Dataset' fix - error in case of big numeric values
2015-02-21 15:21:18 +02:00
Arik Fraimovich
9783d6e839 Merge pull request #374 from akariv/master
Support unicode queries in search API
2015-02-21 14:48:36 +02:00
akariv
8eea2fb367 Support unicode queries in search API
Modify query test case to use unicode strings
2015-02-20 23:49:37 +02:00
olgakogan
b585480c81 removed redundant handling of large numbers when generating a csv file (causes ValueError: timestamp out of range) 2015-02-20 22:33:02 +02:00
Arik Fraimovich
89e307daba Merge pull request #373 from EverythingMe/feature/variables
UI Fixes
2015-02-08 18:18:37 +02:00
Arik Fraimovich
a5eb0e293c Fix: don't lock query editing while executing 2015-02-08 18:17:08 +02:00
Arik Fraimovich
48d1113225 Fix #371: show notification when query fails. 2015-02-08 18:08:24 +02:00
Arik Fraimovich
d82d5c3bdc Merge pull request #372 from EverythingMe/feature/variables
Several UI fixes
2015-02-08 18:05:05 +02:00
Arik Fraimovich
dfe58b3953 Give the user the option to disable sorting of chart data 2015-02-08 18:02:36 +02:00
Arik Fraimovich
44019b8357 Variables: allow nesting variables 2015-02-08 17:07:20 +02:00
Arik Fraimovich
3c15a44faf Fix: keyboard shortcuts were not unbinded 2015-02-08 17:07:06 +02:00
Arik Fraimovich
8d113dadd2 Revert "Fix #242: handle the case there is no connection to the server"
This reverts commit 3960005002.

Conflicts:
	rd_ui/app/index.html
	rd_ui/bower.json
2015-02-02 18:02:42 +02:00
Arik Fraimovich
c1dd26aee7 Merge pull request #370 from alexanderlz/master
add ISO datetime to filename when saving chart as image
2015-02-02 10:52:00 +02:00
Alexander Leibzon
b2228c2a39 replace 'possibly dangerous for some OSs' characters 2015-02-01 15:29:46 +02:00
Alexander Leibzon
d9618cb09c add ISO datetime to filename when saving chart as image 2015-02-01 14:52:58 +02:00
Arik Fraimovich
c8ca683d3a Merge pull request #368 from alexanderlz/master
Issue #168. Visualization: save as image.
2015-02-01 13:22:40 +02:00
Alexander Leibzon
888963ffaa Merge branch 'master' of https://github.com/alexanderlz/redash
Conflicts:
	rd_ui/app/index.html
	rd_ui/bower.json
2015-02-01 13:20:26 +02:00
Alexander Leibzon
ae947a8310 removing unwanted commit 2015-02-01 13:18:57 +02:00
Alexander Leibzon
bee9cde347 removing unwanted commit 2015-02-01 12:00:23 +02:00
Arik Fraimovich
c131dab125 Merge pull request #369 from EverythingMe/fix/dashboard_filters
Fix: filters got linked when they shouldn't have.
2015-02-01 11:57:46 +02:00
Arik Fraimovich
e113642ae4 Fix: filters got linked when they shouldn't have.
- Make a copy of the first filter, to prevent it controlling the other filters.
- If no query string value given or dashboard filters enabled, don't link filters.
2015-02-01 11:51:07 +02:00
69 changed files with 2329 additions and 1028 deletions

View File

@@ -1,6 +1,7 @@
NAME=redash
VERSION=`python ./manage.py version`
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
@@ -15,8 +16,8 @@ pack:
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload:
python bin/upload_version.py $(VERSION) $(FILENAME)
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
test:
nosetests --with-coverage --cover-package=redash tests/*.py
cd rd_ui && grunt test
#cd rd_ui && grunt test

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env python
import sys
import requests
if __name__ == '__main__':
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
if response.status_code != 200:
exit("Failed getting releases (status code: %s)." % response.status_code)
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
latest_release = sorted_releases[0]
asset_url = latest_release['assets'][0]['url']
filename = latest_release['assets'][0]['name']
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
if '--url-only' in sys.argv:
print asset_url
elif '--wget' in sys.argv:
print wget_command
else:
print "Latest release: %s" % latest_release['tag_name']
print latest_release['body']
print "\nTarball URL: %s" % asset_url
print 'wget: %s' % (wget_command)

130
bin/release_manager.py Normal file
View File

@@ -0,0 +1,130 @@
import os
import sys
import json
import re
import subprocess
import requests
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
repo = 'EverythingMe/redash'
def _github_request(method, path, params=None, headers={}):
if not path.startswith('https://api.github.com'):
url = "https://api.github.com/{}".format(path)
else:
url = path
if params is not None:
params = json.dumps(params)
response = requests.request(method, url, data=params, auth=auth)
return response
def exception_from_error(message, response):
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
def rc_tag_name(version):
return "v{}-rc".format(version)
def get_rc_release(version):
tag = rc_tag_name(version)
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
if response.status_code == 404:
return None
elif response.status_code == 200:
return response.json()
raise exception_from_error("Unknown error while looking RC release: ", response)
def create_release(version, commit_sha):
tag = rc_tag_name(version)
params = {
'tag_name': tag,
'name': "{} - RC".format(version),
'target_commitish': commit_sha,
'prerelease': True
}
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
if response.status_code != 201:
raise exception_from_error("Failed creating new release", response)
return response.json()
def upload_asset(release, filepath):
upload_url = release['upload_url'].replace('{?name}', '')
filename = filepath.split('/')[-1]
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
if response.status_code != 201: # not 200/201/...
raise exception_from_error('Failed uploading asset', response)
return response
def remove_previous_builds(release):
for asset in release['assets']:
response = _github_request('delete', asset['url'])
if response.status_code != 204:
raise exception_from_error("Failed deleting asset", response)
def get_changelog(commit_sha):
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
if latest_release.status_code != 200:
raise exception_from_error('Failed getting latest release', latest_release)
latest_release = latest_release.json()
previous_sha = latest_release['target_commitish']
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
log = subprocess.check_output(args)
changes = ["Changes since {}:".format(latest_release['name'])]
for line in log.split('\n'):
try:
sha, subject, body, parents = line[1:-1].split('|')
except ValueError:
continue
try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request)
except Exception, ex:
pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
return "\n".join(changes)
def update_release(version, build_filepath, commit_sha):
try:
release = get_rc_release(version) or create_release(version, commit_sha)
print "Using release id: {}".format(release['id'])
remove_previous_builds(release)
response = upload_asset(release, build_filepath)
changelog = get_changelog(commit_sha)
response = _github_request('patch', release['url'], {'body': changelog})
print response.status_code
print response.text
except Exception, ex:
print ex
if __name__ == '__main__':
commit_sha = sys.argv[1]
version = sys.argv[2]
filepath = sys.argv[3]
# TODO: make sure running from git directory & remote = repo
update_release(version, filepath, commit_sha)

View File

@@ -1,46 +0,0 @@
#!python
import os
import sys
import json
import requests
import subprocess
def capture_output(command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
return proc.stdout.read()
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'body': version_body,
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
headers=headers, verify=False)

View File

@@ -19,6 +19,10 @@ test:
post:
- make pack
deployment:
test:
branch: chore/release_process
commands:
- make upload
github:
branch: master
commands:
@@ -26,3 +30,7 @@ deployment:
notify:
webhooks:
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
general:
branches:
ignore:
- gh-pages

View File

@@ -1,12 +1,15 @@
from playhouse.migrate import Migrator
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = Migrator(db.database)
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrator.add_column(models.Query, models.Query.is_archived, 'is_archived')
migrate(
migrator.add_column('queries', 'is_archived', models.Query.is_archived)
)
db.close_db(None)

View File

@@ -0,0 +1,21 @@
from redash.models import db
if __name__ == '__main__':
db.connect_db()
columns = (
('activity_log', 'created_at'),
('dashboards', 'created_at'),
('data_sources', 'created_at'),
('events', 'created_at'),
('groups', 'created_at'),
('queries', 'created_at'),
('widgets', 'created_at'),
('query_results', 'retrieved_at')
)
with db.database.transaction():
for column in columns:
db.database.execute_sql("ALTER TABLE {} ALTER COLUMN {} TYPE timestamp with time zone;".format(*column))
db.close_db(None)

View File

@@ -0,0 +1,73 @@
import json
from redash import query_runner
from redash.models import DataSource
def update(data_source):
print "[%s] Old options: %s" % (data_source.name, data_source.options)
if query_runner.validate_configuration(data_source.type, data_source.options):
print "[%s] configuration already valid. skipping." % data_source.name
return
if data_source.type == 'pg':
values = data_source.options.split(" ")
configuration = {}
for value in values:
k, v = value.split("=", 1)
configuration[k] = v
if k == 'port':
configuration[k] = int(v)
data_source.options = json.dumps(configuration)
elif data_source.type == 'mysql':
mapping = {
'Server': 'host',
'User': 'user',
'Pwd': 'passwd',
'Database': 'db'
}
values = data_source.options.split(";")
configuration = {}
for value in values:
k, v = value.split("=", 1)
configuration[mapping[k]] = v
data_source.options = json.dumps(configuration)
elif data_source.type == 'graphite':
old_config = json.loads(data_source.options)
configuration = {
"url": old_config["url"]
}
if "verify" in old_config:
configuration['verify'] = old_config['verify']
if "auth" in old_config:
configuration['username'], configuration['password'] = old_config["auth"]
data_source.options = json.dumps(configuration)
elif data_source.type == 'url':
data_source.options = json.dumps({"url": data_source.options})
elif data_source.type == 'script':
data_source.options = json.dumps({"path": data_source.options})
elif data_source.type == 'mongo':
data_source.type = 'mongodb'
else:
print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type)
print "[%s] New options: %s" % (data_source.name, data_source.options)
data_source.save()
if __name__ == '__main__':
for data_source in DataSource.all():
update(data_source)

View File

@@ -0,0 +1,12 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.drop_not_null('events', 'user_id')
)

View File

@@ -0,0 +1,26 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'updated_at', models.Query.updated_at),
migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at),
migrator.add_column('widgets', 'updated_at', models.Widget.updated_at),
migrator.add_column('users', 'created_at', models.User.created_at),
migrator.add_column('users', 'updated_at', models.User.updated_at),
migrator.add_column('visualizations', 'created_at', models.Visualization.created_at),
migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at)
)
db.database.execute_sql("UPDATE queries SET updated_at = created_at;")
db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;")
db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")
db.close_db(None)

View File

@@ -0,0 +1,19 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'last_modified_by_id', models.Query.last_modified_by)
)
db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")
db.close_db(None)

View File

@@ -0,0 +1,23 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'schedule', models.Query.schedule),
)
db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")
migrate(
migrator.drop_column('queries', 'ttl')
)
db.close_db(None)

View File

@@ -18,8 +18,7 @@
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
<link rel="stylesheet" href="/bower_components/offline/themes/offline-theme-default.css">
<link rel="stylesheet" href="/bower_components/offline/themes/offline-language-english.css">
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
<link rel="stylesheet" href="/styles/redash.css">
<!-- endbuild -->
</head>
@@ -41,9 +40,9 @@
<div class="collapse navbar-collapse navbar-ex1-collapse">
<ul class="nav navbar-nav">
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')">
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
<ul class="dropdown-menu">
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
<ul class="dropdown-menu" dropdown-menu>
<span ng-repeat="(name, group) in groupedDashboards">
<li class="dropdown-submenu">
<a href="#" ng-bind="name"></a>
@@ -61,9 +60,9 @@
<li><a data-toggle="modal" href="#new_dashboard_dialog" ng-show="currentUser.hasPermission('create_dashboard')">New Dashboard</a></li>
</ul>
</li>
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Queries <b class="caret"></b></a>
<ul class="dropdown-menu">
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')" dropdown>
<a href="#" class="dropdown-toggle" dropdown-toggle>Queries <b class="caret"></b></a>
<ul class="dropdown-menu" dropdown-menu>
<li ng-show="currentUser.hasPermission('create_query')"><a href="/queries/new">New Query</a></li>
<li><a href="/queries">Queries</a></li>
</ul>
@@ -107,9 +106,10 @@
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
<script src="/bower_components/angular-ui-codemirror/ui-codemirror.js"></script>
<script src="/bower_components/highcharts/highcharts.js"></script>
<script src="/bower_components/highcharts/modules/exporting.js"></script>
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>
@@ -125,11 +125,13 @@
<script src="/bower_components/marked/lib/marked.js"></script>
<script src="/scripts/ng_highchart.js"></script>
<script src="/scripts/ng_smart_table.js"></script>
<script src="/scripts/ui-bootstrap-tpls-0.5.0.min.js"></script>
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
<script src="/bower_components/bucky/bucky.js"></script>
<script src="/bower_components/pace/pace.js"></script>
<script src="/bower_components/mustache/mustache.js"></script>
<script src="/bower_components/offline/offline.min.js"></script>
<script src="/bower_components/canvg/rgbcolor.js"></script>
<script src="/bower_components/canvg/StackBlur.js"></script>
<script src="/bower_components/canvg/canvg.js"></script>
<!-- endbuild -->
<!-- build:js({.tmp,app}) /scripts/scripts.js -->

View File

@@ -6,7 +6,6 @@ angular.module('redash', [
'redash.services',
'redash.renderers',
'redash.visualization',
'ui.codemirror',
'highchart',
'ui.select2',
'angular-growl',

View File

@@ -16,16 +16,9 @@
$timeout(refresh, 59 * 1000);
};
$scope.flowerUrl = featureFlags.flowerUrl;
refresh();
}
var AdminWorkersCtrl = function ($scope, $sce) {
$scope.flowerUrl = $sce.trustAsResourceUrl(featureFlags.flowerUrl);
};
angular.module('redash.admin_controllers', [])
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])
.controller('AdminWorkersCtrl', ['$scope', '$sce', AdminWorkersCtrl])
})();

View File

@@ -1,4 +1,11 @@
(function () {
var dateFormatter = function (value) {
if (!value) {
return "-";
}
return value.toDate().toLocaleString();
};
var QuerySearchCtrl = function($scope, $location, $filter, Events, Query) {
$scope.$parent.pageTitle = "Queries Search";
@@ -8,11 +15,6 @@
maxSize: 8,
};
var dateFormatter = function (value) {
if (!value) return "-";
return value.format("DD/MM/YY HH:mm");
}
$scope.gridColumns = [
{
"label": "Name",
@@ -21,7 +23,7 @@
},
{
'label': 'Created By',
'map': 'user.name'
'map': 'user_name'
},
{
'label': 'Created At',
@@ -30,9 +32,9 @@
},
{
'label': 'Update Schedule',
'map': 'ttl',
'map': 'schedule',
'formatFunction': function (value) {
return $filter('refreshRateHumanize')(value);
return $filter('scheduleHumanize')(value);
}
}
];
@@ -43,6 +45,7 @@
Query.search({q: $scope.term }, function(results) {
$scope.queries = _.map(results, function(query) {
query.created_at = moment(query.created_at);
query.user_name = query.user.name;
return query;
});
});
@@ -70,11 +73,6 @@
$scope.allQueries = [];
$scope.queries = [];
var dateFormatter = function (value) {
if (!value) return "-";
return value.format("DD/MM/YY HH:mm");
}
var filterQueries = function () {
$scope.queries = _.filter($scope.allQueries, function (query) {
if (!$scope.selectedTab) {
@@ -95,6 +93,7 @@
$scope.allQueries = _.map(queries, function (query) {
query.created_at = moment(query.created_at);
query.retrieved_at = moment(query.retrieved_at);
query.user_name = query.user.name;
return query;
});
@@ -109,7 +108,7 @@
},
{
'label': 'Created By',
'map': 'user.name'
'map': 'user_name'
},
{
'label': 'Created At',
@@ -130,9 +129,9 @@
},
{
'label': 'Update Schedule',
'map': 'ttl',
'map': 'schedule',
'formatFunction': function (value) {
return $filter('refreshRateHumanize')(value);
return $filter('scheduleHumanize')(value);
}
}
]

View File

@@ -27,12 +27,19 @@
var filters = {};
_.each(queryResults, function(queryResult) {
var queryFilters = queryResult.getFilters();
_.each(queryFilters, function (filter) {
if (!_.has(filters, filter.name)) {
// TODO: first object should be a copy, otherwise one of the chart filters behaves different than the others.
_.each(queryFilters, function (queryFilter) {
var hasQueryStringValue = _.has($location.search(), queryFilter.name);
if (!(hasQueryStringValue || dashboard.dashboard_filters_enabled)) {
// If dashboard filters not enabled, or no query string value given, skip filters linking.
return;
}
if (!_.has(filters, queryFilter.name)) {
var filter = _.extend({}, queryFilter);
filters[filter.name] = filter;
filters[filter.name].originFilters = [];
if (_.has($location.search(), filter.name)) {
if (hasQueryStringValue) {
filter.current = $location.search()[filter.name];
}
@@ -44,7 +51,7 @@
}
// TODO: merge values.
filters[filter.name].originFilters.push(filter);
filters[queryFilter.name].originFilters.push(queryFilter);
});
});
@@ -93,9 +100,13 @@
Events.record(currentUser, "autorefresh", "dashboard", dashboard.id, {'enable': $scope.refreshEnabled});
if ($scope.refreshEnabled) {
var refreshRate = _.min(_.flatten($scope.dashboard.widgets), function(widget) {
return widget.visualization.query.ttl;
}).visualization.query.ttl;
var refreshRate = _.min(_.map(_.flatten($scope.dashboard.widgets), function(widget) {
var schedule = widget.visualization.query.schedule;
if (schedule === null || schedule.match(/\d\d:\d\d/) !== null) {
return 60;
}
return widget.visualization.query.schedule;
}));
$scope.refreshRate = _.max([120, refreshRate * 2]) * 1000;
@@ -131,7 +142,6 @@
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
var maxAge = $location.search()['maxAge'];
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
$scope.nextUpdateTime = moment(new Date(($scope.query.updated_at + $scope.query.ttl + $scope.query.runtime + 300) * 1000)).fromNow();
$scope.type = 'visualization';
} else {

View File

@@ -17,7 +17,7 @@
saveQuery = $scope.saveQuery;
$scope.sourceMode = true;
$scope.canEdit = currentUser.canEdit($scope.query);
$scope.canEdit = true;
$scope.isDirty = false;
$scope.newVisualization = undefined;
@@ -29,7 +29,7 @@
}
});
KeyboardShortcuts.bind({
var shortcuts = {
'meta+s': function () {
if ($scope.canEdit) {
$scope.saveQuery();
@@ -44,7 +44,9 @@
'meta+enter': $scope.executeQuery,
// Ctrl+Enter for PC
'ctrl+enter': $scope.executeQuery
});
};
KeyboardShortcuts.bind(shortcuts);
// @override
$scope.saveQuery = function(options, data) {
@@ -66,7 +68,7 @@
$scope.duplicateQuery = function() {
Events.record(currentUser, 'fork', 'query', $scope.query.id);
$scope.query.id = null;
$scope.query.ttl = -1;
$scope.query.schedule = null;
$scope.saveQuery({
successMessage: 'Query forked',

View File

@@ -1,27 +1,54 @@
(function() {
'use strict';
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, Query, DataSource) {
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, $modal, Query, DataSource) {
var DEFAULT_TAB = 'table';
var getQueryResult = function(ttl) {
var getQueryResult = function(maxAge) {
// Collect params, and getQueryResult with params; getQueryResult merges it into the query
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
if (ttl == undefined) {
ttl = $location.search()['maxAge'];
if (maxAge == undefined) {
maxAge = $location.search()['maxAge'];
}
$scope.queryResult = $scope.query.getQueryResult(ttl, parameters);
if (maxAge == undefined) {
maxAge = -1;
}
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
}
$scope.query = $route.current.locals.query;
var updateSchema = function() {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
DataSource.getSchema({id: dataSourceId}, function(data) {
if (data && data.length > 0) {
$scope.schema = data;
_.each(data, function(table) {
table.collapsed = true;
});
$scope.editorSize = "col-md-9";
$scope.hasSchema = true;
} else {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
}
});
}
Events.record(currentUser, 'view', 'query', $scope.query.id);
getQueryResult();
$scope.queryExecuting = false;
$scope.isQueryOwner = currentUser.id === $scope.query.user.id;
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
$scope.canViewSource = currentUser.hasPermission('view_source');
$scope.dataSources = DataSource.get(function(dataSources) {
updateSchema();
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
});
@@ -33,6 +60,10 @@
$scope.queryExecuting = lock;
};
$scope.showApiKey = function() {
alert("API Key for this query:\n" + $scope.query.api_key);
};
$scope.saveQuery = function(options, data) {
if (data) {
data.id = $scope.query.id;
@@ -94,7 +125,7 @@
return Query.delete({id: data.id}, function() {
$scope.query.is_archived = true;
$scope.query.ttl = -1;
$scope.query.schedule = null;
growl.addSuccessMessage(options.successMessage);
// This feels dirty.
$('#archive-confirmation-modal').modal('hide');
@@ -117,6 +148,7 @@
});
}
updateSchema();
$scope.executeQuery();
};
@@ -155,6 +187,8 @@
$scope.query.queryResult = $scope.queryResult;
notifications.showNotification("re:dash", $scope.query.name + " updated.");
} else if (status == 'failed') {
notifications.showNotification("re:dash", $scope.query.name + " failed to run: " + $scope.queryResult.getError());
}
if (status === 'done' || status === 'failed') {
@@ -162,6 +196,28 @@
}
});
$scope.openScheduleForm = function() {
if (!$scope.isQueryOwner) {
return;
};
$modal.open({
templateUrl: '/views/schedule_form.html',
size: 'sm',
scope: $scope,
controller: ['$scope', '$modalInstance', function($scope, $modalInstance) {
$scope.close = function() {
$modalInstance.close();
}
if ($scope.query.hasDailySchedule()) {
$scope.refreshType = 'daily';
} else {
$scope.refreshType = 'periodic';
}
}]
});
};
$scope.$watch(function() {
return $location.hash()
}, function(hash) {
@@ -174,5 +230,5 @@
angular.module('redash.controllers')
.controller('QueryViewCtrl',
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', 'Query', 'DataSource', QueryViewCtrl]);
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', '$modal', 'Query', 'DataSource', QueryViewCtrl]);
})();

View File

@@ -29,7 +29,7 @@
restrict: 'E',
template: '<span ng-show="query.id && canViewSource">\
<a ng-show="!sourceMode"\
ng-href="{{query.id}}/source#{{selectedTab}}">Show Source\
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
</a>\
<a ng-show="sourceMode"\
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
@@ -63,26 +63,80 @@
restrict: 'E',
scope: {
'query': '=',
'lock': '='
'lock': '=',
'schema': '='
},
template: '<textarea\
ui-codemirror="editorOptions"\
ng-model="query.query">',
link: function($scope) {
$scope.editorOptions = {
template: '<textarea></textarea>',
link: {
pre: function ($scope, element) {
var textarea = element.children()[0];
var editorOptions = {
mode: 'text/x-sql',
lineWrapping: true,
lineNumbers: true,
readOnly: false,
matchBrackets: true,
autoCloseBrackets: true
};
autoCloseBrackets: true,
extraKeys: {"Ctrl-Space": "autocomplete"}
};
$scope.$watch('lock', function(locked) {
$scope.editorOptions.readOnly = locked ? 'nocursor' : false;
});
var additionalHints = [];
CodeMirror.commands.autocomplete = function(cm) {
var hinter = function(editor, options) {
var hints = CodeMirror.hint.anyword(editor, options);
var cur = editor.getCursor(), token = editor.getTokenAt(cur).string;
hints.list = _.union(hints.list, _.filter(additionalHints, function (h) {
return h.search(token) === 0;
}));
return hints;
};
// CodeMirror.showHint(cm, CodeMirror.hint.anyword);
CodeMirror.showHint(cm, hinter);
};
var codemirror = CodeMirror.fromTextArea(textarea, editorOptions);
codemirror.on('change', function(instance) {
var newValue = instance.getValue();
if (newValue !== $scope.query.query) {
$scope.$evalAsync(function() {
$scope.query.query = newValue;
});
}
});
$scope.$watch('query.query', function () {
if ($scope.query.query !== codemirror.getValue()) {
codemirror.setValue($scope.query.query);
}
});
$scope.$watch('schema', function (schema) {
if (schema) {
var keywords = [];
_.each(schema, function (table) {
keywords.push(table.name);
_.each(table.columns, function (c) {
keywords.push(c);
});
});
additionalHints = _.unique(keywords);
}
});
$scope.$watch('lock', function (locked) {
var readOnly = locked ? 'nocursor' : false;
codemirror.setOption('readOnly', readOnly);
});
}
}
}
};
}
function queryFormatter($http) {
@@ -98,55 +152,104 @@
</button>',
link: function($scope) {
$scope.formatQuery = function formatQuery() {
$scope.queryExecuting = true;
$scope.queryFormatting = true;
$http.post('/api/queries/format', {
'query': $scope.query.query
}).success(function (response) {
$scope.query.query = response;
}).finally(function () {
$scope.queryExecuting = false;
$scope.queryFormatting = false;
});
};
}
}
}
function queryTimePicker() {
return {
restrict: 'E',
template: '<select ng-disabled="refreshType != \'daily\'" ng-model="hour" ng-change="updateSchedule()" ng-options="c as c for c in hourOptions"></select> :\
<select ng-disabled="refreshType != \'daily\'" ng-model="minute" ng-change="updateSchedule()" ng-options="c as c for c in minuteOptions"></select>',
link: function($scope) {
var padWithZeros = function(size, v) {
v = String(v);
if (v.length < size) {
v = "0" + v;
}
return v;
};
$scope.hourOptions = _.map(_.range(0, 24), _.partial(padWithZeros, 2));
$scope.minuteOptions = _.map(_.range(0, 60, 5), _.partial(padWithZeros, 2));
if ($scope.query.hasDailySchedule()) {
var parts = $scope.query.scheduleInLocalTime().split(':');
$scope.minute = parts[1];
$scope.hour = parts[0];
} else {
$scope.minute = "15";
$scope.hour = "00";
}
$scope.updateSchedule = function() {
var newSchedule = moment().hour($scope.hour).minute($scope.minute).utc().format('HH:mm');
if (newSchedule != $scope.query.schedule) {
$scope.query.schedule = newSchedule;
$scope.saveQuery();
}
};
$scope.$watch('refreshType', function() {
if ($scope.refreshType == 'daily') {
$scope.updateSchedule();
}
});
}
}
}
function queryRefreshSelect() {
return {
restrict: 'E',
template: '<select\
ng-disabled="!isQueryOwner"\
ng-model="query.ttl"\
ng-disabled="refreshType != \'periodic\'"\
ng-model="query.schedule"\
ng-change="saveQuery()"\
ng-options="c.value as c.name for c in refreshOptions">\
<option value="">No Refresh</option>\
</select>',
link: function($scope) {
$scope.refreshOptions = [
{
value: -1,
name: 'No Refresh'
},
{
value: 60,
value: "60",
name: 'Every minute'
},
}
]
_.each(_.range(1, 13), function (i) {
$scope.refreshOptions.push({
value: i * 3600,
value: String(i * 3600),
name: 'Every ' + i + 'h'
});
})
$scope.refreshOptions.push({
value: 24 * 3600,
value: String(24 * 3600),
name: 'Every 24h'
});
$scope.refreshOptions.push({
value: 7 * 24 * 3600,
value: String(7 * 24 * 3600),
name: 'Once a week'
});
$scope.$watch('refreshType', function() {
if ($scope.refreshType == 'periodic') {
if ($scope.query.hasDailySchedule()) {
$scope.query.schedule = null;
$scope.saveQuery();
}
}
});
}
}
@@ -158,5 +261,6 @@
.directive('queryResultLink', queryResultCSVLink)
.directive('queryEditor', queryEditor)
.directive('queryRefreshSelect', queryRefreshSelect)
.directive('queryTimePicker', queryTimePicker)
.directive('queryFormatter', ['$http', queryFormatter]);
})();

View File

@@ -24,13 +24,17 @@ angular.module('redash.filters', []).
return durationHumanize;
})
.filter('refreshRateHumanize', function () {
return function (ttl) {
if (ttl == -1) {
.filter('scheduleHumanize', function() {
return function (schedule) {
if (schedule === null) {
return "Never";
} else {
return "Every " + durationHumanize(ttl);
} else if (schedule.match(/\d\d:\d\d/) !== null) {
var parts = schedule.split(':');
var localTime = moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
return "Every day at " + localTime;
}
return "Every " + durationHumanize(parseInt(schedule));
}
})

View File

@@ -50,7 +50,7 @@
;
if (moment.isMoment(this.x)) {
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
var s = '<b>' + this.x.toDate().toLocaleString() + '</b>',
pointsCount = this.points.length;
$.each(this.points, function (i, point) {
@@ -153,6 +153,24 @@
this.redraw();
}
},
{
text: 'Save Image',
onclick: function () {
var canvas = document.createElement('canvas');
window.canvg(canvas, this.getSVG());
var href = canvas.toDataURL('image/png');
var a = document.createElement('a');
a.href = href;
var filenameSuffix = new Date().toISOString().replace(/:/g,'_').replace('Z', '');
if (this.title) {
filenameSuffix = this.title.text;
}
a.download = 'redash_charts_'+filenameSuffix+'.png';
document.body.appendChild(a);
a.click();
a.remove();
}
}
]
}
@@ -287,8 +305,28 @@
scope.chart.series[0].remove(false);
};
// We check either for true or undefined for backward compatibility.
var series = scope.series;
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
if (scope.series.length > 0 && _.some(scope.series[0].data, function (p) {
if (series.length > 0 && _.some(series[0].data, function (p) {
return (angular.isString(p.x) || angular.isDefined(p.name));
})) {
chartOptions['xAxis'] = chartOptions['xAxis'] || {};
@@ -300,13 +338,13 @@
}
if (chartOptions['xAxis']['type'] == 'category' || chartOptions['series']['type']=='pie') {
if (!angular.isDefined(scope.series[0].data[0].name)) {
if (!angular.isDefined(series[0].data[0].name)) {
// We need to make sure that for each category, each series has a value.
var categories = _.union.apply(this, _.map(scope.series, function (s) {
var categories = _.union.apply(this, _.map(series, function (s) {
return _.pluck(s.data, 'x')
}));
_.each(scope.series, function (s) {
_.each(series, function (s) {
// TODO: move this logic to Query#getChartData
var yValues = _.groupBy(s.data, 'x');
@@ -317,11 +355,6 @@
}
});
if (categories.length == 1) {
newData = _.sortBy(newData, 'y').reverse();
}
;
s.data = newData;
});
}
@@ -329,7 +362,7 @@
scope.chart.counters.color = 0;
_.each(scope.series, function (s) {
_.each(series, function (s) {
// here we override the series with the visualization config
s = _.extend(s, chartOptions['series']);

View File

@@ -91,7 +91,7 @@
//insert columns from column config
//TODO add a way to clean all columns
scope.$watch('columnCollection', function (oldValue, newValue) {
scope.$watchCollection('columnCollection', function (oldValue, newValue) {
if (scope.columnCollection) {
scope.columns.length = 0;
for (var i = 0, l = scope.columnCollection.length; i < l; i++) {
@@ -205,11 +205,10 @@
column = scope.column,
row = scope.dataRow,
format = filter('format'),
getter = parse(column.map),
childScope;
//can be useful for child directives
scope.formatedValue = format(getter(row), column.formatFunction, column.formatParameter);
scope.formatedValue = format(row[column.map], column.formatFunction, column.formatParameter);
function defaultContent() {
//clear content
@@ -267,12 +266,11 @@
replace: true,
link: function (scope, element, attrs, ctrl) {
var form = angular.element(element.children()[1]),
input = angular.element(form.children()[0]),
getter = parse(scope.column.map);
input = angular.element(form.children()[0]);
//init values
scope.isEditMode = false;
scope.value = getter(scope.row);
scope.value = scope.row[scope.column.map];
scope.submit = function () {
@@ -285,7 +283,7 @@
};
scope.toggleEditMode = function () {
scope.value = getter(scope.row);
scope.value = scope.row[scope.column.map];
scope.isEditMode = scope.isEditMode !== true;
};
@@ -595,13 +593,11 @@
*/
this.updateDataRow = function (dataRow, propertyName, newValue) {
var index = scope.displayedCollection.indexOf(dataRow),
getter = parse(propertyName),
setter = getter.assign,
oldValue;
if (index !== -1) {
oldValue = getter(scope.displayedCollection[index]);
oldValue = scope.displayedCollection[index][propertyName];
if (oldValue !== newValue) {
setter(scope.displayedCollection[index], newValue);
scope.displayedCollection[index][propertyName] = newValue;
scope.$emit('updateDataRow', {item: scope.displayedCollection[index]});
}
}

View File

@@ -214,10 +214,6 @@
}
});
_.each(series, function (series) {
series.data = _.sortBy(series.data, 'x');
});
return _.values(series);
};
@@ -247,26 +243,9 @@
return parts[0];
};
var charConversionMap = {
'__pct': /%/g,
'_': / /g,
'__qm': /\?/g,
'__brkt': /[\(\)\[\]]/g,
'__dash': /-/g,
'__amp': /&/g,
'__sl': /\//g,
'__fsl': /\\/g,
};
QueryResult.prototype.getColumnCleanName = function (column) {
var name = this.getColumnNameWithoutType(column);
if (name != '') {
_.each(charConversionMap, function(regex, replacement) {
name = name.replace(regex, replacement);
});
}
return name;
}
@@ -329,7 +308,7 @@
this.filters = filters;
}
var refreshStatus = function (queryResult, query, ttl) {
var refreshStatus = function (queryResult, query) {
Job.get({'id': queryResult.job.id}, function (response) {
queryResult.update(response);
@@ -339,25 +318,9 @@
});
} else if (queryResult.getStatus() != "failed") {
$timeout(function () {
refreshStatus(queryResult, query, ttl);
refreshStatus(queryResult, query);
}, 3000);
}
}, function() {
var upHandler = function() {
Offline.off('up', upHandler);
console.log('trying again');
refreshStatus(queryResult, query, ttl);
};
var downHandler = function() {
console.log('2 handling down case');
Offline.on('up', upHandler);
Offline.off('down', downHandler);
};
Offline.on('down', downHandler);
Offline.check();
})
}
@@ -375,14 +338,14 @@
return this.deferred.promise;
}
QueryResult.get = function (data_source_id, query, ttl) {
QueryResult.get = function (data_source_id, query, maxAge) {
var queryResult = new QueryResult();
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'ttl': ttl}, function (response) {
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'max_age': maxAge}, function (response) {
queryResult.update(response);
if ('job' in response) {
refreshStatus(queryResult, query, ttl);
refreshStatus(queryResult, query);
}
});
@@ -410,7 +373,7 @@
return new Query({
query: "",
name: "New Query",
ttl: -1,
schedule: null,
user: currentUser
});
};
@@ -434,10 +397,19 @@
return '/queries/' + this.id + '/source';
};
Query.prototype.getQueryResult = function (ttl, parameters) {
if (ttl == undefined) {
ttl = this.ttl;
}
Query.prototype.hasDailySchedule = function() {
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
}
Query.prototype.scheduleInLocalTime = function() {
var parts = this.schedule.split(':');
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
}
Query.prototype.getQueryResult = function (maxAge, parameters) {
// if (ttl == undefined) {
// ttl = this.ttl;
// }
var queryText = this.query;
@@ -463,16 +435,16 @@
this.latest_query_data_id = null;
}
if (this.latest_query_data && ttl != 0) {
if (this.latest_query_data && maxAge != 0) {
if (!this.queryResult) {
this.queryResult = new QueryResult({'query_result': this.latest_query_data});
}
} else if (this.latest_query_data_id && ttl != 0) {
} else if (this.latest_query_data_id && maxAge != 0) {
if (!this.queryResult) {
this.queryResult = QueryResult.getById(this.latest_query_data_id);
}
} else if (this.data_source_id) {
this.queryResult = QueryResult.get(this.data_source_id, queryText, ttl);
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge);
}
return this.queryResult;
@@ -485,11 +457,19 @@
Query.prototype.getParameters = function() {
var parts = Mustache.parse(this.query);
var parameters = [];
_.each(parts, function(part) {
if (part[0] == 'name') {
parameters.push(part[1]);
}
});
var collectParams = function(parts) {
parameters = [];
_.each(parts, function(part) {
if (part[0] == 'name' || part[0] == '&') {
parameters.push(part[1]);
} else if (part[0] == '#') {
parameters = _.union(parameters, collectParams(part[4]));
}
});
return parameters;
};
parameters = collectParams(parts);
return parameters;
}
@@ -500,7 +480,12 @@
var DataSource = function ($resource) {
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
var actions = {
'get': {'method': 'GET', 'cache': true, 'isArray': true},
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
};
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
return DataSourceResource;
}

View File

@@ -147,6 +147,10 @@
scope.stacking = scope.visualization.options.series.stacking;
}
if (scope.visualization.options.sortX === undefined) {
scope.visualization.options.sortX = true;
}
var refreshSeries = function() {
scope.series = _.map(scope.queryResult.getChartData(scope.visualization.options.columnMapping), function (s) { return s.name; });

View File

@@ -78,15 +78,15 @@
};
} else if (columnType === 'date') {
columnDefinition.formatFunction = function (value) {
if (value) {
return value.format("DD/MM/YY");
if (value && moment.isMoment(value)) {
return value.toDate().toLocaleDateString();
}
return value;
};
} else if (columnType === 'datetime') {
columnDefinition.formatFunction = function (value) {
if (value) {
return value.format("DD/MM/YY HH:mm");
if (value && moment.isMoment(value)) {
return value.toDate().toLocaleString();
}
return value;
};

View File

@@ -156,7 +156,7 @@ li.widget:hover {
/* CodeMirror */
.CodeMirror {
border: 1px solid #eee;
height: auto;
/*height: auto;*/
min-height: 300px;
margin-bottom: 10px;
}
@@ -308,6 +308,18 @@ counter-renderer counter-name {
height: 100%;
}
.schema-browser {
height: 300px;
overflow: scroll;
}
div.table-name {
overflow: scroll;
text-overflow: ellipsis;
white-space: nowrap;
cursor: pointer;
}
/*
bootstrap's hidden-xs class adds display:block when not hidden
use this class when you need to keep the original display value

View File

@@ -24,10 +24,6 @@
<span class="badge">{{manager.outdated_queries_count}}</span>
Outdated Queries Count
</li>
<li class="list-group-item" ng-if="flowerUrl">
<a href="/admin/workers">Workers' Status</a>
</li>
</ul>
<ul class="list-group col-lg-4">
<li class="list-group-item active">Queues</li>

View File

@@ -1,3 +0,0 @@
<div class="container-fluid iframe-container">
<iframe src="{{flowerUrl}}" style="width:100%; height:100%; background-color:transparent;"></iframe>
</div>

View File

@@ -37,7 +37,7 @@
<div class="panel-footer">
<span class="label label-default"
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
tooltip="(query runtime: {{queryResult.getRuntime() | durationHumanize}})"
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
<span class="pull-right">

View File

@@ -59,9 +59,9 @@
<hr>
<div class="row">
<div class="col-lg-12">
<div ng-show="sourceMode">
<div class="row" ng-if="sourceMode">
<div ng-class="editorSize">
<div>
<p>
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
<span class="glyphicon glyphicon-play"></span> Execute
@@ -77,21 +77,43 @@
</button>
</span>
</p>
</div>
<!-- code editor -->
<div ng-show="sourceMode">
<p>
<query-editor query="query" lock="queryExecuting"></query-editor>
<query-editor query="query" schema="schema" lock="queryFormatting"></query-editor>
</p>
<hr>
</div>
</div>
<div class="col-md-3" ng-show="hasSchema">
<div>
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
</div>
<div class="schema-browser">
<div ng-repeat="table in schema | filter:schemaFilter">
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
</div>
<div collapse="table.collapsed">
<div ng-repeat="column in table.columns | filter:schemaFilter" style="padding-left:16px;">{{column}}</div>
</div>
</div>
</div>
</div>
</div>
</div>
<hr ng-if="sourceMode">
<div class="row">
<div class="col-lg-3 rd-hidden-xs">
<p>
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Created By </span>
<strong>{{query.user.name}}</strong>
</p>
<p ng-if="query.last_modified_by && query.user.id != query.last_modified_by.id">
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Last Modified By </span>
<strong>{{query.last_modified_by.name}}</strong>
</p>
<p>
<span class="glyphicon glyphicon-time"></span>
<span class="text-muted">Last update </span>
@@ -99,12 +121,6 @@
<rd-time-ago value="queryResult.query_result.retrieved_at"></rd-time-ago>
</strong>
</p>
<p>
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Created By </span>
<strong ng-hide="isQueryOwner">{{query.user.name}}</strong>
<strong ng-show="isQueryOwner">You</strong>
</p>
<p>
<span class="glyphicon glyphicon-play"></span>
<span class="text-muted">Runtime </span>
@@ -117,8 +133,8 @@
</p>
<p>
<span class="glyphicon glyphicon-refresh"></span>
<span class="text-muted">Refresh Interval</span>
<query-refresh-select></query-refresh-select>
<span class="text-muted">Refresh Schedule</span>
<a href="" ng-click="openScheduleForm()">{{query.schedule | scheduleHumanize}}</a>
</p>
<p>
@@ -139,7 +155,11 @@
ng-show="!query.is_archived && query.id != undefined && (isQueryOwner || currentUser.hasPermission('admin'))">
<i class="fa fa-archive" title="Archive Query"></i>
</a>
<button class="btn btn-default btn-sm" ng-show="query.id != undefined" ng-click="showApiKey()">
<i class="fa fa-key" title="Show API Key"></i>
</button>
<div class="modal fade" id="archive-confirmation-modal" tabindex="-1" role="dialog" aria-labelledby="archiveConfirmationModal" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
@@ -182,7 +202,7 @@
<rd-tab tab-id="{{vis.id}}" name="{{vis.name}}" ng-if="vis.type!='TABLE'" ng-repeat="vis in query.visualizations">
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> &times;</span>
</rd-tab>
<rd-tab tab-id="add" name="&plus; New" removeable="true" ng-show="canEdit"></rd-tab>
<rd-tab tab-id="add" name="&plus; New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
</ul>
</div>

View File

@@ -0,0 +1,18 @@
<div class="modal-header">
<button type="button" class="close" aria-label="Close" ng-click="close()"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Refresh Schedule</h4>
</div>
<div class="modal-body">
<div class="radio">
<label>
<input type="radio" value="periodic" ng-model="refreshType">
<query-refresh-select ng-disabled="refreshType != 'periodic'"></query-refresh-select>
</label>
</div>
<div class="radio">
<label>
<input type="radio" value="daily" ng-model="refreshType">
<query-time-picker ng-disabled="refreshType != 'daily'"></query-time-picker>
</label>
</div>
</div>

View File

@@ -46,6 +46,14 @@
placeholder="Auto">
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-2">Sort X Values</label>
<div class="col-sm-10">
<input name="sortX" type="checkbox" class="form-control"
ng-model="visualization.options.sortX">
</div>
</div>
</div>
</div>

View File

@@ -12,8 +12,7 @@
"es5-shim": "2.0.8",
"angular-moment": "0.2.0",
"moment": "2.1.0",
"angular-ui-bootstrap": "0.5.0",
"angular-ui-codemirror": "0.0.5",
"codemirror": "4.8.0",
"highcharts": "3.0.10",
"underscore": "1.5.1",
"pivottable": "~1.1.1",
@@ -29,7 +28,8 @@
"angular-ui-select": "0.8.2",
"font-awesome": "~4.2.0",
"mustache": "~1.0.0",
"offline": "~0.7.11"
"canvg": "gabelerner/canvg",
"angular-ui-bootstrap-bower": "~0.12.1"
},
"devDependencies": {
"angular-mocks": "1.2.18",

View File

@@ -4,8 +4,9 @@ import redis
from statsd import StatsClient
from redash import settings
from redash.query_runner import import_query_runners
__version__ = '0.5.0'
__version__ = '0.6.0'
def setup_logging():
@@ -31,4 +32,6 @@ def create_redis_connection():
setup_logging()
redis_connection = create_redis_connection()
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
import_query_runners(settings.QUERY_RUNNERS)

View File

@@ -5,7 +5,7 @@ import time
import logging
from flask import request, make_response, redirect, url_for
from flask.ext.login import LoginManager, login_user, current_user
from flask.ext.login import LoginManager, login_user, current_user, logout_user
from redash import models, settings, google_oauth
@@ -23,9 +23,38 @@ def sign(key, path, expires):
return h.hexdigest()
class HMACAuthentication(object):
@staticmethod
def api_key_authentication():
class Authentication(object):
def verify_authentication(self):
return False
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated() or self.verify_authentication():
return fn(*args, **kwargs)
return make_response(redirect(url_for("login", next=request.url)))
return decorated
class ApiKeyAuthentication(Authentication):
def verify_authentication(self):
api_key = request.args.get('api_key')
query_id = request.view_args.get('query_id', None)
if query_id and api_key:
query = models.Query.get(models.Query.id == query_id)
if query.api_key and api_key == query.api_key:
login_user(models.ApiUser(query.api_key), remember=False)
return True
return False
class HMACAuthentication(Authentication):
def verify_authentication(self):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
@@ -41,22 +70,14 @@ class HMACAuthentication(object):
return False
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated():
return fn(*args, **kwargs)
if self.api_key_authentication():
return fn(*args, **kwargs)
return make_response(redirect(url_for("login", next=request.url)))
return decorated
@login_manager.user_loader
def load_user(user_id):
# If the user was previously logged in as api user, the user_id will be the api key and will raise an exception as
# it can't be casted to int.
if isinstance(user_id, basestring) and not user_id.isdigit():
return None
return models.User.select().where(models.User.id == user_id).first()
@@ -66,4 +87,13 @@ def setup_authentication(app):
app.secret_key = settings.COOKIE_SECRET
app.register_blueprint(google_oauth.blueprint)
return HMACAuthentication()
if settings.AUTH_TYPE == 'hmac':
auth = HMACAuthentication()
elif settings.AUTH_TYPE == 'api_key':
auth = ApiKeyAuthentication()
else:
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
auth = HMACAuthentication()
return auth

View File

@@ -1,6 +1,3 @@
from flask import make_response
from functools import update_wrapper
ONE_YEAR = 60 * 60 * 24 * 365.25
headers = {

View File

@@ -1,5 +1,8 @@
import json
import click
from flask.ext.script import Manager
from redash import models
from redash.query_runner import query_runners, validate_configuration
manager = Manager(help="Data sources management commands.")
@@ -13,11 +16,70 @@ def list():
print "Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options)
def validate_data_source_type(type):
if type not in query_runners.keys():
print "Error: the type \"{}\" is not supported (supported types: {}).".format(type, ", ".join(query_runners.keys()))
exit()
def validate_data_source_options(type, options):
if not validate_configuration(type, options):
print "Error: invalid configuration."
exit()
@manager.command
def new(name, type, options):
def new(name=None, type=None, options=None):
"""Create new data source"""
# TODO: validate it's a valid type and in the future, validate the options.
if name is None:
name = click.prompt("Name")
if type is None:
print "Select type:"
for i, query_runner_name in enumerate(query_runners.keys()):
print "{}. {}".format(i+1, query_runner_name)
idx = 0
while idx < 1 or idx > len(query_runners.keys()):
idx = click.prompt("[{}-{}]".format(1, len(query_runners.keys())), type=int)
type = query_runners.keys()[idx-1]
else:
validate_data_source_type(type)
if options is None:
query_runner = query_runners[type]
schema = query_runner.configuration_schema()
types = {
'string': unicode,
'number': int,
'boolean': bool
}
options_obj = {}
for k, prop in schema['properties'].iteritems():
required = k in schema.get('required', [])
default_value = "<<DEFAULT_VALUE>>"
if required:
default_value = None
prompt = prop.get('title', k.capitalize())
if required:
prompt = "{} (required)".format(prompt)
else:
prompt = "{} (optional)".format(prompt)
value = click.prompt(prompt, default=default_value, type=types[prop['type']], show_default=False)
if value != default_value:
options_obj[k] = value
options = json.dumps(options_obj)
validate_data_source_options(type, options)
print "Creating {} data source ({}) with options:\n{}".format(type, name, options)
data_source = models.DataSource.create(name=name,
type=type,
options=options)
@@ -49,7 +111,14 @@ def update_attr(obj, attr, new_value):
def edit(name, new_name=None, options=None, type=None):
"""Edit data source settings (name, options, type)"""
try:
if type is not None:
validate_data_source_type(type)
data_source = models.DataSource.get(models.DataSource.name==name)
if options is not None:
validate_data_source_options(data_source.type, options)
update_attr(data_source, "name", new_name)
update_attr(data_source, "type", type)
update_attr(data_source, "options", options)

View File

@@ -7,9 +7,8 @@ but this is only due to configuration issues and temporary.
import csv
import hashlib
import json
import numbers
import cStringIO
import datetime
import time
import logging
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
@@ -23,6 +22,7 @@ from redash.wsgi import app, auth, api
from redash.tasks import QueryTask, record_event
from redash.cache import headers as cache_headers
from redash.permissions import require_permission
from redash.query_runner import query_runners, validate_configuration
@app.route('/ping', methods=['GET'])
@@ -52,8 +52,7 @@ def index(**kwargs):
}
features = {
'clientSideMetrics': settings.CLIENT_SIDE_METRICS,
'flowerUrl': settings.CELERY_FLOWER_URL
'clientSideMetrics': settings.CLIENT_SIDE_METRICS
}
return render_template("index.html", user=json.dumps(user), name=settings.NAME,
@@ -109,7 +108,7 @@ def status_api():
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = models.Query.outdated_queries().count()
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
@@ -136,6 +135,24 @@ def format_sql_query():
return sqlparse.format(query, reindent=True, keyword_case='upper')
@app.route('/queries/new', methods=['POST'])
@auth.required
def create_query_route():
query = request.form.get('query', None)
data_source_id = request.form.get('data_source_id', None)
if query is None or data_source_id is None:
abort(400)
query = models.Query.create(name="New Query",
query=query,
data_source=data_source_id,
user=current_user._get_current_object(),
schedule=None)
return redirect('/queries/{}'.format(query.id), 303)
class BaseResource(Resource):
decorators = [auth.required]
@@ -174,14 +191,46 @@ class MetricsAPI(BaseResource):
api.add_resource(MetricsAPI, '/api/metrics/v1/send', endpoint='metrics')
class DataSourceTypeListAPI(BaseResource):
@require_permission("admin")
def get(self):
return [q.to_dict() for q in query_runners.values()]
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
class DataSourceListAPI(BaseResource):
def get(self):
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
return data_sources
@require_permission("admin")
def post(self):
req = request.get_json(True)
required_fields = ('options', 'name', 'type')
for f in required_fields:
if f not in req:
abort(400)
if not validate_configuration(req['type'], req['options']):
abort(400)
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options'])
return datasource.to_dict()
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
class DataSourceSchemaAPI(BaseResource):
def get(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
schema = data_source.get_schema()
return schema
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
class DashboardRecentAPI(BaseResource):
def get(self):
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]
@@ -295,7 +344,7 @@ class QueryListAPI(BaseResource):
@require_permission('create_query')
def post(self):
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
query_def.pop(field, None)
query_def['user'] = self.current_user
@@ -303,8 +352,6 @@ class QueryListAPI(BaseResource):
query = models.Query(**query_def)
query.save()
query.create_default_visualizations()
return query.to_dict()
@require_permission('view_query')
@@ -318,7 +365,7 @@ class QueryAPI(BaseResource):
query = models.Query.get_by_id(query_id)
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user']:
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
query_def.pop(field, None)
if 'latest_query_data_id' in query_def:
@@ -327,6 +374,9 @@ class QueryAPI(BaseResource):
if 'data_source_id' in query_def:
query_def['data_source'] = query_def.pop('data_source_id')
query_def['last_modified_by'] = self.current_user
# TODO: use #save() with #dirty_fields.
models.Query.update_instance(query_id, **query_def)
query = models.Query.get_by_id(query_id)
@@ -426,10 +476,12 @@ class QueryResultListAPI(BaseResource):
activity=params['query']
).save()
if params['ttl'] == 0:
max_age = int(params['max_age'])
if max_age == 0:
query_result = None
else:
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], int(params['ttl']))
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], max_age)
if query_result:
return {'query_result': query_result.to_dict()}
@@ -449,10 +501,6 @@ class QueryResultAPI(BaseResource):
writer.writer = utils.UnicodeWriter(s)
writer.writeheader()
for row in query_data['rows']:
for k, v in row.iteritems():
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
writer.writerow(row)
headers = {'Content-Type': "text/csv; charset=UTF-8"}
@@ -470,6 +518,24 @@ class QueryResultAPI(BaseResource):
query_result = models.QueryResult.get_by_id(query_result_id)
if query_result:
if isinstance(self.current_user, models.ApiUser):
event = {
'user_id': None,
'action': 'api_get',
'timestamp': int(time.time()),
'api_key': self.current_user.id,
'file_type': filetype
}
if query_id:
event['object_type'] = 'query'
event['object_id'] = query_id
else:
event['object_type'] = 'query_result'
event['object_id'] = query_result_id
record_event.delay(event)
if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
return make_response(data, 200, cache_headers)

View File

@@ -1,34 +0,0 @@
import json
def get_query_runner(connection_type, connection_string):
if connection_type == 'mysql':
from redash.data import query_runner_mysql
runner = query_runner_mysql.mysql(connection_string)
elif connection_type == 'graphite':
from redash.data import query_runner_graphite
connection_params = json.loads(connection_string)
if connection_params['auth']:
connection_params['auth'] = tuple(connection_params['auth'])
else:
connection_params['auth'] = None
runner = query_runner_graphite.graphite(connection_params)
elif connection_type == 'bigquery':
from redash.data import query_runner_bigquery
connection_params = json.loads(connection_string)
runner = query_runner_bigquery.bigquery(connection_params)
elif connection_type == 'script':
from redash.data import query_runner_script
runner = query_runner_script.script(connection_string)
elif connection_type == 'url':
from redash.data import query_runner_url
runner = query_runner_url.url(connection_string)
elif connection_type == "mongo":
from redash.data import query_runner_mongodb
connection_params = json.loads(connection_string)
runner = query_runner_mongodb.mongodb(connection_params)
else:
from redash.data import query_runner_pg
runner = query_runner_pg.pg(connection_string)
return runner

View File

@@ -1,46 +0,0 @@
"""
QueryRunner for Graphite.
"""
import json
import datetime
import requests
from redash.utils import JSONEncoder
def graphite(connection_params):
def transform_result(response):
columns = [{'name': 'Time::x'}, {'name': 'value::y'}, {'name': 'name::series'}]
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json.dumps(data, cls=JSONEncoder)
def query_runner(query):
base_url = "%s/render?format=json&" % connection_params['url']
url = "%s%s" % (base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=connection_params['auth'],
verify=connection_params['verify'])
if response.status_code == 200:
data = transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception, ex:
data = None
error = ex.message
return data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -1,242 +0,0 @@
import datetime
import logging
import json
import sys
import re
import time
from redash.utils import JSONEncoder
try:
import pymongo
from bson.objectid import ObjectId
from bson.son import SON
except ImportError:
print "Missing dependencies. Please install pymongo."
print "You can use pip: pip install pymongo"
raise
TYPES_MAP = {
ObjectId : "string",
str : "string",
unicode : "string",
int : "integer",
long : "integer",
float : "float",
bool : "boolean",
datetime.datetime: "datetime",
}
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
# Simple query example:
#
# {
# "collection" : "my_collection",
# "query" : {
# "date" : {
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
# },
# "type" : 1
# },
# "fields" : {
# "_id" : 1,
# "name" : 2
# },
# "sort" : [
# {
# "name" : "date",
# "direction" : -1
# }
# ]
#
# }
#
#
# Aggregation
# ===========
# Uses a syntax similar to the one used in PyMongo, however to support the
# correct order of sorting, it uses a regular list for the "$sort" operation
# that converts into a SON (sorted dictionary) object before execution.
#
# Aggregation query example:
#
# {
# "collection" : "things",
# "aggregate" : [
# {
# "$unwind" : "$tags"
# },
# {
# "$group" : {
# {
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# }
# },
# {
# "$sort" : [
# {
# "name" : "count",
# "direction" : -1
# },
# {
# "name" : "_id",
# "direction" : -1
# }
# ]
# }
# ]
# }
#
#
def mongodb(connection_string):
def _get_column_by_name(columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
def query_runner(query):
if not "dbName" in connection_string or not connection_string["dbName"]:
return None, "dbName is missing from connection string JSON or is empty"
db_name = connection_string["dbName"]
if not "connectionString" in connection_string or not connection_string["connectionString"]:
return None, "connectionString is missing from connection string JSON or is empty"
is_replica_set = True if "replicaSetName" in connection_string and connection_string["replicaSetName"] else False
if is_replica_set:
if not connection_string["replicaSetName"]:
return None, "replicaSetName is set in the connection string JSON but is empty"
db_connection = pymongo.MongoReplicaSetClient(connection_string["connectionString"], replicaSet=connection_string["replicaSetName"])
else:
db_connection = pymongo.MongoClient(connection_string["connectionString"])
if db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % db_name
db = db_connection[db_name]
logging.debug("mongodb connection string: %s", connection_string)
logging.debug("mongodb got query: %s", query)
try:
query_data = json.loads(query)
except:
return None, "Invalid query format. The query is not a valid JSON."
if "query" in query_data and "aggregate" in query_data:
return None, "'query' and 'aggregate' sections cannot be used at the same time"
collection = None
if not "collection" in query_data:
return None, "'collection' must be set"
else:
collection = query_data["collection"]
q = None
if "query" in query_data:
q = query_data["query"]
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
f = None
aggregate = None
if "aggregate" in query_data:
aggregate = query_data["aggregate"]
for step in aggregate:
if "$sort" in step:
sort_list = []
for sort_item in step["$sort"]:
sort_list.append((sort_item["name"], sort_item["direction"]))
step["$sort"] = SON(sort_list)
if aggregate:
pass
else:
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field in query_data["sort"]:
s.append((field["name"], field["direction"]))
if "fields" in query_data:
f = query_data["fields"]
columns = []
rows = []
error = None
json_data = None
cursor = None
if q or (not q and not aggregate):
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
if "skip" in query_data:
cursor = cursor.skip(query_data["skip"])
if "limit" in query_data:
cursor = cursor.limit(query_data["limit"])
elif aggregate:
r = db[collection].aggregate(aggregate)
cursor = r["result"]
for r in cursor:
for k in r:
if _get_column_by_name(columns, k) is None:
columns.append({
"name": k,
"friendly_name": k,
"type": TYPES_MAP[type(r[k])] if type(r[k]) in TYPES_MAP else None
})
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:
ordered_columns = []
for k in sorted(f, key=f.get):
ordered_columns.append(_get_column_by_name(columns, k))
columns = ordered_columns
data = {
"columns": columns,
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
return json_data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -1,64 +0,0 @@
"""
QueryRunner is the function that the workers use, to execute queries. This is the Redshift
(PostgreSQL in fact) version, but easily we can write another to support additional databases
(MySQL and others).
Because the worker just pass the query, this can be used with any data store that has some sort of
query language (for example: HiveQL).
"""
import logging
import json
import MySQLdb
import sys
from redash.utils import JSONEncoder
def mysql(connection_string):
if connection_string.endswith(';'):
connection_string = connection_string[0:-1]
def query_runner(query):
connections_params = [entry.split('=')[1] for entry in connection_string.split(';')]
connection = MySQLdb.connect(*connections_params, charset="utf8", use_unicode=True)
cursor = connection.cursor()
logging.debug("mysql got query: %s", query)
try:
cursor.execute(query)
data = cursor.fetchall()
cursor_desc = cursor.description
if (cursor_desc != None):
num_fields = len(cursor_desc)
column_names = [i[0] for i in cursor.description]
rows = [dict(zip(column_names, row)) for row in data]
columns = [{'name': col_name,
'friendly_name': col_name,
'type': None} for col_name in column_names]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
else:
json_data = None
error = "No data was returned."
cursor.close()
except MySQLdb.Error, e:
json_data = None
error = e.args[1]
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
return query_runner

View File

@@ -1,110 +0,0 @@
"""
QueryRunner is the function that the workers use, to execute queries. This is the PostgreSQL
version, but easily we can write another to support additional databases (MySQL and others).
Because the worker just pass the query, this can be used with any data store that has some sort of
query language (for example: HiveQL).
"""
import json
import sys
import select
import logging
import psycopg2
from redash.utils import JSONEncoder
types_map = {
20: 'integer',
21: 'integer',
23: 'integer',
700: 'float',
1700: 'float',
701: 'float',
16: 'boolean',
1082: 'date',
1114: 'datetime',
1184: 'datetime',
1014: 'string',
1015: 'string',
1008: 'string',
1009: 'string',
2951: 'string'
}
def pg(connection_string):
def column_friendly_name(column_name):
return column_name
def wait(conn):
while 1:
try:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [conn.fileno()], [])
elif state == psycopg2.extensions.POLL_READ:
select.select([conn.fileno()], [], [])
else:
raise psycopg2.OperationalError("poll() returned %s" % state)
except select.error:
raise psycopg2.OperationalError("select.error received")
def query_runner(query):
connection = psycopg2.connect(connection_string, async=True)
wait(connection)
cursor = connection.cursor()
try:
cursor.execute(query)
wait(connection)
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
# size of the data we can assume it's ok.
column_names = []
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name = column_name + str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_friendly_name(column_name),
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
return query_runner

View File

@@ -1,51 +0,0 @@
import json
import logging
import sys
import os
import subprocess
# We use subprocess.check_output because we are lazy.
# If someone will really want to run this on Python < 2.7 they can easily update the code to run
# Popen, check the retcodes and other things and read the standard output to a variable.
if not "check_output" in subprocess.__dict__:
print "ERROR: This runner uses subprocess.check_output function which exists in Python 2.7"
def script(connection_string):
def query_runner(query):
try:
json_data = None
error = None
if connection_string is None:
return None, "script execution path is not set. Please reconfigure the data source"
# Poor man's protection against running scripts from output the scripts directory
if connection_string.find("../") > -1:
return None, "Scripts can only be run from the configured scripts directory"
query = query.strip()
script = os.path.join(connection_string, query)
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
output = subprocess.check_output(script, shell=False)
if output != None:
output = output.strip()
if output != "":
return output, None
error = "Error reading output"
except subprocess.CalledProcessError as e:
return None, str(e)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -28,7 +28,7 @@ class Importer(object):
def import_query(self, user, query):
new_query = self._get_or_create(models.Query, query['id'], name=query['name'],
user=user,
ttl=-1,
schedule=None,
query=query['query'],
query_hash=query['query_hash'],
description=query['description'],

View File

@@ -9,17 +9,20 @@ import itertools
import peewee
from passlib.apps import custom_app_context as pwd_context
from playhouse.postgres_ext import ArrayField
from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDatabase
from flask.ext.login import UserMixin, AnonymousUserMixin
import psycopg2
from redash import utils, settings
from redash import utils, settings, redis_connection
from redash.query_runner import get_query_runner
class Database(object):
def __init__(self):
self.database_config = dict(settings.DATABASE_CONFIG)
self.database_config['register_hstore'] = False
self.database_name = self.database_config.pop('name')
self.database = peewee.PostgresqlDatabase(self.database_name, **self.database_config)
self.database = PostgresqlExtDatabase(self.database_name, **self.database_config)
self.app = None
self.pid = os.getpid()
@@ -59,6 +62,30 @@ class BaseModel(peewee.Model):
def get_by_id(cls, model_id):
return cls.get(cls.id == model_id)
def pre_save(self, created):
pass
def post_save(self, created):
# Handler for post_save operations. Overriding if needed.
pass
def save(self, *args, **kwargs):
pk_value = self._get_pk_value()
created = kwargs.get('force_insert', False) or not bool(pk_value)
self.pre_save(created)
super(BaseModel, self).save(*args, **kwargs)
self.post_save(created)
class ModelTimestampsMixin(BaseModel):
updated_at = DateTimeTZField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
def pre_save(self, created):
super(ModelTimestampsMixin, self).pre_save(created)
self.updated_at = datetime.datetime.now()
class PermissionsCheckMixin(object):
def has_permission(self, permission):
@@ -83,6 +110,9 @@ class ApiUser(UserMixin, PermissionsCheckMixin):
def __init__(self, api_key):
self.id = api_key
def __repr__(self):
return u"<ApiUser: {}>".format(self.id)
@property
def permissions(self):
return ['view_query']
@@ -96,7 +126,7 @@ class Group(BaseModel):
name = peewee.CharField(max_length=100)
permissions = ArrayField(peewee.CharField, default=DEFAULT_PERMISSIONS)
tables = ArrayField(peewee.CharField)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'groups'
@@ -114,7 +144,7 @@ class Group(BaseModel):
return unicode(self.id)
class User(BaseModel, UserMixin, PermissionsCheckMixin):
class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
DEFAULT_GROUPS = ['default']
id = peewee.PrimaryKeyField()
@@ -130,7 +160,9 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
return {
'id': self.id,
'name': self.name,
'email': self.email
'email': self.email,
'updated_at': self.updated_at,
'created_at': self.created_at
}
def __init__(self, *args, **kwargs):
@@ -173,7 +205,7 @@ class ActivityLog(BaseModel):
user = peewee.ForeignKeyField(User)
type = peewee.IntegerField()
activity = peewee.TextField()
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'activity_log'
@@ -198,7 +230,7 @@ class DataSource(BaseModel):
options = peewee.TextField()
queue_name = peewee.CharField(default="queries")
scheduled_queue_name = peewee.CharField(default="queries")
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'data_sources'
@@ -210,6 +242,23 @@ class DataSource(BaseModel):
'type': self.type
}
def get_schema(self, refresh=False):
key = "data_source:schema:{}".format(self.id)
cache = None
if not refresh:
cache = redis_connection.get(key)
if cache is None:
query_runner = get_query_runner(self.type, self.options)
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
redis_connection.set(key, json.dumps(schema))
else:
schema = json.loads(cache)
return schema
@classmethod
def all(cls):
return cls.select().order_by(cls.id.asc())
@@ -222,7 +271,7 @@ class QueryResult(BaseModel):
query = peewee.TextField()
data = peewee.TextField()
runtime = peewee.FloatField()
retrieved_at = peewee.DateTimeField()
retrieved_at = DateTimeTZField()
class Meta:
db_table = 'query_results'
@@ -248,16 +297,16 @@ class QueryResult(BaseModel):
return unused_results
@classmethod
def get_latest(cls, data_source, query, ttl=0):
def get_latest(cls, data_source, query, max_age=0):
query_hash = utils.gen_query_hash(query)
if ttl == -1:
if max_age == -1:
query = cls.select().where(cls.query_hash == query_hash,
cls.data_source == data_source).order_by(cls.retrieved_at.desc())
else:
query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source,
peewee.SQL("retrieved_at + interval '%s second' >= now() at time zone 'utc'",
ttl)).order_by(cls.retrieved_at.desc())
max_age)).order_by(cls.retrieved_at.desc())
return query.first()
@@ -284,7 +333,28 @@ class QueryResult(BaseModel):
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
class Query(BaseModel):
def should_schedule_next(previous_iteration, now, schedule):
if schedule.isdigit():
ttl = int(schedule)
next_iteration = previous_iteration + datetime.timedelta(seconds=ttl)
else:
hour, minute = schedule.split(':')
hour, minute = int(hour), int(minute)
# The following logic is needed for cases like the following:
# - The query scheduled to run at 23:59.
# - The scheduler wakes up at 00:01.
# - Using naive implementation of comparing timestamps, it will skip the execution.
normalized_previous_iteration = previous_iteration.replace(hour=hour, minute=minute)
if normalized_previous_iteration > previous_iteration:
previous_iteration = normalized_previous_iteration - datetime.timedelta(days=1)
next_iteration = (previous_iteration + datetime.timedelta(days=1)).replace(hour=hour, minute=minute)
return now > next_iteration
class Query(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource)
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
@@ -293,21 +363,15 @@ class Query(BaseModel):
query = peewee.TextField()
query_hash = peewee.CharField(max_length=32)
api_key = peewee.CharField(max_length=40)
ttl = peewee.IntegerField()
user_email = peewee.CharField(max_length=360, null=True)
user = peewee.ForeignKeyField(User)
last_modified_by = peewee.ForeignKeyField(User, null=True, related_name="modified_queries")
is_archived = peewee.BooleanField(default=False, index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
schedule = peewee.CharField(max_length=10, null=True)
class Meta:
db_table = 'queries'
def create_default_visualizations(self):
table_visualization = Visualization(query=self, name="Table",
description='',
type="TABLE", options="{}")
table_visualization.save()
def to_dict(self, with_stats=False, with_visualizations=False, with_user=True):
d = {
'id': self.id,
@@ -316,15 +380,17 @@ class Query(BaseModel):
'description': self.description,
'query': self.query,
'query_hash': self.query_hash,
'ttl': self.ttl,
'schedule': self.schedule,
'api_key': self.api_key,
'is_archived': self.is_archived,
'updated_at': self.updated_at,
'created_at': self.created_at,
'data_source_id': self._data.get('data_source', None)
}
if with_user:
d['user'] = self.user.to_dict()
d['last_modified_by'] = self.last_modified_by.to_dict()
else:
d['user_id'] = self._data['user']
@@ -340,7 +406,7 @@ class Query(BaseModel):
def archive(self):
self.is_archived = True
self.ttl = -1
self.schedule = None
for vis in self.visualizations:
for w in vis.widgets:
@@ -361,27 +427,25 @@ class Query(BaseModel):
@classmethod
def outdated_queries(cls):
# TODO: this will only find scheduled queries that were executed before. I think this is
# a reasonable assumption, but worth revisiting.
outdated_queries_ids = cls.select(
peewee.Func('first_value', cls.id).over(partition_by=[cls.query_hash, cls.data_source])) \
.join(QueryResult) \
.where(cls.ttl > 0,
cls.is_archived==False,
(QueryResult.retrieved_at +
(cls.ttl * peewee.SQL("interval '1 second'"))) <
peewee.SQL("(now() at time zone 'utc')"))
queries = cls.select(cls, QueryResult.retrieved_at, DataSource)\
.join(QueryResult)\
.switch(Query).join(DataSource)\
.where(cls.schedule != None)
queries = cls.select(cls, DataSource).join(DataSource) \
.where(cls.id << outdated_queries_ids)
now = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None))
outdated_queries = {}
for query in queries:
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
key = "{}:{}".format(query.query_hash, query.data_source.id)
outdated_queries[key] = query
return queries
return outdated_queries.values()
@classmethod
def search(cls, term):
# This is very naive implementation of search, to be replaced with PostgreSQL full-text-search solution.
where = (cls.name**"%{}%".format(term)) | (cls.description**"%{}%".format(term))
where = (cls.name**u"%{}%".format(term)) | (cls.description**u"%{}%".format(term))
if term.isdigit():
where |= cls.id == term
@@ -392,6 +456,7 @@ class Query(BaseModel):
@classmethod
def recent(cls, user_id):
# TODO: instead of t2 here, we should define table_alias for Query table
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\
where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\
@@ -410,10 +475,23 @@ class Query(BaseModel):
update = cls.update(**kwargs).where(cls.id == query_id)
return update.execute()
def save(self, *args, **kwargs):
def pre_save(self, created):
super(Query, self).pre_save(created)
self.query_hash = utils.gen_query_hash(self.query)
self._set_api_key()
super(Query, self).save(*args, **kwargs)
if self.last_modified_by is None:
self.last_modified_by = self.user
def post_save(self, created):
if created:
self._create_default_visualizations()
def _create_default_visualizations(self):
table_visualization = Visualization(query=self, name="Table",
description='',
type="TABLE", options="{}")
table_visualization.save()
def _set_api_key(self):
if not self.api_key:
@@ -432,7 +510,7 @@ class Query(BaseModel):
return unicode(self.id)
class Dashboard(BaseModel):
class Dashboard(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
slug = peewee.CharField(max_length=140, index=True)
name = peewee.CharField(max_length=100)
@@ -441,7 +519,6 @@ class Dashboard(BaseModel):
layout = peewee.TextField()
dashboard_filters_enabled = peewee.BooleanField(default=False)
is_archived = peewee.BooleanField(default=False, index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
class Meta:
db_table = 'dashboards'
@@ -483,7 +560,9 @@ class Dashboard(BaseModel):
'user_id': self._data['user'],
'layout': layout,
'dashboard_filters_enabled': self.dashboard_filters_enabled,
'widgets': widgets_layout
'widgets': widgets_layout,
'updated_at': self.updated_at,
'created_at': self.created_at
}
@classmethod
@@ -516,7 +595,7 @@ class Dashboard(BaseModel):
return u"%s=%s" % (self.id, self.name)
class Visualization(BaseModel):
class Visualization(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
type = peewee.CharField(max_length=100)
query = peewee.ForeignKeyField(Query, related_name='visualizations')
@@ -534,6 +613,8 @@ class Visualization(BaseModel):
'name': self.name,
'description': self.description,
'options': json.loads(self.options),
'updated_at': self.updated_at,
'created_at': self.created_at
}
if with_query:
@@ -545,14 +626,13 @@ class Visualization(BaseModel):
return u"%s %s" % (self.id, self.type)
class Widget(BaseModel):
class Widget(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
visualization = peewee.ForeignKeyField(Visualization, related_name='widgets', null=True)
text = peewee.TextField(null=True)
width = peewee.IntegerField()
options = peewee.TextField()
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
# unused; kept for backward compatability:
type = peewee.CharField(max_length=100, null=True)
@@ -567,7 +647,9 @@ class Widget(BaseModel):
'width': self.width,
'options': json.loads(self.options),
'dashboard_id': self._data['dashboard'],
'text': self.text
'text': self.text,
'updated_at': self.updated_at,
'created_at': self.created_at
}
if self.visualization and self.visualization.id:
@@ -586,13 +668,14 @@ class Widget(BaseModel):
self.dashboard.save()
super(Widget, self).delete_instance(*args, **kwargs)
class Event(BaseModel):
user = peewee.ForeignKeyField(User, related_name="events")
user = peewee.ForeignKeyField(User, related_name="events", null=True)
action = peewee.CharField()
object_type = peewee.CharField()
object_id = peewee.CharField(null=True)
additional_properties = peewee.TextField(null=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'events'

View File

@@ -0,0 +1,107 @@
import logging
import json
import jsonschema
from jsonschema import ValidationError
logger = logging.getLogger(__name__)
__all__ = [
'ValidationError',
'BaseQueryRunner',
'TYPE_DATETIME',
'TYPE_BOOLEAN',
'TYPE_INTEGER',
'TYPE_STRING',
'TYPE_DATE',
'TYPE_FLOAT',
'register',
'get_query_runner',
'import_query_runners'
]
# Valid types of columns returned in results:
TYPE_INTEGER = 'integer'
TYPE_FLOAT = 'float'
TYPE_BOOLEAN = 'boolean'
TYPE_STRING = 'string'
TYPE_DATETIME = 'datetime'
TYPE_DATE = 'date'
class BaseQueryRunner(object):
def __init__(self, configuration):
jsonschema.validate(configuration, self.configuration_schema())
self.configuration = configuration
@classmethod
def name(cls):
return cls.__name__
@classmethod
def type(cls):
return cls.__name__.lower()
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return True
@classmethod
def configuration_schema(cls):
return {}
def run_query(self, query):
raise NotImplementedError()
def get_schema(self):
return []
@classmethod
def to_dict(cls):
return {
'name': cls.name(),
'type': cls.type(),
'configuration_schema': cls.configuration_schema()
}
query_runners = {}
def register(query_runner_class):
global query_runners
if query_runner_class.enabled():
logger.debug("Registering %s (%s) query runner.", query_runner_class.name(), query_runner_class.type())
query_runners[query_runner_class.type()] = query_runner_class
else:
logger.warning("%s query runner enabled but not supported, not registering. Either disable or install missing dependencies.", query_runner_class.name())
def get_query_runner(query_runner_type, configuration_json):
query_runner_class = query_runners.get(query_runner_type, None)
if query_runner_class is None:
return None
return query_runner_class(json.loads(configuration_json))
def validate_configuration(query_runner_type, configuration_json):
query_runner_class = query_runners.get(query_runner_type, None)
if query_runner_class is None:
return False
try:
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema())
except (ValidationError, ValueError):
return False
return True
def import_query_runners(query_runner_imports):
for runner_import in query_runner_imports:
__import__(runner_import)

View File

@@ -1,29 +1,37 @@
import datetime
import httplib2
import json
import httplib2
import logging
import sys
import time
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
import apiclient.errors
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials
except ImportError:
print "Missing dependencies. Please install google-api-python-client and oauth2client."
print "You can use pip: pip install google-api-python-client oauth2client"
from redash.utils import JSONEncoder
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install google-api-python-client and oauth2client.")
logger.warning("You can use pip: pip install google-api-python-client oauth2client")
enabled = False
types_map = {
'INTEGER': 'integer',
'FLOAT': 'float',
'BOOLEAN': 'boolean',
'STRING': 'string',
'TIMESTAMP': 'datetime',
'INTEGER': TYPE_INTEGER,
'FLOAT': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'STRING': TYPE_STRING,
'TIMESTAMP': TYPE_DATETIME,
}
def transform_row(row, fields):
column_index = 0
row_data = {}
@@ -49,37 +57,71 @@ def transform_row(row, fields):
return row_data
def bigquery(connection_string):
def load_key(filename):
f = file(filename, "rb")
try:
return f.read()
finally:
f.close()
def get_bigquery_service():
scope = [
"https://www.googleapis.com/auth/bigquery",
]
def _load_key(filename):
f = file(filename, "rb")
try:
return f.read()
finally:
f.close()
credentials = SignedJwtAssertionCredentials(connection_string["serviceAccount"],
load_key(connection_string["privateKey"]), scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_bigquery_service(service_account, private_key):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
def get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
if not query_reply['jobComplete']:
time.sleep(10)
return get_query_results(jobs, project_id, job_id, start_index)
credentials = SignedJwtAssertionCredentials(service_account, private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return query_reply
return build("bigquery", "v2", http=http)
def query_runner(query):
bigquery_service = get_bigquery_service()
def _get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
if not query_reply['jobComplete']:
time.sleep(10)
return _get_query_results(jobs, project_id, job_id, start_index)
return query_reply
class BigQuery(BaseQueryRunner):
@classmethod
def enabled(cls):
return enabled
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'serviceAccount': {
'type': 'string',
'title': 'Service Account'
},
'projectId': {
'type': 'string',
'title': 'Project ID'
},
'privateKey': {
'type': 'string',
'title': 'Private Key Path'
}
},
'required': ['serviceAccount', 'projectId', 'privateKey']
}
def __init__(self, configuration_json):
super(BigQuery, self).__init__(configuration_json)
self.private_key = _load_key(self.configuration["privateKey"])
def run_query(self, query):
bigquery_service = _get_bigquery_service(self.configuration["serviceAccount"],
self.private_key)
jobs = bigquery_service.jobs()
job_data = {
@@ -90,17 +132,17 @@ def bigquery(connection_string):
}
}
logging.debug("bigquery got query: %s", query)
logger.debug("BigQuery got query: %s", query)
project_id = connection_string["projectId"]
project_id = self.configuration["projectId"]
try:
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
current_row = 0
query_reply = get_query_results(jobs, project_id=project_id,
query_reply = _get_query_results(jobs, project_id=project_id,
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
logging.debug("bigquery replied: %s", query_reply)
logger.debug("bigquery replied: %s", query_reply)
rows = []
@@ -134,5 +176,4 @@ def bigquery(connection_string):
return json_data, error
return query_runner
register(BigQuery)

View File

@@ -0,0 +1,83 @@
import json
import datetime
import requests
import logging
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
def _transform_result(response):
columns = ({'name': 'Time::x', 'type': TYPE_DATETIME},
{'name': 'value::y', 'type': TYPE_FLOAT},
{'name': 'name::series', 'type': TYPE_STRING})
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json.dumps(data, cls=JSONEncoder)
class Graphite(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
},
'username': {
'type': 'string'
},
'password': {
'type': 'string'
},
'verify': {
'type': 'boolean',
'title': 'Verify SSL certificate'
}
},
'required': ['url']
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Graphite, self).__init__(configuration_json)
if "username" in self.configuration and self.configuration["username"]:
self.auth = (self.configuration["username"], self.configuration["password"])
else:
self.auth = None
self.verify = self.configuration["verify"]
self.base_url = "%s/render?format=json&" % self.configuration['url']
def run_query(self, query):
url = "%s%s" % (self.base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=self.auth, verify=self.verify)
if response.status_code == 200:
data = _transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception, ex:
data = None
error = ex.message
return data, error
register(Graphite)

View File

@@ -0,0 +1,178 @@
import json
import datetime
import logging
import re
import time
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
try:
import pymongo
from bson.objectid import ObjectId
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install pymongo.")
logger.warning("You can use pip: pip install pymongo")
enabled = False
TYPES_MAP = {
str: TYPE_STRING,
unicode: TYPE_STRING,
int: TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT,
bool: TYPE_BOOLEAN,
datetime.datetime: TYPE_DATETIME,
}
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
def _get_column_by_name(columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
class MongoDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'connectionString': {
'type': 'string',
'title': 'Connection String'
},
'dbName': {
'type': 'string',
'title': "Database Name"
},
'replicaSetName': {
'type': 'string',
'title': 'Replica Set Name'
},
'required': ['connectionString']
}
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(MongoDB, self).__init__(configuration_json)
self.db_name = self.configuration["dbName"]
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
def run_query(self, query):
if self.is_replica_set:
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
else:
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
if self.db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % self.db_name
db = db_connection[self.db_name ]
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
logger.debug("mongodb got query: %s", query)
try:
query_data = json.loads(query)
except ValueError:
return None, "Invalid query format. The query is not a valid JSON."
if "collection" not in query_data:
return None, "'collection' must have a value to run a query"
else:
collection = query_data["collection"]
q = None
if "query" in query_data:
q = query_data["query"]
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
f = None
if "fields" in query_data:
f = query_data["fields"]
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field_name in query_data["sort"]:
s.append((field_name, query_data["sort"][field_name]))
columns = []
rows = []
error = None
json_data = None
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
for r in cursor:
for k in r:
if _get_column_by_name(columns, k) is None:
columns.append({
"name": k,
"friendly_name": k,
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
})
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:
ordered_columns = []
for k in sorted(f, key=f.get):
ordered_columns.append(_get_column_by_name(columns, k))
columns = ordered_columns
data = {
"columns": columns,
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
return json_data, error
register(MongoDB)

View File

@@ -0,0 +1,132 @@
import sys
import json
import logging
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
class Mysql(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'host': {
'type': 'string'
},
'user': {
'type': 'string'
},
'passwd': {
'type': 'string',
'title': 'Password'
},
'db': {
'type': 'string',
'title': 'Database name'
}
},
'required': ['db']
}
@classmethod
def enabled(cls):
try:
import MySQLdb
except ImportError:
return False
return True
def __init__(self, configuration_json):
super(Mysql, self).__init__(configuration_json)
def get_schema(self):
query = """
SELECT col.table_schema,
col.table_name,
col.column_name
FROM `information_schema`.`columns` col
INNER JOIN
(SELECT table_schema,
TABLE_NAME
FROM information_schema.tables
WHERE table_type <> 'SYSTEM VIEW' AND table_schema NOT IN ('performance_schema', 'mysql')) tables ON tables.table_schema = col.table_schema
AND tables.TABLE_NAME = col.TABLE_NAME;
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != self.configuration['db']:
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
import MySQLdb
connection = MySQLdb.connect(self.configuration.get('host', ''),
self.configuration.get('user', ''),
self.configuration.get('passwd', ''),
self.configuration['db'],
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
try:
cursor.execute(query)
data = cursor.fetchall()
cursor_desc = cursor.description
if cursor_desc is not None:
num_fields = len(cursor_desc)
column_names = [i[0] for i in cursor.description]
rows = [dict(zip(column_names, row)) for row in data]
# TODO: add types support
columns = [{'name': col_name,
'friendly_name': col_name,
'type': None} for col_name in column_names]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
else:
json_data = None
error = "No data was returned."
cursor.close()
except MySQLdb.Error, e:
json_data = None
error = e.args[1]
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Mysql)

170
redash/query_runner/pg.py Normal file
View File

@@ -0,0 +1,170 @@
import json
import logging
import psycopg2
import select
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
types_map = {
20: TYPE_INTEGER,
21: TYPE_INTEGER,
23: TYPE_INTEGER,
700: TYPE_FLOAT,
1700: TYPE_FLOAT,
701: TYPE_FLOAT,
16: TYPE_BOOLEAN,
1082: TYPE_DATE,
1114: TYPE_DATETIME,
1184: TYPE_DATETIME,
1014: TYPE_STRING,
1015: TYPE_STRING,
1008: TYPE_STRING,
1009: TYPE_STRING,
2951: TYPE_STRING
}
def _wait(conn):
while 1:
try:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [conn.fileno()], [])
elif state == psycopg2.extensions.POLL_READ:
select.select([conn.fileno()], [], [])
else:
raise psycopg2.OperationalError("poll() returned %s" % state)
except select.error:
raise psycopg2.OperationalError("select.error received")
class PostgreSQL(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"user": {
"type": "string"
},
"password": {
"type": "string"
},
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"dbname": {
"type": "string",
"title": "Database Name"
}
},
"required": ["dbname"]
}
@classmethod
def type(cls):
return "pg"
def __init__(self, configuration_json):
super(PostgreSQL, self).__init__(configuration_json)
values = []
for k, v in self.configuration.iteritems():
values.append("{}={}".format(k, v))
self.connection_string = " ".join(values)
def get_schema(self):
query = """
SELECT table_schema, table_name, column_name
FROM information_schema.columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != 'public':
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
connection = psycopg2.connect(self.connection_string, async=True)
_wait(connection)
cursor = connection.cursor()
try:
cursor.execute(query)
_wait(connection)
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
# size of the data we can assume it's ok.
column_names = []
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name += str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(PostgreSQL)

View File

@@ -0,0 +1,79 @@
import sys
import json
from redash.query_runner import *
from redash import models
def get_query_result(query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
def execute_query(data_source_name, query):
try:
data_source = models.DataSource.get(models.DataSource.name==data_source_name)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name: %s." % data_source_name)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
class Python(BaseQueryRunner):
"""
This is very, very unsafe. Use at your own risk with people you really trust.
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
}
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Python, self).__init__(configuration_json)
def run_query(self, query):
try:
error = None
script_globals = {'get_query_result': get_query_result, 'execute_query': execute_query}
script_locals = {'result': None}
# TODO: timeout, sandboxing
exec query in script_globals, script_locals
if script_locals['result'] is None:
raise Exception("result wasn't set to value.")
json_data = json.dumps(script_locals['result'])
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(Python)

View File

@@ -0,0 +1,65 @@
import os
import sys
import subprocess
from redash.query_runner import *
class Script(BaseQueryRunner):
@classmethod
def enabled(cls):
return "check_output" in subprocess.__dict__
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'path': {
'type': 'string',
'title': 'Scripts path'
}
},
'required': ['path']
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Script, self).__init__(configuration_json)
# Poor man's protection against running scripts from output the scripts directory
if self.configuration["path"].find("../") > -1:
raise ValidationError("Scripts can only be run from the configured scripts directory")
def run_query(self, query):
try:
json_data = None
error = None
query = query.strip()
script = os.path.join(self.configuration["path"], query)
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
output = subprocess.check_output(script, shell=False)
if output is not None:
output = output.strip()
if output != "":
return output, None
error = "Error reading output"
except subprocess.CalledProcessError as e:
return None, str(e)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(Script)

View File

@@ -1,16 +1,30 @@
import json
import logging
import sys
import os
import urllib2
def url(connection_string):
from redash.query_runner import *
def query_runner(query):
base_url = connection_string
class Url(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string',
'title': 'URL base path'
}
}
}
@classmethod
def annotate_query(cls):
return False
def run_query(self, query):
base_url = self.configuration["url"]
try:
json_data = None
error = None
query = query.strip()
@@ -41,5 +55,4 @@ def url(connection_string):
return json_data, error
query_runner.annotate_query = False
return query_runner
register(Url)

View File

@@ -44,22 +44,20 @@ STATSD_HOST = os.environ.get('REDASH_STATSD_HOST', "127.0.0.1")
STATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', "8125"))
STATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', "redash")
# The following is kept for backward compatability, and shouldn't be used any more.
CONNECTION_ADAPTER = os.environ.get("REDASH_CONNECTION_ADAPTER", "pg")
CONNECTION_STRING = os.environ.get("REDASH_CONNECTION_STRING", "user= password= host= port=5439 dbname=")
# Connection settings for re:dash's own database (where we store the queries, results, etc)
DATABASE_CONFIG = parse_db_url(os.environ.get("REDASH_DATABASE_URL", "postgresql://postgres"))
# Celery related settings
CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL)
CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
CELERY_FLOWER_URL = os.environ.get("REDASH_CELERY_FLOWER_URL", "/flower")
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
# proved to be "safe".
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false"))
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "hmac")
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
@@ -68,14 +66,23 @@ GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
WORKERS_COUNT = int(os.environ.get("REDASH_WORKERS_COUNT", "2"))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600*6))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# Query Runners
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
'redash.query_runner.big_query',
'redash.query_runner.graphite',
'redash.query_runner.mongodb',
'redash.query_runner.mysql',
'redash.query_runner.pg',
'redash.query_runner.script',
'redash.query_runner.url',
])))
# Features:
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))

View File

@@ -8,7 +8,7 @@ from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client, settings
from redash.utils import gen_query_hash
from redash.worker import celery
from redash.data.query_runner import get_query_runner
from redash.query_runner import get_query_runner
logger = get_task_logger(__name__)
@@ -151,8 +151,6 @@ def refresh_queries():
outdated_queries_count += 1
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
# TODO: decide if we still need this
# statsd_client.gauge('manager.queue_size', self.redis_connection.zcard('jobs'))
logger.info("Done refreshing queries. Found %d outdated queries." % outdated_queries_count)
@@ -220,6 +218,17 @@ def cleanup_query_results():
logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
@celery.task(base=BaseTask)
def refresh_schemas():
"""
Refershs the datasources schema.
"""
for ds in models.DataSource.all():
logger.info("Refreshing schema for: {}".format(ds.name))
ds.get_schema(refresh=True)
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?
@@ -237,15 +246,15 @@ def execute_query(self, query, data_source_id):
query_hash = gen_query_hash(query)
query_runner = get_query_runner(data_source.type, data_source.options)
if getattr(query_runner, 'annotate_query', True):
# TODO: anotate with queu ename
if query_runner.annotate_query():
# TODO: annotate with queue name
annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \
(self.request.id, query_hash, query)
else:
annotated_query = query
with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)):
data, error = query_runner(annotated_query)
data, error = query_runner.run_query(annotated_query)
run_time = time.time() - start_time
logger.info("Query finished... data length=%s, error=%s", data and len(data), error)
@@ -255,8 +264,6 @@ def execute_query(self, query, data_source_id):
# Delete query_hash
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
# TODO: it is possible that storing the data will fail, and we will need to retry
# while we already marked the job as done
if not error:
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow())
else:

View File

@@ -15,6 +15,10 @@ celery_schedule = {
'cleanup_tasks': {
'task': 'redash.tasks.cleanup_tasks',
'schedule': timedelta(minutes=5)
},
'refresh_schemas': {
'task': 'redash.tasks.refresh_schemas',
'schedule': timedelta(minutes=30)
}
}

View File

@@ -9,7 +9,7 @@ Werkzeug==0.9.4
aniso8601==0.82
blinker==1.3
itsdangerous==0.23
peewee==2.2.2
peewee==2.4.7
psycopg2==2.5.2
python-dateutil==2.1
pytz==2013.9
@@ -23,3 +23,5 @@ honcho==0.5.0
statsd==2.1.2
gunicorn==18.0
celery==3.1.11
jsonschema==2.4.0
click==3.3

View File

@@ -98,7 +98,9 @@ if [ ! -f "/opt/redash/.env" ]; then
fi
# Install latest version
REDASH_VERSION=${REDASH_VERSION-0.4.0.b589}
# REDASH_VERSION=${REDASH_VERSION-0.4.0.b589}
# modified by @fedex1 3/15/2015 seems to be the latest version at this point in time.
REDASH_VERSION=${REDASH_VERSION-0.6.0.b722}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION/.b/%2Bb}/redash.$REDASH_VERSION.tar.gz"
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
REDASH_TARBALL=/tmp/redash.tar.gz
@@ -146,7 +148,7 @@ if [ $pg_user_exists -ne 0 ]; then
sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
cd /opt/redash/current
sudo -u redash bin/run ./manage.py ds new "re:dash metadata" "pg" "user=redash_reader password=$REDASH_READER_PASSWORD host=localhost dbname=redash"
sudo -u redash bin/run ./manage.py ds new -n "re:dash metadata" -t "pg" -o "{\"user\": \"redash_reader\", \"password\": \"$REDASH_READER_PASSWORD\", \"host\": \"localhost\", \"dbname\": \"redash\"}"
fi
# BigQuery dependencies:

View File

@@ -1,12 +1,17 @@
import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
@@ -18,4 +23,19 @@ class BaseTestCase(TestCase):
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))

View File

@@ -47,7 +47,7 @@ user_factory = ModelFactory(redash.models.User,
data_source_factory = ModelFactory(redash.models.DataSource,
name='Test',
type='pg',
options='')
options='{"dbname": "test"}')
dashboard_factory = ModelFactory(redash.models.Dashboard,
@@ -58,9 +58,9 @@ query_factory = ModelFactory(redash.models.Query,
name='New Query',
description='',
query='SELECT 1',
ttl=-1,
user=user_factory.create,
is_archived=False,
schedule=None,
data_source=data_source_factory.create)
query_result_factory = ModelFactory(redash.models.QueryResult,
@@ -83,4 +83,4 @@ widget_factory = ModelFactory(redash.models.Widget,
width=1,
options='{}',
dashboard=dashboard_factory.create,
visualization=visualization_factory.create)
visualization=visualization_factory.create)

View File

@@ -1,8 +1,45 @@
from flask.ext.login import current_user
from mock import patch
from tests import BaseTestCase
from redash import models
from redash.google_oauth import create_and_login_user
from tests.factories import user_factory
from redash.authentication import ApiKeyAuthentication
from tests.factories import user_factory, query_factory
from redash.wsgi import app
class TestApiKeyAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestApiKeyAuthentication, self).setUp()
self.api_key = 10
self.query = query_factory.create(api_key=self.api_key)
def test_no_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id))
self.assertFalse(auth.verify_authentication())
def test_wrong_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': 'whatever'})
self.assertFalse(auth.verify_authentication())
def test_correct_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': self.api_key})
self.assertTrue(auth.verify_authentication())
def test_no_query_id(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertFalse(auth.verify_authentication())
class TestCreateAndLoginUser(BaseTestCase):

View File

@@ -1,6 +1,7 @@
from contextlib import contextmanager
import json
import time
import datetime
from unittest import TestCase
from flask import url_for
from flask.ext.login import current_user
@@ -104,7 +105,11 @@ class DashboardAPITest(BaseTestCase, AuthenticationTestMixin):
with app.test_client() as c, authenticated_user(c):
rv = c.get('/api/dashboards/{0}'.format(d1.slug))
self.assertEquals(rv.status_code, 200)
self.assertDictEqual(json.loads(rv.data), d1.to_dict(with_widgets=True))
expected = d1.to_dict(with_widgets=True)
actual = json.loads(rv.data)
self.assertResponseEqual(expected, actual)
def test_get_non_existint_dashbaord(self):
with app.test_client() as c, authenticated_user(c):
@@ -222,10 +227,13 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
def test_update_query(self):
query = query_factory.create()
with app.test_client() as c, authenticated_user(c):
other_user = user_factory.create()
with app.test_client() as c, authenticated_user(c, user=other_user):
rv = json_request(c.post, '/api/queries/{0}'.format(query.id), data={'name': 'Testing'})
self.assertEqual(rv.status_code, 200)
self.assertEquals(rv.json['name'], 'Testing')
self.assertEqual(rv.json['name'], 'Testing')
self.assertEqual(rv.json['last_modified_by']['id'], other_user.id)
def test_create_query(self):
user = user_factory.create()
@@ -233,7 +241,7 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
query_data = {
'name': 'Testing',
'query': 'SELECT 1',
'ttl': 3600,
'schedule': "3600",
'data_source_id': data_source.id
}
@@ -256,9 +264,7 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
rv = json_request(c.get, '/api/queries/{0}'.format(query.id))
self.assertEquals(rv.status_code, 200)
d = query.to_dict(with_visualizations=True)
d.pop('created_at')
self.assertDictContainsSubset(d, rv.json)
self.assertResponseEqual(rv.json, query.to_dict(with_visualizations=True))
def test_get_all_queries(self):
queries = [query_factory.create() for _ in range(10)]
@@ -294,7 +300,8 @@ class VisualizationAPITest(BaseTestCase):
rv = json_request(c.delete, '/api/visualizations/{0}'.format(visualization.id))
self.assertEquals(rv.status_code, 200)
self.assertEquals(models.Visualization.select().count(), 0)
# =1 because each query has a default table visualization.
self.assertEquals(models.Visualization.select().count(), 1)
def test_update_visualization(self):
visualization = visualization_factory.create()
@@ -472,4 +479,45 @@ class TestLogout(BaseTestCase):
self.assertTrue(current_user.is_authenticated())
rv = c.get('/logout')
self.assertEquals(rv.status_code, 302)
self.assertFalse(current_user.is_authenticated())
self.assertFalse(current_user.is_authenticated())
class DataSourceTypesTest(BaseTestCase):
def test_returns_data_for_admin(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = c.get("/api/data_sources/types")
self.assertEqual(rv.status_code, 200)
def test_returns_403_for_non_admin(self):
with app.test_client() as c, authenticated_user(c):
rv = c.get("/api/data_sources/types")
self.assertEqual(rv.status_code, 403)
class DataSourceTest(BaseTestCase):
def test_returns_400_when_missing_fields(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = c.post("/api/data_sources")
self.assertEqual(rv.status_code, 400)
rv = json_request(c.post, '/api/data_sources', data={'name': 'DS 1'})
self.assertEqual(rv.status_code, 400)
def test_returns_400_when_configuration_invalid(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{}'})
self.assertEqual(rv.status_code, 400)
def test_creates_data_source(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{"dbname": "redash"}'})
self.assertEqual(rv.status_code, 200)

View File

@@ -26,8 +26,10 @@ class ImportTest(BaseTestCase):
self.assertEqual(dashboard.widgets.count(),
reduce(lambda s, row: s + len(row), self.dashboard['widgets'], 0))
self.assertEqual(models.Visualization.select().count(), dashboard.widgets.count()-1)
self.assertEqual(models.Query.select().count(), dashboard.widgets.count()-2)
queries_count = models.Query.select().count()
self.assertEqual(models.Visualization.select().count(), dashboard.widgets.count()+queries_count-1)
self.assertEqual(queries_count, dashboard.widgets.count()-2)
def test_imports_updates_existing_models(self):
importer = import_export.Importer(data_source=data_source_factory.create())

View File

@@ -1,9 +1,13 @@
#encoding: utf8
import datetime
import json
from unittest import TestCase
import mock
from tests import BaseTestCase
from redash import models
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
from redash.utils import gen_query_hash
from redash import query_runner
class DashboardTest(BaseTestCase):
@@ -31,22 +35,22 @@ class QueryTest(BaseTestCase):
self.assertNotEquals(old_hash, q.query_hash)
def test_search_finds_in_name(self):
q1 = query_factory.create(name="Testing search")
q2 = query_factory.create(name="Testing searching")
q3 = query_factory.create(name="Testing sea rch")
q1 = query_factory.create(name=u"Testing seåřċħ")
q2 = query_factory.create(name=u"Testing seåřċħing")
q3 = query_factory.create(name=u"Testing seå řċħ")
queries = models.Query.search("search")
queries = models.Query.search(u"seåřċħ")
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertNotIn(q3, queries)
def test_search_finds_in_description(self):
q1 = query_factory.create(description="Testing search")
q2 = query_factory.create(description="Testing searching")
q3 = query_factory.create(description="Testing sea rch")
q1 = query_factory.create(description=u"Testing seåřċħ")
q2 = query_factory.create(description=u"Testing seåřċħing")
q3 = query_factory.create(description=u"Testing seå řċħ")
queries = models.Query.search("search")
queries = models.Query.search(u"seåřċħ")
self.assertIn(q1, queries)
self.assertIn(q2, queries)
@@ -64,20 +68,102 @@ class QueryTest(BaseTestCase):
self.assertNotIn(q1, queries)
self.assertNotIn(q2, queries)
def test_save_creates_default_visualization(self):
q = query_factory.create()
self.assertEquals(q.visualizations.count(), 1)
def test_save_updates_updated_at_field(self):
# This should be a test of ModelTimestampsMixin, but it's easier to test in context of existing model... :-\
one_day_ago = datetime.datetime.today() - datetime.timedelta(days=1)
q = query_factory.create(created_at=one_day_ago, updated_at=one_day_ago)
q.save()
self.assertNotEqual(q.updated_at, one_day_ago)
class ShouldScheduleNextTest(TestCase):
def test_interval_schedule_that_needs_reschedule(self):
now = datetime.datetime.now()
two_hours_ago = now - datetime.timedelta(hours=2)
self.assertTrue(models.should_schedule_next(two_hours_ago, now, "3600"))
def test_interval_schedule_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
half_an_hour_ago = now - datetime.timedelta(minutes=30)
self.assertFalse(models.should_schedule_next(half_an_hour_ago, now, "3600"))
def test_exact_time_that_needs_reschedule(self):
now = datetime.datetime.now()
yesterday = now - datetime.timedelta(days=1)
schedule = "{:02d}:00".format(now.hour - 3)
self.assertTrue(models.should_schedule_next(yesterday, now, schedule))
def test_exact_time_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
yesterday = (now - datetime.timedelta(days=1)).replace(hour=now.hour+3, minute=now.minute+1)
schedule = "{:02d}:00".format(now.hour + 3)
self.assertFalse(models.should_schedule_next(yesterday, now, schedule))
def test_exact_time_with_day_change(self):
now = datetime.datetime.now().replace(hour=0, minute=1)
previous = (now - datetime.timedelta(days=2)).replace(hour=23, minute=59)
schedule = "23:59".format(now.hour + 3)
self.assertTrue(models.should_schedule_next(previous, now, schedule))
class QueryOutdatedQueriesTest(BaseTestCase):
# TODO: this test can be refactored to use mock version of should_schedule_next to simplify it.
def test_outdated_queries_skips_unscheduled_queries(self):
query = query_factory.create(schedule=None)
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_ttl_based_schedule(self):
two_hours_ago = datetime.datetime.now() - datetime.timedelta(hours=2)
query = query_factory.create(schedule="3600")
query_result = query_result_factory.create(query=query, retrieved_at=two_hours_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
def test_skips_fresh_queries(self):
half_an_hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule="3600")
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_specific_time_schedule(self):
half_an_hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M'))
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1))
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
class QueryArchiveTest(BaseTestCase):
def setUp(self):
super(QueryArchiveTest, self).setUp()
def test_archive_query_sets_flag(self):
query = query_factory.create(ttl=1)
query = query_factory.create()
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEquals(query.is_archived, True)
def test_archived_query_doesnt_return_in_all(self):
query = query_factory.create(ttl=1)
query = query_factory.create(schedule="1")
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
query_result = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
123, yesterday)
@@ -102,15 +188,53 @@ class QueryArchiveTest(BaseTestCase):
self.assertRaises(models.Widget.DoesNotExist, models.Widget.get_by_id, widget.id)
def test_removes_scheduling(self):
query = query_factory.create(ttl=1)
query = query_factory.create(schedule="1")
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEqual(-1, query.ttl)
self.assertEqual(None, query.schedule)
class DataSourceTest(BaseTestCase):
def test_get_schema(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
def test_get_schema_uses_cache(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
self.assertEqual(patched_get_schema.call_count, 1)
def test_get_schema_skips_cache_with_refresh_true(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
new_return_value = [{'name': 'new_table', 'columns': []}]
patched_get_schema.return_value = new_return_value
schema = ds.get_schema(refresh=True)
self.assertEqual(new_return_value, schema)
self.assertEqual(patched_get_schema.call_count, 2)
class QueryResultTest(BaseTestCase):
def setUp(self):
@@ -144,7 +268,7 @@ class QueryResultTest(BaseTestCase):
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
qr = query_result_factory.create(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=60)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=60)
self.assertIsNone(found_query_result)
@@ -152,7 +276,7 @@ class QueryResultTest(BaseTestCase):
yesterday = datetime.datetime.now() - datetime.timedelta(seconds=30)
qr = query_result_factory.create(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=120)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=120)
self.assertEqual(found_query_result, qr)

View File

@@ -10,7 +10,7 @@ from redash.tasks import refresh_queries
# 2. test for the refresh_query task
class TestRefreshQueries(BaseTestCase):
def test_enqueues_outdated_queries(self):
query = query_factory.create(ttl=60)
query = query_factory.create(schedule="60")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -22,7 +22,7 @@ class TestRefreshQueries(BaseTestCase):
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True)
def test_skips_fresh_queries(self):
query = query_factory.create(ttl=1200)
query = query_factory.create(schedule="1200")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -32,7 +32,7 @@ class TestRefreshQueries(BaseTestCase):
self.assertFalse(add_job_mock.called)
def test_skips_queries_with_no_ttl(self):
query = query_factory.create(ttl=-1)
query = query_factory.create(schedule=None)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -42,8 +42,8 @@ class TestRefreshQueries(BaseTestCase):
self.assertFalse(add_job_mock.called)
def test_enqueues_query_only_once(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash,
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash,
data_source=query.data_source)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
@@ -58,8 +58,8 @@ class TestRefreshQueries(BaseTestCase):
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
def test_enqueues_query_with_correct_data_source(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash)
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -74,9 +74,10 @@ class TestRefreshQueries(BaseTestCase):
self.assertEquals(2, add_job_mock.call_count)
def test_enqueues_only_for_relevant_data_source(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=3600, query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash)
import psycopg2
retrieved_at = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result