Compare commits

...

46 Commits

Author SHA1 Message Date
Arik Fraimovich
7db5449dad Merge pull request #387 from EverythingMe/feature/api_key_auth
Record event when accessing query result from API
2015-03-12 11:46:35 +02:00
Arik Fraimovich
7f6c7f0634 Record event when accessing query result from API 2015-03-12 11:43:21 +02:00
Arik Fraimovich
73955c74f7 Merge pull request #386 from EverythingMe/feature/api_key_auth
Code cleanup (remove "worker's status" dead link & unused settings)
2015-03-11 11:30:15 +02:00
Arik Fraimovich
7de85da8ef Remove unused settings 2015-03-11 07:50:49 +02:00
Arik Fraimovich
0aab35252a Remove broken "Worker's Status" page 2015-03-11 07:47:10 +02:00
Arik Fraimovich
141dbc9e70 Merge pull request #385 from EverythingMe/feature/api_key_auth
Feature: optional API Key authentication instead of HMAC
2015-03-10 18:29:01 +02:00
Arik Fraimovich
2e513c347c Cleanup 2015-03-10 18:21:51 +02:00
Arik Fraimovich
335c136ec2 Show API Key button in query view 2015-03-10 18:08:02 +02:00
Arik Fraimovich
df1170eb9b Feature: optional api key only authentication 2015-03-10 17:51:17 +02:00
Arik Fraimovich
69bcaddbe0 Fix: migrations stopped working due to peewee upgrade 2015-03-09 16:55:55 +02:00
Arik Fraimovich
67958cc27b MySQL query runner: make configuration access safer 2015-03-09 10:16:06 +02:00
Arik Fraimovich
6c716f23d9 Fix migration & query runner for mysql 2015-03-09 08:58:03 +02:00
Arik Fraimovich
bea11b0ac2 Merge pull request #384 from EverythingMe/feature/python_query_runner
Experimental Python query runner
2015-03-08 15:03:59 +02:00
Arik Fraimovich
4927386299 Experimental Python query runner 2015-03-08 15:02:57 +02:00
Arik Fraimovich
30a8550f6b Merge pull request #383 from EverythingMe/fix/migration
Fix: make migration work with new peewee
2015-03-08 14:37:42 +02:00
Arik Fraimovich
0389a45be4 Fix: make migration work with new peewee 2015-03-08 13:28:18 +02:00
Arik Fraimovich
707c169867 Merge pull request #382 from EverythingMe/feature/datasources_v2
Fix: import should be global
2015-03-08 12:27:34 +02:00
Arik Fraimovich
fca034ac0d Fix: import should be global 2015-03-08 12:23:51 +02:00
Arik Fraimovich
97691ea5ee Merge pull request #380 from EverythingMe/feature/datasources_v2
Refactor datasources (query runners)
2015-03-08 11:50:09 +02:00
Arik Fraimovich
40335a0e21 Fix: add missing option flags 2015-03-08 11:00:56 +02:00
Arik Fraimovich
9344cbd078 Update bootstrap script to support new format 2015-03-08 10:38:50 +02:00
Arik Fraimovich
9442fd9465 Update logging messages 2015-03-02 09:49:17 +02:00
Arik Fraimovich
c816f1003d Bump version 2015-03-02 09:45:29 +02:00
Arik Fraimovich
2107b79a80 Use validation for data source editing 2015-03-02 09:44:55 +02:00
Arik Fraimovich
8fae6de8c7 Update datasource CLI to use new format 2015-03-02 09:40:15 +02:00
Arik Fraimovich
d798c77574 Support for already valid data source config 2015-03-02 07:34:06 +02:00
Arik Fraimovich
0abce27381 Set configuration in base ctor 2015-02-24 07:50:10 +02:00
Arik Fraimovich
8a171ba39a Use JSON Schema for data source configuration 2015-02-24 07:50:10 +02:00
Arik Fraimovich
20af276772 Updated configuration spec to include friendly name and more 2015-02-24 07:50:10 +02:00
Arik Fraimovich
4058342763 WIP: configuration object 2015-02-24 07:50:10 +02:00
Arik Fraimovich
af64657260 Migration to update all data source options 2015-02-24 07:50:09 +02:00
Arik Fraimovich
b6bd46e59e New query runners implementation 2015-02-24 07:50:09 +02:00
Arik Fraimovich
31fe547e03 Merge pull request #378 from EverythingMe/feature/variables
Fix #263: timestamp fields should be with time zone
2015-02-23 11:10:20 +02:00
Arik Fraimovich
aff324071e Update peewee version 2015-02-23 09:19:39 +02:00
Arik Fraimovich
131266e408 Fix #263: timestamp fields should be with time zone 2015-02-23 09:02:16 +02:00
Arik Fraimovich
b1f97e8c8d Merge pull request #377 from olgakogan/master
'Download Dataset' fix - error in case of big numeric values
2015-02-21 15:21:18 +02:00
Arik Fraimovich
9783d6e839 Merge pull request #374 from akariv/master
Support unicode queries in search API
2015-02-21 14:48:36 +02:00
akariv
8eea2fb367 Support unicode queries in search API
Modify query test case to use unicode strings
2015-02-20 23:49:37 +02:00
olgakogan
b585480c81 removed redundant handling of large numbers when generating a csv file (causes ValueError: timestamp out of range) 2015-02-20 22:33:02 +02:00
Arik Fraimovich
89e307daba Merge pull request #373 from EverythingMe/feature/variables
UI Fixes
2015-02-08 18:18:37 +02:00
Arik Fraimovich
a5eb0e293c Fix: don't lock query editing while executing 2015-02-08 18:17:08 +02:00
Arik Fraimovich
48d1113225 Fix #371: show notification when query fails. 2015-02-08 18:08:24 +02:00
Arik Fraimovich
d82d5c3bdc Merge pull request #372 from EverythingMe/feature/variables
Several UI fixes
2015-02-08 18:05:05 +02:00
Arik Fraimovich
dfe58b3953 Give the user the option to disable sorting of chart data 2015-02-08 18:02:36 +02:00
Arik Fraimovich
44019b8357 Variables: allow nesting variables 2015-02-08 17:07:20 +02:00
Arik Fraimovich
3c15a44faf Fix: keyboard shortcuts were not unbinded 2015-02-08 17:07:06 +02:00
43 changed files with 1330 additions and 709 deletions

View File

@@ -1,12 +1,15 @@
from playhouse.migrate import Migrator
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = Migrator(db.database)
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrator.add_column(models.Query, models.Query.is_archived, 'is_archived')
migrate(
migrator.add_column('queries', 'is_archived', models.Query.is_archived)
)
db.close_db(None)

View File

@@ -0,0 +1,21 @@
from redash.models import db
if __name__ == '__main__':
db.connect_db()
columns = (
('activity_log', 'created_at'),
('dashboards', 'created_at'),
('data_sources', 'created_at'),
('events', 'created_at'),
('groups', 'created_at'),
('queries', 'created_at'),
('widgets', 'created_at'),
('query_results', 'retrieved_at')
)
with db.database.transaction():
for column in columns:
db.database.execute_sql("ALTER TABLE {} ALTER COLUMN {} TYPE timestamp with time zone;".format(*column))
db.close_db(None)

View File

@@ -0,0 +1,73 @@
import json
from redash import query_runner
from redash.models import DataSource
def update(data_source):
print "[%s] Old options: %s" % (data_source.name, data_source.options)
if query_runner.validate_configuration(data_source.type, data_source.options):
print "[%s] configuration already valid. skipping." % data_source.name
return
if data_source.type == 'pg':
values = data_source.options.split(" ")
configuration = {}
for value in values:
k, v = value.split("=", 1)
configuration[k] = v
if k == 'port':
configuration[k] = int(v)
data_source.options = json.dumps(configuration)
elif data_source.type == 'mysql':
mapping = {
'Server': 'host',
'User': 'user',
'Pwd': 'passwd',
'Database': 'db'
}
values = data_source.options.split(";")
configuration = {}
for value in values:
k, v = value.split("=", 1)
configuration[mapping[k]] = v
data_source.options = json.dumps(configuration)
elif data_source.type == 'graphite':
old_config = json.loads(data_source.options)
configuration = {
"url": old_config["url"]
}
if "verify" in old_config:
configuration['verify'] = old_config['verify']
if "auth" in old_config:
configuration['username'], configuration['password'] = old_config["auth"]
data_source.options = json.dumps(configuration)
elif data_source.type == 'url':
data_source.options = json.dumps({"url": data_source.options})
elif data_source.type == 'script':
data_source.options = json.dumps({"path": data_source.options})
elif data_source.type == 'mongo':
data_source.type = 'mongodb'
else:
print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type)
print "[%s] New options: %s" % (data_source.name, data_source.options)
data_source.save()
if __name__ == '__main__':
for data_source in DataSource.all():
update(data_source)

View File

@@ -0,0 +1,12 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.drop_not_null('events', 'user_id')
)

View File

@@ -16,16 +16,9 @@
$timeout(refresh, 59 * 1000);
};
$scope.flowerUrl = featureFlags.flowerUrl;
refresh();
}
var AdminWorkersCtrl = function ($scope, $sce) {
$scope.flowerUrl = $sce.trustAsResourceUrl(featureFlags.flowerUrl);
};
angular.module('redash.admin_controllers', [])
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])
.controller('AdminWorkersCtrl', ['$scope', '$sce', AdminWorkersCtrl])
})();

View File

@@ -29,7 +29,7 @@
}
});
KeyboardShortcuts.bind({
var shortcuts = {
'meta+s': function () {
if ($scope.canEdit) {
$scope.saveQuery();
@@ -44,7 +44,9 @@
'meta+enter': $scope.executeQuery,
// Ctrl+Enter for PC
'ctrl+enter': $scope.executeQuery
});
};
KeyboardShortcuts.bind(shortcuts);
// @override
$scope.saveQuery = function(options, data) {

View File

@@ -33,6 +33,10 @@
$scope.queryExecuting = lock;
};
$scope.showApiKey = function() {
alert("API Key for this query:\n" + $scope.query.api_key);
};
$scope.saveQuery = function(options, data) {
if (data) {
data.id = $scope.query.id;
@@ -155,6 +159,8 @@
$scope.query.queryResult = $scope.queryResult;
notifications.showNotification("re:dash", $scope.query.name + " updated.");
} else if (status == 'failed') {
notifications.showNotification("re:dash", $scope.query.name + " failed to run: " + $scope.queryResult.getError());
}
if (status === 'done' || status === 'failed') {

View File

@@ -98,13 +98,13 @@
</button>',
link: function($scope) {
$scope.formatQuery = function formatQuery() {
$scope.queryExecuting = true;
$scope.queryFormatting = true;
$http.post('/api/queries/format', {
'query': $scope.query.query
}).success(function (response) {
$scope.query.query = response;
}).finally(function () {
$scope.queryExecuting = false;
$scope.queryFormatting = false;
});
};
}

View File

@@ -305,8 +305,28 @@
scope.chart.series[0].remove(false);
};
// We check either for true or undefined for backward compatibility.
var series = scope.series;
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
if (scope.series.length > 0 && _.some(scope.series[0].data, function (p) {
if (series.length > 0 && _.some(series[0].data, function (p) {
return (angular.isString(p.x) || angular.isDefined(p.name));
})) {
chartOptions['xAxis'] = chartOptions['xAxis'] || {};
@@ -318,13 +338,13 @@
}
if (chartOptions['xAxis']['type'] == 'category' || chartOptions['series']['type']=='pie') {
if (!angular.isDefined(scope.series[0].data[0].name)) {
if (!angular.isDefined(series[0].data[0].name)) {
// We need to make sure that for each category, each series has a value.
var categories = _.union.apply(this, _.map(scope.series, function (s) {
var categories = _.union.apply(this, _.map(series, function (s) {
return _.pluck(s.data, 'x')
}));
_.each(scope.series, function (s) {
_.each(series, function (s) {
// TODO: move this logic to Query#getChartData
var yValues = _.groupBy(s.data, 'x');
@@ -335,11 +355,6 @@
}
});
if (categories.length == 1) {
newData = _.sortBy(newData, 'y').reverse();
}
;
s.data = newData;
});
}
@@ -347,7 +362,7 @@
scope.chart.counters.color = 0;
_.each(scope.series, function (s) {
_.each(series, function (s) {
// here we override the series with the visualization config
s = _.extend(s, chartOptions['series']);

View File

@@ -214,10 +214,6 @@
}
});
_.each(series, function (series) {
series.data = _.sortBy(series.data, 'x');
});
return _.values(series);
};
@@ -469,11 +465,19 @@
Query.prototype.getParameters = function() {
var parts = Mustache.parse(this.query);
var parameters = [];
_.each(parts, function(part) {
if (part[0] == 'name') {
parameters.push(part[1]);
}
});
var collectParams = function(parts) {
parameters = [];
_.each(parts, function(part) {
if (part[0] == 'name' || part[0] == '&') {
parameters.push(part[1]);
} else if (part[0] == '#') {
parameters = _.union(parameters, collectParams(part[4]));
}
});
return parameters;
};
parameters = collectParams(parts);
return parameters;
}

View File

@@ -147,6 +147,10 @@
scope.stacking = scope.visualization.options.series.stacking;
}
if (scope.visualization.options.sortX === undefined) {
scope.visualization.options.sortX = true;
}
var refreshSeries = function() {
scope.series = _.map(scope.queryResult.getChartData(scope.visualization.options.columnMapping), function (s) { return s.name; });

View File

@@ -24,10 +24,6 @@
<span class="badge">{{manager.outdated_queries_count}}</span>
Outdated Queries Count
</li>
<li class="list-group-item" ng-if="flowerUrl">
<a href="/admin/workers">Workers' Status</a>
</li>
</ul>
<ul class="list-group col-lg-4">
<li class="list-group-item active">Queues</li>

View File

@@ -1,3 +0,0 @@
<div class="container-fluid iframe-container">
<iframe src="{{flowerUrl}}" style="width:100%; height:100%; background-color:transparent;"></iframe>
</div>

View File

@@ -82,7 +82,7 @@
<!-- code editor -->
<div ng-show="sourceMode">
<p>
<query-editor query="query" lock="queryExecuting"></query-editor>
<query-editor query="query" lock="queryFormatting"></query-editor>
</p>
<hr>
</div>
@@ -139,7 +139,11 @@
ng-show="!query.is_archived && query.id != undefined && (isQueryOwner || currentUser.hasPermission('admin'))">
<i class="fa fa-archive" title="Archive Query"></i>
</a>
<button class="btn btn-default btn-sm" ng-show="query.id != undefined" ng-click="showApiKey()">
<i class="fa fa-key" title="Show API Key"></i>
</button>
<div class="modal fade" id="archive-confirmation-modal" tabindex="-1" role="dialog" aria-labelledby="archiveConfirmationModal" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">

View File

@@ -46,6 +46,14 @@
placeholder="Auto">
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-2">Sort X Values</label>
<div class="col-sm-10">
<input name="sortX" type="checkbox" class="form-control"
ng-model="visualization.options.sortX">
</div>
</div>
</div>
</div>

View File

@@ -4,8 +4,9 @@ import redis
from statsd import StatsClient
from redash import settings
from redash.query_runner import import_query_runners
__version__ = '0.5.0'
__version__ = '0.6.0'
def setup_logging():
@@ -31,4 +32,6 @@ def create_redis_connection():
setup_logging()
redis_connection = create_redis_connection()
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
import_query_runners(settings.QUERY_RUNNERS)

View File

@@ -5,7 +5,7 @@ import time
import logging
from flask import request, make_response, redirect, url_for
from flask.ext.login import LoginManager, login_user, current_user
from flask.ext.login import LoginManager, login_user, current_user, logout_user
from redash import models, settings, google_oauth
@@ -23,9 +23,38 @@ def sign(key, path, expires):
return h.hexdigest()
class HMACAuthentication(object):
@staticmethod
def api_key_authentication():
class Authentication(object):
def verify_authentication(self):
return False
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated() or self.verify_authentication():
return fn(*args, **kwargs)
return make_response(redirect(url_for("login", next=request.url)))
return decorated
class ApiKeyAuthentication(Authentication):
def verify_authentication(self):
api_key = request.args.get('api_key')
query_id = request.view_args.get('query_id', None)
if query_id and api_key:
query = models.Query.get(models.Query.id == query_id)
if query.api_key and api_key == query.api_key:
login_user(models.ApiUser(query.api_key), remember=False)
return True
return False
class HMACAuthentication(Authentication):
def verify_authentication(self):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
@@ -41,22 +70,14 @@ class HMACAuthentication(object):
return False
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated():
return fn(*args, **kwargs)
if self.api_key_authentication():
return fn(*args, **kwargs)
return make_response(redirect(url_for("login", next=request.url)))
return decorated
@login_manager.user_loader
def load_user(user_id):
# If the user was previously logged in as api user, the user_id will be the api key and will raise an exception as
# it can't be casted to int.
if isinstance(user_id, basestring) and not user_id.isdigit():
return None
return models.User.select().where(models.User.id == user_id).first()
@@ -66,4 +87,13 @@ def setup_authentication(app):
app.secret_key = settings.COOKIE_SECRET
app.register_blueprint(google_oauth.blueprint)
return HMACAuthentication()
if settings.AUTH_TYPE == 'hmac':
auth = HMACAuthentication()
elif settings.AUTH_TYPE == 'api_key':
auth = ApiKeyAuthentication()
else:
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
auth = HMACAuthentication()
return auth

View File

@@ -1,5 +1,8 @@
import json
import click
from flask.ext.script import Manager
from redash import models
from redash.query_runner import query_runners, validate_configuration
manager = Manager(help="Data sources management commands.")
@@ -13,11 +16,70 @@ def list():
print "Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options)
def validate_data_source_type(type):
if type not in query_runners.keys():
print "Error: the type \"{}\" is not supported (supported types: {}).".format(type, ", ".join(query_runners.keys()))
exit()
def validate_data_source_options(type, options):
if not validate_configuration(type, options):
print "Error: invalid configuration."
exit()
@manager.command
def new(name, type, options):
def new(name=None, type=None, options=None):
"""Create new data source"""
# TODO: validate it's a valid type and in the future, validate the options.
if name is None:
name = click.prompt("Name")
if type is None:
print "Select type:"
for i, query_runner_name in enumerate(query_runners.keys()):
print "{}. {}".format(i+1, query_runner_name)
idx = 0
while idx < 1 or idx > len(query_runners.keys()):
idx = click.prompt("[{}-{}]".format(1, len(query_runners.keys())), type=int)
type = query_runners.keys()[idx-1]
else:
validate_data_source_type(type)
if options is None:
query_runner = query_runners[type]
schema = query_runner.configuration_schema()
types = {
'string': unicode,
'number': int,
'boolean': bool
}
options_obj = {}
for k, prop in schema['properties'].iteritems():
required = k in schema.get('required', [])
default_value = "<<DEFAULT_VALUE>>"
if required:
default_value = None
prompt = prop.get('title', k.capitalize())
if required:
prompt = "{} (required)".format(prompt)
else:
prompt = "{} (optional)".format(prompt)
value = click.prompt(prompt, default=default_value, type=types[prop['type']], show_default=False)
if value != default_value:
options_obj[k] = value
options = json.dumps(options_obj)
validate_data_source_options(type, options)
print "Creating {} data source ({}) with options:\n{}".format(type, name, options)
data_source = models.DataSource.create(name=name,
type=type,
options=options)
@@ -49,7 +111,14 @@ def update_attr(obj, attr, new_value):
def edit(name, new_name=None, options=None, type=None):
"""Edit data source settings (name, options, type)"""
try:
if type is not None:
validate_data_source_type(type)
data_source = models.DataSource.get(models.DataSource.name==name)
if options is not None:
validate_data_source_options(data_source.type, options)
update_attr(data_source, "name", new_name)
update_attr(data_source, "type", type)
update_attr(data_source, "options", options)

View File

@@ -7,9 +7,8 @@ but this is only due to configuration issues and temporary.
import csv
import hashlib
import json
import numbers
import cStringIO
import datetime
import time
import logging
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
@@ -23,6 +22,7 @@ from redash.wsgi import app, auth, api
from redash.tasks import QueryTask, record_event
from redash.cache import headers as cache_headers
from redash.permissions import require_permission
from redash.query_runner import query_runners, validate_configuration
@app.route('/ping', methods=['GET'])
@@ -52,8 +52,7 @@ def index(**kwargs):
}
features = {
'clientSideMetrics': settings.CLIENT_SIDE_METRICS,
'flowerUrl': settings.CELERY_FLOWER_URL
'clientSideMetrics': settings.CLIENT_SIDE_METRICS
}
return render_template("index.html", user=json.dumps(user), name=settings.NAME,
@@ -174,11 +173,35 @@ class MetricsAPI(BaseResource):
api.add_resource(MetricsAPI, '/api/metrics/v1/send', endpoint='metrics')
class DataSourceTypeListAPI(BaseResource):
@require_permission("admin")
def get(self):
return [q.to_dict() for q in query_runners.values()]
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
class DataSourceListAPI(BaseResource):
def get(self):
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
return data_sources
@require_permission("admin")
def post(self):
req = request.get_json(True)
required_fields = ('options', 'name', 'type')
for f in required_fields:
if f not in req:
abort(400)
if not validate_configuration(req['type'], req['options']):
abort(400)
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options'])
return datasource.to_dict()
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
@@ -449,10 +472,6 @@ class QueryResultAPI(BaseResource):
writer.writer = utils.UnicodeWriter(s)
writer.writeheader()
for row in query_data['rows']:
for k, v in row.iteritems():
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
writer.writerow(row)
headers = {'Content-Type': "text/csv; charset=UTF-8"}
@@ -470,6 +489,24 @@ class QueryResultAPI(BaseResource):
query_result = models.QueryResult.get_by_id(query_result_id)
if query_result:
if isinstance(self.current_user, models.ApiUser):
event = {
'user_id': None,
'action': 'api_get',
'timestamp': int(time.time()),
'api_key': self.current_user.id,
'file_type': filetype
}
if query_id:
event['object_type'] = 'query'
event['object_id'] = query_id
else:
event['object_type'] = 'query_result'
event['object_id'] = query_result_id
record_event.delay(event)
if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
return make_response(data, 200, cache_headers)

View File

@@ -1,34 +0,0 @@
import json
def get_query_runner(connection_type, connection_string):
if connection_type == 'mysql':
from redash.data import query_runner_mysql
runner = query_runner_mysql.mysql(connection_string)
elif connection_type == 'graphite':
from redash.data import query_runner_graphite
connection_params = json.loads(connection_string)
if connection_params['auth']:
connection_params['auth'] = tuple(connection_params['auth'])
else:
connection_params['auth'] = None
runner = query_runner_graphite.graphite(connection_params)
elif connection_type == 'bigquery':
from redash.data import query_runner_bigquery
connection_params = json.loads(connection_string)
runner = query_runner_bigquery.bigquery(connection_params)
elif connection_type == 'script':
from redash.data import query_runner_script
runner = query_runner_script.script(connection_string)
elif connection_type == 'url':
from redash.data import query_runner_url
runner = query_runner_url.url(connection_string)
elif connection_type == "mongo":
from redash.data import query_runner_mongodb
connection_params = json.loads(connection_string)
runner = query_runner_mongodb.mongodb(connection_params)
else:
from redash.data import query_runner_pg
runner = query_runner_pg.pg(connection_string)
return runner

View File

@@ -1,46 +0,0 @@
"""
QueryRunner for Graphite.
"""
import json
import datetime
import requests
from redash.utils import JSONEncoder
def graphite(connection_params):
def transform_result(response):
columns = [{'name': 'Time::x'}, {'name': 'value::y'}, {'name': 'name::series'}]
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json.dumps(data, cls=JSONEncoder)
def query_runner(query):
base_url = "%s/render?format=json&" % connection_params['url']
url = "%s%s" % (base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=connection_params['auth'],
verify=connection_params['verify'])
if response.status_code == 200:
data = transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception, ex:
data = None
error = ex.message
return data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -1,242 +0,0 @@
import datetime
import logging
import json
import sys
import re
import time
from redash.utils import JSONEncoder
try:
import pymongo
from bson.objectid import ObjectId
from bson.son import SON
except ImportError:
print "Missing dependencies. Please install pymongo."
print "You can use pip: pip install pymongo"
raise
TYPES_MAP = {
ObjectId : "string",
str : "string",
unicode : "string",
int : "integer",
long : "integer",
float : "float",
bool : "boolean",
datetime.datetime: "datetime",
}
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
# Simple query example:
#
# {
# "collection" : "my_collection",
# "query" : {
# "date" : {
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
# },
# "type" : 1
# },
# "fields" : {
# "_id" : 1,
# "name" : 2
# },
# "sort" : [
# {
# "name" : "date",
# "direction" : -1
# }
# ]
#
# }
#
#
# Aggregation
# ===========
# Uses a syntax similar to the one used in PyMongo, however to support the
# correct order of sorting, it uses a regular list for the "$sort" operation
# that converts into a SON (sorted dictionary) object before execution.
#
# Aggregation query example:
#
# {
# "collection" : "things",
# "aggregate" : [
# {
# "$unwind" : "$tags"
# },
# {
# "$group" : {
# {
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# }
# },
# {
# "$sort" : [
# {
# "name" : "count",
# "direction" : -1
# },
# {
# "name" : "_id",
# "direction" : -1
# }
# ]
# }
# ]
# }
#
#
def mongodb(connection_string):
def _get_column_by_name(columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
def query_runner(query):
if not "dbName" in connection_string or not connection_string["dbName"]:
return None, "dbName is missing from connection string JSON or is empty"
db_name = connection_string["dbName"]
if not "connectionString" in connection_string or not connection_string["connectionString"]:
return None, "connectionString is missing from connection string JSON or is empty"
is_replica_set = True if "replicaSetName" in connection_string and connection_string["replicaSetName"] else False
if is_replica_set:
if not connection_string["replicaSetName"]:
return None, "replicaSetName is set in the connection string JSON but is empty"
db_connection = pymongo.MongoReplicaSetClient(connection_string["connectionString"], replicaSet=connection_string["replicaSetName"])
else:
db_connection = pymongo.MongoClient(connection_string["connectionString"])
if db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % db_name
db = db_connection[db_name]
logging.debug("mongodb connection string: %s", connection_string)
logging.debug("mongodb got query: %s", query)
try:
query_data = json.loads(query)
except:
return None, "Invalid query format. The query is not a valid JSON."
if "query" in query_data and "aggregate" in query_data:
return None, "'query' and 'aggregate' sections cannot be used at the same time"
collection = None
if not "collection" in query_data:
return None, "'collection' must be set"
else:
collection = query_data["collection"]
q = None
if "query" in query_data:
q = query_data["query"]
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
f = None
aggregate = None
if "aggregate" in query_data:
aggregate = query_data["aggregate"]
for step in aggregate:
if "$sort" in step:
sort_list = []
for sort_item in step["$sort"]:
sort_list.append((sort_item["name"], sort_item["direction"]))
step["$sort"] = SON(sort_list)
if aggregate:
pass
else:
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field in query_data["sort"]:
s.append((field["name"], field["direction"]))
if "fields" in query_data:
f = query_data["fields"]
columns = []
rows = []
error = None
json_data = None
cursor = None
if q or (not q and not aggregate):
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
if "skip" in query_data:
cursor = cursor.skip(query_data["skip"])
if "limit" in query_data:
cursor = cursor.limit(query_data["limit"])
elif aggregate:
r = db[collection].aggregate(aggregate)
cursor = r["result"]
for r in cursor:
for k in r:
if _get_column_by_name(columns, k) is None:
columns.append({
"name": k,
"friendly_name": k,
"type": TYPES_MAP[type(r[k])] if type(r[k]) in TYPES_MAP else None
})
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:
ordered_columns = []
for k in sorted(f, key=f.get):
ordered_columns.append(_get_column_by_name(columns, k))
columns = ordered_columns
data = {
"columns": columns,
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
return json_data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -1,64 +0,0 @@
"""
QueryRunner is the function that the workers use, to execute queries. This is the Redshift
(PostgreSQL in fact) version, but easily we can write another to support additional databases
(MySQL and others).
Because the worker just pass the query, this can be used with any data store that has some sort of
query language (for example: HiveQL).
"""
import logging
import json
import MySQLdb
import sys
from redash.utils import JSONEncoder
def mysql(connection_string):
if connection_string.endswith(';'):
connection_string = connection_string[0:-1]
def query_runner(query):
connections_params = [entry.split('=')[1] for entry in connection_string.split(';')]
connection = MySQLdb.connect(*connections_params, charset="utf8", use_unicode=True)
cursor = connection.cursor()
logging.debug("mysql got query: %s", query)
try:
cursor.execute(query)
data = cursor.fetchall()
cursor_desc = cursor.description
if (cursor_desc != None):
num_fields = len(cursor_desc)
column_names = [i[0] for i in cursor.description]
rows = [dict(zip(column_names, row)) for row in data]
columns = [{'name': col_name,
'friendly_name': col_name,
'type': None} for col_name in column_names]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
else:
json_data = None
error = "No data was returned."
cursor.close()
except MySQLdb.Error, e:
json_data = None
error = e.args[1]
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
return query_runner

View File

@@ -1,110 +0,0 @@
"""
QueryRunner is the function that the workers use, to execute queries. This is the PostgreSQL
version, but easily we can write another to support additional databases (MySQL and others).
Because the worker just pass the query, this can be used with any data store that has some sort of
query language (for example: HiveQL).
"""
import json
import sys
import select
import logging
import psycopg2
from redash.utils import JSONEncoder
types_map = {
20: 'integer',
21: 'integer',
23: 'integer',
700: 'float',
1700: 'float',
701: 'float',
16: 'boolean',
1082: 'date',
1114: 'datetime',
1184: 'datetime',
1014: 'string',
1015: 'string',
1008: 'string',
1009: 'string',
2951: 'string'
}
def pg(connection_string):
def column_friendly_name(column_name):
return column_name
def wait(conn):
while 1:
try:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [conn.fileno()], [])
elif state == psycopg2.extensions.POLL_READ:
select.select([conn.fileno()], [], [])
else:
raise psycopg2.OperationalError("poll() returned %s" % state)
except select.error:
raise psycopg2.OperationalError("select.error received")
def query_runner(query):
connection = psycopg2.connect(connection_string, async=True)
wait(connection)
cursor = connection.cursor()
try:
cursor.execute(query)
wait(connection)
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
# size of the data we can assume it's ok.
column_names = []
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name = column_name + str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_friendly_name(column_name),
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
return query_runner

View File

@@ -1,51 +0,0 @@
import json
import logging
import sys
import os
import subprocess
# We use subprocess.check_output because we are lazy.
# If someone will really want to run this on Python < 2.7 they can easily update the code to run
# Popen, check the retcodes and other things and read the standard output to a variable.
if not "check_output" in subprocess.__dict__:
print "ERROR: This runner uses subprocess.check_output function which exists in Python 2.7"
def script(connection_string):
def query_runner(query):
try:
json_data = None
error = None
if connection_string is None:
return None, "script execution path is not set. Please reconfigure the data source"
# Poor man's protection against running scripts from output the scripts directory
if connection_string.find("../") > -1:
return None, "Scripts can only be run from the configured scripts directory"
query = query.strip()
script = os.path.join(connection_string, query)
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
output = subprocess.check_output(script, shell=False)
if output != None:
output = output.strip()
if output != "":
return output, None
error = "Error reading output"
except subprocess.CalledProcessError as e:
return None, str(e)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -9,7 +9,7 @@ import itertools
import peewee
from passlib.apps import custom_app_context as pwd_context
from playhouse.postgres_ext import ArrayField
from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDatabase
from flask.ext.login import UserMixin, AnonymousUserMixin
from redash import utils, settings
@@ -18,8 +18,9 @@ from redash import utils, settings
class Database(object):
def __init__(self):
self.database_config = dict(settings.DATABASE_CONFIG)
self.database_config['register_hstore'] = False
self.database_name = self.database_config.pop('name')
self.database = peewee.PostgresqlDatabase(self.database_name, **self.database_config)
self.database = PostgresqlExtDatabase(self.database_name, **self.database_config)
self.app = None
self.pid = os.getpid()
@@ -83,6 +84,9 @@ class ApiUser(UserMixin, PermissionsCheckMixin):
def __init__(self, api_key):
self.id = api_key
def __repr__(self):
return u"<ApiUser: {}>".format(self.id)
@property
def permissions(self):
return ['view_query']
@@ -96,7 +100,7 @@ class Group(BaseModel):
name = peewee.CharField(max_length=100)
permissions = ArrayField(peewee.CharField, default=DEFAULT_PERMISSIONS)
tables = ArrayField(peewee.CharField)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'groups'
@@ -173,7 +177,7 @@ class ActivityLog(BaseModel):
user = peewee.ForeignKeyField(User)
type = peewee.IntegerField()
activity = peewee.TextField()
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'activity_log'
@@ -198,7 +202,7 @@ class DataSource(BaseModel):
options = peewee.TextField()
queue_name = peewee.CharField(default="queries")
scheduled_queue_name = peewee.CharField(default="queries")
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'data_sources'
@@ -222,7 +226,7 @@ class QueryResult(BaseModel):
query = peewee.TextField()
data = peewee.TextField()
runtime = peewee.FloatField()
retrieved_at = peewee.DateTimeField()
retrieved_at = DateTimeTZField()
class Meta:
db_table = 'query_results'
@@ -297,7 +301,7 @@ class Query(BaseModel):
user_email = peewee.CharField(max_length=360, null=True)
user = peewee.ForeignKeyField(User)
is_archived = peewee.BooleanField(default=False, index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'queries'
@@ -381,7 +385,7 @@ class Query(BaseModel):
def search(cls, term):
# This is very naive implementation of search, to be replaced with PostgreSQL full-text-search solution.
where = (cls.name**"%{}%".format(term)) | (cls.description**"%{}%".format(term))
where = (cls.name**u"%{}%".format(term)) | (cls.description**u"%{}%".format(term))
if term.isdigit():
where |= cls.id == term
@@ -441,7 +445,7 @@ class Dashboard(BaseModel):
layout = peewee.TextField()
dashboard_filters_enabled = peewee.BooleanField(default=False)
is_archived = peewee.BooleanField(default=False, index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'dashboards'
@@ -552,7 +556,7 @@ class Widget(BaseModel):
width = peewee.IntegerField()
options = peewee.TextField()
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
# unused; kept for backward compatability:
type = peewee.CharField(max_length=100, null=True)
@@ -586,13 +590,14 @@ class Widget(BaseModel):
self.dashboard.save()
super(Widget, self).delete_instance(*args, **kwargs)
class Event(BaseModel):
user = peewee.ForeignKeyField(User, related_name="events")
user = peewee.ForeignKeyField(User, related_name="events", null=True)
action = peewee.CharField()
object_type = peewee.CharField()
object_id = peewee.CharField(null=True)
additional_properties = peewee.TextField(null=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'events'

View File

@@ -0,0 +1,104 @@
import logging
import json
import jsonschema
from jsonschema import ValidationError
logger = logging.getLogger(__name__)
__all__ = [
'ValidationError',
'BaseQueryRunner',
'TYPE_DATETIME',
'TYPE_BOOLEAN',
'TYPE_INTEGER',
'TYPE_STRING',
'TYPE_DATE',
'TYPE_FLOAT',
'register',
'get_query_runner',
'import_query_runners'
]
# Valid types of columns returned in results:
TYPE_INTEGER = 'integer'
TYPE_FLOAT = 'float'
TYPE_BOOLEAN = 'boolean'
TYPE_STRING = 'string'
TYPE_DATETIME = 'datetime'
TYPE_DATE = 'date'
class BaseQueryRunner(object):
def __init__(self, configuration):
jsonschema.validate(configuration, self.configuration_schema())
self.configuration = configuration
@classmethod
def name(cls):
return cls.__name__
@classmethod
def type(cls):
return cls.__name__.lower()
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return True
@classmethod
def configuration_schema(cls):
return {}
def run_query(self, query):
raise NotImplementedError()
@classmethod
def to_dict(cls):
return {
'name': cls.name(),
'type': cls.type(),
'configuration_schema': cls.configuration_schema()
}
query_runners = {}
def register(query_runner_class):
global query_runners
if query_runner_class.enabled():
logger.debug("Registering %s (%s) query runner.", query_runner_class.name(), query_runner_class.type())
query_runners[query_runner_class.type()] = query_runner_class
else:
logger.warning("%s query runner enabled but not supported, not registering. Either disable or install missing dependencies.", query_runner_class.name())
def get_query_runner(query_runner_type, configuration_json):
query_runner_class = query_runners.get(query_runner_type, None)
if query_runner_class is None:
return None
return query_runner_class(json.loads(configuration_json))
def validate_configuration(query_runner_type, configuration_json):
query_runner_class = query_runners.get(query_runner_type, None)
if query_runner_class is None:
return False
try:
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema())
except (ValidationError, ValueError):
return False
return True
def import_query_runners(query_runner_imports):
for runner_import in query_runner_imports:
__import__(runner_import)

View File

@@ -1,29 +1,37 @@
import datetime
import httplib2
import json
import httplib2
import logging
import sys
import time
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
import apiclient.errors
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials
except ImportError:
print "Missing dependencies. Please install google-api-python-client and oauth2client."
print "You can use pip: pip install google-api-python-client oauth2client"
from redash.utils import JSONEncoder
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install google-api-python-client and oauth2client.")
logger.warning("You can use pip: pip install google-api-python-client oauth2client")
enabled = False
types_map = {
'INTEGER': 'integer',
'FLOAT': 'float',
'BOOLEAN': 'boolean',
'STRING': 'string',
'TIMESTAMP': 'datetime',
'INTEGER': TYPE_INTEGER,
'FLOAT': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'STRING': TYPE_STRING,
'TIMESTAMP': TYPE_DATETIME,
}
def transform_row(row, fields):
column_index = 0
row_data = {}
@@ -49,37 +57,71 @@ def transform_row(row, fields):
return row_data
def bigquery(connection_string):
def load_key(filename):
f = file(filename, "rb")
try:
return f.read()
finally:
f.close()
def get_bigquery_service():
scope = [
"https://www.googleapis.com/auth/bigquery",
]
def _load_key(filename):
f = file(filename, "rb")
try:
return f.read()
finally:
f.close()
credentials = SignedJwtAssertionCredentials(connection_string["serviceAccount"],
load_key(connection_string["privateKey"]), scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_bigquery_service(service_account, private_key):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
def get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
if not query_reply['jobComplete']:
time.sleep(10)
return get_query_results(jobs, project_id, job_id, start_index)
credentials = SignedJwtAssertionCredentials(service_account, private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return query_reply
return build("bigquery", "v2", http=http)
def query_runner(query):
bigquery_service = get_bigquery_service()
def _get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
if not query_reply['jobComplete']:
time.sleep(10)
return _get_query_results(jobs, project_id, job_id, start_index)
return query_reply
class BigQuery(BaseQueryRunner):
@classmethod
def enabled(cls):
return enabled
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'serviceAccount': {
'type': 'string',
'title': 'Service Account'
},
'projectId': {
'type': 'string',
'title': 'Project ID'
},
'privateKey': {
'type': 'string',
'title': 'Private Key Path'
}
},
'required': ['serviceAccount', 'projectId', 'privateKey']
}
def __init__(self, configuration_json):
super(BigQuery, self).__init__(configuration_json)
self.private_key = _load_key(self.configuration["privateKey"])
def run_query(self, query):
bigquery_service = _get_bigquery_service(self.configuration["serviceAccount"],
self.private_key)
jobs = bigquery_service.jobs()
job_data = {
@@ -90,17 +132,17 @@ def bigquery(connection_string):
}
}
logging.debug("bigquery got query: %s", query)
logger.debug("BigQuery got query: %s", query)
project_id = connection_string["projectId"]
project_id = self.configuration["projectId"]
try:
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
current_row = 0
query_reply = get_query_results(jobs, project_id=project_id,
query_reply = _get_query_results(jobs, project_id=project_id,
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
logging.debug("bigquery replied: %s", query_reply)
logger.debug("bigquery replied: %s", query_reply)
rows = []
@@ -134,5 +176,4 @@ def bigquery(connection_string):
return json_data, error
return query_runner
register(BigQuery)

View File

@@ -0,0 +1,83 @@
import json
import datetime
import requests
import logging
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
def _transform_result(response):
columns = ({'name': 'Time::x', 'type': TYPE_DATETIME},
{'name': 'value::y', 'type': TYPE_FLOAT},
{'name': 'name::series', 'type': TYPE_STRING})
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json.dumps(data, cls=JSONEncoder)
class Graphite(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
},
'username': {
'type': 'string'
},
'password': {
'type': 'string'
},
'verify': {
'type': 'boolean',
'title': 'Verify SSL certificate'
}
},
'required': ['url']
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Graphite, self).__init__(configuration_json)
if "username" in self.configuration and self.configuration["username"]:
self.auth = (self.configuration["username"], self.configuration["password"])
else:
self.auth = None
self.verify = self.configuration["verify"]
self.base_url = "%s/render?format=json&" % self.configuration['url']
def run_query(self, query):
url = "%s%s" % (self.base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=self.auth, verify=self.verify)
if response.status_code == 200:
data = _transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception, ex:
data = None
error = ex.message
return data, error
register(Graphite)

View File

@@ -0,0 +1,178 @@
import json
import datetime
import logging
import re
import time
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
try:
import pymongo
from bson.objectid import ObjectId
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install pymongo.")
logger.warning("You can use pip: pip install pymongo")
enabled = False
TYPES_MAP = {
str: TYPE_STRING,
unicode: TYPE_STRING,
int: TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT,
bool: TYPE_BOOLEAN,
datetime.datetime: TYPE_DATETIME,
}
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
def _get_column_by_name(columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
class MongoDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'connectionString': {
'type': 'string',
'title': 'Connection String'
},
'dbName': {
'type': 'string',
'title': "Database Name"
},
'replicaSetName': {
'type': 'string',
'title': 'Replica Set Name'
},
'required': ['connectionString']
}
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(MongoDB, self).__init__(configuration_json)
self.db_name = self.configuration["dbName"]
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
def run_query(self, query):
if self.is_replica_set:
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
else:
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
if self.db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % self.db_name
db = db_connection[self.db_name ]
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
logger.debug("mongodb got query: %s", query)
try:
query_data = json.loads(query)
except ValueError:
return None, "Invalid query format. The query is not a valid JSON."
if "collection" not in query_data:
return None, "'collection' must have a value to run a query"
else:
collection = query_data["collection"]
q = None
if "query" in query_data:
q = query_data["query"]
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
f = None
if "fields" in query_data:
f = query_data["fields"]
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field_name in query_data["sort"]:
s.append((field_name, query_data["sort"][field_name]))
columns = []
rows = []
error = None
json_data = None
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
for r in cursor:
for k in r:
if _get_column_by_name(columns, k) is None:
columns.append({
"name": k,
"friendly_name": k,
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
})
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:
ordered_columns = []
for k in sorted(f, key=f.get):
ordered_columns.append(_get_column_by_name(columns, k))
columns = ordered_columns
data = {
"columns": columns,
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
return json_data, error
register(MongoDB)

View File

@@ -0,0 +1,97 @@
import sys
import json
import logging
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
class Mysql(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'host': {
'type': 'string'
},
'user': {
'type': 'string'
},
'passwd': {
'type': 'string',
'title': 'Password'
},
'db': {
'type': 'string',
'title': 'Database name'
}
},
'required': ['db']
}
@classmethod
def enabled(cls):
try:
import MySQLdb
except ImportError:
return False
return True
def __init__(self, configuration_json):
super(Mysql, self).__init__(configuration_json)
def run_query(self, query):
import MySQLdb
connection = MySQLdb.connect(self.configuration.get('host', ''),
self.configuration.get('user', ''),
self.configuration.get('passwd', ''),
self.configuration['db'],
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
try:
cursor.execute(query)
data = cursor.fetchall()
cursor_desc = cursor.description
if cursor_desc is not None:
num_fields = len(cursor_desc)
column_names = [i[0] for i in cursor.description]
rows = [dict(zip(column_names, row)) for row in data]
# TODO: add types support
columns = [{'name': col_name,
'friendly_name': col_name,
'type': None} for col_name in column_names]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
else:
json_data = None
error = "No data was returned."
cursor.close()
except MySQLdb.Error, e:
json_data = None
error = e.args[1]
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Mysql)

142
redash/query_runner/pg.py Normal file
View File

@@ -0,0 +1,142 @@
import json
import logging
import psycopg2
import select
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
types_map = {
20: TYPE_INTEGER,
21: TYPE_INTEGER,
23: TYPE_INTEGER,
700: TYPE_FLOAT,
1700: TYPE_FLOAT,
701: TYPE_FLOAT,
16: TYPE_BOOLEAN,
1082: TYPE_DATE,
1114: TYPE_DATETIME,
1184: TYPE_DATETIME,
1014: TYPE_STRING,
1015: TYPE_STRING,
1008: TYPE_STRING,
1009: TYPE_STRING,
2951: TYPE_STRING
}
def _wait(conn):
while 1:
try:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [conn.fileno()], [])
elif state == psycopg2.extensions.POLL_READ:
select.select([conn.fileno()], [], [])
else:
raise psycopg2.OperationalError("poll() returned %s" % state)
except select.error:
raise psycopg2.OperationalError("select.error received")
class PostgreSQL(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"user": {
"type": "string"
},
"password": {
"type": "string"
},
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"dbname": {
"type": "string",
"title": "Database Name"
}
},
"required": ["dbname"]
}
@classmethod
def type(cls):
return "pg"
def __init__(self, configuration_json):
super(PostgreSQL, self).__init__(configuration_json)
values = []
for k, v in self.configuration.iteritems():
values.append("{}={}".format(k, v))
self.connection_string = " ".join(values)
def run_query(self, query):
connection = psycopg2.connect(self.connection_string, async=True)
_wait(connection)
cursor = connection.cursor()
try:
cursor.execute(query)
_wait(connection)
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
# size of the data we can assume it's ok.
column_names = []
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name += str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(PostgreSQL)

View File

@@ -0,0 +1,79 @@
import sys
import json
from redash.query_runner import *
from redash import models
def get_query_result(query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
def execute_query(data_source_name, query):
try:
data_source = models.DataSource.get(models.DataSource.name==data_source_name)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name: %s." % data_source_name)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
class Python(BaseQueryRunner):
"""
This is very, very unsafe. Use at your own risk with people you really trust.
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
}
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Python, self).__init__(configuration_json)
def run_query(self, query):
try:
error = None
script_globals = {'get_query_result': get_query_result, 'execute_query': execute_query}
script_locals = {'result': None}
# TODO: timeout, sandboxing
exec query in script_globals, script_locals
if script_locals['result'] is None:
raise Exception("result wasn't set to value.")
json_data = json.dumps(script_locals['result'])
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(Python)

View File

@@ -0,0 +1,65 @@
import os
import sys
import subprocess
from redash.query_runner import *
class Script(BaseQueryRunner):
@classmethod
def enabled(cls):
return "check_output" in subprocess.__dict__
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'path': {
'type': 'string',
'title': 'Scripts path'
}
},
'required': ['path']
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Script, self).__init__(configuration_json)
# Poor man's protection against running scripts from output the scripts directory
if self.configuration["path"].find("../") > -1:
raise ValidationError("Scripts can only be run from the configured scripts directory")
def run_query(self, query):
try:
json_data = None
error = None
query = query.strip()
script = os.path.join(self.configuration["path"], query)
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
output = subprocess.check_output(script, shell=False)
if output is not None:
output = output.strip()
if output != "":
return output, None
error = "Error reading output"
except subprocess.CalledProcessError as e:
return None, str(e)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(Script)

View File

@@ -1,16 +1,30 @@
import json
import logging
import sys
import os
import urllib2
def url(connection_string):
from redash.query_runner import *
def query_runner(query):
base_url = connection_string
class Url(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string',
'title': 'URL base path'
}
}
}
@classmethod
def annotate_query(cls):
return False
def run_query(self, query):
base_url = self.configuration["url"]
try:
json_data = None
error = None
query = query.strip()
@@ -41,5 +55,4 @@ def url(connection_string):
return json_data, error
query_runner.annotate_query = False
return query_runner
register(Url)

View File

@@ -44,22 +44,20 @@ STATSD_HOST = os.environ.get('REDASH_STATSD_HOST', "127.0.0.1")
STATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', "8125"))
STATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', "redash")
# The following is kept for backward compatability, and shouldn't be used any more.
CONNECTION_ADAPTER = os.environ.get("REDASH_CONNECTION_ADAPTER", "pg")
CONNECTION_STRING = os.environ.get("REDASH_CONNECTION_STRING", "user= password= host= port=5439 dbname=")
# Connection settings for re:dash's own database (where we store the queries, results, etc)
DATABASE_CONFIG = parse_db_url(os.environ.get("REDASH_DATABASE_URL", "postgresql://postgres"))
# Celery related settings
CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL)
CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
CELERY_FLOWER_URL = os.environ.get("REDASH_CELERY_FLOWER_URL", "/flower")
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
# proved to be "safe".
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false"))
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "hmac")
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
@@ -68,14 +66,23 @@ GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
WORKERS_COUNT = int(os.environ.get("REDASH_WORKERS_COUNT", "2"))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600*6))
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# Query Runners
QUERY_RUNNERS = [
'redash.query_runner.big_query',
'redash.query_runner.graphite',
'redash.query_runner.mongodb',
'redash.query_runner.mysql',
'redash.query_runner.pg',
'redash.query_runner.script',
'redash.query_runner.url',
]
# Features:
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))

View File

@@ -8,7 +8,7 @@ from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client, settings
from redash.utils import gen_query_hash
from redash.worker import celery
from redash.data.query_runner import get_query_runner
from redash.query_runner import get_query_runner
logger = get_task_logger(__name__)
@@ -151,8 +151,6 @@ def refresh_queries():
outdated_queries_count += 1
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
# TODO: decide if we still need this
# statsd_client.gauge('manager.queue_size', self.redis_connection.zcard('jobs'))
logger.info("Done refreshing queries. Found %d outdated queries." % outdated_queries_count)
@@ -237,15 +235,15 @@ def execute_query(self, query, data_source_id):
query_hash = gen_query_hash(query)
query_runner = get_query_runner(data_source.type, data_source.options)
if getattr(query_runner, 'annotate_query', True):
# TODO: anotate with queu ename
if query_runner.annotate_query():
# TODO: annotate with queue name
annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \
(self.request.id, query_hash, query)
else:
annotated_query = query
with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)):
data, error = query_runner(annotated_query)
data, error = query_runner.run_query(annotated_query)
run_time = time.time() - start_time
logger.info("Query finished... data length=%s, error=%s", data and len(data), error)
@@ -255,8 +253,6 @@ def execute_query(self, query, data_source_id):
# Delete query_hash
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
# TODO: it is possible that storing the data will fail, and we will need to retry
# while we already marked the job as done
if not error:
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow())
else:

View File

@@ -9,7 +9,7 @@ Werkzeug==0.9.4
aniso8601==0.82
blinker==1.3
itsdangerous==0.23
peewee==2.2.2
peewee==2.4.7
psycopg2==2.5.2
python-dateutil==2.1
pytz==2013.9
@@ -23,3 +23,5 @@ honcho==0.5.0
statsd==2.1.2
gunicorn==18.0
celery==3.1.11
jsonschema==2.4.0
click==3.3

View File

@@ -146,7 +146,7 @@ if [ $pg_user_exists -ne 0 ]; then
sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
cd /opt/redash/current
sudo -u redash bin/run ./manage.py ds new "re:dash metadata" "pg" "user=redash_reader password=$REDASH_READER_PASSWORD host=localhost dbname=redash"
sudo -u redash bin/run ./manage.py ds new -n "re:dash metadata" -t "pg" -o "{\"user\": \"redash_reader\", \"password\": \"$REDASH_READER_PASSWORD\", \"host\": \"localhost\", \"dbname\": \"redash\"}"
fi
# BigQuery dependencies:

View File

@@ -1,8 +1,45 @@
from flask.ext.login import current_user
from mock import patch
from tests import BaseTestCase
from redash import models
from redash.google_oauth import create_and_login_user
from tests.factories import user_factory
from redash.authentication import ApiKeyAuthentication
from tests.factories import user_factory, query_factory
from redash.wsgi import app
class TestApiKeyAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestApiKeyAuthentication, self).setUp()
self.api_key = 10
self.query = query_factory.create(api_key=self.api_key)
def test_no_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id))
self.assertFalse(auth.verify_authentication())
def test_wrong_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': 'whatever'})
self.assertFalse(auth.verify_authentication())
def test_correct_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': self.api_key})
self.assertTrue(auth.verify_authentication())
def test_no_query_id(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertFalse(auth.verify_authentication())
class TestCreateAndLoginUser(BaseTestCase):

View File

@@ -472,4 +472,45 @@ class TestLogout(BaseTestCase):
self.assertTrue(current_user.is_authenticated())
rv = c.get('/logout')
self.assertEquals(rv.status_code, 302)
self.assertFalse(current_user.is_authenticated())
self.assertFalse(current_user.is_authenticated())
class DataSourceTypesTest(BaseTestCase):
def test_returns_data_for_admin(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = c.get("/api/data_sources/types")
self.assertEqual(rv.status_code, 200)
def test_returns_403_for_non_admin(self):
with app.test_client() as c, authenticated_user(c):
rv = c.get("/api/data_sources/types")
self.assertEqual(rv.status_code, 403)
class DataSourceTest(BaseTestCase):
def test_returns_400_when_missing_fields(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = c.post("/api/data_sources")
self.assertEqual(rv.status_code, 400)
rv = json_request(c.post, '/api/data_sources', data={'name': 'DS 1'})
self.assertEqual(rv.status_code, 400)
def test_returns_400_when_configuration_invalid(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{}'})
self.assertEqual(rv.status_code, 400)
def test_creates_data_source(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{"dbname": "redash"}'})
self.assertEqual(rv.status_code, 200)

View File

@@ -1,3 +1,4 @@
#encoding: utf8
import datetime
import json
from tests import BaseTestCase
@@ -31,22 +32,22 @@ class QueryTest(BaseTestCase):
self.assertNotEquals(old_hash, q.query_hash)
def test_search_finds_in_name(self):
q1 = query_factory.create(name="Testing search")
q2 = query_factory.create(name="Testing searching")
q3 = query_factory.create(name="Testing sea rch")
q1 = query_factory.create(name=u"Testing seåřċħ")
q2 = query_factory.create(name=u"Testing seåřċħing")
q3 = query_factory.create(name=u"Testing seå řċħ")
queries = models.Query.search("search")
queries = models.Query.search(u"seåřċħ")
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertNotIn(q3, queries)
def test_search_finds_in_description(self):
q1 = query_factory.create(description="Testing search")
q2 = query_factory.create(description="Testing searching")
q3 = query_factory.create(description="Testing sea rch")
q1 = query_factory.create(description=u"Testing seåřċħ")
q2 = query_factory.create(description=u"Testing seåřċħing")
q3 = query_factory.create(description=u"Testing seå řċħ")
queries = models.Query.search("search")
queries = models.Query.search(u"seåřċħ")
self.assertIn(q1, queries)
self.assertIn(q2, queries)