Compare commits

..

26 Commits

Author SHA1 Message Date
Arik Fraimovich
dc0f9a63cb Merge pull request #351 from joeysim/search_improvements
Search improvements
2015-01-18 09:22:41 +02:00
Arik Fraimovich
21c042996e Merge pull request #350 from joeysim/ctrl_enter_exec
Added support for Cmd+Enter query execution for PCs
2015-01-18 09:21:32 +02:00
Joey Simhon
5f22adadf2 ordering all_queries by created_at desc for better relevancy with big lists 2015-01-17 21:19:22 +02:00
Joey Simhon
4e8888ce2f sort searched queries by creation time, assuming the newer queries are usually more relevant 2015-01-17 21:14:56 +02:00
Joey Simhon
0a69609d38 Added support for Cmd+Enter query execution for PCs 2015-01-17 00:32:21 +02:00
Arik Fraimovich
e85d3c3c9f Merge pull request #348 from EverythingMe/feature/additional_manage_commands
Feature: new data source management commands in manage.py
2015-01-14 12:35:13 +02:00
Arik Fraimovich
e20f57bba8 Added edit & delete commands to data source cli 2015-01-14 12:23:53 +02:00
Arik Fraimovich
933ace2e38 Split CLI commands to several files for easier editing and naming. 2015-01-14 10:52:11 +02:00
Arik Fraimovich
4c1e5aed6b Remove import from settings command (obsolete). 2015-01-14 10:27:53 +02:00
Arik Fraimovich
77d982b4aa Merge pull request #347 from barnash/query-params-for-filters
Query params for filters
2015-01-13 22:35:32 +02:00
barnash
02c8163265 Changed the query param to something more url friendly 2015-01-12 18:56:44 +02:00
Arik Fraimovich
ef868dbb6e Merge pull request #346 from erans/master
Initial support for Mongo's aggregation framework.
2015-01-12 18:17:41 +02:00
Iftach Bar
b2bab33baa added support for deep links to dashboards with saved filters 2015-01-12 09:23:27 +02:00
Iftach Bar
149e0835f8 fixed jshint stuff - semicolon in different places 2015-01-12 09:22:53 +02:00
Eran Sandler
50bed1d8f2 Initial support for Mongo's aggregation framework. 2015-01-11 12:37:37 +02:00
Eran Sandler
d4b5d78743 Perform a JSON.stringify on values who's type is "object" 2015-01-11 12:28:21 +02:00
Arik Fraimovich
7fc82a2562 Merge pull request #345 from EverythingMe/vagrant_dev
Developer Vagrant box for easier contribution
2014-12-30 07:52:07 +02:00
Arik Fraimovich
92fb138c2c Vagrant file to use the redash/dev box 2014-12-30 07:45:30 +02:00
Arik Fraimovich
71b4b45a3c Merge pull request #344 from EverythingMe/feature/query_results_cleanup
Job to cleanup unused query results
2014-12-25 15:58:10 +02:00
Arik Fraimovich
07f4a1b227 Fix: wiredep failing after version upgrade 2014-12-25 15:52:52 +02:00
Arik Fraimovich
e116e88e98 Job to cleanup unused query results 2014-12-25 15:39:49 +02:00
Arik Fraimovich
2278a181ca Merge pull request #339 from EverythingMe/counter-vis
bugfix: Counter visualization font size issues
2014-11-11 18:21:29 +02:00
Amir Nissim
98dc75a404 bugfix: Counter visualization was not watching for filter changes 2014-11-11 13:04:45 +02:00
Amir Nissim
536918aab3 bugfix: Counter visualization font size issues 2014-11-10 15:21:03 +02:00
Arik Fraimovich
c75ac80c7a Merge pull request #333 from EverythingMe/fix/import
Fix: mixed number columns was wrongly detected as integer
2014-11-05 11:33:46 +02:00
Arik Fraimovich
522d8542e9 Fix: mixed number columns was wrongly detected as integer 2014-11-05 11:30:17 +02:00
22 changed files with 370 additions and 217 deletions

View File

@@ -1,2 +1,2 @@
web: ./manage.py runserver -p $PORT
web: ./manage.py runserver -p $PORT --host 0.0.0.0
worker: ./bin/run celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries

11
Vagrantfile vendored Normal file
View File

@@ -0,0 +1,11 @@
# -*- mode: ruby -*-
# vi: set ft=ruby :
# Vagrantfile API/syntax version. Don't touch unless you know what you're doing!
VAGRANTFILE_API_VERSION = "2"
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = "redash/dev"
config.vm.synced_folder "./", "/opt/redash/current"
config.vm.network "forwarded_port", guest: 5000, host: 9001
end

178
manage.py
View File

@@ -2,17 +2,19 @@
"""
CLI to manage redash.
"""
import datetime
from flask.ext.script import Manager, prompt_pass
from flask.ext.script import Manager
from redash import settings, models, __version__
from redash.wsgi import app
from redash.import_export import import_manager
from redash.cli import users, database, data_sources
manager = Manager(app)
database_manager = Manager(help="Manages the database (create/drop tables).")
users_manager = Manager(help="Users management commands.")
data_sources_manager = Manager(help="Data sources management commands.")
manager.add_command("database", database.manager)
manager.add_command("users", users.manager)
manager.add_command("import", import_manager)
manager.add_command("ds", data_sources.manager)
@manager.command
def version():
@@ -22,7 +24,7 @@ def version():
@manager.command
def runworkers():
"""Prints deprecation warning."""
"""Start workers (deprecated)."""
print "** This command is deprecated. Please use Celery's CLI to control the workers. **"
@@ -31,8 +33,10 @@ def make_shell_context():
from redash.models import db
return dict(app=app, db=db, models=models)
@manager.command
def check_settings():
"""Show the settings as re:dash sees them (useful for debugging)."""
from types import ModuleType
for name in dir(settings):
@@ -40,168 +44,6 @@ def check_settings():
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
print "{} = {}".format(name, item)
@manager.command
def import_events(events_file):
# TODO: remove this code past 1/11/2014.
import json
from collections import Counter
count = Counter()
with open(events_file) as f:
for line in f:
try:
event = json.loads(line)
object_type = event.get('object_type', None)
object_id = event.get('object_id', None)
if object_id == 'dashboard' and object_type == 'dashboard':
count['bad dashboard id'] += 1
continue
models.Event.record(event)
count['imported'] += 1
except Exception as ex:
print "Failed importing line:"
print line
print ex.message
count[ex.message] += 1
count['failed'] += 1
models.db.close_db(None)
for k, v in count.iteritems():
print k
print v
@database_manager.command
def create_tables():
"""Creates the database tables."""
from redash.models import create_db, init_db
create_db(True, False)
init_db()
@database_manager.command
def drop_tables():
"""Drop the database tables."""
from redash.models import create_db
create_db(False, True)
@users_manager.option('email', help="User's email")
@users_manager.option('name', help="User's full name")
@users_manager.option('--admin', dest='is_admin', action="store_true", default=False, help="set user as admin")
@users_manager.option('--google', dest='google_auth', action="store_true", default=False, help="user uses Google Auth to login")
@users_manager.option('--password', dest='password', default=None, help="Password for users who don't use Google Auth (leave blank for prompt).")
@users_manager.option('--groups', dest='groups', default=models.User.DEFAULT_GROUPS, help="Comma seperated list of groups (leave blank for default).")
def create(email, name, groups, is_admin=False, google_auth=False, password=None):
print "Creating user (%s, %s)..." % (email, name)
print "Admin: %r" % is_admin
print "Login with Google Auth: %r\n" % google_auth
if isinstance(groups, basestring):
groups= groups.split(',')
groups.remove('') # in case it was empty string
if is_admin:
groups += ['admin']
user = models.User(email=email, name=name, groups=groups)
if not google_auth:
password = password or prompt_pass("Password")
user.hash_password(password)
try:
user.save()
except Exception, e:
print "Failed creating user: %s" % e.message
@users_manager.option('email', help="email address of user to delete")
def delete(email):
deleted_count = models.User.delete().where(models.User.email == email).execute()
print "Deleted %d users." % deleted_count
@users_manager.option('password', help="new password for the user")
@users_manager.option('email', help="email address of the user to change password for")
def password(email, password):
try:
user = models.User.get_by_email(email)
user.hash_password(password)
user.save()
print "User updated."
except models.User.DoesNotExist:
print "User [%s] not found." % email
@users_manager.option('email', help="email address of the user to grant admin to")
def grant_admin(email):
try:
user = models.User.get_by_email(email)
user.groups.append('admin')
user.save()
print "User updated."
except models.User.DoesNotExist:
print "User [%s] not found." % email
# it should be named just "list", but then it will collide with "list" data sources.
# TODO: need to split to multiple files.
@users_manager.command
def list_users():
"""List all users"""
for i, user in enumerate(models.User.select()):
if i > 0:
print "-"*20
print "Id: {}\nName: {}\nEmail: {}".format(user.id, user.name.encode('utf-8'), user.email)
@data_sources_manager.command
def import_from_settings(name=None):
"""Import data source from settings (env variables)."""
name = name or "Default"
data_source = models.DataSource.create(name=name,
type=settings.CONNECTION_ADAPTER,
options=settings.CONNECTION_STRING)
print "Imported data source from settings (id={}).".format(data_source.id)
@data_sources_manager.command
def list():
"""List currently configured data sources"""
for i, ds in enumerate(models.DataSource.select()):
if i > 0:
print "-"*20
print "Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options)
@data_sources_manager.command
def new(name, type, options):
"""Create new data source"""
# TODO: validate it's a valid type and in the future, validate the options.
print "Creating {} data source ({}) with options:\n{}".format(type, name, options)
data_source = models.DataSource.create(name=name,
type=type,
options=options)
print "Id: {}".format(data_source.id)
manager.add_command("database", database_manager)
manager.add_command("users", users_manager)
manager.add_command("import", import_manager)
manager.add_command("ds", data_sources_manager)
if __name__ == '__main__':
manager.run()

View File

@@ -163,7 +163,6 @@ module.exports = function (grunt) {
// Automatically inject Bower components into the app
wiredep: {
options: {
cwd: '<%= yeoman.app %>'
},
app: {
src: ['<%= yeoman.app %>/index.html'],

View File

@@ -1,5 +1,5 @@
(function() {
var DashboardCtrl = function($scope, Events, Widget, $routeParams, $http, $timeout, $q, Dashboard) {
var DashboardCtrl = function($scope, Events, Widget, $routeParams, $location, $http, $timeout, $q, Dashboard) {
$scope.refreshEnabled = false;
$scope.refreshRate = 60;
@@ -15,7 +15,7 @@
return _.map(row, function (widget) {
var w = new Widget(widget);
if (w.visualization && dashboard.dashboard_filters_enabled) {
if (w.visualization) {
promises.push(w.getQuery().getQueryResultPromise());
}
@@ -32,22 +32,23 @@
// TODO: first object should be a copy, otherwise one of the chart filters behaves different than the others.
filters[filter.name] = filter;
filters[filter.name].originFilters = [];
if (_.has($location.search(), filter.name)) {
filter.current = $location.search()[filter.name];
}
$scope.$watch(function () { return filter.current }, function (value) {
_.each(filter.originFilters, function (originFilter) {
originFilter.current = value;
});
});
};
}
// TODO: merge values.
filters[filter.name].originFilters.push(filter);
});
});
if (dashboard.dashboard_filters_enabled) {
$scope.filters = _.values(filters);
}
$scope.filters = _.values(filters);
});
@@ -83,8 +84,8 @@
});
}, $scope.refreshRate);
};
}
}
};
$scope.triggerRefresh = function() {
$scope.refreshEnabled = !$scope.refreshEnabled;
@@ -137,7 +138,7 @@
};
angular.module('redash.controllers')
.controller('DashboardCtrl', ['$scope', 'Events', 'Widget', '$routeParams', '$http', '$timeout', '$q', 'Dashboard', DashboardCtrl])
.controller('DashboardCtrl', ['$scope', 'Events', 'Widget', '$routeParams', '$location', '$http', '$timeout', '$q', 'Dashboard', DashboardCtrl])
.controller('WidgetCtrl', ['$scope', 'Events', 'Query', WidgetCtrl])
})();
})();

View File

@@ -21,8 +21,13 @@
$scope.saveQuery();
}
},
// Cmd+Enter for Mac
'meta+enter': function () {
$scope.executeQuery();
},
// Ctrl+Enter for PC
'ctrl+enter': function () {
$scope.executeQuery();
}
};

View File

@@ -15,17 +15,15 @@
_.each(this.query_result.data.rows, function (row) {
_.each(row, function (v, k) {
if (angular.isNumber(v)) {
if (parseInt(v) === v) {
columnTypes[k] = 'integer';
} else {
columnTypes[k] = 'float';
}
columnTypes[k] = 'float';
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}T/)) {
row[k] = moment(v);
columnTypes[k] = 'datetime';
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}/)) {
row[k] = moment(v);
columnTypes[k] = 'date';
} else if (typeof(v) == 'object') {
row[k] = JSON.stringify(v);
}
}, this);
}, this);

View File

@@ -30,7 +30,7 @@
$scope.visualization.options.rowNumber =
$scope.visualization.options.rowNumber || 0;
$scope.$watch('queryResult && queryResult.getData() && visualization.options',
$scope.$watch('[queryResult && queryResult.getData(), visualization.options]',
function() {
var queryData = $scope.queryResult.getData();
if (queryData) {

View File

@@ -273,7 +273,6 @@ pivot-table-renderer > table, grid-renderer > div, visualization-renderer > div
counter-renderer {
display: block;
text-align: center;
padding: 30px 0;
}
counter-renderer counter {
margin: 0 auto;
@@ -283,7 +282,8 @@ counter-renderer counter {
}
counter-renderer value,
counter-renderer counter-target {
font-size: 130px;
font-size: 80px;
display: block;
}
counter-renderer counter-target {
color: #ccc;

View File

@@ -1,5 +1,5 @@
<counter ng-class="{'positive': targetValue && trendPositive, 'negative': targetValue && !trendPositive}">
<value>{{counterValue}}</value>
<counter-target ng-if="targetValue">({{targetValue}})</counter-target>
<value>{{counterValue|number}}</value>
<counter-target ng-if="targetValue">({{targetValue|number}})</counter-target>
<counter-name>{{visualization.name}}</counter-name>
</counter>

View File

@@ -14,6 +14,7 @@ def setup_logging():
handler.setFormatter(formatter)
logging.getLogger().addHandler(handler)
logging.getLogger().setLevel(settings.LOG_LEVEL)
logging.getLogger("passlib").setLevel("ERROR")
def create_redis_connection():

0
redash/cli/__init__.py Normal file
View File

View File

@@ -0,0 +1,60 @@
from flask.ext.script import Manager
from redash import models
manager = Manager(help="Data sources management commands.")
@manager.command
def list():
"""List currently configured data sources"""
for i, ds in enumerate(models.DataSource.select()):
if i > 0:
print "-"*20
print "Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options)
@manager.command
def new(name, type, options):
"""Create new data source"""
# TODO: validate it's a valid type and in the future, validate the options.
print "Creating {} data source ({}) with options:\n{}".format(type, name, options)
data_source = models.DataSource.create(name=name,
type=type,
options=options)
print "Id: {}".format(data_source.id)
@manager.command
def delete(name):
"""Deletes data source by name"""
try:
data_source = models.DataSource.get(models.DataSource.name==name)
print "Deleting data source: {} (id={})".format(name, data_source.id)
data_source.delete_instance()
except models.DataSource.DoesNotExist:
print "Couldn't find data source named: {}".format(name)
def update_attr(obj, attr, new_value):
if new_value is not None:
old_value = getattr(obj, attr)
print "Updating {}: {} -> {}".format(attr, old_value, new_value)
setattr(obj, attr, new_value)
@manager.option('name', default=None, help="name of data source to edit")
@manager.option('--name', dest='new_name', default=None, help="new name for the data source")
@manager.option('--options', dest='options', default=None, help="updated options for the data source")
@manager.option('--type', dest='type', default=None, help="new type for the data source")
def edit(name, new_name=None, options=None, type=None):
"""Edit data source settings (name, options, type)"""
try:
data_source = models.DataSource.get(models.DataSource.name==name)
update_attr(data_source, "name", new_name)
update_attr(data_source, "type", type)
update_attr(data_source, "options", options)
data_source.save()
except models.DataSource.DoesNotExist:
print "Couldn't find data source named: {}".format(name)

19
redash/cli/database.py Normal file
View File

@@ -0,0 +1,19 @@
from flask.ext.script import Manager
manager = Manager(help="Manages the database (create/drop tables).")
@manager.command
def create_tables():
"""Creates the database tables."""
from redash.models import create_db, init_db
create_db(True, False)
init_db()
@manager.command
def drop_tables():
"""Drop the database tables."""
from redash.models import create_db
create_db(False, True)

74
redash/cli/users.py Normal file
View File

@@ -0,0 +1,74 @@
from flask.ext.script import Manager, prompt_pass
from redash import models
manager = Manager(help="Users management commands.")
@manager.option('email', help="email address of the user to grant admin to")
def grant_admin(email):
try:
user = models.User.get_by_email(email)
user.groups.append('admin')
user.save()
print "User updated."
except models.User.DoesNotExist:
print "User [%s] not found." % email
@manager.option('email', help="User's email")
@manager.option('name', help="User's full name")
@manager.option('--admin', dest='is_admin', action="store_true", default=False, help="set user as admin")
@manager.option('--google', dest='google_auth', action="store_true", default=False, help="user uses Google Auth to login")
@manager.option('--password', dest='password', default=None, help="Password for users who don't use Google Auth (leave blank for prompt).")
@manager.option('--groups', dest='groups', default=models.User.DEFAULT_GROUPS, help="Comma seperated list of groups (leave blank for default).")
def create(email, name, groups, is_admin=False, google_auth=False, password=None):
print "Creating user (%s, %s)..." % (email, name)
print "Admin: %r" % is_admin
print "Login with Google Auth: %r\n" % google_auth
if isinstance(groups, basestring):
groups= groups.split(',')
groups.remove('') # in case it was empty string
if is_admin:
groups += ['admin']
user = models.User(email=email, name=name, groups=groups)
if not google_auth:
password = password or prompt_pass("Password")
user.hash_password(password)
try:
user.save()
except Exception, e:
print "Failed creating user: %s" % e.message
@manager.option('email', help="email address of user to delete")
def delete(email):
deleted_count = models.User.delete().where(models.User.email == email).execute()
print "Deleted %d users." % deleted_count
@manager.option('password', help="new password for the user")
@manager.option('email', help="email address of the user to change password for")
def password(email, password):
try:
user = models.User.get_by_email(email)
user.hash_password(password)
user.save()
print "User updated."
except models.User.DoesNotExist:
print "User [%s] not found." % email
@manager.command
def list():
"""List all users"""
for i, user in enumerate(models.User.select()):
if i > 0:
print "-"*20
print "Id: {}\nName: {}\nEmail: {}".format(user.id, user.name.encode('utf-8'), user.email)

View File

@@ -100,6 +100,7 @@ def status_api():
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()

View File

@@ -9,9 +9,11 @@ from redash.utils import JSONEncoder
try:
import pymongo
from bson.objectid import ObjectId
from bson.son import SON
except ImportError:
print "Missing dependencies. Please install pymongo."
print "You can use pip: pip install pymongo"
raise
TYPES_MAP = {
ObjectId : "string",
@@ -26,6 +28,68 @@ TYPES_MAP = {
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
# Simple query example:
#
# {
# "collection" : "my_collection",
# "query" : {
# "date" : {
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
# },
# "type" : 1
# },
# "fields" : {
# "_id" : 1,
# "name" : 2
# },
# "sort" : [
# {
# "name" : "date",
# "direction" : -1
# }
# ]
#
# }
#
#
# Aggregation
# ===========
# Uses a syntax similar to the one used in PyMongo, however to support the
# correct order of sorting, it uses a regular list for the "$sort" operation
# that converts into a SON (sorted dictionary) object before execution.
#
# Aggregation query example:
#
# {
# "collection" : "things",
# "aggregate" : [
# {
# "$unwind" : "$tags"
# },
# {
# "$group" : {
# {
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# }
# },
# {
# "$sort" : [
# {
# "name" : "count",
# "direction" : -1
# },
# {
# "name" : "_id",
# "direction" : -1
# }
# ]
# }
# ]
# }
#
#
def mongodb(connection_string):
def _get_column_by_name(columns, column_name):
for c in columns:
@@ -56,7 +120,7 @@ def mongodb(connection_string):
if is_replica_set:
if not connection_string["replicaSetName"]:
return None, "replicaSetName is set in the connection string JSON but is empty"
db_connection = pymongo.MongoReplicaSetClient(connection_string["connectionString"], replicaSet=connection_string["replicaSetName"])
else:
db_connection = pymongo.MongoClient(connection_string["connectionString"])
@@ -74,9 +138,12 @@ def mongodb(connection_string):
except:
return None, "Invalid query format. The query is not a valid JSON."
if "query" in query_data and "aggregate" in query_data:
return None, "'query' and 'aggregate' sections cannot be used at the same time"
collection = None
if not "collection" in query_data:
return None, "'collection' must have a value to run a query"
return None, "'collection' must be set"
else:
collection = query_data["collection"]
@@ -93,15 +160,31 @@ def mongodb(connection_string):
_convert_date(q[k], k2)
f = None
aggregate = None
if "aggregate" in query_data:
aggregate = query_data["aggregate"]
for step in aggregate:
if "$sort" in step:
sort_list = []
for sort_item in step["$sort"]:
sort_list.append((sort_item["name"], sort_item["direction"]))
step["$sort"] = SON(sort_list)
if aggregate:
pass
else:
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field in query_data["sort"]:
for k in field:
s.append((k, field[k]))
if "fields" in query_data:
f = query_data["fields"]
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field_name in query_data["sort"]:
s.append((field_name, query_data["sort"][field_name]))
columns = []
rows = []
@@ -109,10 +192,14 @@ def mongodb(connection_string):
json_data = None
cursor = None
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
if q:
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
elif aggregate:
r = db[collection].aggregate(aggregate)
cursor = r["result"]
for r in cursor:
for k in r:
@@ -127,7 +214,6 @@ def mongodb(connection_string):
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:

View File

@@ -225,6 +225,15 @@ class QueryResult(BaseModel):
'retrieved_at': self.retrieved_at
}
@classmethod
def unused(cls):
week_ago = datetime.datetime.now() - datetime.timedelta(days=7)
unused_results = cls.select().where(Query.id == None, cls.retrieved_at < week_ago)\
.join(Query, join_type=peewee.JOIN_LEFT_OUTER)
return unused_results
@classmethod
def get_latest(cls, data_source, query, ttl=0):
query_hash = utils.gen_query_hash(query)
@@ -319,7 +328,8 @@ class Query(BaseModel):
q = Query.select(Query, User, QueryResult.retrieved_at, QueryResult.runtime)\
.join(QueryResult, join_type=peewee.JOIN_LEFT_OUTER)\
.switch(Query).join(User)\
.group_by(Query.id, User.id, QueryResult.id, QueryResult.retrieved_at, QueryResult.runtime)
.group_by(Query.id, User.id, QueryResult.id, QueryResult.retrieved_at, QueryResult.runtime)\
.order_by(cls.created_at.desc())
return q
@@ -349,7 +359,7 @@ class Query(BaseModel):
if term.isdigit():
where |= cls.id == term
return cls.select().where(where)
return cls.select().where(where).order_by(cls.created_at.desc())
@classmethod
def update_instance(cls, query_id, **kwargs):

View File

@@ -56,6 +56,10 @@ CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL)
CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
CELERY_FLOWER_URL = os.environ.get("REDASH_CELERY_FLOWER_URL", "/flower")
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
# proved to be "safe".
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false"))
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")

View File

@@ -202,6 +202,22 @@ def cleanup_tasks():
redis_connection.delete(lock_keys[i])
@celery.task(base=BaseTask)
def cleanup_query_results():
"""
Job to cleanup unused query results -- such that no query links to them anymore, and older than a week (so it's less
likely to be open in someone's browser and be used).
Each time the job deletes only 100 query results so it won't choke the database in case of many such results.
"""
unused_query_results = models.QueryResult.unused().limit(100)
total_unused_query_results = models.QueryResult.unused().count()
deleted_count = models.QueryResult.delete().where(models.QueryResult.id << unused_query_results).execute()
logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?

View File

@@ -7,19 +7,26 @@ celery = Celery('redash',
broker=settings.CELERY_BROKER,
include='redash.tasks')
celery.conf.update(CELERY_RESULT_BACKEND=settings.CELERY_BACKEND,
CELERYBEAT_SCHEDULE={
'refresh_queries': {
'task': 'redash.tasks.refresh_queries',
'schedule': timedelta(seconds=30)
},
'cleanup_tasks': {
'task': 'redash.tasks.cleanup_tasks',
'schedule': timedelta(minutes=5)
},
},
CELERY_TIMEZONE='UTC')
celery_schedule = {
'refresh_queries': {
'task': 'redash.tasks.refresh_queries',
'schedule': timedelta(seconds=30)
},
'cleanup_tasks': {
'task': 'redash.tasks.cleanup_tasks',
'schedule': timedelta(minutes=5)
}
}
if settings.QUERY_RESULTS_CLEANUP_ENABLED:
celery_schedule['cleanup_query_results'] = {
'task': 'redash.tasks.cleanup_query_results',
'schedule': timedelta(minutes=5)
}
celery.conf.update(CELERY_RESULT_BACKEND=settings.CELERY_BACKEND,
CELERYBEAT_SCHEDULE=celery_schedule,
CELERY_TIMEZONE='UTC')
if __name__ == '__main__':
celery.start()

View File

@@ -129,6 +129,25 @@ class QueryResultTest(BaseTestCase):
self.assertEqual(found_query_result.id, qr.id)
class TestUnusedQueryResults(BaseTestCase):
def test_returns_only_unused_query_results(self):
two_weeks_ago = datetime.datetime.now() - datetime.timedelta(days=14)
qr = query_result_factory.create()
query = query_factory.create(latest_query_data=qr)
unused_qr = query_result_factory.create(retrieved_at=two_weeks_ago)
self.assertIn(unused_qr, models.QueryResult.unused())
self.assertNotIn(qr, models.QueryResult.unused())
def test_returns_only_over_a_week_old_results(self):
two_weeks_ago = datetime.datetime.now() - datetime.timedelta(days=14)
unused_qr = query_result_factory.create(retrieved_at=two_weeks_ago)
new_unused_qr = query_result_factory.create()
self.assertIn(unused_qr, models.QueryResult.unused())
self.assertNotIn(new_unused_qr, models.QueryResult.unused())
class TestQueryResultStoreResult(BaseTestCase):
def setUp(self):
super(TestQueryResultStoreResult, self).setUp()