Compare commits

..

73 Commits

Author SHA1 Message Date
Arik Fraimovich
a4486c56b9 Merge pull request #289 from EverythingMe/feature_google_oauth
Fix: add necessary scope to get user's name
2014-09-26 00:40:11 +03:00
Arik Fraimovich
3da0ecf36c Fix: add necessary scope to get user's name 2014-09-25 17:55:43 +03:00
Arik Fraimovich
11a1095b18 Merge pull request #284 from EverythingMe/feature_google_oauth
Feature: Google OAuth support (instead of deprecated OpenID)
2014-09-24 18:13:45 +03:00
Arik Fraimovich
b43485f322 Update tests 2014-09-21 10:11:03 +03:00
Arik Fraimovich
d83675326b Only enable google oauth if client id & secret provided 2014-09-21 09:07:52 +03:00
Arik Fraimovich
8d7b9a552e Google OAuth support (fixes #223) 2014-09-21 08:53:41 +03:00
Arik Fraimovich
e1eb75b786 Add to requirements flask-oauth and remove flask-googleopenid 2014-09-21 08:48:15 +03:00
Arik Fraimovich
34a3c9e91c Link to wiki in readme 2014-09-17 16:14:49 +03:00
Arik Fraimovich
e007a2891d Fix build status image in readme 2014-09-17 16:06:15 +03:00
Arik Fraimovich
febe6e4aa7 Update readme 2014-09-17 16:04:30 +03:00
Arik Fraimovich
8099dafc68 Merge pull request #283 from EverythingMe/fix_stuck_jobs
Update psycopg2 to 2.5.2.
2014-09-15 09:28:47 +03:00
Arik Fraimovich
ce3d5e637f Update psycopg2 to 2.5.2.
In 2.5.1 they had an issue, where OperationalError exception was causing SEGFAULT
when being pickled. This was crashing the Celery worker, causing the jobs to be lost.
2014-09-15 07:25:35 +03:00
Arik Fraimovich
4a52ccd4fa Gitter integration for CircleCI. 2014-09-14 18:23:02 +03:00
Arik Fraimovich
a0c81f8a31 Merge pull request #281 from EverythingMe/fix_stuck_jobs
Several fixes to reduce cases of stuck jobs
2014-09-11 07:50:35 +03:00
Arik Fraimovich
ce13b79bdc Use correct logging level 2014-09-11 07:47:30 +03:00
Arik Fraimovich
c580db277d Add cleanup_tasks job.
Enumerates all locks and removes those of non existing jobs. Useful
for case the worker is being cold restarted, and jobs are finished
properly.
2014-09-11 07:42:36 +03:00
Arik Fraimovich
5e944e9a8f If found lock is for a ready job, ignore it.
ready - revoked, finished or failed.
2014-09-11 07:41:43 +03:00
Arik Fraimovich
4b94cf706a Set default locks expiry time to 12 hours 2014-09-11 07:41:23 +03:00
Arik Fraimovich
364c51456d Set expiry time to locks, just in case for some reason they get stuck. 2014-09-11 07:40:20 +03:00
Arik Fraimovich
1274d36abc Merge pull request #280 from EverythingMe/fix_stuck_jobs
Fix #261: cancelling jobs sends them to limbo
2014-09-06 18:12:03 +03:00
Arik Fraimovich
f6bd562dd2 Remove cleanup_tasks, as it's not stable 2014-09-06 18:09:04 +03:00
Arik Fraimovich
065d2bc2c6 Schedule removal of dead tasks 2014-09-06 14:18:35 +03:00
Arik Fraimovich
653ed1c57a Add cleanup task to remove locks of dead jobs 2014-09-06 14:18:15 +03:00
Arik Fraimovich
7dc1176628 Fix #261: cancelling jobs sends them to limbo 2014-09-06 13:56:36 +03:00
Arik Fraimovich
365b8a8c93 Merge pull request #279 from EverythingMe/json-results
API - query results in JSON format. fixes #278
2014-09-03 12:07:36 +03:00
Arik Fraimovich
6e1e0a9967 Merge QueryResultAPI with CSVQueryResultAPI 2014-09-03 11:55:17 +03:00
Amir Nissim
170640a63f API - query results in JSON format. fixes #278 2014-09-02 17:52:04 +03:00
Arik Fraimovich
5e970b73d5 Merge pull request #270 from olgakogan/master
added handling for querying strings with non standard characters
2014-08-25 12:00:02 +03:00
olgakogan
a4643472a5 added handling for querying strings with non standard characters 2014-08-24 19:08:10 +03:00
Arik Fraimovich
7aa01f2bd2 Comment out filters url sync tests. 2014-08-20 09:07:08 +03:00
Arik Fraimovich
cb4b0e0296 Merge pull request #269 from EverythingMe/257-chart-editor
Disable filters url syncing
2014-08-20 08:59:22 +03:00
Arik Fraimovich
2c05e921c4 Disable filters url syncing 2014-08-20 08:58:56 +03:00
Arik Fraimovich
c4877f254e Merge pull request #268 from EverythingMe/257-chart-editor
[#257] chart editor: global series type
2014-08-19 19:51:57 +03:00
Arik Fraimovich
9fc59de35f remove throttling of redrawData 2014-08-19 18:37:32 +03:00
Amir Nissim
eb50f3fc94 [#257] chart editor: use globalSeriesType when creating new series 2014-08-19 14:44:53 +03:00
Arik Fraimovich
12fe59827f Merge pull request #267 from EverythingMe/257-chart-editor
[#257] chart editor: global series type
2014-08-19 14:04:44 +03:00
Arik Fraimovich
d32caff31d Merge pull request #266 from EverythingMe/265-db-reloads
disable reloadOnSearch for /dashboard. fixes #265
2014-08-19 13:17:17 +03:00
Amir Nissim
ba540ff380 [#257] chart editor: global series type 2014-08-19 13:14:24 +03:00
Amir Nissim
2112faab02 disable reloadOnSearch for /dashboard. fixes #265 2014-08-19 12:01:23 +03:00
Arik Fraimovich
34c6be398a Merge pull request #264 from EverythingMe/fix_data_error
Treat all psycopg2.DatabaseError the same.
2014-08-19 09:53:38 +03:00
Arik Fraimovich
3f9c2a5592 Treat all psycopg2.DatabaseError the same.
Sometimes division by zero are reported as OperationalError rather than
DataError.
2014-08-19 09:47:31 +03:00
Arik Fraimovich
8076b7f0b7 Gruntfile.js: add login.html back to minified files. 2014-08-12 13:34:39 +03:00
Arik Fraimovich
8940d66b0b Merge pull request #253 from EverythingMe/146-filter-sync
rd_ui: sync filters with location.search [closes #146]
2014-08-07 14:28:06 +03:00
Amir Nissim
948e2247e4 rd_ui: sync filters with location.search [closes #146] 2014-08-07 14:11:43 +03:00
Arik Fraimovich
eba2ba1918 Merge pull request #260 from EverythingMe/fix_queue_name
Fix: dashboard filters broken after #252
2014-08-07 08:20:01 +03:00
Arik Fraimovich
59d5ba9273 Use promises to create dashboard filters. 2014-08-06 23:39:30 +03:00
Arik Fraimovich
4aba24a976 Add promise support to QueryResult. 2014-08-06 23:39:09 +03:00
Arik Fraimovich
762c331ddf Merge pull request #259 from EverythingMe/fix_queue_name
Fix events import code
2014-08-06 17:58:28 +03:00
Amir Nissim
9592610f8b update .gitignore 2014-08-06 16:19:09 +03:00
Arik Fraimovich
8b7399ddc9 Fix events import code 2014-08-06 09:31:19 +03:00
Arik Fraimovich
f6221da9dc Merge pull request #256 from EverythingMe/fix_queue_name
Fix: series options not showing up when first running the query.
2014-08-05 12:42:43 +03:00
Arik Fraimovich
10c84d2cd0 Fix: series options not showing up when first running the query. 2014-08-05 12:39:35 +03:00
Arik Fraimovich
60d784d7bc Cleanup Query.prototype.getQueryResult and make sure it caches result by id. 2014-08-05 12:38:53 +03:00
Arik Fraimovich
b28e4be8d7 Sort data sources by id. 2014-08-05 12:30:51 +03:00
Arik Fraimovich
e74b36996f Merge pull request #255 from EverythingMe/fix_queue_name
Fix: use correct queue name for scheduled queries
2014-08-04 22:40:16 +03:00
Arik Fraimovich
4c28d11259 Fix: use correct queue name for scheduled queries 2014-08-04 22:31:13 +03:00
Arik Fraimovich
b1e1a32f37 Merge pull request #252 from EverythingMe/perf
perf: HTTP caching headers for /api/query_results [fixes #228]
2014-08-04 16:55:39 +03:00
Amir Nissim
a12b43265d perf: HTTP caching headers for /api/query_results [fixes #228] 2014-08-04 16:50:56 +03:00
Arik Fraimovich
c2d621ae0f Merge pull request #247 from EverythingMe/245-refresh-btn
[#245] Add refresh button to query view page
2014-08-03 14:51:06 +03:00
Amir Nissim
d93e07061b [#245] Add refresh button to query view page 2014-08-03 13:14:17 +03:00
Arik Fraimovich
cb59973b9a Merge pull request #251 from EverythingMe/tests
setup Karma unit tests
2014-08-03 11:27:38 +03:00
Amir Nissim
72e41a94e4 update ci config 2014-08-03 11:15:02 +03:00
Amir Nissim
9013497fc7 rd_ui: fix failing unit test 2014-08-03 11:15:00 +03:00
Amir Nissim
a74ae32122 testing infra: basic QueryViewCtrl tests 2014-07-31 16:11:37 +03:00
Amir Nissim
9cfae349da testing infra: updated Karma and Grunt 2014-07-30 14:28:00 +03:00
Arik Fraimovich
a16718917b Merge pull request #248 from EverythingMe/243-db-requests
#243 dashboards api will not return query results by default
2014-07-29 16:14:08 +03:00
Amir Nissim
e2e365d9ff Query.to_dict never with results 2014-07-29 11:11:40 +03:00
Amir Nissim
5310498d0f [#241] fix textbox widget layout 2014-07-28 17:17:20 +03:00
Amir Nissim
bb1d2f8805 [#243] dashboards api will not return query results by default 2014-07-28 16:52:19 +03:00
Amir Nissim
0d5f001d38 fix migration add_text_to_widgets 2014-07-28 16:27:23 +03:00
Amir Nissim
236f7f9c04 fix add_global_filters_to_dashboard migration script 2014-07-28 12:15:08 +03:00
Amir Nissim
74bf8e5239 ignore celery files 2014-07-28 12:08:59 +03:00
Arik Fraimovich
71e125b4b0 Update Procfile.dev to use celery. 2014-07-20 12:08:08 +03:00
47 changed files with 1116 additions and 629 deletions

3
.gitignore vendored
View File

@@ -4,7 +4,7 @@
.coverage
rd_ui/dist
.DS_Store
celerybeat-schedule
celerybeat-schedule*
.#*
\#*#
*~
@@ -17,3 +17,4 @@ redash/dump.rdb
.ruby-version
venv
dump.rdb

View File

@@ -6,7 +6,7 @@ FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
deps:
cd rd_ui && npm install
cd rd_ui && npm install grunt-cli bower
cd rd_ui && npm install -g bower grunt-cli
cd rd_ui && bower install
cd rd_ui && grunt build
@@ -19,3 +19,4 @@ upload:
test:
nosetests --with-coverage --cover-package=redash tests/*.py
cd rd_ui && grunt test

View File

@@ -1,2 +1,2 @@
web: ./manage.py runserver -p $PORT
worker: ./manage.py runworkers
worker: ./bin/run celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries

View File

@@ -1,18 +1,24 @@
# [_re:dash_](https://github.com/everythingme/redash)
![Build Status](https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040 "Build Status")
<p align="center">
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
</p>
<p align="center">
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
</p>
**_re:dash_** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
Prior to **_re:dash_**, we tried to use tranditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts.
**_re:dash_** consists of two parts:
1. **Query Editor**: think of [JS Fiddle](http://jsfiddle.net) for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it. Also it's possible to fork it and generate new datasets and reach new insights.
2. **Dashboards/Visualizations**: once you have a dataset, you can create different visualizations out of it, and then combine several visualizations into a single dashboard. Currently it supports bar charts, pivot table and cohorts.
1. **Query Editor**: think of [JS Fiddle](http://jsfiddle.net) for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it. Also it's possible to fork it and generate new datasets and reach new insights.
2. **Dashboards/Visualizations**: once you have a dataset, you can create different visualizations out of it, and then combine several visualizations into a single dashboard. Currently it supports charts, pivot table and cohorts.
This is the first release, which is more than usable but still has its rough edges and way to go to fulfill its full potential. The Query Editor part is quite solid, but the visualizations need more work to enrich them and to make them more user friendly.
**_re:dash_** is a work in progress and has its rough edges and way to go to fulfill its full potential. The Query Editor part is quite solid, but the visualizations need more work to enrich them and to make them more user friendly.
## Demo
@@ -22,51 +28,21 @@ You can try out the demo instance: http://rd-demo.herokuapp.com/ (login with any
Due to Heroku dev plan limits, it has a small database of flights (see schema [here](http://rd-demo.herokuapp.com/dashboard/schema)). Also due to another Heroku limitation, it is running with the regular user, hence you can DELETE or INSERT data/tables. Please be nice and don't do this.
## Getting Started
* [Setting re:dash on your own server (Ubuntu)](https://github.com/EverythingMe/redash/wiki/Setting-re:dash-on-your-own-server-(for-version-0.4))
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).
## Getting help
* [Google Group (mailing list)](https://groups.google.com/forum/#!forum/redash-users): the best place to get updates about new releases or ask general questions.
* #redash IRC channel on [Freenode](http://www.freenode.net/).
## Technology
* Python
* [AngularJS](http://angularjs.org/)
* [PostgreSQL](http://www.postgresql.org/) / [AWS Redshift](http://aws.amazon.com/redshift/)
* [Redis](http://redis.io)
PostgreSQL is used both as the operatinal database for the system, but also as the data store that is being queried. To be exact, we built this system to use on top of Amazon's Redshift, which supports the PG driver. But it's quite simple to add support for other datastores, and we do plan to do so.
This is our first large scale AngularJS project, and we learned a lot during the development of it. There are still things we need to iron out, and comments on the way we use AngularJS are more than welcome (and pull requests just as well).
### HighCharts
HighCharts is really great, but it's not free for commercial use. Please refer to their [licensing options](http://shop.highsoft.com/highcharts.html), to see what applies for your use.
It's very likely that in the future we will switch to [D3.js](http://d3js.org/) instead.
## Getting Started
* [Setting up re:dash on Heroku in 5 minutes](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-on-Heroku-in-5-minutes)
* [Setting re:dash on your own server (Ubuntu)](https://github.com/EverythingMe/redash/wiki/Setting-re:dash-on-your-own-server-(Ubuntu))
**Need help setting re:dash or one of the dependencies up?** Ping @arikfr on the IRC #redash channel or send a message to the [mailing list](https://groups.google.com/forum/#!forum/redash-users), and he will gladly help.
* Find us [on gitter](https://gitter.im/EverythingMe/redash#) (chat).
* Contact Arik, the maintainer directly: arik@everything.me.
## Roadmap
Below you can see the "big" features of the next 3 releases (for full list, click on the link):
### [v0.3](https://github.com/EverythingMe/redash/issues?milestone=2&state=open)
- Dashboard filters: ability to filter/slice the data you see in a single dashboard using filters (date or selectors).
- Multiple databases support (including other database type than PostgreSQL).
- Scheduled reports by email.
- Comments on queries.
### [v0.4](https://github.com/EverythingMe/redash/issues?milestone=3&state=open)
- Query versioning.
- More "realtime" UI (using websockets).
- More visualizations.
TBD.
## Reporting Bugs and Contributing Code

View File

@@ -1,7 +1,7 @@
machine:
node:
version:
0.10.22
0.10.24
python:
version:
2.7.3
@@ -17,9 +17,12 @@ test:
override:
- make test
post:
- make pack
- make pack
deployment:
github:
branch: master
commands:
- make upload
notify:
webhooks:
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f

View File

@@ -43,8 +43,10 @@ def check_settings():
@manager.command
def import_events(events_file):
import json
from collections import Counter
count = Counter()
count = 0
with open(events_file) as f:
for line in f:
try:
@@ -53,21 +55,32 @@ def import_events(events_file):
user = event.pop('user_id')
action = event.pop('action')
object_type = event.pop('object_type')
object_id = event.pop('object_id')
object_id = event.pop('object_id', None)
if object_id == 'dashboard' and object_type == 'dashboard':
count['bad dashboard id'] += 1
continue
created_at = datetime.datetime.utcfromtimestamp(event.pop('timestamp'))
additional_properties = json.dumps(event)
models.Event.create(user=user, action=action, object_type=object_type, object_id=object_id,
additional_properties=additional_properties, created_at=created_at)
count += 1
count['imported'] += 1
except Exception as ex:
print "Failed importing line:"
print line
print ex.message
count[ex.message] += 1
count['failed'] += 1
print "Importe %d rows" % count
models.db.close_db(None)
for k, v in count.iteritems():
print k
print v
@database_manager.command

View File

@@ -1,6 +1,6 @@
from playhouse.migrate import Migrator
from redash import db
from redash import models
from redash.models import db
if __name__ == '__main__':
@@ -9,4 +9,4 @@ if __name__ == '__main__':
with db.database.transaction():
migrator.add_column(models.Dashboard, models.Dashboard.dashboard_filters_enabled, 'dashboard_filters_enabled')
db.close_db(None)
db.close_db(None)

View File

@@ -1,5 +1,5 @@
from playhouse.migrate import Migrator
from redash import db
from redash.models import db
from redash import models
@@ -10,4 +10,4 @@ if __name__ == '__main__':
migrator.add_column(models.Widget, models.Widget.text, 'text')
migrator.set_nullable(models.Widget, models.Widget.visualization, True)
db.close_db(None)
db.close_db(None)

View File

@@ -1,6 +1,5 @@
language: node_js
node_js:
- '0.8'
- '0.10'
before_script:
- 'npm install -g bower grunt-cli'

View File

@@ -1,10 +1,5 @@
// Generated on 2013-08-25 using generator-angular 0.4.0
// Generated on 2014-07-30 using generator-angular 0.9.2
'use strict';
var LIVERELOAD_PORT = 35729;
var lrSnippet = require('connect-livereload')({ port: LIVERELOAD_PORT });
var mountFolder = function (connect, dir) {
return connect.static(require('path').resolve(dir));
};
// # Globbing
// for performance reasons we're only matching one level down:
@@ -13,48 +8,148 @@ var mountFolder = function (connect, dir) {
// 'test/spec/**/*.js'
module.exports = function (grunt) {
// Load grunt tasks automatically
require('load-grunt-tasks')(grunt);
// Time how long tasks take. Can help when optimizing build times
require('time-grunt')(grunt);
// configurable paths
var yeomanConfig = {
app: 'app',
// Configurable paths for the application
var appConfig = {
app: require('./bower.json').appPath || 'app',
dist: 'dist'
};
try {
yeomanConfig.app = require('./bower.json').appPath || yeomanConfig.app;
} catch (e) {}
// Define the configuration for all the tasks
grunt.initConfig({
yeoman: yeomanConfig,
// Project settings
yeoman: appConfig,
// Watches files for changes and runs tasks based on the changed files
watch: {
coffee: {
files: ['<%= yeoman.app %>/scripts/{,*/}*.coffee'],
tasks: ['coffee:dist']
bower: {
files: ['bower.json'],
tasks: ['wiredep']
},
coffeeTest: {
files: ['test/spec/{,*/}*.coffee'],
tasks: ['coffee:test']
js: {
files: ['<%= yeoman.app %>/scripts/{,*/}*.js'],
tasks: ['newer:jshint:all'],
options: {
livereload: '<%= connect.options.livereload %>'
}
},
jsTest: {
files: ['test/spec/{,*/}*.js'],
tasks: ['newer:jshint:test', 'karma']
},
styles: {
files: ['<%= yeoman.app %>/styles/{,*/}*.css'],
tasks: ['copy:styles', 'autoprefixer']
tasks: ['newer:copy:styles', 'autoprefixer']
},
gruntfile: {
files: ['Gruntfile.js']
},
livereload: {
options: {
livereload: LIVERELOAD_PORT
livereload: '<%= connect.options.livereload %>'
},
files: [
'<%= yeoman.app %>/{,*/}*.html',
'.tmp/styles/{,*/}*.css',
'{.tmp,<%= yeoman.app %>}/scripts/{,*/}*.js',
'<%= yeoman.app %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}'
]
}
},
// The actual grunt server settings
connect: {
options: {
port: 9000,
// Change this to '0.0.0.0' to access the server from outside.
hostname: 'localhost',
livereload: 35729
},
livereload: {
options: {
open: true,
middleware: function (connect) {
return [
connect.static('.tmp'),
connect().use(
'/bower_components',
connect.static('./bower_components')
),
connect.static(appConfig.app)
];
}
}
},
test: {
options: {
port: 9001,
middleware: function (connect) {
return [
connect.static('.tmp'),
connect.static('test'),
connect().use(
'/bower_components',
connect.static('./bower_components')
),
connect.static(appConfig.app)
];
}
}
},
dist: {
options: {
open: true,
base: '<%= yeoman.dist %>'
}
}
},
// Make sure code styles are up to par and there are no obvious mistakes
jshint: {
options: {
jshintrc: '.jshintrc',
reporter: require('jshint-stylish')
},
all: {
src: [
'Gruntfile.js',
'<%= yeoman.app %>/scripts/{,*/}*.js'
]
},
test: {
options: {
jshintrc: 'test/.jshintrc'
},
src: ['test/spec/{,*/}*.js']
}
},
// Empties folders to start fresh
clean: {
dist: {
files: [{
dot: true,
src: [
'.tmp',
'<%= yeoman.dist %>/{,*/}*',
'!<%= yeoman.dist %>/.git*'
]
}]
},
server: '.tmp'
},
// Add vendor prefixed styles
autoprefixer: {
options: ['last 1 version'],
options: {
browsers: ['last 1 version']
},
dist: {
files: [{
expand: true,
@@ -64,134 +159,95 @@ module.exports = function (grunt) {
}]
}
},
connect: {
// Automatically inject Bower components into the app
wiredep: {
options: {
port: 9000,
// Change this to '0.0.0.0' to access the server from outside.
hostname: 'localhost'
cwd: '<%= yeoman.app %>'
},
livereload: {
options: {
middleware: function (connect) {
return [
lrSnippet,
mountFolder(connect, '.tmp'),
mountFolder(connect, yeomanConfig.app)
];
}
}
},
test: {
options: {
middleware: function (connect) {
return [
mountFolder(connect, '.tmp'),
mountFolder(connect, 'test')
];
}
}
},
dist: {
options: {
middleware: function (connect) {
return [
mountFolder(connect, yeomanConfig.dist)
];
}
}
app: {
src: ['<%= yeoman.app %>/index.html'],
ignorePath: /\.\.\//
}
},
open: {
server: {
url: 'http://localhost:<%= connect.options.port %>'
}
},
clean: {
dist: {
files: [{
dot: true,
src: [
'.tmp',
'<%= yeoman.dist %>/*',
'!<%= yeoman.dist %>/.git*'
]
}]
},
server: '.tmp'
},
jshint: {
options: {
jshintrc: '.jshintrc'
},
all: [
'Gruntfile.js',
'<%= yeoman.app %>/scripts/{,*/}*.js'
]
},
coffee: {
options: {
sourceMap: true,
sourceRoot: ''
},
dist: {
files: [{
expand: true,
cwd: '<%= yeoman.app %>/scripts',
src: '{,*/}*.coffee',
dest: '.tmp/scripts',
ext: '.js'
}]
},
test: {
files: [{
expand: true,
cwd: 'test/spec',
src: '{,*/}*.coffee',
dest: '.tmp/spec',
ext: '.js'
}]
}
},
// not used since Uglify task does concat,
// but still available if needed
/*concat: {
dist: {}
},*/
rev: {
dist: {
files: {
src: [
'<%= yeoman.dist %>/scripts/{,*/}*.js',
'<%= yeoman.dist %>/styles/{,*/}*.css',
'<%= yeoman.dist %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}',
'<%= yeoman.dist %>/styles/fonts/*'
]
}
// Renames files for browser caching purposes
filerev: {
dist: {
src: [
'<%= yeoman.dist %>/scripts/{,*/}*.js',
'<%= yeoman.dist %>/styles/{,*/}*.css',
'<%= yeoman.dist %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}',
'<%= yeoman.dist %>/styles/fonts/*'
]
}
},
// Reads HTML for usemin blocks to enable smart builds that automatically
// concat, minify and revision files. Creates configurations in memory so
// additional tasks can operate on them
useminPrepare: {
html: ['<%= yeoman.app %>/index.html', '<%= yeoman.app %>/login.html'],
options: {
dest: '<%= yeoman.dist %>'
dest: '<%= yeoman.dist %>',
flow: {
html: {
steps: {
js: ['concat', 'uglifyjs'],
css: ['cssmin']
},
post: {}
}
}
}
},
// Performs rewrites based on filerev and the useminPrepare configuration
usemin: {
html: ['<%= yeoman.dist %>/{,*/}*.html'],
css: ['<%= yeoman.dist %>/styles/{,*/}*.css'],
options: {
dirs: ['<%= yeoman.dist %>']
assetsDirs: ['<%= yeoman.dist %>','<%= yeoman.dist %>/images']
}
},
// The following *-min tasks will produce minified files in the dist folder
// By default, your `index.html`'s <!-- Usemin block --> will take care of
// minification. These next options are pre-configured if you do not wish
// to use the Usemin blocks.
// cssmin: {
// dist: {
// files: {
// '<%= yeoman.dist %>/styles/main.css': [
// '.tmp/styles/{,*/}*.css'
// ]
// }
// }
// },
// uglify: {
// dist: {
// files: {
// '<%= yeoman.dist %>/scripts/scripts.js': [
// '<%= yeoman.dist %>/scripts/scripts.js'
// ]
// }
// }
// },
// concat: {
// dist: {}
// },
imagemin: {
dist: {
files: [{
expand: true,
cwd: '<%= yeoman.app %>/images',
src: '{,*/}*.{png,jpg,jpeg}',
src: '{,*/}*.{png,jpg,jpeg,gif}',
dest: '<%= yeoman.dist %>/images'
}]
}
},
svgmin: {
dist: {
files: [{
@@ -202,41 +258,47 @@ module.exports = function (grunt) {
}]
}
},
cssmin: {
// By default, your `index.html` <!-- Usemin Block --> will take care of
// minification. This option is pre-configured if you do not wish to use
// Usemin blocks.
// dist: {
// files: {
// '<%= yeoman.dist %>/styles/main.css': [
// '.tmp/styles/{,*/}*.css',
// '<%= yeoman.app %>/styles/{,*/}*.css'
// ]
// }
// }
},
htmlmin: {
dist: {
options: {
/*removeCommentsFromCDATA: true,
// https://github.com/yeoman/grunt-usemin/issues/44
//collapseWhitespace: true,
collapseWhitespace: true,
conservativeCollapse: true,
collapseBooleanAttributes: true,
removeAttributeQuotes: true,
removeRedundantAttributes: true,
useShortDoctype: true,
removeEmptyAttributes: true,
removeOptionalTags: true*/
removeCommentsFromCDATA: true,
removeOptionalTags: true
},
files: [{
expand: true,
cwd: '<%= yeoman.app %>',
src: ['*.html', 'views/**/*.html'],
cwd: '<%= yeoman.dist %>',
src: ['*.html', 'views/{,*/}*.html'],
dest: '<%= yeoman.dist %>'
}]
}
},
// Put files not handled in other tasks here
// ngmin tries to make the code safe for minification automatically by
// using the Angular long form for dependency injection. It doesn't work on
// things like resolve or inject so those have to be done manually.
ngmin: {
dist: {
files: [{
expand: true,
cwd: '.tmp/concat/scripts',
src: '*.js',
dest: '.tmp/concat/scripts'
}]
}
},
// Replace Google CDN references
cdnify: {
dist: {
html: ['<%= yeoman.dist %>/*.html']
}
},
// Copies remaining files to places other tasks can use
copy: {
dist: {
files: [{
@@ -247,18 +309,21 @@ module.exports = function (grunt) {
src: [
'*.{ico,png,txt}',
'.htaccess',
'bower_components/**/*',
'images/{,*/}*.{gif,webp}',
'styles/{,*/}*.{png,gif}',
'*.html',
'views/{,*/}*.html',
'images/{,*/}*.{webp}',
'fonts/*'
]
}, {
expand: true,
cwd: '.tmp/images',
dest: '<%= yeoman.dist %>/images',
src: [
'generated/*'
]
src: ['generated/*']
}, {
expand: true,
cwd: 'bower_components/bootstrap/dist',
src: 'fonts/*',
dest: '<%= yeoman.dist %>'
}]
},
styles: {
@@ -268,70 +333,52 @@ module.exports = function (grunt) {
src: '{,*/}*.css'
}
},
// Run some tasks in parallel to speed up the build process
concurrent: {
server: [
'coffee:dist',
'copy:styles'
],
test: [
'coffee',
'copy:styles'
],
dist: [
'coffee',
'copy:styles',
'imagemin',
'svgmin',
'htmlmin'
'svgmin'
]
},
// Test settings
karma: {
unit: {
configFile: 'karma.conf.js',
configFile: 'test/karma.conf.js',
singleRun: true
}
},
cdnify: {
dist: {
html: ['<%= yeoman.dist %>/*.html']
}
},
ngmin: {
dist: {
files: [{
expand: true,
cwd: '<%= yeoman.dist %>/scripts',
src: '*.js',
dest: '<%= yeoman.dist %>/scripts'
}]
}
},
uglify: {
dist: {
files: {
'<%= yeoman.dist %>/scripts/scripts.js': [
'<%= yeoman.dist %>/scripts/scripts.js'
]
}
}
}
});
grunt.registerTask('server', function (target) {
grunt.registerTask('serve', 'Compile then start a connect web server', function (target) {
if (target === 'dist') {
return grunt.task.run(['build', 'open', 'connect:dist:keepalive']);
return grunt.task.run(['build', 'connect:dist:keepalive']);
}
grunt.task.run([
'clean:server',
'wiredep',
'concurrent:server',
'autoprefixer',
'connect:livereload',
'open',
'watch'
]);
});
grunt.registerTask('server', 'DEPRECATED TASK. Use the "serve" task instead', function (target) {
grunt.log.warn('The `server` task has been deprecated. Use `grunt serve` to start a server.');
grunt.task.run(['serve:' + target]);
});
grunt.registerTask('test', [
'clean:server',
'concurrent:test',
@@ -342,21 +389,23 @@ module.exports = function (grunt) {
grunt.registerTask('build', [
'clean:dist',
'wiredep',
'useminPrepare',
'concurrent:dist',
'autoprefixer',
'concat',
'ngmin',
'copy:dist',
'cdnify',
'ngmin',
'cssmin',
'uglify',
'rev',
'usemin'
'filerev',
'usemin',
'htmlmin'
]);
grunt.registerTask('default', [
'jshint',
'newer:jshint',
'test',
'build'
]);

View File

@@ -159,4 +159,4 @@
</script>
</body>
</html>
</html>

View File

@@ -65,7 +65,7 @@
<div class="row">
<div class="col-xs-6 col-sm-6 col-md-6">
<a href="/google_auth/login?next={{next}}" class="btn btn-lg btn-info btn-block">Google</a>
<a href="/oauth/google?next={{next}}" class="btn btn-lg btn-info btn-block">Google</a>
</div>
</div>

View File

@@ -37,7 +37,8 @@ angular.module('redash', [
$routeProvider.when('/dashboard/:dashboardSlug', {
templateUrl: '/views/dashboard.html',
controller: 'DashboardCtrl'
controller: 'DashboardCtrl',
reloadOnSearch: false
});
$routeProvider.when('/queries', {
templateUrl: '/views/queries.html',
@@ -89,4 +90,4 @@ angular.module('redash', [
}
]);
]);

View File

@@ -1,5 +1,5 @@
(function() {
var DashboardCtrl = function($scope, Events, Widget, $routeParams, $http, $timeout, Dashboard) {
var DashboardCtrl = function($scope, Events, Widget, $routeParams, $http, $timeout, $q, Dashboard) {
$scope.refreshEnabled = false;
$scope.refreshRate = 60;
@@ -8,43 +8,49 @@
Events.record(currentUser, "view", "dashboard", dashboard.id);
$scope.$parent.pageTitle = dashboard.name;
var filters = {};
var promises = [];
$scope.dashboard.widgets = _.map($scope.dashboard.widgets, function (row) {
return _.map(row, function (widget) {
var w = new Widget(widget);
if (w.visualization && dashboard.dashboard_filters_enabled) {
var queryFilters = w.getQuery().getQueryResult().getFilters();
_.each(queryFilters, function (filter) {
if (!_.has(filters, filter.name)) {
// TODO: first object should be a copy, otherwise one of the chart filters behaves different than the others.
filters[filter.name] = filter;
filters[filter.name].originFilters = [];
$scope.$watch(function () {
return filter.current
}, function (value) {
_.each(filter.originFilters, function (originFilter) {
originFilter.current = value;
})
});
}
;
// TODO: merge values.
filters[filter.name].originFilters.push(filter);
});
promises.push(w.getQuery().getQueryResultPromise());
}
return w;
});
});
if (dashboard.dashboard_filters_enabled) {
$scope.filters = _.values(filters);
}
$q.all(promises).then(function(queryResults) {
var filters = {};
_.each(queryResults, function(queryResult) {
var queryFilters = queryResult.getFilters();
_.each(queryFilters, function (filter) {
if (!_.has(filters, filter.name)) {
// TODO: first object should be a copy, otherwise one of the chart filters behaves different than the others.
filters[filter.name] = filter;
filters[filter.name].originFilters = [];
$scope.$watch(function () { return filter.current }, function (value) {
_.each(filter.originFilters, function (originFilter) {
originFilter.current = value;
});
});
};
// TODO: merge values.
filters[filter.name].originFilters.push(filter);
});
});
if (dashboard.dashboard_filters_enabled) {
$scope.filters = _.values(filters);
}
});
}, function () {
// error...
// try again. we wrap loadDashboard with throttle so it doesn't happen too often.\
@@ -131,7 +137,7 @@
};
angular.module('redash.controllers')
.controller('DashboardCtrl', ['$scope', 'Events', 'Widget', '$routeParams', '$http', '$timeout', 'Dashboard', DashboardCtrl])
.controller('DashboardCtrl', ['$scope', 'Events', 'Widget', '$routeParams', '$http', '$timeout', '$q', 'Dashboard', DashboardCtrl])
.controller('WidgetCtrl', ['$scope', 'Events', 'Query', WidgetCtrl])
})();

View File

@@ -32,6 +32,14 @@
$scope.newVisualization = undefined;
// @override
Object.defineProperty($scope, 'showDataset', {
get: function() {
return $scope.queryResult && $scope.queryResult.getStatus() == 'done';
}
});
KeyboardShortcuts.bind(shortcuts);
// @override
@@ -109,4 +117,4 @@
'Events', 'growl', '$controller', '$scope', '$location', 'Query',
'Visualization', 'KeyboardShortcuts', QuerySourceCtrl
]);
})();
})();

View File

@@ -16,6 +16,10 @@
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
});
// in view mode, latest dataset is always visible
// source mode changes this behavior
$scope.showDataset = true;
$scope.lockButton = function(lock) {
$scope.queryExecuting = lock;
};
@@ -91,16 +95,6 @@
$scope.$parent.pageTitle = $scope.query.name;
});
$scope.$watch('queryResult && queryResult.getError()', function(newError, oldError) {
if (newError == undefined) {
return;
}
if (oldError == undefined && newError != undefined) {
$scope.lockButton(false);
}
});
$scope.$watch('queryResult && queryResult.getData()', function(data, oldData) {
if (!data) {
return;
@@ -114,7 +108,7 @@
return;
}
if (status == "done") {
if (status == 'done') {
if ($scope.query.id &&
$scope.query.latest_query_data_id != $scope.queryResult.getId() &&
$scope.query.query_hash == $scope.queryResult.query_result.query_hash) {
@@ -124,9 +118,12 @@
})
}
$scope.query.latest_query_data_id = $scope.queryResult.getId();
$scope.query.queryResult = $scope.queryResult;
notifications.showNotification("re:dash", $scope.query.name + " updated.");
}
if (status === 'done' || status === 'failed') {
$scope.lockButton(false);
}
});
@@ -144,4 +141,4 @@
angular.module('redash.controllers')
.controller('QueryViewCtrl',
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', 'Query', 'DataSource', QueryViewCtrl]);
})();
})();

View File

@@ -1,5 +1,5 @@
(function () {
var QueryResult = function ($resource, $timeout) {
var QueryResult = function ($resource, $timeout, $q) {
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
var Job = $resource('/api/jobs/:id', {id: '@id'});
@@ -32,6 +32,7 @@
}
});
this.deferred.resolve(this);
} else if (this.job.status == 3) {
this.status = "processing";
} else {
@@ -40,6 +41,7 @@
}
function QueryResult(props) {
this.deferred = $q.defer();
this.job = {};
this.query_result = {};
this.status = "waiting";
@@ -221,7 +223,7 @@
if (this.columns == undefined && this.query_result.data) {
this.columns = this.query_result.data.columns;
}
return this.columns;
}
@@ -349,6 +351,10 @@
});
return queryResult;
};
QueryResult.prototype.toPromise = function() {
return this.deferred.promise;
}
QueryResult.get = function (data_source_id, query, ttl) {
@@ -389,24 +395,30 @@
ttl = this.ttl;
}
var queryResult = null;
if (this.latest_query_data && ttl != 0) {
if (!this.queryResult) {
this.queryResult = new QueryResult({'query_result': this.latest_query_data});
}
queryResult = this.queryResult;
} else if (this.latest_query_data_id && ttl != 0) {
this.queryResult = queryResult = QueryResult.getById(this.latest_query_data_id);
if (!this.queryResult) {
this.queryResult = QueryResult.getById(this.latest_query_data_id);
}
} else if (this.data_source_id) {
this.queryResult = queryResult = QueryResult.get(this.data_source_id, this.query, ttl);
this.queryResult = QueryResult.get(this.data_source_id, this.query, ttl);
}
return queryResult;
return this.queryResult;
};
Query.prototype.getQueryResultPromise = function() {
return this.getQueryResult().toPromise();
}
return Query;
};
var DataSource = function ($resource) {
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
@@ -435,7 +447,7 @@
}
angular.module('redash.services')
.factory('QueryResult', ['$resource', '$timeout', QueryResult])
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
.factory('DataSource', ['$resource', DataSource])
.factory('Widget', ['$resource', 'Query', Widget]);

View File

@@ -55,7 +55,7 @@
}];
};
var VisualizationRenderer = function (Visualization) {
var VisualizationRenderer = function ($location, Visualization) {
return {
restrict: 'E',
scope: {
@@ -70,10 +70,44 @@
link: function (scope) {
scope.select2Options = {
width: '50%'
};
function readURL() {
var searchFilters = angular.fromJson($location.search().filters);
if (searchFilters) {
_.forEach(scope.filters, function(filter) {
var value = searchFilters[filter.friendlyName];
if (value) {
filter.current = value;
}
});
}
}
function updateURL(filters) {
var current = {};
_.each(filters, function(filter) {
if (filter.current) {
current[filter.friendlyName] = filter.current;
}
});
var newSearch = angular.extend($location.search(), {
filters: angular.toJson(current)
});
$location.search(newSearch);
}
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
if (filters) {
scope.filters = filters;
if (filters.length && false) {
readURL();
// start watching for changes and update URL
scope.$watch('filters', updateURL, true);
}
}
});
}
@@ -172,7 +206,7 @@
angular.module('redash.visualization', [])
.provider('Visualization', VisualizationProvider)
.directive('visualizationRenderer', ['Visualization', VisualizationRenderer])
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
.directive('filters', Filters)
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])

View File

@@ -33,7 +33,7 @@
$scope.chartSeries = [];
$scope.chartOptions = {};
var reloadData = _.throttle(function(data) {
var reloadData = function(data) {
if (!data || ($scope.queryResult && $scope.queryResult.getData()) == null) {
$scope.chartSeries.splice(0, $scope.chartSeries.length);
} else {
@@ -49,8 +49,8 @@
}
$scope.chartSeries.push(_.extend(s, additional));
});
}
}, 500);
};
};
$scope.$watch('options', function (chartOptions) {
if (chartOptions) {
@@ -87,6 +87,8 @@
'Pie': 'pie'
};
scope.globalSeriesType = 'column';
scope.stackingOptions = {
"None": "none",
"Normal": "normal",
@@ -120,6 +122,13 @@
var chartOptionsUnwatch = null,
columnsWatch = null;
scope.$watch('globalSeriesType', function(type, old) {
if (type && old && type !== old && scope.visualization.options.seriesOptions) {
_.each(scope.visualization.options.seriesOptions, function(sOptions) {
sOptions.type = type;
});
}
});
scope.$watch('visualization.type', function (visualizationType) {
if (visualizationType == 'CHART') {
if (scope.visualization.options.series.stacking === null) {
@@ -130,11 +139,28 @@
scope.stacking = scope.visualization.options.series.stacking;
}
columnsWatch = scope.$watch('queryResult.getId()', function(id) {
if (!id) {
return;
}
var refreshSeries = function() {
scope.series = _.map(scope.queryResult.getChartData(scope.visualization.options.columnMapping), function (s) { return s.name; });
// TODO: remove uneeded ones?
if (scope.visualization.options.seriesOptions == undefined) {
scope.visualization.options.seriesOptions = {
type: scope.globalSeriesType
};
};
_.each(scope.series, function(s, i) {
if (scope.visualization.options.seriesOptions[s] == undefined) {
scope.visualization.options.seriesOptions[s] = {'type': 'column', 'yAxis': 0};
}
scope.visualization.options.seriesOptions[s].zIndex = i;
});
scope.zIndexes = _.range(scope.series.length);
scope.yAxes = [[0, 'left'], [1, 'right']];
};
var initColumnMapping = function() {
scope.columns = scope.queryResult.getColumns();
if (scope.visualization.options.columnMapping == undefined) {
@@ -162,31 +188,23 @@
scope.columnTypeSelection[column.name] = scope.visualization.options.columnMapping[column.name] = 'unused';
}
});
};
columnsWatch = scope.$watch('queryResult.getId()', function(id) {
if (!id) {
return;
}
initColumnMapping();
refreshSeries();
});
scope.$watchCollection('columnTypeSelection', function(selections) {
_.each(scope.columnTypeSelection, function(type, name) {
scope.visualization.options.columnMapping[name] = type;
});
});
scope.$watchCollection('visualization.options.columnMapping', function (chartOptions) {
scope.series = _.map(scope.query.getQueryResult().getChartData(scope.visualization.options.columnMapping), function (s) { return s.name; });
// TODO: remove uneeded ones?
if (scope.visualization.options.seriesOptions == undefined) {
scope.visualization.options.seriesOptions = {};
};
_.each(scope.series, function(s, i) {
if (scope.visualization.options.seriesOptions[s] == undefined) {
scope.visualization.options.seriesOptions[s] = {'type': 'column', 'yAxis': 0};
}
scope.visualization.options.seriesOptions[s].zIndex = i;
});
scope.zIndexes = _.range(scope.series.length);
scope.yAxes = [[0, 'left'], [1, 'right']];
refreshSeries();
});
chartOptionsUnwatch = scope.$watch("stacking", function (stacking) {
@@ -223,4 +241,4 @@
}
}
});
}());
}());

View File

@@ -245,6 +245,9 @@ to add those CSS styles here. */
background-color: #FF8080;
border-radius: 50%;
}
.nav-tabs > li.rd-tab-btn {
float: right;
}
/* light version of bootstrap's form-control */
.rd-form-control {
@@ -268,6 +271,10 @@ pivot-table-renderer > table, grid-renderer > div, visualization-renderer > div
overflow: auto;
}
.rd-widget-textbox p {
margin-bottom: 0;
}
.iframe-container {
height: 100%;
}
@@ -280,4 +287,4 @@ use this class when you need to keep the original display value
.rd-hidden-xs {
display: none !important;
}
}
}

View File

@@ -53,15 +53,20 @@
</div>
</div>
<div class="panel panel-default" ng-if="type=='textbox'" ng-mouseenter="showControls = true" ng-mouseleave="showControls = false">
<div class="panel panel-default rd-widget-textbox" ng-if="type=='textbox'" ng-mouseenter="showControls = true" ng-mouseleave="showControls = false">
<div class="panel-body">
<p ng-bind-html="widget.text | markdown"></p>
<span class="pull-right" ng-show="showControls">
<button type="button" class="btn btn-default btn-xs" ng-show="dashboard.canEdit()" ng-click="deleteWidget()" title="Remove Widget"><span class="glyphicon glyphicon-trash"></span></button>
</span>
<div class="row">
<div class="col-lg-11">
<p ng-bind-html="widget.text | markdown"></p>
</div>
<div class="col-lg-1">
<span class="pull-right" ng-show="showControls">
<button type="button" class="btn btn-default btn-xs" ng-show="dashboard.canEdit()" ng-click="deleteWidget()" title="Remove Widget"><span class="glyphicon glyphicon-trash"></span></button>
</span>
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -142,7 +142,7 @@
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
<!-- tabs and data -->
<div ng-show="queryResult.getStatus() == 'done'">
<div ng-show="showDataset">
<div class="row">
<div class="col-lg-12">
<ul class="nav nav-tabs">
@@ -152,6 +152,7 @@
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> &times;</span>
</rd-tab>
<rd-tab tab-id="add" name="&plus; New" removeable="true" ng-show="canEdit"></rd-tab>
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
</ul>
</div>
</div>
@@ -170,7 +171,7 @@
<edit-visulatization-form visualization="vis" query="query" query-result="queryResult" ng-show="canEdit"></edit-visulatization-form>
</div>
<div ng-show="selectedTab == 'add'">
<div ng-if="canEdit" ng-show="selectedTab == 'add'">
<visualization-renderer visualization="newVisualization" query-result="queryResult"></visualization-renderer>
<edit-visulatization-form visualization="newVisualization" query="query" query-result="queryResult" ng-show="canEdit" open-editor="true" on-new-success="setVisualizationTab"></edit-visulatization-form>
</div>

View File

@@ -18,6 +18,15 @@
class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-2">Series Type</label>
<div class="col-sm-10">
<select required ng-options="value as key for (key, value) in seriesTypes"
ng-model="globalSeriesType" class="form-control"></select>
</div>
</div>
</div>
</div>
@@ -88,4 +97,4 @@
</div>
</div>
</div>
</div>

View File

@@ -24,4 +24,4 @@
</div>
</form>
</div>
</div>

View File

@@ -11,7 +11,7 @@
"moment": "2.1.0",
"angular-ui-bootstrap": "0.5.0",
"angular-ui-codemirror": "0.0.5",
"highcharts": "3.0.1",
"highcharts": "3.0.10",
"underscore": "1.5.1",
"angular-resource": "1.2.15",
"angular-growl": "0.3.1",

View File

@@ -1,58 +0,0 @@
// Karma E2E configuration
// base path, that will be used to resolve files and exclude
basePath = '';
// list of files / patterns to load in the browser
files = [
ANGULAR_SCENARIO,
ANGULAR_SCENARIO_ADAPTER,
'test/e2e/**/*.js'
];
// list of files to exclude
exclude = [];
// test results reporter to use
// possible values: dots || progress || growl
reporters = ['progress'];
// web server port
port = 8080;
// cli runner port
runnerPort = 9100;
// enable / disable colors in the output (reporters and logs)
colors = true;
// level of logging
// possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG
logLevel = LOG_INFO;
// enable / disable watching file and executing tests whenever any file changes
autoWatch = false;
// Start these browsers, currently available:
// - Chrome
// - ChromeCanary
// - Firefox
// - Opera
// - Safari (only Mac)
// - PhantomJS
// - IE (only Windows)
browsers = ['Chrome'];
// If browser does not capture in given timeout [ms], kill it
captureTimeout = 5000;
// Continuous Integration mode
// if true, it capture browsers, run tests and exit
singleRun = false;
// Uncomment the following lines if you are using grunt's server to run the tests
// proxies = {
// '/': 'http://localhost:9000/'
// };
// URL root prevent conflicts with the site root
// urlRoot = '_karma_';

View File

@@ -1,56 +0,0 @@
// Karma configuration
// base path, that will be used to resolve files and exclude
basePath = '';
// list of files / patterns to load in the browser
files = [
JASMINE,
JASMINE_ADAPTER,
'app/bower_components/angular/angular.js',
'app/bower_components/angular-mocks/angular-mocks.js',
'app/scripts/*.js',
'app/scripts/**/*.js',
'test/mock/**/*.js',
'test/spec/**/*.js'
];
// list of files to exclude
exclude = [];
// test results reporter to use
// possible values: dots || progress || growl
reporters = ['progress'];
// web server port
port = 8080;
// cli runner port
runnerPort = 9100;
// enable / disable colors in the output (reporters and logs)
colors = true;
// level of logging
// possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG
logLevel = LOG_INFO;
// enable / disable watching file and executing tests whenever any file changes
autoWatch = false;
// Start these browsers, currently available:
// - Chrome
// - ChromeCanary
// - Firefox
// - Opera
// - Safari (only Mac)
// - PhantomJS
// - IE (only Windows)
browsers = ['Chrome'];
// If browser does not capture in given timeout [ms], kill it
captureTimeout = 5000;
// Continuous Integration mode
// if true, it capture browsers, run tests and exit
singleRun = false;

View File

@@ -1,38 +1,39 @@
{
"name": "rd-ui",
"version": "0.1.0",
"dependencies": {
},
"name": "rdui",
"version": "0.0.0",
"dependencies": {},
"devDependencies": {
"grunt": "git+https://github.com/gruntjs/grunt.git#08a3af5",
"grunt-contrib-copy": "~0.4.1",
"grunt-contrib-concat": "~0.3.0",
"grunt-contrib-coffee": "~0.7.0",
"grunt-contrib-uglify": "~0.2.0",
"grunt-contrib-compass": "~0.5.0",
"grunt-contrib-jshint": "~0.6.0",
"grunt-contrib-cssmin": "~0.6.0",
"grunt-contrib-connect": "~0.3.0",
"grunt-contrib-clean": "~0.5.0",
"grunt-contrib-htmlmin": "~0.1.3",
"grunt-contrib-imagemin": "~0.2.0",
"grunt-contrib-watch": "~0.5.2",
"grunt-autoprefixer": "~0.2.0",
"grunt-usemin": "~0.1.11",
"grunt-svgmin": "~0.2.0",
"grunt-rev": "~0.1.0",
"grunt-open": "~0.2.0",
"grunt-concurrent": "~0.3.0",
"load-grunt-tasks": "~0.1.0",
"connect-livereload": "~0.2.0",
"grunt-google-cdn": "~0.2.0",
"grunt-ngmin": "~0.0.2",
"time-grunt": "~0.1.0",
"bower": "~1.2.7",
"grunt-cli": "~0.1.9"
"grunt": "^0.4.1",
"grunt-autoprefixer": "^0.7.3",
"grunt-concurrent": "^0.5.0",
"grunt-contrib-clean": "^0.5.0",
"grunt-contrib-concat": "^0.4.0",
"grunt-contrib-connect": "^0.7.1",
"grunt-contrib-copy": "^0.5.0",
"grunt-contrib-cssmin": "^0.9.0",
"grunt-contrib-htmlmin": "^0.3.0",
"grunt-contrib-imagemin": "^0.7.0",
"grunt-contrib-jshint": "^0.10.0",
"grunt-contrib-uglify": "^0.4.0",
"grunt-contrib-watch": "^0.6.1",
"grunt-filerev": "^0.2.1",
"grunt-google-cdn": "^0.4.0",
"grunt-newer": "^0.7.0",
"grunt-ngmin": "^0.0.3",
"grunt-svgmin": "^0.4.0",
"grunt-usemin": "^2.1.1",
"grunt-wiredep": "^1.7.0",
"jshint-stylish": "^0.2.0",
"load-grunt-tasks": "^0.4.0",
"time-grunt": "^0.3.1",
"karma-jasmine": "~0.1.5",
"grunt-karma": "~0.8.3",
"karma-phantomjs-launcher": "~0.1.4",
"karma": "~0.12.19",
"karma-ng-html2js-preprocessor": "~0.1.0"
},
"engines": {
"node": ">=0.8.0"
"node": ">=0.10.0"
},
"scripts": {
"test": "grunt test"

View File

@@ -29,6 +29,7 @@
"expect": false,
"inject": false,
"it": false,
"jasmine": false,
"spyOn": false
}
}

131
rd_ui/test/karma.conf.js Normal file
View File

@@ -0,0 +1,131 @@
// Karma configuration
// http://karma-runner.github.io/0.12/config/configuration-file.html
// Generated on 2014-07-30 using
// generator-karma 0.8.3
module.exports = function(config) {
'use strict';
config.set({
// enable / disable watching file and executing tests whenever any file changes
autoWatch: true,
// base path, that will be used to resolve files and exclude
basePath: '../',
// testing framework to use (jasmine/mocha/qunit/...)
frameworks: ['jasmine'],
// list of files / patterns to load in the browser
files: [
'app/bower_components/jquery/jquery.js',
'app/bower_components/jquery-ui/ui/jquery-ui.js',
'app/bower_components/angular/angular.js',
'app/bower_components/angular-route/angular-route.js',
'app/bower_components/angular-mocks/angular-mocks.js',
'app/bower_components/bootstrap/js/collapse.js',
'app/bower_components/bootstrap/js/modal.js',
'app/bower_components/angular-resource/angular-resource.js',
'app/bower_components/underscore/underscore.js',
'app/bower_components/moment/moment.js',
'app/bower_components/angular-moment/angular-moment.js',
'app/bower_components/codemirror/lib/codemirror.js',
'app/bower_components/codemirror/addon/edit/matchbrackets.js',
'app/bower_components/codemirror/addon/edit/closebrackets.js',
'app/bower_components/codemirror/mode/sql/sql.js',
'app/bower_components/codemirror/mode/javascript/javascript.js',
'app/bower_components/angular-ui-codemirror/ui-codemirror.js',
'app/bower_components/highcharts/highcharts.js',
'app/bower_components/highcharts/modules/exporting.js',
'app/bower_components/gridster/dist/jquery.gridster.js',
'app/bower_components/angular-growl/build/angular-growl.js',
'app/bower_components/pivottable/dist/pivot.js',
'app/bower_components/cornelius/src/cornelius.js',
'app/bower_components/mousetrap/mousetrap.js',
'app/bower_components/mousetrap/plugins/global-bind/mousetrap-global-bind.js',
'app/bower_components/select2/select2.js',
'app/bower_components/angular-ui-select2/src/select2.js',
'app/bower_components/underscore.string/lib/underscore.string.js',
'app/bower_components/marked/lib/marked.js',
'app/scripts/ng_highchart.js',
'app/scripts/ng_smart_table.js',
'app/scripts/ui-bootstrap-tpls-0.5.0.min.js',
'app/bower_components/bucky/bucky.js',
'app/bower_components/pace/pace.js',
'app/scripts/app.js',
'app/scripts/services/services.js',
'app/scripts/services/resources.js',
'app/scripts/services/notifications.js',
'app/scripts/services/dashboards.js',
'app/scripts/controllers/controllers.js',
'app/scripts/controllers/dashboard.js',
'app/scripts/controllers/admin_controllers.js',
'app/scripts/controllers/query_view.js',
'app/scripts/controllers/query_source.js',
'app/scripts/visualizations/base.js',
'app/scripts/visualizations/chart.js',
'app/scripts/visualizations/cohort.js',
'app/scripts/visualizations/table.js',
'app/scripts/visualizations/pivot.js',
'app/scripts/directives/directives.js',
'app/scripts/directives/query_directives.js',
'app/scripts/directives/dashboard_directives.js',
'app/scripts/filters.js',
'app/views/**/*.html',
'test/mocks/*.js',
'test/unit/*.js'
],
// generate js files from html templates
preprocessors: {
'app/views/**/*.html': 'ng-html2js'
},
// list of files / patterns to exclude
exclude: [],
// web server port
port: 8080,
// Start these browsers, currently available:
// - Chrome
// - ChromeCanary
// - Firefox
// - Opera
// - Safari (only Mac)
// - PhantomJS
// - IE (only Windows)
browsers: [
'PhantomJS'
],
// Which plugins to enable
plugins: [
'karma-phantomjs-launcher',
'karma-jasmine',
'karma-ng-html2js-preprocessor'
],
// Continuous Integration mode
// if true, it capture browsers, run tests and exit
singleRun: false,
colors: true,
// level of logging
// possible values: LOG_DISABLE || LOG_ERROR || LOG_WARN || LOG_INFO || LOG_DEBUG
logLevel: config.LOG_INFO,
// Uncomment the following lines if you are using grunt's server to run the tests
// proxies: {
// '/': 'http://localhost:9000/'
// },
// URL root prevent conflicts with the site root
// urlRoot: '_karma_'
});
};

View File

@@ -0,0 +1,108 @@
featureFlags = [];
currentUser = {
id: 1,
name: 'John Mock',
email: 'john@example.com',
groups: ['default'],
permissions: [],
canEdit: function(object) {
var user_id = object.user_id || (object.user && object.user.id);
return user_id && (user_id == currentUser.id);
},
hasPermission: function(permission) {
return this.permissions.indexOf(permission) != -1;
}
};
angular.module('redashMocks', [])
.value('MockData', {
query: {
"ttl": -1,
"query": "select name from users;",
"id": 1803,
"description": "",
"name": "my test query",
"created_at": "2014-01-07T16:11:31.859528+02:00",
"query_hash": "c89c235bc73e462e9702debc56adc309",
"user": {
"email": "amirn@everything.me",
"id": 48,
"name": "Amir Nissim"
},
"visualizations": [{
"description": "",
"options": {},
"type": "TABLE",
"id": 636,
"name": "Table"
}],
"api_key": "123456789",
"data_source_id": 1,
"latest_query_data_id": 106632,
"latest_query_data": {
"retrieved_at": "2014-07-29T10:49:10.951364+03:00",
"query_hash": "c89c235bc73e462e9702debc56adc309",
"query": "select name from users;",
"runtime": 0.0139260292053223,
"data": {
"rows": [{
"name": "Amir Nissim"
}, {
"name": "Arik Fraimovich"
}],
"columns": [{
"friendly_name": "name",
"type": null,
"name": "name"
}, {
"friendly_name": "mail::filter",
"type": null,
"name": "mail::filter"
}]
},
"id": 106632,
"data_source_id": 1
}
},
queryResult: {
"job": {},
"query_result": {
"retrieved_at": "2014-08-04T13:33:45.563486+03:00",
"query_hash": "9951c38c9cf00e6ee8aecce026b51c19",
"query": "select name as \"name::filter\" from users",
"runtime": 0.00896096229553223,
"data": {
"rows": [],
"columns": [{
"friendly_name": "name::filter",
"type": null,
"name": "name::filter"
}]
},
"id": 106673,
"data_source_id": 1
},
"status": "done",
"filters": [],
"filterFreeze": "test@example.com",
"updatedAt": "2014-08-05T13:13:40.833Z",
"columnNames": ["name::filter"],
"filteredData": [{
"name::filter": "test@example.com"
}],
"columns": [{
"friendly_name": "name::filter",
"type": null,
"name": "name::filter"
}]
}
});

View File

@@ -0,0 +1,5 @@
describe('example test', function() {
it('should expect the obvious', function() {
expect(0).toBe(0);
});
});

View File

@@ -0,0 +1,34 @@
'use strict';
describe('QueryViewCtrl', function() {
var scope;
var MockData;
beforeEach(module('redash', 'redashMocks'));
beforeEach(inject(function($injector, $controller, $rootScope, Query, _MockData_) {
MockData = _MockData_;
scope = $rootScope.$new();
var route = {
current: {
locals: {
query: new Query(MockData.query)
}
}
};
$controller('QueryViewCtrl', {$scope: scope, $route: route});
}));
it('should have a query', function() {
expect(scope.query).toBeDefined();
});
it('should update the executing state', function() {
expect(scope.queryExecuting).toBe(false);
scope.executeQuery();
expect(scope.queryExecuting).toBe(true);
});
});

View File

@@ -0,0 +1,89 @@
'use strict';
describe('VisualizationRenderer', function() {
var element;
var scope;
var filters = [{
"name": "name::filter",
"friendlyName": "Name",
"values": ["test@example.com", "amirn@example.com"],
"multiple": false
}];
beforeEach(module('redash', 'redashMocks'));
// loading templates
beforeEach(module('app/views/grid_renderer.html',
'app/views/visualizations/filters.html'));
// serving templates
beforeEach(inject(function($httpBackend, $templateCache) {
$httpBackend.whenGET('/views/grid_renderer.html')
.respond($templateCache.get('app/views/grid_renderer.html'));
$httpBackend.whenGET('/views/visualizations/filters.html')
.respond($templateCache.get('app/views/visualizations/filters.html'));
}));
// directive setup
beforeEach(inject(function($rootScope, $compile, MockData, QueryResult) {
var qr = new QueryResult(MockData.queryResult)
qr.filters = filters;
$rootScope.queryResult = qr;
element = angular.element(
'<visualization-renderer query-result="queryResult">' +
'</visualization-renderer>');
}));
describe('scope', function() {
beforeEach(inject(function($rootScope, $compile) {
$compile(element)($rootScope);
// we will test the isolated scope of the directive
scope = element.isolateScope();
scope.$digest();
}));
it('should have filters', function() {
expect(scope.filters).toBeDefined();
});
});
/*describe('URL binding', function() {
beforeEach(inject(function($rootScope, $compile, $location) {
spyOn($location, 'search').andCallThrough();
// set initial search
var initialSearch = {};
initialSearch[filters[0].friendlyName] = filters[0].values[0];
$location.search('filters', initialSearch);
$compile(element)($rootScope);
// we will test the isolated scope of the directive
scope = element.isolateScope();
scope.$digest();
}));
it('should update scope from URL',
inject(function($location) {
expect($location.search).toHaveBeenCalled();
expect(scope.filters[0].current).toEqual(filters[0].values[0]);
}));
it('should update URL from scope',
inject(function($location) {
scope.filters[0].current = 'newValue';
scope.$digest();
var searchFilters = angular.fromJson($location.search().filters);
expect(searchFilters[filters[0].friendlyName]).toEqual('newValue');
}));
});*/
});

View File

@@ -6,10 +6,8 @@ import logging
from flask import request, make_response, redirect, url_for
from flask.ext.login import LoginManager, login_user, current_user
from flask.ext.googleauth import GoogleAuth, login
from werkzeug.contrib.fixers import ProxyFix
from redash import models, settings
from redash import models, settings, google_oauth
login_manager = LoginManager()
logger = logging.getLogger('authentication')
@@ -57,48 +55,15 @@ class HMACAuthentication(object):
return decorated
def validate_email(email):
if not settings.GOOGLE_APPS_DOMAIN:
return True
return email in settings.ALLOWED_EXTERNAL_USERS or email.endswith("@%s" % settings.GOOGLE_APPS_DOMAIN)
def create_and_login_user(app, user):
if not validate_email(user.email):
return
try:
user_object = models.User.get(models.User.email == user.email)
if user_object.name != user.name:
logger.debug("Updating user name (%r -> %r)", user_object.name, user.name)
user_object.name = user.name
user_object.save()
except models.User.DoesNotExist:
logger.debug("Creating user object (%r)", user.name)
user_object = models.User.create(name=user.name, email=user.email, groups = models.User.DEFAULT_GROUPS)
login_user(user_object, remember=True)
login.connect(create_and_login_user)
@login_manager.user_loader
def load_user(user_id):
return models.User.select().where(models.User.id == user_id).first()
def setup_authentication(app):
if settings.GOOGLE_OPENID_ENABLED:
openid_auth = GoogleAuth(app, url_prefix="/google_auth")
# If we don't have a list of external users, we can use Google's federated login, which limits
# the domain with which you can sign in.
if not settings.ALLOWED_EXTERNAL_USERS and settings.GOOGLE_APPS_DOMAIN:
openid_auth._OPENID_ENDPOINT = "https://www.google.com/a/%s/o8/ud?be=o8" % settings.GOOGLE_APPS_DOMAIN
login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser
app.wsgi_app = ProxyFix(app.wsgi_app)
app.secret_key = settings.COOKIE_SECRET
app.register_blueprint(google_oauth.blueprint)
return HMACAuthentication()

8
redash/cache.py Normal file
View File

@@ -0,0 +1,8 @@
from flask import make_response
from functools import update_wrapper
ONE_YEAR = 60 * 60 * 24 * 365.25
headers = {
'Cache-Control': 'max-age=%d' % ONE_YEAR
}

View File

@@ -26,6 +26,7 @@ from redash.wsgi import app, auth, api
import logging
from tasks import QueryTask
from cache import headers as cache_headers
@app.route('/ping', methods=['GET'])
def ping():
@@ -68,8 +69,7 @@ def login():
return redirect(request.args.get('next') or '/')
if not settings.PASSWORD_LOGIN_ENABLED:
blueprint = app.extensions['googleauth'].blueprint
return redirect(url_for("%s.login" % blueprint.name, next=request.args.get('next')))
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
if request.method == 'POST':
user = models.User.select().where(models.User.email == request.form['username']).first()
@@ -83,7 +83,7 @@ def login():
analytics=settings.ANALYTICS,
next=request.args.get('next'),
username=request.form.get('username', ''),
show_google_openid=settings.GOOGLE_OPENID_ENABLED)
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
@app.route('/logout')
@@ -110,7 +110,6 @@ def status_api():
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['queue_size'] = redis_connection.llen('queries') + redis_connection.llen('scheduled_queries')
status['manager']['outdated_queries_count'] = models.Query.outdated_queries().count()
queues = {}
@@ -178,7 +177,7 @@ api.add_resource(MetricsAPI, '/api/metrics/v1/send', endpoint='metrics')
class DataSourceListAPI(BaseResource):
def get(self):
data_sources = [ds.to_dict() for ds in models.DataSource.select()]
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
return data_sources
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
@@ -294,11 +293,11 @@ class QueryListAPI(BaseResource):
query.create_default_visualizations()
return query.to_dict(with_result=False)
return query.to_dict()
@require_permission('view_query')
def get(self):
return [q.to_dict(with_result=False, with_stats=True) for q in models.Query.all_queries()]
return [q.to_dict(with_stats=True) for q in models.Query.all_queries()]
class QueryAPI(BaseResource):
@@ -318,7 +317,7 @@ class QueryAPI(BaseResource):
query = models.Query.get_by_id(query_id)
return query.to_dict(with_result=False, with_visualizations=True)
return query.to_dict(with_visualizations=True)
@require_permission('view_query')
def get(self, query_id):
@@ -392,7 +391,7 @@ class QueryResultListAPI(BaseResource):
'error': 'Access denied for table(s): %s' % (metadata.used_tables)
}
}
models.ActivityLog(
user=self.current_user,
type=models.ActivityLog.QUERY_EXECUTION,
@@ -413,47 +412,52 @@ class QueryResultListAPI(BaseResource):
class QueryResultAPI(BaseResource):
@require_permission('view_query')
def get(self, query_result_id):
query_result = models.QueryResult.get_by_id(query_result_id)
if query_result:
return {'query_result': query_result.to_dict()}
else:
abort(404)
@staticmethod
def csv_response(query_result):
s = cStringIO.StringIO()
query_data = json.loads(query_result.data)
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
writer.writer = utils.UnicodeWriter(s)
writer.writeheader()
for row in query_data['rows']:
for k, v in row.iteritems():
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
writer.writerow(row)
headers = {'Content-Type': "text/csv; charset=UTF-8"}
headers.update(cache_headers)
return make_response(s.getvalue(), 200, headers)
class CsvQueryResultsAPI(BaseResource):
@require_permission('view_query')
def get(self, query_id, query_result_id=None):
if not query_result_id:
def get(self, query_id=None, query_result_id=None, filetype='json'):
if query_result_id is None and query_id is not None:
query = models.Query.get(models.Query.id == query_id)
if query:
query_result_id = query._data['latest_query_data']
query_result = query_result_id and models.QueryResult.get_by_id(query_result_id)
if query_result_id:
query_result = models.QueryResult.get_by_id(query_result_id)
if query_result:
s = cStringIO.StringIO()
if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
return make_response(data, 200, cache_headers)
else:
return self.csv_response(query_result)
query_data = json.loads(query_result.data)
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
writer.writer = utils.UnicodeWriter(s)
writer.writeheader()
for row in query_data['rows']:
for k, v in row.iteritems():
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
writer.writerow(row)
return make_response(s.getvalue(), 200, {'Content-Type': "text/csv; charset=UTF-8"})
else:
abort(404)
api.add_resource(CsvQueryResultsAPI, '/api/queries/<query_id>/results/<query_result_id>.csv',
'/api/queries/<query_id>/results.csv',
endpoint='csv_query_results')
api.add_resource(QueryResultListAPI, '/api/query_results', endpoint='query_results')
api.add_resource(QueryResultAPI, '/api/query_results/<query_result_id>', endpoint='query_result')
api.add_resource(QueryResultAPI,
'/api/query_results/<query_result_id>',
'/api/queries/<query_id>/results.<filetype>',
'/api/queries/<query_id>/results/<query_result_id>.<filetype>',
endpoint='query_result')
class JobAPI(BaseResource):

View File

@@ -18,7 +18,7 @@ def mysql(connection_string):
def query_runner(query):
connections_params = [entry.split('=')[1] for entry in connection_string.split(';')]
connection = MySQLdb.connect(*connections_params)
connection = MySQLdb.connect(*connections_params, charset="utf8", use_unicode=True)
cursor = connection.cursor()
logging.debug("mysql got query: %s", query)
@@ -61,4 +61,4 @@ def mysql(connection_string):
return json_data, error
return query_runner
return query_runner

View File

@@ -88,11 +88,12 @@ def pg(connection_string):
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError, psycopg2.OperationalError) as e:
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except KeyboardInterrupt:

81
redash/google_oauth.py Normal file
View File

@@ -0,0 +1,81 @@
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint
from flask_oauth import OAuth
from redash import models, settings
logger = logging.getLogger('google_oauth')
oauth = OAuth()
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
if settings.GOOGLE_APPS_DOMAIN:
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
else:
logger.warning("No Google Apps domain defined, all Google accounts allowed.")
google = oauth.remote_app('google',
base_url='https://www.google.com/accounts/',
authorize_url='https://accounts.google.com/o/oauth2/auth',
request_token_url=None,
request_token_params=request_token_params,
access_token_url='https://accounts.google.com/o/oauth2/token',
access_token_method='POST',
access_token_params={'grant_type': 'authorization_code'},
consumer_key=settings.GOOGLE_CLIENT_ID,
consumer_secret=settings.GOOGLE_CLIENT_SECRET)
blueprint = Blueprint('google_oauth', __name__)
def get_user_profile(access_token):
headers = {'Authorization': 'OAuth '+access_token}
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
if response.status_code == 401:
logger.warning("Failed getting user profile (response code 401).")
return None
return response.json()
def create_and_login_user(name, email):
try:
user_object = models.User.get(models.User.email == email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
user_object.save()
except models.User.DoesNotExist:
logger.debug("Creating user object (%r)", name)
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
login_user(user_object, remember=True)
@blueprint.route('/oauth/google', endpoint="authorize")
def login():
# TODO, suport next
callback=url_for('.callback', _external=True)
logger.debug("Callback url: %s", callback)
return google.authorize(callback=callback)
@blueprint.route('/oauth/google_callback', endpoint="callback")
@google.authorized_handler
def authorized(resp):
access_token = resp['access_token']
if access_token is None:
logger.warning("Access token missing in call back request.")
return redirect(url_for('login'))
profile = get_user_profile(access_token)
if profile is None:
return redirect(url_for('login'))
create_and_login_user(profile['name'], profile['email'])
return redirect(url_for('index'))

View File

@@ -78,7 +78,7 @@ class ApiUser(UserMixin):
class Group(BaseModel):
DEFAULT_PERMISSIONS = ['create_dashboard', 'create_query', 'edit_dashboard', 'edit_query',
'view_query', 'view_source', 'execute_query']
id = peewee.PrimaryKeyField()
name = peewee.CharField(max_length=100)
permissions = ArrayField(peewee.CharField, default=DEFAULT_PERMISSIONS)
@@ -139,6 +139,10 @@ class User(BaseModel, UserMixin):
return self._allowed_tables
@classmethod
def get_by_email(cls, email):
return cls.get(cls.email == email)
def __unicode__(self):
return '%r, %r' % (self.name, self.email)
@@ -151,7 +155,7 @@ class User(BaseModel, UserMixin):
class ActivityLog(BaseModel):
QUERY_EXECUTION = 1
id = peewee.PrimaryKeyField()
user = peewee.ForeignKeyField(User)
type = peewee.IntegerField()
@@ -193,6 +197,10 @@ class DataSource(BaseModel):
'type': self.type
}
@classmethod
def all(cls):
return cls.select().order_by(cls.id.asc())
class QueryResult(BaseModel):
id = peewee.PrimaryKeyField()
@@ -277,7 +285,7 @@ class Query(BaseModel):
type="TABLE", options="{}")
table_visualization.save()
def to_dict(self, with_result=True, with_stats=False, with_visualizations=False, with_user=True):
def to_dict(self, with_stats=False, with_visualizations=False, with_user=True):
d = {
'id': self.id,
'latest_query_data_id': self._data.get('latest_query_data', None),
@@ -307,9 +315,6 @@ class Query(BaseModel):
d['visualizations'] = [vis.to_dict(with_query=False)
for vis in self.visualizations]
if with_result and self.latest_query_data:
d['latest_query_data'] = self.latest_query_data.to_dict()
return d
@classmethod
@@ -383,13 +388,11 @@ class Dashboard(BaseModel):
layout = json.loads(self.layout)
if with_widgets:
widgets = Widget.select(Widget, Visualization, Query, QueryResult, User)\
widgets = Widget.select(Widget, Visualization, Query, User)\
.where(Widget.dashboard == self.id)\
.join(Visualization, join_type=peewee.JOIN_LEFT_OUTER)\
.join(Query, join_type=peewee.JOIN_LEFT_OUTER)\
.join(User, join_type=peewee.JOIN_LEFT_OUTER)\
.switch(Query)\
.join(QueryResult, join_type=peewee.JOIN_LEFT_OUTER)
.join(User, join_type=peewee.JOIN_LEFT_OUTER)
widgets = {w.id: w.to_dict() for w in widgets}
# The following is a workaround for cases when the widget object gets deleted without the dashboard layout
@@ -504,11 +507,10 @@ class Widget(BaseModel):
class Event(BaseModel):
# user, action, object_type, object_id, additional_properties
user = peewee.ForeignKeyField(User, related_name="events")
action = peewee.CharField()
object_type = peewee.CharField()
object_id = peewee.IntegerField()
object_id = peewee.CharField(null=True)
additional_properties = peewee.TextField(null=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
@@ -539,4 +541,4 @@ def create_db(create_tables, drop_tables):
if create_tables and not model.table_exists():
model.create_table()
db.close_db(None)
db.close_db(None)

View File

@@ -59,12 +59,15 @@ CELERY_FLOWER_URL = os.environ.get("REDASH_CELERY_FLOWER_URL", "/flower")
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
GOOGLE_OPENID_ENABLED = parse_boolean(os.environ.get("REDASH_GOOGLE_OPENID_ENABLED", "true"))
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "false"))
ALLOWED_EXTERNAL_USERS = array_from_string(os.environ.get("REDASH_ALLOWED_EXTERNAL_USERS", ''))
GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
WORKERS_COUNT = int(os.environ.get("REDASH_WORKERS_COUNT", "2"))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600*24))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600*6))
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
EVENTS_LOG_PATH = os.environ.get("REDASH_EVENTS_LOG_PATH", "")
@@ -73,4 +76,4 @@ CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS",
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# Features:
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))

View File

@@ -5,7 +5,7 @@ import redis
from celery import Task
from celery.result import AsyncResult
from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client
from redash import redis_connection, models, statsd_client, settings
from redash.utils import gen_query_hash
from redash.worker import celery
from redash.data.query_runner import get_query_runner
@@ -64,18 +64,23 @@ class QueryTask(object):
logging.info("[Manager][%s] Found existing job: %s", query_hash, job_id)
job = cls(job_id=job_id)
else:
if job.ready():
logging.info("[%s] job found is ready (%s), removing lock", query_hash, job.celery_status)
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
job = None
if not job:
pipe.multi()
if scheduled:
queue_name = data_source.queue_name
else:
queue_name = data_source.scheduled_queue_name
else:
queue_name = data_source.queue_name
result = execute_query.apply_async(args=(query, data_source.id), queue=queue_name)
job = cls(async_result=result)
logging.info("[Manager][%s] Created new job: %s", query_hash, job.id)
pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id)
pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)
pipe.execute()
break
@@ -113,6 +118,17 @@ class QueryTask(object):
'query_result_id': query_result_id,
}
@property
def is_cancelled(self):
return self._async_result.status == 'REVOKED'
@property
def celery_status(self):
return self._async_result.status
def ready(self):
return self._async_result.ready()
def cancel(self):
return self._async_result.revoke(terminate=True)
@@ -151,6 +167,41 @@ def refresh_queries():
statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now)))
@celery.task(base=BaseTask)
def cleanup_tasks():
# in case of cold restart of the workers, there might be jobs that still have their "lock" object, but aren't really
# going to run. this job removes them.
lock_keys = redis_connection.keys("query_hash_job:*") # TODO: use set instead of keys command
query_tasks = [QueryTask(job_id=j) for j in redis_connection.mget(lock_keys)]
logger.info("Found %d locks", len(query_tasks))
inspect = celery.control.inspect()
active_tasks = inspect.active()
if active_tasks is None:
active_tasks = []
else:
active_tasks = active_tasks.values()
all_tasks = set()
for task_list in active_tasks:
for task in task_list:
all_tasks.add(task['id'])
logger.info("Active jobs count: %d", len(all_tasks))
for i, t in enumerate(query_tasks):
if t.ready():
# if locked task is ready already (failed, finished, revoked), we don't need the lock anymore
logger.warning("%s is ready (%s), removing lock.", lock_keys[i], t.celery_status)
redis_connection.delete(lock_keys[i])
if t.celery_status == 'STARTED' and t.id not in all_tasks:
logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
redis_connection.delete(lock_keys[i])
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?

View File

@@ -13,6 +13,10 @@ celery.conf.update(CELERY_RESULT_BACKEND=settings.CELERY_BACKEND,
'task': 'redash.tasks.refresh_queries',
'schedule': timedelta(seconds=30)
},
'cleanup_tasks': {
'task': 'redash.tasks.cleanup_tasks',
'schedule': timedelta(minutes=5)
},
},
CELERY_TIMEZONE='UTC')

View File

@@ -1,7 +1,7 @@
Flask==0.10.1
Flask-GoogleAuth==0.4
Flask-RESTful==0.2.10
Flask-Login==0.2.9
Flask-OAuth==0.12
passlib==1.6.2
Jinja2==2.7.2
MarkupSafe==0.18
@@ -10,7 +10,7 @@ aniso8601==0.82
blinker==1.3
itsdangerous==0.23
peewee==2.2.2
psycopg2==2.5.1
psycopg2==2.5.2
python-dateutil==2.1
pytz==2013.9
redis==2.7.5

View File

@@ -1,52 +1,25 @@
from unittest import TestCase
from mock import patch
from flask_googleauth import ObjectDict
from tests import BaseTestCase
from redash.authentication import validate_email, create_and_login_user
from redash import settings, models
from redash import models
from redash.google_oauth import create_and_login_user
from tests.factories import user_factory
class TestEmailValidation(TestCase):
def test_accepts_address_with_correct_domain(self):
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'):
self.assertTrue(validate_email('example@example.com'))
def test_accepts_address_from_exception_list(self):
with patch.multiple(settings, GOOGLE_APPS_DOMAIN='example.com', ALLOWED_EXTERNAL_USERS=['whatever@whatever.com']):
self.assertTrue(validate_email('whatever@whatever.com'))
def test_accept_any_address_when_domain_empty(self):
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', None):
self.assertTrue(validate_email('whatever@whatever.com'))
def test_rejects_address_with_incorrect_domain(self):
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'):
self.assertFalse(validate_email('whatever@whatever.com'))
class TestCreateAndLoginUser(BaseTestCase):
def test_logins_valid_user(self):
user = user_factory.create(email='test@example.com')
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'), patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(None, user)
with patch('redash.google_oauth.login_user') as login_user_mock:
create_and_login_user(user.name, user.email)
login_user_mock.assert_called_once_with(user, remember=True)
def test_creates_vaild_new_user(self):
openid_user = ObjectDict({'email': 'test@example.com', 'name': 'Test User'})
email = 'test@example.com'
name = 'Test User'
with patch.multiple(settings, GOOGLE_APPS_DOMAIN='example.com'), \
patch('redash.authentication.login_user') as login_user_mock:
with patch('redash.google_oauth.login_user') as login_user_mock:
create_and_login_user(None, openid_user)
create_and_login_user(name, email)
self.assertTrue(login_user_mock.called)
user = models.User.get(models.User.email == openid_user.email)
def test_ignores_invliad_user(self):
user = ObjectDict({'email': 'test@whatever.com'})
with patch.object(settings, 'GOOGLE_APPS_DOMAIN', 'example.com'), patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(None, user)
self.assertFalse(login_user_mock.called)
user = models.User.get(models.User.email == email)

View File

@@ -380,7 +380,7 @@ class TestLogin(BaseTestCase):
with app.test_client() as c, patch.object(settings, 'PASSWORD_LOGIN_ENABLED', False):
rv = c.get('/login')
self.assertEquals(rv.status_code, 302)
self.assertTrue(rv.location.endswith(url_for('GoogleAuth.login')))
self.assertTrue(rv.location.endswith(url_for('google_oauth.authorize')))
def test_get_login_form(self):
with app.test_client() as c: