Compare commits

...

158 Commits

Author SHA1 Message Date
Arik Fraimovich
2911fa8af7 Bump version. 2015-07-13 18:31:31 +03:00
Arik Fraimovich
7fc2d5ee0b Update bootstrap.sh to use 0.6.3. 2015-07-13 12:00:28 +03:00
Arik Fraimovich
3c9c1466a3 Merge pull request #483 from erans/master
Feature: Updated more fine grained support for CORS in QueryResultAPI
2015-07-13 11:04:10 +03:00
Eran Sandler
4a7c066bf0 Too many languages... :-( 2015-07-13 10:05:07 +03:00
Eran Sandler
1a3657572e Added fine grind control of CORS header for QueryResultAPI and possibly future APIs. 2015-07-13 09:42:23 +03:00
Arik Fraimovich
666e3281e4 Merge pull request #482 from erans/master
Feature: CORS support for the Query Result API to allow embedding in other domains
2015-07-13 08:41:38 +03:00
Eran Sandler
66084b1a3b minor fixes 2015-07-12 23:07:06 +03:00
Eran Sandler
421470666a use set_from_string. 2015-07-12 23:06:00 +03:00
Eran Sandler
f8e2bc9eca Added configurable CORS support for the Query Result API (to export data as JSON or CSV). Configuration is via an environment variable that is comma separated to include one or more domains (REDASH_QUERIES_RESULT_CORS) 2015-07-12 23:00:50 +03:00
Arik Fraimovich
c195362710 Merge pull request #479 from erans/master
Feature: improved error reporting and new log information support for the Python query runner
2015-07-12 22:30:21 +03:00
Arik Fraimovich
b671dd0431 Merge pull request #480 from EverythingMe/feature/multiple-domains
Feature: support for multiple domains with Google OAuth
2015-07-12 13:36:17 +03:00
Arik Fraimovich
7793f3b257 Feature: support for multiple Google Auth domains 2015-07-12 12:51:45 +03:00
Arik Fraimovich
e09aa6f81a Show message for wrong user/password (closes #275) 2015-07-12 12:43:37 +03:00
Eran Sandler
780e0c0418 - refactored the Python query runner
- Improved error handling to show the real error (including indentation and syntax errors - it should show the right row number)
- Added support for "print" statements which becomes log messages show in a single query window (where you edit). It's on by default. To remove it simply called 'disable_print_log()'
2015-07-12 12:14:46 +03:00
Arik Fraimovich
a692e3f664 Merge pull request #476 from EverythingMe/feature/api
Feature: support for per user API keys
2015-07-08 21:34:06 +03:00
Arik Fraimovich
6860dde1f7 Set api_key to be unique 2015-07-08 21:29:32 +03:00
Arik Fraimovich
e183affdd0 Feature: support for per user api keys 2015-07-08 20:59:07 +03:00
Arik Fraimovich
39db74ff20 Merge pull request #475 from hakobera/support-infuxdb
Feature: Support InfluxDB v0.9+
2015-07-05 22:39:40 +03:00
Arik Fraimovich
05c2c21a85 Bump version. 2015-07-05 22:39:12 +03:00
Kazuyuki Honda
00edc29e50 Support influxdb 0.9+ 2015-07-06 01:15:43 +09:00
Arik Fraimovich
3771af0a8c Update bootstrap.sh to use 0.6.2. 2015-07-05 08:30:11 +03:00
Arik Fraimovich
c32c2d43f7 Bump version. 2015-07-04 10:41:06 +03:00
Arik Fraimovich
4e2e3f9077 Merge pull request #472 from BrunoSalerno/map_visualization_options_fixed
map visualization: options fixed
2015-07-04 10:23:55 +03:00
Bruno Salerno
2a27422df9 map visualization: draw_options to scope 2015-07-03 18:02:22 -03:00
Bruno Salerno
f9e0ce8e9c map visualization: options fixed 2015-07-03 16:45:55 -03:00
Arik Fraimovich
a1d49f13d3 Merge pull request #471 from EverythingMe/fix/visualization_api
Fix: opening viz editor resets its options
2015-07-02 13:17:50 +03:00
Arik Fraimovich
26aa199f9c Fix: opening viz editor resets its options 2015-07-02 13:17:32 +03:00
Arik Fraimovich
4c77f3f914 Merge pull request #470 from EverythingMe/fix/visualization_api
Increase limit of tables for showing search.
2015-07-02 11:06:19 +03:00
Arik Fraimovich
d6be792595 Increase limit of tables for showing search. 2015-07-02 11:05:45 +03:00
Arik Fraimovich
59c1ea7f16 Merge pull request #469 from EverythingMe/fix/visualization_api
Fix: map - HTTPS support in tiles/marker
2015-07-02 11:05:25 +03:00
Arik Fraimovich
4d24005eff Fix: map - HTTPS support in tiles/marker 2015-07-02 11:02:44 +03:00
Arik Fraimovich
2dab35b614 Merge pull request #468 from EverythingMe/fix/visualization_api
Fix: visualizations API fixes
2015-07-02 08:51:09 +03:00
Arik Fraimovich
0b61b88f5f Fix: make default options apply to new visualizations 2015-07-02 08:38:08 +03:00
Arik Fraimovich
e5cb58207c Fix: vis title wasn't updating when changing type 2015-07-02 08:26:10 +03:00
Arik Fraimovich
fc17d1af81 Don't cache static assets in debug mode 2015-07-02 08:25:51 +03:00
Arik Fraimovich
e6650e1e2d Merge pull request #467 from BrunoSalerno/leaflet-visualization-marker-path-bug-fixed
leaflet visualization: marker path bug handled
2015-07-01 22:49:12 +03:00
Bruno Salerno
3aa1cd0133 leaflet visualization: marker path bug handled 2015-07-01 16:40:56 -03:00
Arik Fraimovich
e04833c327 Merge pull request #466 from BrunoSalerno/leaflet-visualization
Feature: Map visualization (using Leaflet)
2015-07-01 20:58:43 +03:00
Bruno Salerno
b743cceb60 leaflet visualization: map template margins fixed 2015-07-01 14:53:31 -03:00
Bruno Salerno
a0e134d3b5 leaflet visualization: dinamic height 2015-07-01 14:15:17 -03:00
Bruno Salerno
d7fb2d7458 leaflet-visualization: div size fixed and bounds storting improved 2015-07-01 12:30:48 -03:00
Bruno Salerno
b913ce6022 leaflet visualization: color series named properly 2015-07-01 10:03:43 -03:00
Bruno Salerno
1eb7945d16 leaflet visualization: map bounds are stored and kept 2015-06-30 18:18:34 -03:00
Bruno Salerno
37d0026ee4 leaflet-visualization: point feature 2015-06-30 17:34:31 -03:00
Arik Fraimovich
9cdc2cb2f7 Merge pull request #465 from EverythingMe/fix/time_field_serialize
Feature: ability to control series order in charts.
2015-06-30 09:26:26 +03:00
Arik Fraimovich
a9bff9063e Feature: cli to get status. 2015-06-30 09:25:32 +03:00
Arik Fraimovich
380126ee44 Feature: ability to control series index in charts. 2015-06-30 09:15:00 +03:00
Arik Fraimovich
d8377375b8 Merge pull request #461 from myinsiders/saml
Added SAML authentication support, eg for OneLogin or Okta
2015-06-30 08:25:44 +03:00
Arik Fraimovich
98ff701f9a Merge pull request #464 from EverythingMe/fix/time_field_serialize
Fix #463: support for datetime.time and datetime.timedelta fields
2015-06-29 18:06:25 +03:00
Arik Fraimovich
f5ea3e97d3 Fix: support for datetime.time and datetime.timedelta fields 2015-06-29 18:01:36 +03:00
Mark White
719e96dd2f Added SAML login option to login form 2015-06-28 17:19:57 +01:00
Arik Fraimovich
6c6c0256ba Merge pull request #462 from EverythingMe/fix_codemirror_resize_issue
Fix: refresh CodeMirror size when schema browser appears
2015-06-28 13:46:12 +03:00
Arik Fraimovich
723df51cdd Fix: refresh CodeMirror size when schema browser appears 2015-06-28 13:45:49 +03:00
Arik Fraimovich
a0f4e263b2 Merge pull request #459 from olgakogan/patch-2
Fixed an error in case a query doesn't have last_modified_date
2015-06-28 10:27:04 +03:00
Arik Fraimovich
4706bf8060 Merge pull request #458 from erans/master
Initial and very early support for ElasticSearch query runner
2015-06-28 10:26:26 +03:00
Mark White
f96a9f659a Added Apache license to code taken from Okta 2015-06-26 11:45:24 +01:00
Mark White
63c273f896 Fixed issue in saml login 2015-06-26 11:12:27 +01:00
Mark White
622ac6d781 Fixes to saml callback server name code 2015-06-26 10:26:59 +01:00
Mark White
8dc564a8bc Added configuration of flask server name 2015-06-26 09:06:50 +01:00
Mark White
3ae5baef22 Added OneLogin support 2015-06-25 17:52:00 +01:00
olga
8d819068b5 Fixed an error in case a query doesn't have last_modified_date 2015-06-25 11:31:22 +03:00
Eran Sandler
585e056265 Initial very early release of an ElasticSearch query runner. It only support Lucene style queries (single line, similar to what Kibana uses but without aggregations). 2015-06-24 09:53:09 +03:00
Arik Fraimovich
1914ed7c7c Merge pull request #456 from bells17/master
Changed the README's 'Setting up re:dash instance' url to a new url
2015-06-19 10:25:11 +01:00
bells17
bd216e93e7 Changed the README's 'Setting up re:dash instance' url to a new url 2015-06-19 10:20:41 +09:00
Arik Fraimovich
5e351de896 Merge pull request #455 from erans/master
added Mongo JSON serializer to correctly serialize ObjectId + datetime.datetime serialization
2015-06-17 10:59:42 +03:00
Eran Sandler
de0e534c77 removed the unnecessary check for datetime.datetime in the JSON encoder. 2015-06-17 10:58:12 +03:00
Eran Sandler
5fa1f9440d duh! 2015-06-16 11:50:20 +03:00
Eran Sandler
b3ddc5f8b9 removed old conversion of ObjectId to string since it is now part of the new JSON serializer 2015-06-16 11:34:19 +03:00
Eran Sandler
8cde5f9673 added Mongo JSON serializer to correctly serialize ObjectId 2015-06-16 11:27:23 +03:00
Arik Fraimovich
1bb53ca497 Merge pull request #451 from EverythingMe/fix/unicode_in_annotation
Fix: charts with category X axis were not sorted properly
2015-06-11 21:46:26 +03:00
Arik Fraimovich
0a3cd9267f Fix: charts with category x axis were not sorted properly 2015-06-11 21:45:45 +03:00
Arik Fraimovich
075d843354 Merge pull request #449 from EverythingMe/fix/unicode_in_annotation
Fix: schema browser chokes on large schemas
2015-06-10 13:36:53 +03:00
Arik Fraimovich
b14e5e8c0e Fix: schema browser chokes on large schemas 2015-06-10 13:36:05 +03:00
Arik Fraimovich
c9da4be422 Merge pull request #442 from EverythingMe/fix/timezone
Fix: when the server has non UTC timezone, timestamps were wrong
2015-06-07 22:23:46 +03:00
Arik Fraimovich
276ee7c27a Merge pull request #448 from olgakogan/master
supervisord default config: separate queue for ad-hoc and scheduled queries
2015-06-07 17:38:53 +03:00
olga
334040532a changed default concurrency level to 2 per queue 2015-06-07 17:36:24 +03:00
olga
335a3a98b5 separated the queue for ad-hoc and for scheduled queries (someone who runs an ad-hoc query should not wait because there scheduled queries are being refreshed at that time) 2015-06-07 17:28:57 +03:00
Arik Fraimovich
b17080a7f5 Merge pull request #446 from EverythingMe/fix/unicode_in_annotation
Fix #443: open table when searching & don't hide columns
2015-06-05 18:13:05 +03:00
Arik Fraimovich
8441c12b01 Fix #443: open table when searching & don't hide columns 2015-06-05 18:08:06 +03:00
Arik Fraimovich
3b4af1b6fa Merge pull request #445 from EverythingMe/fix/unicode_in_annotation
Fix #444: unicode characters in username fail query execution
2015-06-05 16:58:00 +03:00
Arik Fraimovich
c3deb8e2fa Fix #444: unicode characters in username fail query execution 2015-06-05 16:49:25 +03:00
Arik Fraimovich
a60b1686da Fix: when the server has non UTC timezone, timestamps were wrong 2015-06-03 07:58:28 +03:00
Arik Fraimovich
b56e87ceb2 Merge pull request #440 from EverythingMe/fix_ui
Fix: python query runner didn't allow iterating lists
2015-05-31 10:20:32 +03:00
Arik Fraimovich
fc89bcdaf3 Fix: python query runner didn't allow accessing dicts 2015-05-31 10:15:48 +03:00
Arik Fraimovich
15ec8321bb Merge pull request #437 from EverythingMe/fix_ui
Feature: ability to disable x axis labels
2015-05-19 22:24:36 +03:00
Arik Fraimovich
e6ba62485c Merge pull request #436 from EverythingMe/fix_ui
Fix: sorting not working for columns with special characters
2015-05-19 22:15:21 +03:00
Arik Fraimovich
9077b01fb9 Feature: ability to disable x axis labels 2015-05-19 22:15:08 +03:00
Arik Fraimovich
f45281be96 Fix: annotation was failing if query had unicode in it 2015-05-19 22:01:02 +03:00
Arik Fraimovich
a1c8ef9037 Merge pull request #435 from EverythingMe/fix_ui
Fix: string columns with date/time values failed to render.
2015-05-19 22:00:37 +03:00
Arik Fraimovich
f46e8af23f Fix: sorting not working for columns with special characters 2015-05-19 22:00:15 +03:00
Arik Fraimovich
30a89bfd2c Fix: string columns with dates failed to render. 2015-05-19 21:43:50 +03:00
Arik Fraimovich
6312f8738d Merge pull request #433 from stanhu/make-query-link-obvious
Make it obvious that the query link is clickable.
2015-05-17 08:18:19 +03:00
Stan Hu
9e3d5c10c5 Make it obvious that the query link is clickable: underline when hovering and add glyphicon 2015-05-16 22:06:04 -07:00
Arik Fraimovich
59b87ec4fd Merge pull request #434 from erans/master
MongoDB aggregation support + mongo documentation (as comments)
2015-05-17 07:49:20 +03:00
Eran Sandler
27ecf5f25c Merged the older MongoDB code into the new mongodb query runner to support aggregation 2015-05-16 22:22:33 +03:00
Arik Fraimovich
105971c4c8 Merge pull request #432 from stanhu/allow-undefined-max-age
Allow undefined max_age parameter in query_results endpoint
2015-05-15 11:25:24 +03:00
Stan Hu
690f8323c3 Allow undefined max_age parameter in query_results endpoint
An Error 500 would be returned by the endpoint if you attempted to
pass a query parameter to the dashboard since maxAge was undefined in JavaScript.
2015-05-14 22:00:08 -07:00
Arik Fraimovich
20eb110ce3 Fix: update_release_commit_sha should return json 2015-05-14 10:09:57 +03:00
Arik Fraimovich
571c9d0aee Update release manager: update tag commit sha on new release 2015-05-14 09:59:21 +03:00
Arik Fraimovich
0ee7292f16 Merge pull request #431 from EverythingMe/feature/additional_refresh_rates
Feature: additional refresh times (5, 10, 15, 30 minutes)
2015-05-14 09:25:35 +03:00
Arik Fraimovich
8c28392dfd Feature: additional refersh times (5,10,15,30 minutes) 2015-05-13 20:59:39 +03:00
Arik Fraimovich
671f1f4478 Merge pull request #428 from olgakogan/master
Feature: support for column types in MySQL query runner
2015-05-12 13:55:56 +03:00
olga
557d3748be added support to column types in mysql 2015-05-12 12:01:47 +03:00
Arik Fraimovich
f00d080ed2 Install optipng in CircleCI. 2015-05-12 10:33:11 +03:00
Arik Fraimovich
4e76c1305f Merge pull request #425 from EverythingMe/new_logo
New logo
2015-05-12 10:27:01 +03:00
Arik Fraimovich
36ef388e92 Bump version 2015-05-12 10:26:16 +03:00
Arik Fraimovich
2e1ee7f76c New logo 2015-05-12 10:25:57 +03:00
Arik Fraimovich
fc1e38772d New logo! 2015-05-11 23:13:15 +03:00
Arik Fraimovich
0e631a5121 Merge pull request #422 from EverythingMe/feature/288_bq_instance_auth
Feature: BigQueryGCE query runner that uses instance auth (fixes #288)
2015-05-10 23:18:45 +03:00
Arik Fraimovich
d74175efca Feature: BigQueryGCE query runner that uses instance auth 2015-05-10 08:46:41 +03:00
Arik Fraimovich
bf5fe7d2c7 Merge pull request #421 from EverythingMe/fix/issue_417
Feature: show visualization name next to query name (#418)
2015-05-08 22:28:12 +03:00
Arik Fraimovich
0f022aba92 Feature: show visualization name next to query name. 2015-05-07 21:58:12 +03:00
Arik Fraimovich
0b6e55e55a Remove unused code 2015-05-07 21:58:08 +03:00
Arik Fraimovich
e1c409366c Merge pull request #420 from EverythingMe/fix/issue_417
Fix: Make query editor auto resize again to prevent scroll issues
2015-05-07 21:52:07 +03:00
Arik Fraimovich
3b942118e9 Make query editor auto resize again to prevent scroll issues 2015-05-07 21:39:25 +03:00
Arik Fraimovich
7f1543db8f Merge pull request #419 from EverythingMe/fix/issue_417
Fix #417: integer columns treated as floats
2015-05-07 21:38:54 +03:00
Arik Fraimovich
74a5121be2 Fix #417: integer columns treated as floats 2015-05-07 21:25:30 +03:00
Arik Fraimovich
26fe136a1a Merge pull request #416 from daamien/patch-1
Upgrade to requests 2.3.0
2015-05-07 09:30:43 +03:00
damien clochard
83fb189b05 Update requirements.txt
The bootstrap.sh script fails on Debian 7.8

I solved the problem with :

$ sudo pip install requests==2.3.0

Check this bug for more details :
https://github.com/kennethreitz/requests/issues/2028
2015-05-06 18:36:24 +02:00
Arik Fraimovich
5e8d0d36c0 Merge pull request #409 from erans/master
Fix: minor fixes for MongoDB, script and Python query runners
2015-04-26 11:07:33 +03:00
Eran Sandler
4ae4cffa04 Removed a copy-paste duplication. Hmpf. 2015-04-26 11:05:40 +03:00
Eran Sandler
bc433e88fe Fix for _getitem_ error when accessing a dictionary directly. 2015-04-26 11:03:53 +03:00
Arik Fraimovich
513ef501a4 Merge pull request #410 from stanhu/sort-by-y-values
Feature: sort by Y values charts that have a single value per series
2015-04-26 10:23:06 +03:00
Stan Hu
f2bdcbedfb Simplify code and remove sortY option to avoid confusion 2015-04-26 00:18:03 -07:00
Stan Hu
fd056edb2a Support sort by y values for charts that have a single value per series 2015-04-21 22:52:14 -07:00
Eran Sandler
0f0acfdd12 Fix which prevented MongoDB connections to execute queries due to a faulty json schema configuration. 2015-04-22 00:18:28 +03:00
Eran Sandler
1e3b507b2b For for the script data source when command line parameters are passed as part of the query. 2015-04-21 09:36:05 +03:00
Arik Fraimovich
84d95272f3 Comment out active tasks cleanup, as it sometimes fails. 2015-04-20 10:05:04 +03:00
Arik Fraimovich
3b08e9e214 Merge pull request #408 from alexanderlz/master
Feature: additional metadata in query annotation (username, query id, queue name)
2015-04-20 08:48:59 +03:00
Arik Fraimovich
f4be83b06f Use query id from UI & annotate scheduled queries 2015-04-20 08:46:01 +03:00
Alexander Leibzon
4918d0430c add redash username/query_id to query for easier backtracking 2015-04-20 02:16:12 +03:00
Arik Fraimovich
e25b86b10d Merge pull request #398 from lenguyenthedat/data_sources_name_unique
Fix: make the data_sources' name unique
2015-04-18 22:51:12 +03:00
Arik Fraimovich
d3d305a843 Make sure data sources have unique names in tests 2015-04-18 22:46:42 +03:00
Arik Fraimovich
825b93bfe9 Fix migration numbering (there is 0007 already) 2015-04-18 22:46:42 +03:00
Arik Fraimovich
8c98282200 Rename only data sources with duplicates 2015-04-18 22:46:42 +03:00
Dat Le
768ac9eb04 Fix: make the data_sources's name unique
Also added migration script.
2015-04-18 22:46:42 +03:00
Arik Fraimovich
71011d2fca Merge pull request #407 from stanhu/add-flask-admin 2015-04-18 22:23:10 +03:00
Arik Fraimovich
9683a8ed82 Dedicated view for data source 2015-04-18 22:21:58 +03:00
Arik Fraimovich
10a6ac9313 Dedicated view for User model 2015-04-18 18:48:44 +03:00
Arik Fraimovich
dba325e9a2 Use ArrayListField for Array fields. 2015-04-18 18:47:54 +03:00
Arik Fraimovich
fcd9ab533c Fix: correctly call CustomModelConverter __init__. 2015-04-18 18:46:32 +03:00
Arik Fraimovich
68e3e8e1c5 Update name in admin screens 2015-04-18 18:00:52 +03:00
Arik Fraimovich
7f8b738b9e Fix requirements.txt (peewee was specified twice) 2015-04-18 16:58:05 +03:00
Arik Fraimovich
8a35dcedfa Merge pull request #406 from stanhu/add-mysql-port
Add support for configuring MySQL port
2015-04-18 16:14:26 +03:00
Stan Hu
ef763b7157 Use Flask-Admin to provide basic Web-based /admin page 2015-04-18 04:11:30 -07:00
Stan Hu
498e1d4474 Add support for configuring MySQL port 2015-04-17 22:57:34 -07:00
Arik Fraimovich
73de936c75 Merge pull request #405 from EverythingMe/feature/syntax_highglight
Feature: use correct syntax highlighting for Python/Mongo data sources
2015-04-14 17:53:46 +03:00
Arik Fraimovich
e32b709a41 Typo fix in the python query runner 2015-04-14 17:50:36 +03:00
Arik Fraimovich
60652f63c4 Use correct syntax highlighting for Python/Mongo sources 2015-04-14 17:48:36 +03:00
Arik Fraimovich
d0d4101f90 Merge pull request #404 from erans/master
Improvement: make Python datasource to use the RestrictedPython sandbox
2015-04-13 16:13:00 +03:00
Eran Sandler
646875794f Per request by Arik - the BDFL :-) 2015-04-13 15:27:28 +03:00
Eran Sandler
cdad4be0d5 Removed the try..catch block in the import of RestrictedPython since we are putting it in the requirements.txt file. 2015-04-13 15:23:49 +03:00
Eran Sandler
8f4285be62 Minor fixes from code review. 2015-04-13 15:21:43 +03:00
Eran Sandler
acfa55e2d0 Python datasource that uses RestrictedPython. Only modules listed in "allowedImportModules" (command separated) will be allowed to be imported and the code assume they are installed on the server running the actual code. 2015-04-13 11:22:22 +03:00
Arik Fraimovich
0b7cd07db0 Merge pull request #403 from EverythingMe/chore/release_process
Fix: schema browser styles
2015-04-08 16:14:30 +03:00
Arik Fraimovich
6297ffd523 Fix: schema browser styles 2015-04-08 16:13:03 +03:00
Arik Fraimovich
368f4fdbef Merge pull request #402 from EverythingMe/chore/release_process
New release process.
2015-04-06 12:51:12 +03:00
Arik Fraimovich
f52044a209 New release process 2015-04-06 12:50:17 +03:00
60 changed files with 2079 additions and 499 deletions

View File

@@ -1,6 +1,7 @@
NAME=redash
VERSION=`python ./manage.py version`
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
@@ -15,7 +16,7 @@ pack:
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload:
python bin/upload_version.py $(VERSION) $(FILENAME)
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
test:
nosetests --with-coverage --cover-package=redash tests/*.py

View File

@@ -1,6 +1,5 @@
<p align="center">
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
</p>
<p align="center">
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
@@ -28,7 +27,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
## Getting Started
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env python
import sys
import requests
if __name__ == '__main__':
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
if response.status_code != 200:
exit("Failed getting releases (status code: %s)." % response.status_code)
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
latest_release = sorted_releases[0]
asset_url = latest_release['assets'][0]['url']
filename = latest_release['assets'][0]['name']
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
if '--url-only' in sys.argv:
print asset_url
elif '--wget' in sys.argv:
print wget_command
else:
print "Latest release: %s" % latest_release['tag_name']
print latest_release['body']
print "\nTarball URL: %s" % asset_url
print 'wget: %s' % (wget_command)

147
bin/release_manager.py Normal file
View File

@@ -0,0 +1,147 @@
import os
import sys
import json
import re
import subprocess
import requests
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
repo = 'EverythingMe/redash'
def _github_request(method, path, params=None, headers={}):
if not path.startswith('https://api.github.com'):
url = "https://api.github.com/{}".format(path)
else:
url = path
if params is not None:
params = json.dumps(params)
response = requests.request(method, url, data=params, auth=auth)
return response
def exception_from_error(message, response):
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
def rc_tag_name(version):
return "v{}-rc".format(version)
def get_rc_release(version):
tag = rc_tag_name(version)
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
if response.status_code == 404:
return None
elif response.status_code == 200:
return response.json()
raise exception_from_error("Unknown error while looking RC release: ", response)
def create_release(version, commit_sha):
tag = rc_tag_name(version)
params = {
'tag_name': tag,
'name': "{} - RC".format(version),
'target_commitish': commit_sha,
'prerelease': True
}
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
if response.status_code != 201:
raise exception_from_error("Failed creating new release", response)
return response.json()
def upload_asset(release, filepath):
upload_url = release['upload_url'].replace('{?name}', '')
filename = filepath.split('/')[-1]
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
if response.status_code != 201: # not 200/201/...
raise exception_from_error('Failed uploading asset', response)
return response
def remove_previous_builds(release):
for asset in release['assets']:
response = _github_request('delete', asset['url'])
if response.status_code != 204:
raise exception_from_error("Failed deleting asset", response)
def get_changelog(commit_sha):
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
if latest_release.status_code != 200:
raise exception_from_error('Failed getting latest release', latest_release)
latest_release = latest_release.json()
previous_sha = latest_release['target_commitish']
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
log = subprocess.check_output(args)
changes = ["Changes since {}:".format(latest_release['name'])]
for line in log.split('\n'):
try:
sha, subject, body, parents = line[1:-1].split('|')
except ValueError:
continue
try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request)
except Exception, ex:
pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
return "\n".join(changes)
def update_release_commit_sha(release, commit_sha):
params = {
'target_commitish': commit_sha,
}
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
if response.status_code != 200:
raise exception_from_error("Failed updating commit sha for existing release", response)
return response.json()
def update_release(version, build_filepath, commit_sha):
try:
release = get_rc_release(version)
if release:
release = update_release_commit_sha(release, commit_sha)
else:
release = create_release(version, commit_sha)
print "Using release id: {}".format(release['id'])
remove_previous_builds(release)
response = upload_asset(release, build_filepath)
changelog = get_changelog(commit_sha)
response = _github_request('patch', release['url'], {'body': changelog})
if response.status_code != 200:
raise exception_from_error("Failed updating release description", response)
except Exception, ex:
print ex
if __name__ == '__main__':
commit_sha = sys.argv[1]
version = sys.argv[2]
filepath = sys.argv[3]
# TODO: make sure running from git directory & remote = repo
update_release(version, filepath, commit_sha)

View File

@@ -1,46 +0,0 @@
#!python
import os
import sys
import json
import requests
import subprocess
def capture_output(command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
return proc.stdout.read()
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'body': version_body,
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
headers=headers, verify=False)

View File

@@ -7,6 +7,9 @@ machine:
2.7.3
dependencies:
pre:
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
- tar xvf optipng-0.7.5.tar.gz
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
- make deps
- pip install -r dev_requirements.txt
- pip install -r requirements.txt

View File

@@ -2,12 +2,15 @@
"""
CLI to manage redash.
"""
import json
from flask.ext.script import Manager
from redash import settings, models, __version__
from redash.wsgi import app
from redash.import_export import import_manager
from redash.cli import users, database, data_sources
from redash.monitor import get_status
manager = Manager(app)
manager.add_command("database", database.manager)
@@ -21,6 +24,9 @@ def version():
"""Displays re:dash version."""
print __version__
@manager.command
def status():
print json.dumps(get_status(), indent=2)
@manager.command
def runworkers():

View File

@@ -0,0 +1,20 @@
from redash.models import db
if __name__ == '__main__':
db.connect_db()
with db.database.transaction():
# Make sure all data sources names are unique.
db.database.execute_sql("""
UPDATE data_sources
SET name = new_names.name
FROM (
SELECT id, name || ' ' || id as name
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
) AS new_names
WHERE data_sources.id = new_names.id;
""")
# Add unique constraint on data_sources.name.
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
db.close_db(None)

View File

@@ -0,0 +1,27 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
column = models.User.api_key
column.null = True
migrate(
migrator.add_column('users', 'api_key', models.User.api_key),
)
for user in models.User.select():
user.save()
migrate(
migrator.add_not_null('users', 'api_key')
)
db.close_db(None)

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

View File

@@ -19,8 +19,14 @@
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
<link rel="stylesheet" href="/styles/redash.css">
<!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head>
<body>
<div growl></div>
@@ -34,7 +40,7 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
</div>
{% raw %}
<div class="collapse navbar-collapse navbar-ex1-collapse">
@@ -109,6 +115,7 @@
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
<script src="/bower_components/codemirror/mode/python/python.js"></script>
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
<script src="/bower_components/highcharts/highcharts.js"></script>
<script src="/bower_components/highcharts/modules/exporting.js"></script>
@@ -131,7 +138,8 @@
<script src="/bower_components/mustache/mustache.js"></script>
<script src="/bower_components/canvg/rgbcolor.js"></script>
<script src="/bower_components/canvg/StackBlur.js"></script>
<script src="/bower_components/canvg/canvg.js"></script>
<script src="/bower_components/canvg/canvg.js"></script>
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
<!-- endbuild -->
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
@@ -148,6 +156,7 @@
<script src="/scripts/visualizations/base.js"></script>
<script src="/scripts/visualizations/chart.js"></script>
<script src="/scripts/visualizations/cohort.js"></script>
<script src="/scripts/visualizations/map.js"></script>
<script src="/scripts/visualizations/counter.js"></script>
<script src="/scripts/visualizations/table.js"></script>
<script src="/scripts/visualizations/pivot.js"></script>

View File

@@ -13,6 +13,10 @@
<link rel="stylesheet" href="/styles/redash.css">
<link rel="stylesheet" href="/styles/login.css">
<!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head>
<body>
@@ -26,13 +30,20 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
</div>
</div>
</nav>
<div class="container">
<div class="row">
{% with messages = get_flashed_messages() %}
{% if messages %}
{% for message in messages %}
<div class="alert alert-warning" role="alert">{{ message }}</div>
{% endfor %}
{% endif %}
{% endwith %}
<div class="main">
{% if show_google_openid %}
@@ -48,6 +59,19 @@
{% endif %}
{% if show_saml_login %}
<div class="row">
<a href="/saml/login">SAML Login</a>
</div>
<div class="login-or">
<hr class="hr-or">
<span class="span-or">or</span>
</div>
{% endif %}
<form role="form" method="post" name="login">
<div class="form-group">
<label for="inputUsernameEmail">Username or email</label>

View File

@@ -15,9 +15,11 @@
maxAge = -1;
}
$scope.showLog = false;
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
}
$scope.dataSource = {};
$scope.query = $route.current.locals.query;
var updateSchema = function() {
@@ -50,11 +52,13 @@
$scope.dataSources = DataSource.get(function(dataSources) {
updateSchema();
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
});
// in view mode, latest dataset is always visible
// source mode changes this behavior
$scope.showDataset = true;
$scope.showLog = false;
$scope.lockButton = function(lock) {
$scope.queryExecuting = lock;
@@ -108,21 +112,21 @@
$scope.queryResult.cancelExecution();
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
};
$scope.archiveQuery = function(options, data) {
if (data) {
data.id = $scope.query.id;
} else {
data = $scope.query;
}
$scope.isDirty = false;
options = _.extend({}, {
successMessage: 'Query archived',
errorMessage: 'Query could not be archived'
}, options);
return Query.delete({id: data.id}, function() {
$scope.query.is_archived = true;
$scope.query.schedule = null;
@@ -149,6 +153,7 @@
}
updateSchema();
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
$scope.executeQuery();
};
@@ -194,6 +199,10 @@
if (status === 'done' || status === 'failed') {
$scope.lockButton(false);
}
if ($scope.queryResult.getLog() != null) {
$scope.showLog = true;
}
});
$scope.openScheduleForm = function() {

View File

@@ -8,7 +8,7 @@
'query': '=',
'visualization': '=?'
},
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
link: function(scope, element) {
scope.link = '/queries/' + scope.query.id;
if (scope.visualization) {
@@ -64,14 +64,23 @@
scope: {
'query': '=',
'lock': '=',
'schema': '='
'schema': '=',
'syntax': '='
},
template: '<textarea></textarea>',
link: {
pre: function ($scope, element) {
$scope.syntax = $scope.syntax || 'sql';
var modes = {
'sql': 'text/x-sql',
'python': 'text/x-python',
'json': 'application/json'
};
var textarea = element.children()[0];
var editorOptions = {
mode: 'text/x-sql',
mode: modes[$scope.syntax],
lineWrapping: true,
lineNumbers: true,
readOnly: false,
@@ -108,6 +117,8 @@
$scope.query.query = newValue;
});
}
$('.schema-container').css('height', $('.CodeMirror').css('height'));
});
$scope.$watch('query.query', function () {
@@ -128,6 +139,12 @@
additionalHints = _.unique(keywords);
}
codemirror.refresh();
});
$scope.$watch('syntax', function(syntax) {
codemirror.setOption('mode', modes[syntax]);
});
$scope.$watch('lock', function (locked) {
@@ -224,7 +241,14 @@
value: "60",
name: 'Every minute'
}
]
];
_.each([5, 10, 15, 30], function(i) {
$scope.refreshOptions.push({
value: String(i*60),
name: "Every " + i + " minutes"
})
});
_.each(_.range(1, 13), function (i) {
$scope.refreshOptions.push({

View File

@@ -308,21 +308,22 @@
// We check either for true or undefined for backward compatibility.
var series = scope.series;
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
// If this is a chart that has just one row for multiple columns, sort
// by the Y values. For example:
//
// A | B | C
// 20 | 30 | 15
//
// Will be sorted:
// C | A | B
// 15 | 20 | 30
var sortable = _.every(series, function(s) { return s.data.length == 1 });
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
if (sortable) {
series = _.sortBy(series, function (s) {
return s.data[0].y
});
series = seriesCopy;
}
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
@@ -359,6 +360,23 @@
});
}
}
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
scope.chart.counters.color = 0;

View File

@@ -381,7 +381,10 @@
function sortDataRow(array, column) {
var sortAlgo = (scope.sortAlgorithm && angular.isFunction(scope.sortAlgorithm)) === true ? scope.sortAlgorithm : filter('orderBy');
if (column) {
return arrayUtility.sort(array, sortAlgo, column.sortPredicate, column.reverse);
var predicate = function(o) {
return o[column.sortPredicate];
};
return arrayUtility.sort(array, sortAlgo, predicate, column.reverse);
} else {
return array;
}

View File

@@ -12,6 +12,8 @@
var columnTypes = {};
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
_.each(this.query_result.data.rows, function (row) {
_.each(row, function (v, k) {
if (angular.isNumber(v)) {
@@ -30,7 +32,9 @@
_.each(this.query_result.data.columns, function(column) {
if (columnTypes[column.name]) {
column.type = columnTypes[column.name];
if (column.type == null || column.type == 'string') {
column.type = columnTypes[column.name];
}
}
});
@@ -91,6 +95,14 @@
return this.job.error;
}
QueryResult.prototype.getLog = function() {
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
return null;
}
return this.query_result.data.log;
}
QueryResult.prototype.getUpdatedAt = function () {
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
}
@@ -338,10 +350,15 @@
return this.deferred.promise;
}
QueryResult.get = function (data_source_id, query, maxAge) {
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
var queryResult = new QueryResult();
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'max_age': maxAge}, function (response) {
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
if (queryId !== undefined) {
params['query_id'] = queryId;
};
QueryResultResource.post(params, function (response) {
queryResult.update(response);
if ('job' in response) {
@@ -444,7 +461,7 @@
this.queryResult = QueryResult.getById(this.latest_query_data_id);
}
} else if (this.data_source_id) {
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge);
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
}
return this.queryResult;

View File

@@ -55,6 +55,22 @@
}];
};
var VisualizationName = function(Visualization) {
return {
restrict: 'E',
scope: {
visualization: '='
},
template: '<small>{{name}}</small>',
replace: false,
link: function (scope) {
if (Visualization.visualizations[scope.visualization.type].name != scope.visualization.name) {
scope.name = scope.visualization.name;
}
}
}
}
var VisualizationRenderer = function ($location, Visualization) {
return {
restrict: 'E',
@@ -72,42 +88,9 @@
width: '50%'
};
function readURL() {
var searchFilters = angular.fromJson($location.search().filters);
if (searchFilters) {
_.forEach(scope.filters, function(filter) {
var value = searchFilters[filter.friendlyName];
if (value) {
filter.current = value;
}
});
}
}
function updateURL(filters) {
var current = {};
_.each(filters, function(filter) {
if (filter.current) {
current[filter.friendlyName] = filter.current;
}
});
var newSearch = angular.extend($location.search(), {
filters: angular.toJson(current)
});
$location.search(newSearch);
}
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
if (filters) {
scope.filters = filters;
if (filters.length && false) {
readURL();
// start watching for changes and update URL
scope.$watch('filters', updateURL, true);
}
}
});
}
@@ -138,7 +121,7 @@
query: '=',
queryResult: '=',
visualization: '=?',
openEditor: '=?',
openEditor: '@',
onNewSuccess: '=?'
},
link: function (scope, element, attrs) {
@@ -167,9 +150,13 @@
scope.$watch('visualization.type', function (type, oldType) {
// if not edited by user, set name to match type
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
// poor man's titlecase
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
scope.visualization.name = _.string.titleize(scope.visualization.type);
}
if (type && oldType != type && scope.visualization) {
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
}
});
scope.submit = function () {
@@ -208,6 +195,7 @@
.provider('Visualization', VisualizationProvider)
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
.directive('visualizationName', ['Visualization', VisualizationName])
.directive('filters', Filters)
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])
})();

View File

@@ -112,9 +112,6 @@
scope.columnTypes = {
"X": "x",
// "X (Date time)": "x",
// "X (Linear)": "x-linear",
// "X (Category)": "x-category",
"Y": "y",
"Series": "series",
"Unused": "unused"
@@ -166,7 +163,7 @@
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
}
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
});
scope.zIndexes = _.range(scope.series.length);
scope.yAxes = [[0, 'left'], [1, 'right']];
@@ -227,6 +224,12 @@
}
});
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
scope.visualization.options.xAxis.labels.enabled = true;
}
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {

View File

@@ -0,0 +1,238 @@
'use strict';
(function() {
var module = angular.module('redash.visualization');
module.config(['VisualizationProvider', function(VisualizationProvider) {
var renderTemplate =
'<map-renderer ' +
'options="visualization.options" query-result="queryResult">' +
'</map-renderer>';
var editTemplate = '<map-editor></map-editor>';
var defaultOptions = {
'height': 500,
'draw': 'Marker',
'classify':'none'
};
VisualizationProvider.registerVisualization({
type: 'MAP',
name: 'Map',
renderTemplate: renderTemplate,
editorTemplate: editTemplate,
defaultOptions: defaultOptions
});
}
]);
module.directive('mapRenderer', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map.html',
link: function($scope, elm, attrs) {
var setBounds = function(){
var b = $scope.visualization.options.bounds;
if(b){
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
} else if ($scope.features.length > 0){
var group= new L.featureGroup($scope.features);
$scope.map.fitBounds(group.getBounds());
}
};
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
function() {
var marker = function(lat,lon){
if (lat == null || lon == null) return;
return L.marker([lat, lon]);
};
var heatpoint = function(lat,lon,obj){
if (lat == null || lon == null) return;
var color = 'red';
if (obj &&
obj[$scope.visualization.options.classify] &&
$scope.visualization.options.classification){
var v = $.grep($scope.visualization.options.classification,function(e){
return e.value == obj[$scope.visualization.options.classify];
});
if (v.length >0) color = v[0].color;
}
var style = {
fillColor:color,
fillOpacity:0.5,
stroke:false
};
return L.circleMarker([lat,lon],style)
};
var color = function(val){
// taken from http://jsfiddle.net/xgJ2e/2/
var h= Math.floor((100 - val) * 120 / 100);
var s = Math.abs(val - 50)/50;
var v = 1;
var rgb, i, data = [];
if (s === 0) {
rgb = [v,v,v];
} else {
h = h / 60;
i = Math.floor(h);
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
switch(i) {
case 0:
rgb = [v, data[2], data[0]];
break;
case 1:
rgb = [data[1], v, data[0]];
break;
case 2:
rgb = [data[0], v, data[2]];
break;
case 3:
rgb = [data[0], data[1], v];
break;
case 4:
rgb = [data[2], data[0], v];
break;
default:
rgb = [v, data[0], data[1]];
break;
}
}
return '#' + rgb.map(function(x){
return ("0" + Math.round(x*255).toString(16)).slice(-2);
}).join('');
};
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
function getBounds(e) {
$scope.visualization.options.bounds = $scope.map.getBounds();
}
var queryData = $scope.queryResult.getData();
var classify = $scope.visualization.options.classify;
if (queryData) {
$scope.visualization.options.classification = [];
for (var row in queryData) {
if (queryData[row][classify] &&
$.grep($scope.visualization.options.classification, function (e) {
return e.value == queryData[row][classify]
}).length == 0) {
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
}
}
$.each($scope.visualization.options.classification, function (i, c) {
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
});
if (!$scope.map) {
$scope.map = L.map(elm[0].children[0].children[0])
}
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
}).addTo($scope.map);
$scope.features = $scope.features || [];
var tmp_features = [];
var lat_col = $scope.visualization.options.latColName || 'lat';
var lon_col = $scope.visualization.options.lonColName || 'lon';
for (var row in queryData) {
var feature;
if ($scope.visualization.options.draw == 'Marker') {
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
} else if ($scope.visualization.options.draw == 'Color') {
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
}
if (!feature) continue;
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
for (var k in queryData[row]){
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
}
obj_description += '</ul>';
feature.bindPopup(obj_description);
tmp_features.push(feature);
}
$.each($scope.features, function (i, f) {
$scope.map.removeLayer(f);
});
$scope.features = tmp_features;
$.each($scope.features, function (i, f) {
f.addTo($scope.map)
});
setBounds();
$scope.map.on('focus',function(){
$scope.map.on('moveend', getBounds);
});
$scope.map.on('blur',function(){
$scope.map.off('moveend', getBounds);
});
// We redraw the map if it was loaded in a hidden tab
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
setTimeout(function() {
$scope.map.invalidateSize(false);
setBounds();
},500);
});
}
}
}, true);
$scope.$watch('visualization.options.height', function() {
if (!$scope.map) return;
$scope.map.invalidateSize(false);
setBounds();
});
}
}
});
module.directive('mapEditor', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map_editor.html',
link: function($scope, elm, attrs) {
$scope.draw_options = ['Marker','Color'];
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
}
}
});
})();

View File

@@ -14,7 +14,12 @@ a.page-title {
}
a.navbar-brand {
font-style: italic;
padding: 5px 5px 0px 0px;
margin-left: 0px !important;
}
a.navbar-brand img {
height: 40px;
}
.graph {
@@ -92,7 +97,7 @@ a.navbar-brand {
}
.panel-heading .query-link:hover {
text-decoration: none;
text-decoration: underline;
}
/* angular-growl */
@@ -156,7 +161,7 @@ li.widget:hover {
/* CodeMirror */
.CodeMirror {
border: 1px solid #eee;
/*height: auto;*/
height: auto;
min-height: 300px;
margin-bottom: 10px;
}
@@ -308,13 +313,18 @@ counter-renderer counter-name {
height: 100%;
}
.schema-browser {
.schema-container {
height: 300px;
overflow: scroll;
}
.schema-browser {
height: 100%;
overflow-y: auto;
overflow-x: hidden;
}
div.table-name {
overflow: scroll;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
cursor: pointer;
@@ -329,3 +339,7 @@ use this class when you need to keep the original display value
display: none !important;
}
}
.log-container {
margin-bottom: 50px;
}

View File

@@ -28,6 +28,7 @@
<p>
<span ng-hide="currentUser.hasPermission('view_query')">{{query.name}}</span>
<query-link query="query" visualization="widget.visualization" ng-show="currentUser.hasPermission('view_query')"></query-link>
<visualization-name visualization="widget.visualization"/>
</p>
<div class="text-muted" ng-bind-html="query.description | markdown"></div>
</h3>

View File

@@ -79,21 +79,21 @@
</p>
<p>
<query-editor query="query" schema="schema" lock="queryFormatting"></query-editor>
<query-editor query="query" schema="schema" syntax="dataSource.syntax" lock="queryFormatting"></query-editor>
</p>
</div>
</div>
<div class="col-md-3" ng-show="hasSchema">
<div>
<div class="col-md-3 schema-container" ng-show="hasSchema">
<div ng-show="schema.length < 200">
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
</div>
<div class="schema-browser">
<div ng-repeat="table in schema | filter:schemaFilter">
<div ng-repeat="table in schema | filter:schemaFilter track by table.name">
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
</div>
<div collapse="table.collapsed">
<div ng-repeat="column in table.columns | filter:schemaFilter" style="padding-left:16px;">{{column}}</div>
<div collapse="table.collapsed && !schemaFilter">
<div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
</div>
</div>
</div>
@@ -192,6 +192,16 @@
</div>
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
<div class="row log-container" ng-show="showLog">
<span ng-show="showLog">Log Information:</span>
<table>
<tbody>
<tr ng-repeat="l in queryResult.getLog()">
<td>{{l}}</td>
</tr>
</tbody>
</table>
</div>
<!-- tabs and data -->
<div ng-show="showDataset">
<div class="row">

View File

@@ -54,6 +54,14 @@
ng-model="visualization.options.sortX">
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-2">Show X Axis Labels</label>
<div class="col-sm-10">
<input name="sortX" type="checkbox" class="form-control"
ng-model="visualization.options.xAxis.labels.enabled">
</div>
</div>
</div>
</div>
@@ -100,6 +108,15 @@
class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-3">Index</label>
<div class="col-sm-9">
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
ng-options="o as o for o in zIndexes"
class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-3">y Axis</label>

View File

@@ -1,7 +1,7 @@
<div>
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()">
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
<div class="form-group">
<label class="control-label">Name</label>
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">

View File

@@ -0,0 +1,3 @@
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
<div style="width:100%; height:100%;"></div>
</div>

View File

@@ -0,0 +1,55 @@
<div class="form-horizontal">
<div class="form-group">
<label class="col-lg-2">Map height (px)</label>
<div class="col-sm-4">
<input class="form-control" type="number" ng-model = "visualization.options.height" />
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Draw option</label>
<div class="col-sm-4">
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Latitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Longitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
</div>
</div>
<div ng-show = "visualization.options.draw == 'Color'">
<div class="form-group">
<label class="col-lg-2">Classify by column</label>
<div class="col-sm-4">
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
</div>
</div>
<div class="row" >
<div class="col-lg-6">
<div ng-repeat="element in visualization.options.classification" class="list-group">
<div class="list-group-item active">
{{element.value}}
</div>
<div class="list-group-item">
<div class="form-group">
<label class="col-lg-4">Color</label>
<div class="col-sm-4">
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -29,7 +29,8 @@
"font-awesome": "~4.2.0",
"mustache": "~1.0.0",
"canvg": "gabelerner/canvg",
"angular-ui-bootstrap-bower": "~0.12.1"
"angular-ui-bootstrap-bower": "~0.12.1",
"leaflet":"~0.7.3"
},
"devDependencies": {
"angular-mocks": "1.2.18",

BIN
rd_ui/favicon.ico Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -6,7 +6,7 @@ from statsd import StatsClient
from redash import settings
from redash.query_runner import import_query_runners
__version__ = '0.6.0'
__version__ = '0.6.4'
def setup_logging():

116
redash/admin.py Normal file
View File

@@ -0,0 +1,116 @@
import json
from flask_admin.contrib.peewee import ModelView
from flask.ext.admin import Admin
from flask_admin.contrib.peewee.form import CustomModelConverter
from flask_admin.form.widgets import DateTimePickerWidget
from playhouse.postgres_ext import ArrayField, DateTimeTZField
from wtforms import fields
from wtforms.widgets import TextInput
from redash import models
from redash import query_runner
from redash.permissions import require_permission
class ArrayListField(fields.Field):
widget = TextInput()
def _value(self):
if self.data:
return u', '.join(self.data)
else:
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = [x.strip() for x in valuelist[0].split(',')]
else:
self.data = []
class JSONTextAreaField(fields.TextAreaField):
def process_formdata(self, valuelist):
if valuelist:
try:
json.loads(valuelist[0])
except ValueError:
raise ValueError(self.gettext(u'Invalid JSON'))
self.data = valuelist[0]
else:
self.data = ''
class PasswordHashField(fields.PasswordField):
def _value(self):
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = models.pwd_context.encrypt(valuelist[0])
else:
self.data = u''
class PgModelConverter(CustomModelConverter):
def __init__(self, view, additional=None):
additional = {ArrayField: self.handle_array_field,
DateTimeTZField: self.handle_datetime_tz_field}
super(PgModelConverter, self).__init__(view, additional)
self.view = view
def handle_array_field(self, model, field, **kwargs):
return field.name, ArrayListField(**kwargs)
def handle_datetime_tz_field(self, model, field, **kwargs):
kwargs['widget'] = DateTimePickerWidget()
return field.name, fields.DateTimeField(**kwargs)
class BaseModelView(ModelView):
model_form_converter = PgModelConverter
@require_permission('admin')
def is_accessible(self):
return True
class UserModelView(BaseModelView):
column_searchable_list = ('name', 'email')
form_excluded_columns = ('created_at', 'updated_at')
column_exclude_list = ('password_hash',)
form_overrides = dict(password_hash=PasswordHashField)
form_args = {
'password_hash': {'label': 'Password'}
}
def query_runner_type_formatter(view, context, model, name):
qr = query_runner.query_runners.get(model.type, None)
if qr:
return qr.name()
return model.type
class DataSourceModelView(BaseModelView):
form_overrides = dict(type=fields.SelectField, options=JSONTextAreaField)
form_args = dict(type={
'choices': [(k, r.name()) for k, r in query_runner.query_runners.iteritems()]
})
column_formatters = dict(type=query_runner_type_formatter)
column_filters = ('type',)
def init_admin(app):
admin = Admin(app, name='re:dash admin')
views = {
models.User: UserModelView(models.User),
models.DataSource: DataSourceModelView(models.DataSource)
}
for m in models.all_models:
if m in views:
admin.add_view(views[m])
else:
admin.add_view(BaseModelView(m))

View File

@@ -1,13 +1,11 @@
import functools
import hashlib
import hmac
import time
import logging
from flask import request, make_response, redirect, url_for
from flask.ext.login import LoginManager, login_user, current_user, logout_user
from flask.ext.login import LoginManager
from redash import models, settings, google_oauth
from redash import models, settings, google_oauth, saml_auth
login_manager = LoginManager()
logger = logging.getLogger('authentication')
@@ -23,77 +21,72 @@ def sign(key, path, expires):
return h.hexdigest()
class Authentication(object):
def verify_authentication(self):
return False
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated() or self.verify_authentication():
return fn(*args, **kwargs)
return make_response(redirect(url_for("login", next=request.url)))
return decorated
@login_manager.user_loader
def load_user(user_id):
return models.User.get_by_id(user_id)
class ApiKeyAuthentication(Authentication):
def verify_authentication(self):
api_key = request.args.get('api_key')
query_id = request.view_args.get('query_id', None)
def hmac_load_user_from_request(request):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
user_id = request.args.get('user_id', None)
if query_id and api_key:
query = models.Query.get(models.Query.id == query_id)
# TODO: 3600 should be a setting
if signature and time.time() < expires <= time.time() + 3600:
if user_id:
user = models.User.get_by_id(user_id)
calculated_signature = sign(user.api_key, request.path, expires)
if query.api_key and api_key == query.api_key:
login_user(models.ApiUser(query.api_key), remember=False)
return True
if user.api_key and signature == calculated_signature:
return user
return False
class HMACAuthentication(Authentication):
def verify_authentication(self):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
# TODO: 3600 should be a setting
if signature and query_id and time.time() < expires <= time.time() + 3600:
if query_id:
query = models.Query.get(models.Query.id == query_id)
calculated_signature = sign(query.api_key, request.path, expires)
if query.api_key and signature == calculated_signature:
login_user(models.ApiUser(query.api_key), remember=False)
return True
return models.ApiUser(query.api_key)
return False
return None
@login_manager.user_loader
def load_user(user_id):
# If the user was previously logged in as api user, the user_id will be the api key and will raise an exception as
# it can't be casted to int.
if isinstance(user_id, basestring) and not user_id.isdigit():
def get_user_from_api_key(api_key, query_id):
if not api_key:
return None
return models.User.select().where(models.User.id == user_id).first()
user = None
try:
user = models.User.get_by_api_key(api_key)
except models.User.DoesNotExist:
if query_id:
query = models.Query.get_by_id(query_id)
if query and query.api_key == api_key:
user = models.ApiUser(api_key)
return user
def api_key_load_user_from_request(request):
api_key = request.args.get('api_key', None)
query_id = request.view_args.get('query_id', None)
user = get_user_from_api_key(api_key, query_id)
return user
def setup_authentication(app):
login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser
login_manager.login_view = 'login'
app.secret_key = settings.COOKIE_SECRET
app.register_blueprint(google_oauth.blueprint)
app.register_blueprint(saml_auth.blueprint)
if settings.AUTH_TYPE == 'hmac':
auth = HMACAuthentication()
login_manager.request_loader(hmac_load_user_from_request)
elif settings.AUTH_TYPE == 'api_key':
auth = ApiKeyAuthentication()
login_manager.request_loader(api_key_load_user_from_request)
else:
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
auth = HMACAuthentication()
login_manager.request_loader(hmac_load_user_from_request)
return auth

View File

@@ -12,17 +12,18 @@ import time
import logging
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
session, url_for
session, url_for, current_app, flash
from flask.ext.restful import Resource, abort
from flask_login import current_user, login_user, logout_user
from flask_login import current_user, login_user, logout_user, login_required
import sqlparse
from redash import redis_connection, statsd_client, models, settings, utils, __version__
from redash.wsgi import app, auth, api
from redash import statsd_client, models, settings, utils
from redash.wsgi import app, api
from redash.tasks import QueryTask, record_event
from redash.cache import headers as cache_headers
from redash.permissions import require_permission
from redash.query_runner import query_runners, validate_configuration
from redash.monitor import get_status
@app.route('/ping', methods=['GET'])
@@ -37,7 +38,7 @@ def ping():
@app.route('/queries/<query_id>/<anything>')
@app.route('/personal')
@app.route('/')
@auth.required
@login_required
def index(**kwargs):
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
@@ -66,22 +67,28 @@ def login():
return redirect(request.args.get('next') or '/')
if not settings.PASSWORD_LOGIN_ENABLED:
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
if settings.SAML_LOGIN_ENABLED:
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
else:
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
if request.method == 'POST':
user = models.User.select().where(models.User.email == request.form['username']).first()
if user and user.verify_password(request.form['password']):
remember = ('remember' in request.form)
login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
try:
user = models.User.get_by_email(request.form['username'])
if user and user.verify_password(request.form['password']):
remember = ('remember' in request.form)
login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
except models.User.DoesNotExist:
flash("Wrong username or password.")
return render_template("login.html",
name=settings.NAME,
analytics=settings.ANALYTICS,
next=request.args.get('next'),
username=request.form.get('username', ''),
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
show_saml_login=settings.SAML_LOGIN_ENABLED)
@app.route('/logout')
def logout():
@@ -91,43 +98,16 @@ def logout():
return redirect('/login')
@app.route('/status.json')
@auth.required
@login_required
@require_permission('admin')
def status_api():
status = {}
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
status = get_status()
return jsonify(status)
@app.route('/api/queries/format', methods=['POST'])
@auth.required
@login_required
def format_sql_query():
arguments = request.get_json(force=True)
query = arguments.get("query", "")
@@ -136,7 +116,7 @@ def format_sql_query():
@app.route('/queries/new', methods=['POST'])
@auth.required
@login_required
def create_query_route():
query = request.form.get('query', None)
data_source_id = request.form.get('data_source_id', None)
@@ -154,7 +134,7 @@ def create_query_route():
class BaseResource(Resource):
decorators = [auth.required]
decorators = [login_required]
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
@@ -363,7 +343,7 @@ class QueryAPI(BaseResource):
@require_permission('edit_query')
def post(self, query_id):
query = models.Query.get_by_id(query_id)
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
query_def.pop(field, None)
@@ -415,7 +395,7 @@ class VisualizationListAPI(BaseResource):
kwargs = request.get_json(force=True)
kwargs['options'] = json.dumps(kwargs['options'])
kwargs['query'] = kwargs.pop('query_id')
vis = models.Visualization(**kwargs)
vis.save()
@@ -450,7 +430,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
class QueryResultListAPI(BaseResource):
@require_permission('execute_query')
def post(self):
params = request.json
params = request.get_json(force=True)
if settings.FEATURE_TABLES_PERMISSIONS:
metadata = utils.SQLMetaData(params['query'])
@@ -476,7 +456,7 @@ class QueryResultListAPI(BaseResource):
activity=params['query']
).save()
max_age = int(params['max_age'])
max_age = int(params.get('max_age', -1))
if max_age == 0:
query_result = None
@@ -487,7 +467,8 @@ class QueryResultListAPI(BaseResource):
return {'query_result': query_result.to_dict()}
else:
data_source = models.DataSource.get_by_id(params['data_source_id'])
job = QueryTask.add_task(params['query'], data_source)
query_id = params.get('query_id', 'adhoc')
job = QueryTask.add_task(params['query'], data_source, metadata={"Username": self.current_user.name, "Query ID": query_id})
return {'job': job.to_dict()}
@@ -507,6 +488,28 @@ class QueryResultAPI(BaseResource):
headers.update(cache_headers)
return make_response(s.getvalue(), 200, headers)
@staticmethod
def add_cors_headers(headers):
if 'Origin' in request.headers:
origin = request.headers['Origin']
if origin in settings.ACCESS_CONTROL_ALLOW_ORIGIN:
headers['Access-Control-Allow-Origin'] = origin
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
@require_permission('view_query')
def options(self, query_id=None, query_result_id=None, filetype='json'):
headers = {}
self.add_cors_headers(headers)
if settings.ACCESS_CONTROL_REQUEST_METHOD:
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
return make_response("", 200, headers)
@require_permission('view_query')
def get(self, query_id=None, query_result_id=None, filetype='json'):
if query_result_id is None and query_id is not None:
@@ -536,9 +539,15 @@ class QueryResultAPI(BaseResource):
record_event.delay(event)
headers = {}
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
self.add_cors_headers(headers)
if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
return make_response(data, 200, cache_headers)
headers.update(cache_headers)
return make_response(data, 200, headers)
else:
return self.csv_response(query_result)
@@ -568,11 +577,13 @@ api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
@app.route('/<path:filename>')
def send_static(filename):
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
if current_app.debug:
cache_timeout = 0
else:
cache_timeout = None
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)
if __name__ == '__main__':
app.run(debug=True)

View File

@@ -1,25 +1,25 @@
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint
from flask import redirect, url_for, Blueprint, flash
from flask_oauth import OAuth
from redash import models, settings
logger = logging.getLogger('google_oauth')
oauth = OAuth()
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
if settings.GOOGLE_APPS_DOMAIN:
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
else:
if not settings.GOOGLE_APPS_DOMAIN:
logger.warning("No Google Apps domain defined, all Google accounts allowed.")
google = oauth.remote_app('google',
base_url='https://www.google.com/accounts/',
authorize_url='https://accounts.google.com/o/oauth2/auth',
request_token_url=None,
request_token_params=request_token_params,
request_token_params={
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
'response_type': 'code'
},
access_token_url='https://accounts.google.com/o/oauth2/token',
access_token_method='POST',
access_token_params={'grant_type': 'authorization_code'},
@@ -31,7 +31,7 @@ blueprint = Blueprint('google_oauth', __name__)
def get_user_profile(access_token):
headers = {'Authorization': 'OAuth '+access_token}
headers = {'Authorization': 'OAuth {}'.format(access_token)}
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
if response.status_code == 401:
@@ -41,9 +41,17 @@ def get_user_profile(access_token):
return response.json()
def verify_profile(profile):
if not settings.GOOGLE_APPS_DOMAIN:
return True
domain = profile['email'].split('@')[-1]
return domain in settings.GOOGLE_APPS_DOMAIN
def create_and_login_user(name, email):
try:
user_object = models.User.get(models.User.email == email)
user_object = models.User.get_by_email(email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
@@ -70,10 +78,17 @@ def authorized(resp):
if access_token is None:
logger.warning("Access token missing in call back request.")
flash("Validation error. Please retry.")
return redirect(url_for('login'))
profile = get_user_profile(access_token)
if profile is None:
flash("Validation error. Please retry.")
return redirect(url_for('login'))
if not verify_profile(profile):
logger.warning("User tried to login with unauthorized domain name: %s", profile['email'])
flash("Your Google Apps domain name isn't allowed.")
return redirect(url_for('login'))
create_and_login_user(profile['name'], profile['email'])

View File

@@ -15,6 +15,7 @@ import psycopg2
from redash import utils, settings, redis_connection
from redash.query_runner import get_query_runner
from utils import generate_token
class Database(object):
@@ -152,6 +153,7 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
email = peewee.CharField(max_length=320, index=True, unique=True)
password_hash = peewee.CharField(max_length=128, null=True)
groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS)
api_key = peewee.CharField(max_length=40, unique=True)
class Meta:
db_table = 'users'
@@ -169,6 +171,12 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
super(User, self).__init__(*args, **kwargs)
self._allowed_tables = None
def pre_save(self, created):
super(User, self).pre_save(created)
if not self.api_key:
self.api_key = generate_token(40)
@property
def permissions(self):
# TODO: this should be cached.
@@ -188,6 +196,10 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
def get_by_email(cls, email):
return cls.get(cls.email == email)
@classmethod
def get_by_api_key(cls, api_key):
return cls.get(cls.api_key == api_key)
def __unicode__(self):
return '%r, %r' % (self.name, self.email)
@@ -225,11 +237,11 @@ class ActivityLog(BaseModel):
class DataSource(BaseModel):
id = peewee.PrimaryKeyField()
name = peewee.CharField()
name = peewee.CharField(unique=True)
type = peewee.CharField()
options = peewee.TextField()
queue_name = peewee.CharField(default="queries")
scheduled_queue_name = peewee.CharField(default="queries")
scheduled_queue_name = peewee.CharField(default="scheduled_queries")
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
@@ -239,7 +251,8 @@ class DataSource(BaseModel):
return {
'id': self.id,
'name': self.name,
'type': self.type
'type': self.type,
'syntax': self.query_runner.syntax
}
def get_schema(self, refresh=False):
@@ -250,7 +263,7 @@ class DataSource(BaseModel):
cache = redis_connection.get(key)
if cache is None:
query_runner = get_query_runner(self.type, self.options)
query_runner = self.query_runner
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
redis_connection.set(key, json.dumps(schema))
@@ -259,6 +272,10 @@ class DataSource(BaseModel):
return schema
@property
def query_runner(self):
return get_query_runner(self.type, self.options)
@classmethod
def all(cls):
return cls.select().order_by(cls.id.asc())
@@ -390,7 +407,7 @@ class Query(ModelTimestampsMixin, BaseModel):
if with_user:
d['user'] = self.user.to_dict()
d['last_modified_by'] = self.last_modified_by.to_dict()
d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
else:
d['user_id'] = self._data['user']
@@ -432,7 +449,7 @@ class Query(ModelTimestampsMixin, BaseModel):
.switch(Query).join(DataSource)\
.where(cls.schedule != None)
now = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None))
now = utils.utcnow()
outdated_queries = {}
for query in queries:
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):

33
redash/monitor.py Normal file
View File

@@ -0,0 +1,33 @@
from redash import redis_connection, models, __version__
def get_status():
status = {}
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
return status

View File

@@ -15,6 +15,7 @@ __all__ = [
'TYPE_STRING',
'TYPE_DATE',
'TYPE_FLOAT',
'SUPPORTED_COLUMN_TYPES',
'register',
'get_query_runner',
'import_query_runners'
@@ -28,10 +29,19 @@ TYPE_STRING = 'string'
TYPE_DATETIME = 'datetime'
TYPE_DATE = 'date'
SUPPORTED_COLUMN_TYPES = set([
TYPE_INTEGER,
TYPE_FLOAT,
TYPE_BOOLEAN,
TYPE_STRING,
TYPE_DATETIME,
TYPE_DATE
])
class BaseQueryRunner(object):
def __init__(self, configuration):
jsonschema.validate(configuration, self.configuration_schema())
self.syntax = 'sql'
self.configuration = configuration
@classmethod
@@ -104,4 +114,4 @@ def validate_configuration(query_runner_type, configuration_json):
def import_query_runners(query_runner_imports):
for runner_import in query_runner_imports:
__import__(runner_import)
__import__(runner_import)

View File

@@ -5,6 +5,8 @@ import logging
import sys
import time
import requests
from redash.query_runner import *
from redash.utils import JSONEncoder
@@ -15,6 +17,7 @@ try:
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials
from oauth2client import gce
enabled = True
except ImportError:
@@ -66,18 +69,6 @@ def _load_key(filename):
f.close()
def _get_bigquery_service(service_account, private_key):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
credentials = SignedJwtAssertionCredentials(service_account, private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
@@ -117,11 +108,23 @@ class BigQuery(BaseQueryRunner):
def __init__(self, configuration_json):
super(BigQuery, self).__init__(configuration_json)
self.private_key = _load_key(self.configuration["privateKey"])
def _get_bigquery_service(self):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
private_key = _load_key(self.configuration["privateKey"])
credentials = SignedJwtAssertionCredentials(self.configuration['serviceAccount'], private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_project_id(self):
return self.configuration["projectId"]
def run_query(self, query):
bigquery_service = _get_bigquery_service(self.configuration["serviceAccount"],
self.private_key)
bigquery_service = self._get_bigquery_service()
jobs = bigquery_service.jobs()
job_data = {
@@ -134,13 +137,13 @@ class BigQuery(BaseQueryRunner):
logger.debug("BigQuery got query: %s", query)
project_id = self.configuration["projectId"]
project_id = self._get_project_id()
try:
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
current_row = 0
query_reply = _get_query_results(jobs, project_id=project_id,
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
logger.debug("bigquery replied: %s", query_reply)
@@ -176,4 +179,26 @@ class BigQuery(BaseQueryRunner):
return json_data, error
register(BigQuery)
class BigQueryGCE(BigQuery):
@classmethod
def type(cls):
return "bigquery_gce"
@classmethod
def configuration_schema(cls):
return {}
def _get_project_id(self):
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
def _get_bigquery_service(self):
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
register(BigQuery)
register(BigQueryGCE)

View File

@@ -0,0 +1,258 @@
import datetime
import json
import logging
import sys
import urllib
from redash.query_runner import *
from redash import models
import requests
import dateutil
from dateutil.parser import parse
try:
import http.client as http_client
except ImportError:
# Python 2
import httplib as http_client
logger = logging.getLogger(__name__)
ELASTICSEARCH_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"date" : TYPE_DATE,
# "geo_point" TODO: Need to split to 2 fields somehow
}
PYTHON_TYPES_MAPPING = {
str: TYPE_STRING,
unicode: TYPE_STRING,
bool : TYPE_BOOLEAN,
int : TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT
}
#
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
# but without the aggregation).
#
# Full blown JSON based ElasticSearch queries (including aggregations) will be
# added later
#
# Simple query example:
#
# - Query the index named "twitter"
# - Filter by "user:kimchy"
# - Return the fields: "@timestamp", "tweet" and "user"
# - Return up to 15 results
# - Sort by @timestamp ascending
#
# {
# "index" : "twitter",
# "query" : "user:kimchy",
# "fields" : ["@timestamp", "tweet", "user"],
# "size" : 15,
# "sort" : "@timestamp:asc"
# }
#
#
# Simple query on a logstash ElasticSearch instance:
#
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
# - Return up to 250 results
# - Sort by @timestamp ascending
# {
# "index" : "logstash-2015.04.*",
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
# "size" : 250,
# "sort" : "@timestamp:asc"
# }
#
#
class ElasticSearch(BaseQueryRunner):
DEBUG_ENABLED = False
"""
ElastichSearch query runner for querying ElasticSearch servers.
Query can be done using the Lucene Syntax (single line) or the more complex,
full blown ElasticSearch JSON syntax
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'server': {
'type': 'string'
}
},
"required" : ["server"]
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(ElasticSearch, self).__init__(configuration_json)
self.syntax = "json"
if self.DEBUG_ENABLED:
http_client.HTTPConnection.debuglevel = 1
# you need to initialize logging, otherwise you will not see anything from requests
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
def get_mappings(self, url):
mappings = {}
r = requests.get(url)
mappings_data = r.json()
for index_name in mappings_data:
index_mappings = mappings_data[index_name]
for m in index_mappings.get("mappings", {}):
for property_name in index_mappings["mappings"][m]["properties"]:
property_data = index_mappings["mappings"][m]["properties"][property_name]
if not property_name in mappings:
property_type = property_data.get("type", None)
if property_type:
if property_type in ELASTICSEARCH_TYPES_MAPPING:
mappings[property_name] = property_type
else:
raise "Unknown property type: {0}".format(property_type)
return mappings
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
result_columns_index = {}
for c in result_columns:
result_columns_index[c["name"]] = c
result_fields_index = {}
if result_fields:
for r in result_fields:
result_fields_index[r] = None
for h in raw_result["hits"]["hits"]:
row = {}
for column in h["_source"]:
if result_fields and column not in result_fields_index:
continue
if column not in result_columns_index:
result_columns.append({
"name" : column,
"friendly_name" : column,
"type" : mappings.get(column, "string")
})
result_columns_index[column] = result_columns[-1]
row[column] = h["_source"][column]
if row and len(row) > 0:
result_rows.append(row)
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
url += "&from={0}".format(_from)
r = requests.get(url)
if r.status_code != 200:
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
raw_result = r.json()
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
total = raw_result["hits"]["total"]
result_size = len(raw_result["hits"]["hits"])
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
return raw_result["hits"]["total"]
def run_query(self, query):
try:
error = None
logger.debug(query)
query_params = json.loads(query)
index_name = query_params["index"]
query_data = query_params["query"]
size = int(query_params.get("size", 500))
result_fields = query_params.get("fields", None)
sort = query_params.get("sort", None)
server_url = self.configuration["server"]
if not server_url:
error = "Missing configuration key 'server'"
return None, error
if server_url[-1] == "/":
server_url = server_url[:-1]
url = "{0}/{1}/_search?".format(server_url, index_name)
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
mappings = self.get_mappings(mapping_url)
logger.debug(json.dumps(mappings, indent=4))
if size:
url += "&size={0}".format(size)
if sort:
url += "&sort={0}".format(urllib.quote_plus(sort))
url += "&q={0}".format(urllib.quote_plus(query_data))
logger.debug("Using URL: {0}".format(url))
logger.debug("Using Query: {0}".format(query_data))
result_columns = []
result_rows = []
if isinstance(query_data, str) or isinstance(query_data, unicode):
_from = 0
while True:
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
_from += size
if _from >= total:
break
else:
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
raise Exception("Advanced queries are not supported")
json_data = json.dumps({
"columns" : result_columns,
"rows" : result_rows
})
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(ElasticSearch)

View File

@@ -0,0 +1,83 @@
import json
import logging
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
try:
from influxdb import InfluxDBClusterClient
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install influxdb.")
logger.warning("You can use pip: pip install influxdb")
enabled = False
def _transform_result(results):
result_columns = []
result_rows = []
for result in results:
if not result_columns:
for c in result.raw['series'][0]['columns']:
result_columns.append({ "name": c })
for point in result.get_points():
result_rows.append(point)
return json.dumps({
"columns" : result_columns,
"rows" : result_rows
}, cls=JSONEncoder)
class InfluxDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
}
},
'required': ['url']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "influxdb"
def __init__(self, configuration_json):
super(InfluxDB, self).__init__(configuration_json)
def run_query(self, query):
client = InfluxDBClusterClient.from_DSN(self.configuration['url'])
logger.debug("influxdb url: %s", self.configuration['url'])
logger.debug("influxdb got query: %s", query)
try:
results = client.query(query)
if not isinstance(results, list):
results = [results]
json_data = _transform_result(results)
error = None
except Exception, ex:
json_data = None
error = ex.message
return json_data, error
register(InfluxDB)

View File

@@ -12,6 +12,7 @@ logger = logging.getLogger(__name__)
try:
import pymongo
from bson.objectid import ObjectId
from bson.son import SON
enabled = True
except ImportError:
@@ -32,24 +33,74 @@ TYPES_MAP = {
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
class MongoDBJSONEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
def _get_column_by_name(columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
return super(MongoDBJSONEncoder, self).default(o)
# Simple query example:
#
# {
# "collection" : "my_collection",
# "query" : {
# "date" : {
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
# },
# "type" : 1
# },
# "fields" : {
# "_id" : 1,
# "name" : 2
# },
# "sort" : [
# {
# "name" : "date",
# "direction" : -1
# }
# ]
#
# }
#
#
# Aggregation
# ===========
# Uses a syntax similar to the one used in PyMongo, however to support the
# correct order of sorting, it uses a regular list for the "$sort" operation
# that converts into a SON (sorted dictionary) object before execution.
#
# Aggregation query example:
#
# {
# "collection" : "things",
# "aggregate" : [
# {
# "$unwind" : "$tags"
# },
# {
# "$group" : {
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# },
# {
# "$sort" : [
# {
# "name" : "count",
# "direction" : -1
# },
# {
# "name" : "_id",
# "direction" : -1
# }
# ]
# }
# ]
# }
#
#
class MongoDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
@@ -68,8 +119,8 @@ class MongoDB(BaseQueryRunner):
'type': 'string',
'title': 'Replica Set Name'
},
'required': ['connectionString']
}
},
'required': ['connectionString']
}
@classmethod
@@ -83,10 +134,28 @@ class MongoDB(BaseQueryRunner):
def __init__(self, configuration_json):
super(MongoDB, self).__init__(configuration_json)
self.syntax = 'json'
self.db_name = self.configuration["dbName"]
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
def _get_column_by_name(self, columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(self, q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
def run_query(self, query):
if self.is_replica_set:
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
@@ -117,21 +186,42 @@ class MongoDB(BaseQueryRunner):
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
self._convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
self._convert_date(q[k], k2)
f = None
aggregate = None
if "aggregate" in query_data:
aggregate = query_data["aggregate"]
for step in aggregate:
if "$sort" in step:
sort_list = []
for sort_item in step["$sort"]:
sort_list.append((sort_item["name"], sort_item["direction"]))
step["$sort"] = SON(sort_list)
if aggregate:
pass
else:
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field in query_data["sort"]:
s.append((field["name"], field["direction"]))
if "fields" in query_data:
f = query_data["fields"]
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field_name in query_data["sort"]:
s.append((field_name, query_data["sort"][field_name]))
for field_data in query_data["sort"]:
s.append((field_data["name"], field_data["direction"]))
columns = []
rows = []
@@ -139,30 +229,38 @@ class MongoDB(BaseQueryRunner):
error = None
json_data = None
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
cursor = None
if q or (not q and not aggregate):
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
if "skip" in query_data:
cursor = cursor.skip(query_data["skip"])
if "limit" in query_data:
cursor = cursor.limit(query_data["limit"])
elif aggregate:
r = db[collection].aggregate(aggregate)
cursor = r["result"]
for r in cursor:
for k in r:
if _get_column_by_name(columns, k) is None:
if self._get_column_by_name(columns, k) is None:
columns.append({
"name": k,
"friendly_name": k,
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
})
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:
ordered_columns = []
for k in sorted(f, key=f.get):
ordered_columns.append(_get_column_by_name(columns, k))
ordered_columns.append(self._get_column_by_name(columns, k))
columns = ordered_columns
@@ -171,8 +269,8 @@ class MongoDB(BaseQueryRunner):
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
return json_data, error
register(MongoDB)
register(MongoDB)

View File

@@ -7,6 +7,24 @@ from redash.query_runner import *
logger = logging.getLogger(__name__)
types_map = {
0: TYPE_FLOAT,
1: TYPE_INTEGER,
2: TYPE_INTEGER,
3: TYPE_INTEGER,
4: TYPE_FLOAT,
5: TYPE_FLOAT,
7: TYPE_DATETIME,
8: TYPE_INTEGER,
9: TYPE_INTEGER,
10: TYPE_DATE,
12: TYPE_DATETIME,
15: TYPE_STRING,
16: TYPE_INTEGER,
246: TYPE_FLOAT,
253: TYPE_STRING,
254: TYPE_STRING,
}
class Mysql(BaseQueryRunner):
@classmethod
@@ -27,7 +45,10 @@ class Mysql(BaseQueryRunner):
'db': {
'type': 'string',
'title': 'Database name'
}
},
"port": {
"type": "number"
},
},
'required': ['db']
}
@@ -82,10 +103,11 @@ class Mysql(BaseQueryRunner):
def run_query(self, query):
import MySQLdb
connection = MySQLdb.connect(self.configuration.get('host', ''),
self.configuration.get('user', ''),
self.configuration.get('passwd', ''),
self.configuration['db'],
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
@@ -96,17 +118,15 @@ class Mysql(BaseQueryRunner):
data = cursor.fetchall()
cursor_desc = cursor.description
if cursor_desc is not None:
num_fields = len(cursor_desc)
column_names = [i[0] for i in cursor.description]
# TODO - very similar to pg.py
if cursor.description is not None:
columns_data = [(i[0], i[1]) for i in cursor.description]
rows = [dict(zip(column_names, row)) for row in data]
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
# TODO: add types support
columns = [{'name': col_name,
'friendly_name': col_name,
'type': None} for col_name in column_names]
columns = [{'name': col[0],
'friendly_name': col[0],
'type': types_map.get(col[1], None)} for col in columns_data]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)

View File

@@ -1,79 +1,203 @@
import sys
import datetime
import json
import logging
import weakref
from redash.query_runner import *
from redash import models
import importlib
def get_query_result(query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
logger = logging.getLogger(__name__)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
from RestrictedPython import compile_restricted
from RestrictedPython.Guards import safe_builtins
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
class CustomPrint(object):
""" CustomPrint redirect "print" calls to be sent as "log" on the result object """
def __init__(self, python_runner):
self._python_runner = python_runner
return json.loads(query.latest_query_data.data)
def write(self, text):
if self._python_runner()._enable_print_log:
if text and text.strip():
log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
self._python_runner()._result["log"].append(log_line)
def execute_query(data_source_name, query):
try:
data_source = models.DataSource.get(models.DataSource.name==data_source_name)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name: %s." % data_source_name)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
def __call__(self):
return self
class Python(BaseQueryRunner):
"""
This is very, very unsafe. Use at your own risk with people you really trust.
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
}
'allowedImportModules': {
'type': 'string',
'title': 'Modules to import prior to running the script'
}
},
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
global ALLOWED_MODULES
super(Python, self).__init__(configuration_json)
self.syntax = "python"
self._allowed_modules = {}
self._result = { "rows" : [], "columns" : [], "log" : [] }
self._enable_print_log = True
if self.configuration.get("allowedImportModules", None):
for item in self.configuration["allowedImportModules"].split(","):
self._allowed_modules[item] = None
def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
if name in self._allowed_modules:
m = None
if self._allowed_modules[name] is None:
m = importlib.import_module(name)
self._allowed_modules[name] = m
else:
m = self._allowed_modules[name]
return m
raise Exception("'{0}' is not configured as a supported import module".format(name))
def custom_write(self, obj):
"""
Custom hooks which controls the way objects/lists/tuples/dicts behave in
RestrictedPython
"""
return obj
def custom_get_item(self, obj, key):
return obj[key]
def custom_get_iter(self, obj):
return iter(obj)
def disable_print_log(self):
self._enable_print_log = False
def enable_print_log(self):
self._enable_print_log = True
def add_result_column(self, result, column_name, friendly_name, column_type):
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
if column_type not in SUPPORTED_COLUMN_TYPES:
raise Exception("'{0}' is not a supported column type".format(column_type))
if not "columns" in result:
result["columns"] = []
result["columns"].append({
"name" : column_name,
"friendly_name" : friendly_name,
"type" : column_type
})
def add_result_row(self, result, values):
if not "rows" in result:
result["rows"] = []
result["rows"].append(values)
def execute_query(self, data_source_name_or_id, query):
try:
if type(data_source_name_or_id) == int:
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
def get_query_result(self, query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
def run_query(self, query):
try:
error = None
script_globals = {'get_query_result': get_query_result, 'execute_query': execute_query}
script_locals = {'result': None}
# TODO: timeout, sandboxing
exec query in script_globals, script_locals
code = compile_restricted(query, '<string>', 'exec')
if script_locals['result'] is None:
raise Exception("result wasn't set to value.")
safe_builtins["_write_"] = self.custom_write
safe_builtins["__import__"] = self.custom_import
safe_builtins["_getattr_"] = getattr
safe_builtins["getattr"] = getattr
safe_builtins["_setattr_"] = setattr
safe_builtins["setattr"] = setattr
safe_builtins["_getitem_"] = self.custom_get_item
safe_builtins["_getiter_"] = self.custom_get_iter
safe_builtins["_print_"] = CustomPrint(weakref.ref(self))
json_data = json.dumps(script_locals['result'])
script_locals = { "result" : self._result }
restricted_globals = dict(__builtins__=safe_builtins)
restricted_globals["get_query_result"] = self.get_query_result
restricted_globals["execute_query"] = self.execute_query
restricted_globals["add_result_column"] = self.add_result_column
restricted_globals["add_result_row"] = self.add_result_row
restricted_globals["disable_print_log"] = self.disable_print_log
restricted_globals["enable_print_log"] = self.enable_print_log
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
restricted_globals["TYPE_STRING"] = TYPE_STRING
restricted_globals["TYPE_DATE"] = TYPE_DATE
restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT
# TODO: Figure out the best way to have a timeout on a script
# One option is to use ETA with Celery + timeouts on workers
# And replacement of worker process every X requests handled.
exec(code) in restricted_globals, script_locals
json_data = json.dumps(self._result)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
error = str(e)
json_data = None
return json_data, error
register(Python)

View File

@@ -30,7 +30,7 @@ class Script(BaseQueryRunner):
def __init__(self, configuration_json):
super(Script, self).__init__(configuration_json)
# Poor man's protection against running scripts from output the scripts directory
# Poor man's protection against running scripts from outside the scripts directory
if self.configuration["path"].find("../") > -1:
raise ValidationError("Scripts can only be run from the configured scripts directory")
@@ -41,11 +41,13 @@ class Script(BaseQueryRunner):
query = query.strip()
script = os.path.join(self.configuration["path"], query)
script = os.path.join(self.configuration["path"], query.split(" ")[0])
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
output = subprocess.check_output(script, shell=False)
script = os.path.join(self.configuration["path"], query)
output = subprocess.check_output(script.split(" "), shell=False)
if output is not None:
output = output.strip()
if output != "":
@@ -62,4 +64,4 @@ class Script(BaseQueryRunner):
return json_data, error
register(Script)
register(Script)

145
redash/saml_auth.py Normal file
View File

@@ -0,0 +1,145 @@
# Copyright 2015 Okta, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint, request
from flask_oauth import OAuth
from redash import models, settings
from saml2 import (
BINDING_HTTP_POST,
BINDING_HTTP_REDIRECT,
entity,
)
from saml2.client import Saml2Client
from saml2.config import Config as Saml2Config
logger = logging.getLogger('saml_auth')
blueprint = Blueprint('saml_auth', __name__)
def get_saml_client():
'''
Return saml configuation.
The configuration is a hash for use by saml2.config.Config
'''
if settings.SAML_CALLBACK_SERVER_NAME:
acs_url=settings.SAML_CALLBACK_SERVER_NAME + url_for("saml_auth.idp_initiated")
else:
acs_url = url_for("saml_auth.idp_initiated",_external=True)
# NOTE:
# Ideally, this should fetch the metadata and pass it to
# PySAML2 via the "inline" metadata type.
# However, this method doesn't seem to work on PySAML2 v2.4.0
#
# SAML metadata changes very rarely. On a production system,
# this data should be cached as approprate for your production system.
rv = requests.get(settings.SAML_METADATA_URL)
import tempfile
tmp = tempfile.NamedTemporaryFile()
f = open(tmp.name, 'w')
f.write(rv.text)
f.close()
saml_settings = {
'metadata': {
# 'inline': metadata,
"local": [tmp.name]
},
'service': {
'sp': {
'endpoints': {
'assertion_consumer_service': [
(acs_url, BINDING_HTTP_REDIRECT),
(acs_url, BINDING_HTTP_POST)
],
},
# Don't verify that the incoming requests originate from us via
# the built-in cache for authn request ids in pysaml2
'allow_unsolicited': True,
# Don't sign authn requests, since signed requests only make
# sense in a situation where you control both the SP and IdP
'authn_requests_signed': False,
'logout_requests_signed': True,
'want_assertions_signed': True,
'want_response_signed': False,
},
},
}
spConfig = Saml2Config()
spConfig.load(saml_settings)
spConfig.allow_unknown_attributes = True
saml_client = Saml2Client(config=spConfig)
tmp.close()
return saml_client
@blueprint.route("/saml/callback", methods=['POST'])
def idp_initiated():
saml_client = get_saml_client()
authn_response = saml_client.parse_authn_request_response(
request.form['SAMLResponse'],
entity.BINDING_HTTP_POST)
authn_response.get_identity()
user_info = authn_response.get_subject()
email = user_info.text
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
# This is what as known as "Just In Time (JIT) provisioning".
# What that means is that, if a user in a SAML assertion
# isn't in the user store, we create that user first, then log them in
try:
user_object = models.User.get(models.User.email == email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
user_object.save()
except models.User.DoesNotExist:
logger.debug("Creating user object (%r)", name)
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
login_user(user_object, remember=True)
url = url_for('index')
return redirect(url)
@blueprint.route("/saml/login")
def sp_initiated():
if not settings.SAML_METADATA_URL:
logger.error("Cannot invoke saml endpoint without metadata url in settings.")
return redirect(url_for('index'))
saml_client = get_saml_client()
reqid, info = saml_client.prepare_for_authenticate()
redirect_url = None
# Select the IdP URL to send the AuthN request to
for key, value in info['headers']:
if key is 'Location':
redirect_url = value
response = redirect(redirect_url, code=302)
# NOTE:
# I realize I _technically_ don't need to set Cache-Control or Pragma:
# http://stackoverflow.com/a/5494469
# However, Section 3.2.3.2 of the SAML spec suggests they are set:
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
# We set those headers here as a "belt and suspenders" approach,
# since enterprise environments don't always conform to RFCs
response.headers['Cache-Control'] = 'no-cache, no-store'
response.headers['Pragma'] = 'no-cache'
return response

View File

@@ -32,6 +32,10 @@ def array_from_string(str):
return array
def set_from_string(str):
return set(array_from_string(str))
def parse_boolean(str):
return json.loads(str.lower())
@@ -60,12 +64,16 @@ PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENA
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
GOOGLE_APPS_DOMAIN = set_from_string(os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", ""))
GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
SAML_CALLBACK_SERVER_NAME = os.environ.get("REDASH_SAML_CALLBACK_SERVER_NAME", "")
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
@@ -73,6 +81,14 @@ LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# CORS settings for the Query Result API (and possbily future external APIs).
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
# to the calling domain (or domains in a comma separated list).
ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", ""))
ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false"))
ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT")
ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
# Query Runners
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
'redash.query_runner.big_query',
@@ -82,6 +98,7 @@ QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS",
'redash.query_runner.pg',
'redash.query_runner.script',
'redash.query_runner.url',
'redash.query_runner.influx_db',
])))
# Features:

View File

@@ -1,11 +1,10 @@
import time
import datetime
import logging
import redis
from celery import Task
from celery.result import AsyncResult
from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client, settings
from redash import redis_connection, models, statsd_client, settings, utils
from redash.utils import gen_query_hash
from redash.worker import celery
from redash.query_runner import get_query_runner
@@ -47,12 +46,13 @@ class QueryTask(object):
return self._async_result.id
@classmethod
def add_task(cls, query, data_source, scheduled=False):
def add_task(cls, query, data_source, scheduled=False, metadata={}):
query_hash = gen_query_hash(query)
logging.info("[Manager][%s] Inserting job", query_hash)
logging.info("[Manager] Metadata: [%s]", metadata)
try_count = 0
job = None
while try_count < cls.MAX_RETRIES:
try_count += 1
@@ -77,8 +77,9 @@ class QueryTask(object):
else:
queue_name = data_source.queue_name
result = execute_query.apply_async(args=(query, data_source.id), queue=queue_name)
result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)
job = cls(async_result=result)
logging.info("[Manager][%s] Created new job: %s", query_hash, job.id)
pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)
pipe.execute()
@@ -146,8 +147,8 @@ def refresh_queries():
outdated_queries_count = 0
for query in models.Query.outdated_queries():
# TODO: this should go into lower priority
QueryTask.add_task(query.query, query.data_source, scheduled=True)
QueryTask.add_task(query.query, query.data_source, scheduled=True,
metadata={'Query ID': query.id, 'Username': 'Scheduled'})
outdated_queries_count += 1
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
@@ -197,9 +198,9 @@ def cleanup_tasks():
logger.warning("%s is ready (%s), removing lock.", lock_keys[i], t.celery_status)
redis_connection.delete(lock_keys[i])
if t.celery_status == 'STARTED' and t.id not in all_tasks:
logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
redis_connection.delete(lock_keys[i])
# if t.celery_status == 'STARTED' and t.id not in all_tasks:
# logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
# redis_connection.delete(lock_keys[i])
@celery.task(base=BaseTask)
@@ -230,8 +231,7 @@ def refresh_schemas():
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?
def execute_query(self, query, data_source_id, metadata):
start_time = time.time()
logger.info("Loading data source (%d)...", data_source_id)
@@ -247,9 +247,15 @@ def execute_query(self, query, data_source_id):
query_runner = get_query_runner(data_source.type, data_source.options)
if query_runner.annotate_query():
# TODO: annotate with queue name
annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \
(self.request.id, query_hash, query)
metadata['Task ID'] = self.request.id
metadata['Query Hash'] = query_hash
metadata['Queue'] = self.request.delivery_info['routing_key']
annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()])
logging.debug(u"Annotation: %s", annotation)
annotated_query = u"/* {} */ {}".format(annotation, query)
else:
annotated_query = query
@@ -265,7 +271,7 @@ def execute_query(self, query, data_source_id):
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
if not error:
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow())
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
else:
raise Exception(error)

View File

@@ -4,9 +4,11 @@ import codecs
import decimal
import datetime
import json
import random
import re
import hashlib
import sqlparse
import pytz
COMMENTS_REGEX = re.compile("/\*.*?\*/")
@@ -62,6 +64,14 @@ class SQLMetaData(object):
return False
def utcnow():
"""Return datetime.now value with timezone specified.
Without the timezone data, when the timestamp stored to the database it gets the current timezone of the server,
which leads to errors in calculations.
"""
return datetime.datetime.now(pytz.utc)
def slugify(s):
return re.sub('[^a-z0-9_\-]+', '-', s.lower())
@@ -79,6 +89,14 @@ def gen_query_hash(sql):
return hashlib.md5(sql.encode('utf-8')).hexdigest()
def generate_token(length):
chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789')
rand = random.SystemRandom()
return ''.join(rand.choice(chars) for x in range(length))
class JSONEncoder(json.JSONEncoder):
"""Custom JSON encoding class, to handle Decimal and datetime.date instances.
"""
@@ -86,9 +104,9 @@ class JSONEncoder(json.JSONEncoder):
if isinstance(o, decimal.Decimal):
return float(o)
if isinstance(o, datetime.date):
if isinstance(o, (datetime.date, datetime.time, datetime.timedelta)):
return o.isoformat()
super(JSONEncoder, self).default(o)
@@ -128,4 +146,4 @@ class UnicodeWriter:
def writerows(self, rows):
for row in rows:
self.writerow(row)
self.writerow(row)

View File

@@ -1,9 +1,12 @@
import json
from flask import Flask, make_response
from werkzeug.wrappers import Response
from flask.ext.restful import Api
from redash import settings, utils
from redash.models import db
from redash.admin import init_admin
__version__ = '0.4.0'
@@ -14,6 +17,7 @@ app = Flask(__name__,
api = Api(app)
init_admin(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
@@ -21,10 +25,13 @@ app.config['DATABASE'] = settings.DATABASE_CONFIG
db.init_app(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
# Flask-Restful checks only for flask.Response but flask-login uses werkzeug.wrappers.Response
if isinstance(data, Response):
return data
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp

View File

@@ -1,6 +1,7 @@
Flask==0.10.1
Flask-Admin==1.1.0
Flask-RESTful==0.2.10
Flask-Login==0.2.9
Flask-Login==0.2.11
Flask-OAuth==0.12
passlib==1.6.2
Jinja2==2.7.2
@@ -14,7 +15,7 @@ psycopg2==2.5.2
python-dateutil==2.1
pytz==2013.9
redis==2.7.5
requests==2.2.0
requests==2.3.0
six==1.5.2
sqlparse==0.1.8
wsgiref==0.1.2
@@ -25,3 +26,7 @@ gunicorn==18.0
celery==3.1.11
jsonschema==2.4.0
click==3.3
RestrictedPython==3.6.0
wtf-peewee==0.2.3
pysaml2==2.4.0
pycrypto==2.6.1

View File

@@ -98,10 +98,8 @@ if [ ! -f "/opt/redash/.env" ]; then
fi
# Install latest version
# REDASH_VERSION=${REDASH_VERSION-0.4.0.b589}
# modified by @fedex1 3/15/2015 seems to be the latest version at this point in time.
REDASH_VERSION=${REDASH_VERSION-0.6.0.b722}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION/.b/%2Bb}/redash.$REDASH_VERSION.tar.gz"
REDASH_VERSION=${REDASH_VERSION-0.6.3.b906}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
REDASH_TARBALL=/tmp/redash.tar.gz
REDASH_TARBALL=/tmp/redash.tar.gz

View File

@@ -20,8 +20,12 @@ autorestart=true
stdout_logfile=/opt/redash/logs/api.log
stderr_logfile=/opt/redash/logs/api_error.log
# There are two queue types here: one for ad-hoc queries, and one for the refresh of scheduled queries
# (note that "scheduled_queries" appears only in the queue list of "redash_celery_scheduled").
# The default concurrency level for each is 2 (-c2), you can increase based on your machine's resources.
[program:redash_celery]
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -c2 -Qqueries,celery
process_name=redash_celery
numprocs=1
priority=999
@@ -29,3 +33,13 @@ autostart=true
autorestart=true
stdout_logfile=/opt/redash/logs/celery.log
stderr_logfile=/opt/redash/logs/celery_error.log
[program:redash_celery_scheduled]
command=/opt/redash/current/bin/run celery worker --app=redash.worker -c2 -Qscheduled_queries
process_name=redash_celery_scheduled
numprocs=1
priority=999
autostart=true
autorestart=true
stdout_logfile=/opt/redash/logs/celery.log
stderr_logfile=/opt/redash/logs/celery_error.log

View File

@@ -1,6 +1,5 @@
import datetime
import redash.models
from redash.utils import gen_query_hash
from redash.utils import gen_query_hash, utcnow
class ModelFactory(object):
@@ -45,7 +44,7 @@ user_factory = ModelFactory(redash.models.User,
data_source_factory = ModelFactory(redash.models.DataSource,
name='Test',
name=Sequence('Test {}'),
type='pg',
options='{"dbname": "test"}')
@@ -66,7 +65,7 @@ query_factory = ModelFactory(redash.models.Query,
query_result_factory = ModelFactory(redash.models.QueryResult,
data='{"columns":{}, "rows":[]}',
runtime=1,
retrieved_at=datetime.datetime.utcnow,
retrieved_at=utcnow,
query="SELECT 1",
query_hash=gen_query_hash('SELECT 1'),
data_source=data_source_factory.create)

View File

@@ -1,9 +1,10 @@
from flask.ext.login import current_user
from flask import request
from mock import patch
import time
from tests import BaseTestCase
from redash import models
from redash.google_oauth import create_and_login_user
from redash.authentication import ApiKeyAuthentication
from redash.authentication import api_key_load_user_from_request, hmac_load_user_from_request, sign
from tests.factories import user_factory, query_factory
from redash.wsgi import app
@@ -18,29 +19,72 @@ class TestApiKeyAuthentication(BaseTestCase):
self.query = query_factory.create(api_key=self.api_key)
def test_no_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id))
self.assertFalse(auth.verify_authentication())
self.assertIsNone(api_key_load_user_from_request(request))
def test_wrong_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': 'whatever'})
self.assertFalse(auth.verify_authentication())
self.assertIsNone(api_key_load_user_from_request(request))
def test_correct_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': self.api_key})
self.assertTrue(auth.verify_authentication())
self.assertIsNotNone(api_key_load_user_from_request(request))
def test_no_query_id(self):
auth = ApiKeyAuthentication()
with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertFalse(auth.verify_authentication())
self.assertIsNone(api_key_load_user_from_request(request))
def test_user_api_key(self):
user = user_factory.create(api_key="user_key")
with app.test_client() as c:
rv = c.get('/api/queries/', query_string={'api_key': user.api_key})
self.assertEqual(user.id, api_key_load_user_from_request(request).id)
class TestHMACAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestHMACAuthentication, self).setUp()
self.api_key = 10
self.query = query_factory.create(api_key=self.api_key)
self.path = '/api/queries/{0}'.format(self.query.id)
self.expires = time.time() + 1800
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_no_signature(self):
with app.test_client() as c:
rv = c.get(self.path)
self.assertIsNone(hmac_load_user_from_request(request))
def test_wrong_signature(self):
with app.test_client() as c:
rv = c.get(self.path, query_string={'signature': 'whatever', 'expires': self.expires})
self.assertIsNone(hmac_load_user_from_request(request))
def test_correct_signature(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'signature': self.signature(self.expires), 'expires': self.expires})
self.assertIsNotNone(hmac_load_user_from_request(request))
def test_no_query_id(self):
with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertIsNone(hmac_load_user_from_request(request))
def test_user_api_key(self):
user = user_factory.create(api_key="user_key")
path = '/api/queries/'
with app.test_client() as c:
signature = sign(user.api_key, path, self.expires)
rv = c.get(path, query_string={'signature': signature, 'expires': self.expires, 'user_id': user.id})
self.assertEqual(user.id, hmac_load_user_from_request(request).id)
class TestCreateAndLoginUser(BaseTestCase):
def test_logins_valid_user(self):

View File

@@ -319,6 +319,17 @@ class QueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
self.paths = []
super(QueryResultAPITest, self).setUp()
def test_post_result_list(self):
data_source = data_source_factory.create()
query_result = query_result_factory.create()
query = query_factory.create()
with app.test_client() as c, authenticated_user(c):
rv = json_request(c.post, '/api/query_results',
data={'data_source_id': data_source.id,
'query': query.query})
self.assertEquals(rv.status_code, 200)
class JobAPITest(BaseTestCase, AuthenticationTestMixin):
def setUp(self):
@@ -326,58 +337,6 @@ class JobAPITest(BaseTestCase, AuthenticationTestMixin):
super(JobAPITest, self).setUp()
class CsvQueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
def setUp(self):
super(CsvQueryResultAPITest, self).setUp()
self.paths = []
self.query_result = query_result_factory.create()
self.query = query_factory.create()
self.path = '/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id)
# TODO: factor out the HMAC authentication tests
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_redirect_when_unauthenticated(self):
with app.test_client() as c:
rv = c.get(self.path)
self.assertEquals(rv.status_code, 302)
def test_redirect_for_wrong_signature(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': 'whatever', 'expires': 0})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_wrong_expires(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(0), 'expires': 0})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_no_expires(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(time.time()+3600)})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_expires_too_long(self):
with app.test_client() as c:
expires = time.time()+(10*3600)
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
self.assertEquals(rv.status_code, 302)
def test_returns_200_for_correct_signature(self):
with app.test_client() as c:
expires = time.time()+1800
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
self.assertEquals(rv.status_code, 200)
def test_returns_200_for_authenticated_user(self):
with app.test_client() as c, authenticated_user(c):
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id))
self.assertEquals(rv.status_code, 200)
class TestLogin(BaseTestCase):
def setUp(self):
settings.PASSWORD_LOGIN_ENABLED = True

View File

@@ -6,8 +6,7 @@ import mock
from tests import BaseTestCase
from redash import models
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
from redash.utils import gen_query_hash
from redash import query_runner
from redash.utils import gen_query_hash, utcnow
class DashboardTest(BaseTestCase):
@@ -141,7 +140,7 @@ class QueryOutdatedQueriesTest(BaseTestCase):
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_specific_time_schedule(self):
half_an_hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=30)
half_an_hour_ago = utcnow() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M'))
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1))
query.latest_query_data = query_result
@@ -326,7 +325,7 @@ class TestQueryResultStoreResult(BaseTestCase):
self.query = "SELECT 1"
self.query_hash = gen_query_hash(self.query)
self.runtime = 123
self.utcnow = datetime.datetime.utcnow()
self.utcnow = utcnow()
self.data = "data"
def test_stores_the_result(self):

View File

@@ -1,7 +1,8 @@
import datetime
from mock import patch, call
from mock import patch, call, ANY
from tests import BaseTestCase
from tests.factories import query_factory, query_result_factory
from redash.utils import utcnow
from redash.tasks import refresh_queries
@@ -11,7 +12,7 @@ from redash.tasks import refresh_queries
class TestRefreshQueries(BaseTestCase):
def test_enqueues_outdated_queries(self):
query = query_factory.create(schedule="60")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -19,11 +20,11 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True, metadata=ANY)
def test_skips_fresh_queries(self):
query = query_factory.create(schedule="1200")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -33,7 +34,7 @@ class TestRefreshQueries(BaseTestCase):
def test_skips_queries_with_no_ttl(self):
query = query_factory.create(schedule=None)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -45,7 +46,7 @@ class TestRefreshQueries(BaseTestCase):
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash,
data_source=query.data_source)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -55,12 +56,12 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)#{'Query ID': query.id, 'Username': 'Scheduled'})
def test_enqueues_query_with_correct_data_source(self):
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -70,14 +71,16 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True), call(query.query, query.data_source, scheduled=True)], any_order=True)
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True, metadata=ANY),
call(query.query, query.data_source, scheduled=True, metadata=ANY)],
any_order=True)
self.assertEquals(2, add_job_mock.call_count)
def test_enqueues_only_for_relevant_data_source(self):
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash)
import psycopg2
retrieved_at = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
retrieved_at = utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -87,4 +90,4 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)