Compare commits

...

240 Commits

Author SHA1 Message Date
Arik Fraimovich
2911fa8af7 Bump version. 2015-07-13 18:31:31 +03:00
Arik Fraimovich
7fc2d5ee0b Update bootstrap.sh to use 0.6.3. 2015-07-13 12:00:28 +03:00
Arik Fraimovich
3c9c1466a3 Merge pull request #483 from erans/master
Feature: Updated more fine grained support for CORS in QueryResultAPI
2015-07-13 11:04:10 +03:00
Eran Sandler
4a7c066bf0 Too many languages... :-( 2015-07-13 10:05:07 +03:00
Eran Sandler
1a3657572e Added fine grind control of CORS header for QueryResultAPI and possibly future APIs. 2015-07-13 09:42:23 +03:00
Arik Fraimovich
666e3281e4 Merge pull request #482 from erans/master
Feature: CORS support for the Query Result API to allow embedding in other domains
2015-07-13 08:41:38 +03:00
Eran Sandler
66084b1a3b minor fixes 2015-07-12 23:07:06 +03:00
Eran Sandler
421470666a use set_from_string. 2015-07-12 23:06:00 +03:00
Eran Sandler
f8e2bc9eca Added configurable CORS support for the Query Result API (to export data as JSON or CSV). Configuration is via an environment variable that is comma separated to include one or more domains (REDASH_QUERIES_RESULT_CORS) 2015-07-12 23:00:50 +03:00
Arik Fraimovich
c195362710 Merge pull request #479 from erans/master
Feature: improved error reporting and new log information support for the Python query runner
2015-07-12 22:30:21 +03:00
Arik Fraimovich
b671dd0431 Merge pull request #480 from EverythingMe/feature/multiple-domains
Feature: support for multiple domains with Google OAuth
2015-07-12 13:36:17 +03:00
Arik Fraimovich
7793f3b257 Feature: support for multiple Google Auth domains 2015-07-12 12:51:45 +03:00
Arik Fraimovich
e09aa6f81a Show message for wrong user/password (closes #275) 2015-07-12 12:43:37 +03:00
Eran Sandler
780e0c0418 - refactored the Python query runner
- Improved error handling to show the real error (including indentation and syntax errors - it should show the right row number)
- Added support for "print" statements which becomes log messages show in a single query window (where you edit). It's on by default. To remove it simply called 'disable_print_log()'
2015-07-12 12:14:46 +03:00
Arik Fraimovich
a692e3f664 Merge pull request #476 from EverythingMe/feature/api
Feature: support for per user API keys
2015-07-08 21:34:06 +03:00
Arik Fraimovich
6860dde1f7 Set api_key to be unique 2015-07-08 21:29:32 +03:00
Arik Fraimovich
e183affdd0 Feature: support for per user api keys 2015-07-08 20:59:07 +03:00
Arik Fraimovich
39db74ff20 Merge pull request #475 from hakobera/support-infuxdb
Feature: Support InfluxDB v0.9+
2015-07-05 22:39:40 +03:00
Arik Fraimovich
05c2c21a85 Bump version. 2015-07-05 22:39:12 +03:00
Kazuyuki Honda
00edc29e50 Support influxdb 0.9+ 2015-07-06 01:15:43 +09:00
Arik Fraimovich
3771af0a8c Update bootstrap.sh to use 0.6.2. 2015-07-05 08:30:11 +03:00
Arik Fraimovich
c32c2d43f7 Bump version. 2015-07-04 10:41:06 +03:00
Arik Fraimovich
4e2e3f9077 Merge pull request #472 from BrunoSalerno/map_visualization_options_fixed
map visualization: options fixed
2015-07-04 10:23:55 +03:00
Bruno Salerno
2a27422df9 map visualization: draw_options to scope 2015-07-03 18:02:22 -03:00
Bruno Salerno
f9e0ce8e9c map visualization: options fixed 2015-07-03 16:45:55 -03:00
Arik Fraimovich
a1d49f13d3 Merge pull request #471 from EverythingMe/fix/visualization_api
Fix: opening viz editor resets its options
2015-07-02 13:17:50 +03:00
Arik Fraimovich
26aa199f9c Fix: opening viz editor resets its options 2015-07-02 13:17:32 +03:00
Arik Fraimovich
4c77f3f914 Merge pull request #470 from EverythingMe/fix/visualization_api
Increase limit of tables for showing search.
2015-07-02 11:06:19 +03:00
Arik Fraimovich
d6be792595 Increase limit of tables for showing search. 2015-07-02 11:05:45 +03:00
Arik Fraimovich
59c1ea7f16 Merge pull request #469 from EverythingMe/fix/visualization_api
Fix: map - HTTPS support in tiles/marker
2015-07-02 11:05:25 +03:00
Arik Fraimovich
4d24005eff Fix: map - HTTPS support in tiles/marker 2015-07-02 11:02:44 +03:00
Arik Fraimovich
2dab35b614 Merge pull request #468 from EverythingMe/fix/visualization_api
Fix: visualizations API fixes
2015-07-02 08:51:09 +03:00
Arik Fraimovich
0b61b88f5f Fix: make default options apply to new visualizations 2015-07-02 08:38:08 +03:00
Arik Fraimovich
e5cb58207c Fix: vis title wasn't updating when changing type 2015-07-02 08:26:10 +03:00
Arik Fraimovich
fc17d1af81 Don't cache static assets in debug mode 2015-07-02 08:25:51 +03:00
Arik Fraimovich
e6650e1e2d Merge pull request #467 from BrunoSalerno/leaflet-visualization-marker-path-bug-fixed
leaflet visualization: marker path bug handled
2015-07-01 22:49:12 +03:00
Bruno Salerno
3aa1cd0133 leaflet visualization: marker path bug handled 2015-07-01 16:40:56 -03:00
Arik Fraimovich
e04833c327 Merge pull request #466 from BrunoSalerno/leaflet-visualization
Feature: Map visualization (using Leaflet)
2015-07-01 20:58:43 +03:00
Bruno Salerno
b743cceb60 leaflet visualization: map template margins fixed 2015-07-01 14:53:31 -03:00
Bruno Salerno
a0e134d3b5 leaflet visualization: dinamic height 2015-07-01 14:15:17 -03:00
Bruno Salerno
d7fb2d7458 leaflet-visualization: div size fixed and bounds storting improved 2015-07-01 12:30:48 -03:00
Bruno Salerno
b913ce6022 leaflet visualization: color series named properly 2015-07-01 10:03:43 -03:00
Bruno Salerno
1eb7945d16 leaflet visualization: map bounds are stored and kept 2015-06-30 18:18:34 -03:00
Bruno Salerno
37d0026ee4 leaflet-visualization: point feature 2015-06-30 17:34:31 -03:00
Arik Fraimovich
9cdc2cb2f7 Merge pull request #465 from EverythingMe/fix/time_field_serialize
Feature: ability to control series order in charts.
2015-06-30 09:26:26 +03:00
Arik Fraimovich
a9bff9063e Feature: cli to get status. 2015-06-30 09:25:32 +03:00
Arik Fraimovich
380126ee44 Feature: ability to control series index in charts. 2015-06-30 09:15:00 +03:00
Arik Fraimovich
d8377375b8 Merge pull request #461 from myinsiders/saml
Added SAML authentication support, eg for OneLogin or Okta
2015-06-30 08:25:44 +03:00
Arik Fraimovich
98ff701f9a Merge pull request #464 from EverythingMe/fix/time_field_serialize
Fix #463: support for datetime.time and datetime.timedelta fields
2015-06-29 18:06:25 +03:00
Arik Fraimovich
f5ea3e97d3 Fix: support for datetime.time and datetime.timedelta fields 2015-06-29 18:01:36 +03:00
Mark White
719e96dd2f Added SAML login option to login form 2015-06-28 17:19:57 +01:00
Arik Fraimovich
6c6c0256ba Merge pull request #462 from EverythingMe/fix_codemirror_resize_issue
Fix: refresh CodeMirror size when schema browser appears
2015-06-28 13:46:12 +03:00
Arik Fraimovich
723df51cdd Fix: refresh CodeMirror size when schema browser appears 2015-06-28 13:45:49 +03:00
Arik Fraimovich
a0f4e263b2 Merge pull request #459 from olgakogan/patch-2
Fixed an error in case a query doesn't have last_modified_date
2015-06-28 10:27:04 +03:00
Arik Fraimovich
4706bf8060 Merge pull request #458 from erans/master
Initial and very early support for ElasticSearch query runner
2015-06-28 10:26:26 +03:00
Mark White
f96a9f659a Added Apache license to code taken from Okta 2015-06-26 11:45:24 +01:00
Mark White
63c273f896 Fixed issue in saml login 2015-06-26 11:12:27 +01:00
Mark White
622ac6d781 Fixes to saml callback server name code 2015-06-26 10:26:59 +01:00
Mark White
8dc564a8bc Added configuration of flask server name 2015-06-26 09:06:50 +01:00
Mark White
3ae5baef22 Added OneLogin support 2015-06-25 17:52:00 +01:00
olga
8d819068b5 Fixed an error in case a query doesn't have last_modified_date 2015-06-25 11:31:22 +03:00
Eran Sandler
585e056265 Initial very early release of an ElasticSearch query runner. It only support Lucene style queries (single line, similar to what Kibana uses but without aggregations). 2015-06-24 09:53:09 +03:00
Arik Fraimovich
1914ed7c7c Merge pull request #456 from bells17/master
Changed the README's 'Setting up re:dash instance' url to a new url
2015-06-19 10:25:11 +01:00
bells17
bd216e93e7 Changed the README's 'Setting up re:dash instance' url to a new url 2015-06-19 10:20:41 +09:00
Arik Fraimovich
5e351de896 Merge pull request #455 from erans/master
added Mongo JSON serializer to correctly serialize ObjectId + datetime.datetime serialization
2015-06-17 10:59:42 +03:00
Eran Sandler
de0e534c77 removed the unnecessary check for datetime.datetime in the JSON encoder. 2015-06-17 10:58:12 +03:00
Eran Sandler
5fa1f9440d duh! 2015-06-16 11:50:20 +03:00
Eran Sandler
b3ddc5f8b9 removed old conversion of ObjectId to string since it is now part of the new JSON serializer 2015-06-16 11:34:19 +03:00
Eran Sandler
8cde5f9673 added Mongo JSON serializer to correctly serialize ObjectId 2015-06-16 11:27:23 +03:00
Arik Fraimovich
1bb53ca497 Merge pull request #451 from EverythingMe/fix/unicode_in_annotation
Fix: charts with category X axis were not sorted properly
2015-06-11 21:46:26 +03:00
Arik Fraimovich
0a3cd9267f Fix: charts with category x axis were not sorted properly 2015-06-11 21:45:45 +03:00
Arik Fraimovich
075d843354 Merge pull request #449 from EverythingMe/fix/unicode_in_annotation
Fix: schema browser chokes on large schemas
2015-06-10 13:36:53 +03:00
Arik Fraimovich
b14e5e8c0e Fix: schema browser chokes on large schemas 2015-06-10 13:36:05 +03:00
Arik Fraimovich
c9da4be422 Merge pull request #442 from EverythingMe/fix/timezone
Fix: when the server has non UTC timezone, timestamps were wrong
2015-06-07 22:23:46 +03:00
Arik Fraimovich
276ee7c27a Merge pull request #448 from olgakogan/master
supervisord default config: separate queue for ad-hoc and scheduled queries
2015-06-07 17:38:53 +03:00
olga
334040532a changed default concurrency level to 2 per queue 2015-06-07 17:36:24 +03:00
olga
335a3a98b5 separated the queue for ad-hoc and for scheduled queries (someone who runs an ad-hoc query should not wait because there scheduled queries are being refreshed at that time) 2015-06-07 17:28:57 +03:00
Arik Fraimovich
b17080a7f5 Merge pull request #446 from EverythingMe/fix/unicode_in_annotation
Fix #443: open table when searching & don't hide columns
2015-06-05 18:13:05 +03:00
Arik Fraimovich
8441c12b01 Fix #443: open table when searching & don't hide columns 2015-06-05 18:08:06 +03:00
Arik Fraimovich
3b4af1b6fa Merge pull request #445 from EverythingMe/fix/unicode_in_annotation
Fix #444: unicode characters in username fail query execution
2015-06-05 16:58:00 +03:00
Arik Fraimovich
c3deb8e2fa Fix #444: unicode characters in username fail query execution 2015-06-05 16:49:25 +03:00
Arik Fraimovich
a60b1686da Fix: when the server has non UTC timezone, timestamps were wrong 2015-06-03 07:58:28 +03:00
Arik Fraimovich
b56e87ceb2 Merge pull request #440 from EverythingMe/fix_ui
Fix: python query runner didn't allow iterating lists
2015-05-31 10:20:32 +03:00
Arik Fraimovich
fc89bcdaf3 Fix: python query runner didn't allow accessing dicts 2015-05-31 10:15:48 +03:00
Arik Fraimovich
15ec8321bb Merge pull request #437 from EverythingMe/fix_ui
Feature: ability to disable x axis labels
2015-05-19 22:24:36 +03:00
Arik Fraimovich
e6ba62485c Merge pull request #436 from EverythingMe/fix_ui
Fix: sorting not working for columns with special characters
2015-05-19 22:15:21 +03:00
Arik Fraimovich
9077b01fb9 Feature: ability to disable x axis labels 2015-05-19 22:15:08 +03:00
Arik Fraimovich
f45281be96 Fix: annotation was failing if query had unicode in it 2015-05-19 22:01:02 +03:00
Arik Fraimovich
a1c8ef9037 Merge pull request #435 from EverythingMe/fix_ui
Fix: string columns with date/time values failed to render.
2015-05-19 22:00:37 +03:00
Arik Fraimovich
f46e8af23f Fix: sorting not working for columns with special characters 2015-05-19 22:00:15 +03:00
Arik Fraimovich
30a89bfd2c Fix: string columns with dates failed to render. 2015-05-19 21:43:50 +03:00
Arik Fraimovich
6312f8738d Merge pull request #433 from stanhu/make-query-link-obvious
Make it obvious that the query link is clickable.
2015-05-17 08:18:19 +03:00
Stan Hu
9e3d5c10c5 Make it obvious that the query link is clickable: underline when hovering and add glyphicon 2015-05-16 22:06:04 -07:00
Arik Fraimovich
59b87ec4fd Merge pull request #434 from erans/master
MongoDB aggregation support + mongo documentation (as comments)
2015-05-17 07:49:20 +03:00
Eran Sandler
27ecf5f25c Merged the older MongoDB code into the new mongodb query runner to support aggregation 2015-05-16 22:22:33 +03:00
Arik Fraimovich
105971c4c8 Merge pull request #432 from stanhu/allow-undefined-max-age
Allow undefined max_age parameter in query_results endpoint
2015-05-15 11:25:24 +03:00
Stan Hu
690f8323c3 Allow undefined max_age parameter in query_results endpoint
An Error 500 would be returned by the endpoint if you attempted to
pass a query parameter to the dashboard since maxAge was undefined in JavaScript.
2015-05-14 22:00:08 -07:00
Arik Fraimovich
20eb110ce3 Fix: update_release_commit_sha should return json 2015-05-14 10:09:57 +03:00
Arik Fraimovich
571c9d0aee Update release manager: update tag commit sha on new release 2015-05-14 09:59:21 +03:00
Arik Fraimovich
0ee7292f16 Merge pull request #431 from EverythingMe/feature/additional_refresh_rates
Feature: additional refresh times (5, 10, 15, 30 minutes)
2015-05-14 09:25:35 +03:00
Arik Fraimovich
8c28392dfd Feature: additional refersh times (5,10,15,30 minutes) 2015-05-13 20:59:39 +03:00
Arik Fraimovich
671f1f4478 Merge pull request #428 from olgakogan/master
Feature: support for column types in MySQL query runner
2015-05-12 13:55:56 +03:00
olga
557d3748be added support to column types in mysql 2015-05-12 12:01:47 +03:00
Arik Fraimovich
f00d080ed2 Install optipng in CircleCI. 2015-05-12 10:33:11 +03:00
Arik Fraimovich
4e76c1305f Merge pull request #425 from EverythingMe/new_logo
New logo
2015-05-12 10:27:01 +03:00
Arik Fraimovich
36ef388e92 Bump version 2015-05-12 10:26:16 +03:00
Arik Fraimovich
2e1ee7f76c New logo 2015-05-12 10:25:57 +03:00
Arik Fraimovich
fc1e38772d New logo! 2015-05-11 23:13:15 +03:00
Arik Fraimovich
0e631a5121 Merge pull request #422 from EverythingMe/feature/288_bq_instance_auth
Feature: BigQueryGCE query runner that uses instance auth (fixes #288)
2015-05-10 23:18:45 +03:00
Arik Fraimovich
d74175efca Feature: BigQueryGCE query runner that uses instance auth 2015-05-10 08:46:41 +03:00
Arik Fraimovich
bf5fe7d2c7 Merge pull request #421 from EverythingMe/fix/issue_417
Feature: show visualization name next to query name (#418)
2015-05-08 22:28:12 +03:00
Arik Fraimovich
0f022aba92 Feature: show visualization name next to query name. 2015-05-07 21:58:12 +03:00
Arik Fraimovich
0b6e55e55a Remove unused code 2015-05-07 21:58:08 +03:00
Arik Fraimovich
e1c409366c Merge pull request #420 from EverythingMe/fix/issue_417
Fix: Make query editor auto resize again to prevent scroll issues
2015-05-07 21:52:07 +03:00
Arik Fraimovich
3b942118e9 Make query editor auto resize again to prevent scroll issues 2015-05-07 21:39:25 +03:00
Arik Fraimovich
7f1543db8f Merge pull request #419 from EverythingMe/fix/issue_417
Fix #417: integer columns treated as floats
2015-05-07 21:38:54 +03:00
Arik Fraimovich
74a5121be2 Fix #417: integer columns treated as floats 2015-05-07 21:25:30 +03:00
Arik Fraimovich
26fe136a1a Merge pull request #416 from daamien/patch-1
Upgrade to requests 2.3.0
2015-05-07 09:30:43 +03:00
damien clochard
83fb189b05 Update requirements.txt
The bootstrap.sh script fails on Debian 7.8

I solved the problem with :

$ sudo pip install requests==2.3.0

Check this bug for more details :
https://github.com/kennethreitz/requests/issues/2028
2015-05-06 18:36:24 +02:00
Arik Fraimovich
5e8d0d36c0 Merge pull request #409 from erans/master
Fix: minor fixes for MongoDB, script and Python query runners
2015-04-26 11:07:33 +03:00
Eran Sandler
4ae4cffa04 Removed a copy-paste duplication. Hmpf. 2015-04-26 11:05:40 +03:00
Eran Sandler
bc433e88fe Fix for _getitem_ error when accessing a dictionary directly. 2015-04-26 11:03:53 +03:00
Arik Fraimovich
513ef501a4 Merge pull request #410 from stanhu/sort-by-y-values
Feature: sort by Y values charts that have a single value per series
2015-04-26 10:23:06 +03:00
Stan Hu
f2bdcbedfb Simplify code and remove sortY option to avoid confusion 2015-04-26 00:18:03 -07:00
Stan Hu
fd056edb2a Support sort by y values for charts that have a single value per series 2015-04-21 22:52:14 -07:00
Eran Sandler
0f0acfdd12 Fix which prevented MongoDB connections to execute queries due to a faulty json schema configuration. 2015-04-22 00:18:28 +03:00
Eran Sandler
1e3b507b2b For for the script data source when command line parameters are passed as part of the query. 2015-04-21 09:36:05 +03:00
Arik Fraimovich
84d95272f3 Comment out active tasks cleanup, as it sometimes fails. 2015-04-20 10:05:04 +03:00
Arik Fraimovich
3b08e9e214 Merge pull request #408 from alexanderlz/master
Feature: additional metadata in query annotation (username, query id, queue name)
2015-04-20 08:48:59 +03:00
Arik Fraimovich
f4be83b06f Use query id from UI & annotate scheduled queries 2015-04-20 08:46:01 +03:00
Alexander Leibzon
4918d0430c add redash username/query_id to query for easier backtracking 2015-04-20 02:16:12 +03:00
Arik Fraimovich
e25b86b10d Merge pull request #398 from lenguyenthedat/data_sources_name_unique
Fix: make the data_sources' name unique
2015-04-18 22:51:12 +03:00
Arik Fraimovich
d3d305a843 Make sure data sources have unique names in tests 2015-04-18 22:46:42 +03:00
Arik Fraimovich
825b93bfe9 Fix migration numbering (there is 0007 already) 2015-04-18 22:46:42 +03:00
Arik Fraimovich
8c98282200 Rename only data sources with duplicates 2015-04-18 22:46:42 +03:00
Dat Le
768ac9eb04 Fix: make the data_sources's name unique
Also added migration script.
2015-04-18 22:46:42 +03:00
Arik Fraimovich
71011d2fca Merge pull request #407 from stanhu/add-flask-admin 2015-04-18 22:23:10 +03:00
Arik Fraimovich
9683a8ed82 Dedicated view for data source 2015-04-18 22:21:58 +03:00
Arik Fraimovich
10a6ac9313 Dedicated view for User model 2015-04-18 18:48:44 +03:00
Arik Fraimovich
dba325e9a2 Use ArrayListField for Array fields. 2015-04-18 18:47:54 +03:00
Arik Fraimovich
fcd9ab533c Fix: correctly call CustomModelConverter __init__. 2015-04-18 18:46:32 +03:00
Arik Fraimovich
68e3e8e1c5 Update name in admin screens 2015-04-18 18:00:52 +03:00
Arik Fraimovich
7f8b738b9e Fix requirements.txt (peewee was specified twice) 2015-04-18 16:58:05 +03:00
Arik Fraimovich
8a35dcedfa Merge pull request #406 from stanhu/add-mysql-port
Add support for configuring MySQL port
2015-04-18 16:14:26 +03:00
Stan Hu
ef763b7157 Use Flask-Admin to provide basic Web-based /admin page 2015-04-18 04:11:30 -07:00
Stan Hu
498e1d4474 Add support for configuring MySQL port 2015-04-17 22:57:34 -07:00
Arik Fraimovich
73de936c75 Merge pull request #405 from EverythingMe/feature/syntax_highglight
Feature: use correct syntax highlighting for Python/Mongo data sources
2015-04-14 17:53:46 +03:00
Arik Fraimovich
e32b709a41 Typo fix in the python query runner 2015-04-14 17:50:36 +03:00
Arik Fraimovich
60652f63c4 Use correct syntax highlighting for Python/Mongo sources 2015-04-14 17:48:36 +03:00
Arik Fraimovich
d0d4101f90 Merge pull request #404 from erans/master
Improvement: make Python datasource to use the RestrictedPython sandbox
2015-04-13 16:13:00 +03:00
Eran Sandler
646875794f Per request by Arik - the BDFL :-) 2015-04-13 15:27:28 +03:00
Eran Sandler
cdad4be0d5 Removed the try..catch block in the import of RestrictedPython since we are putting it in the requirements.txt file. 2015-04-13 15:23:49 +03:00
Eran Sandler
8f4285be62 Minor fixes from code review. 2015-04-13 15:21:43 +03:00
Eran Sandler
acfa55e2d0 Python datasource that uses RestrictedPython. Only modules listed in "allowedImportModules" (command separated) will be allowed to be imported and the code assume they are installed on the server running the actual code. 2015-04-13 11:22:22 +03:00
Arik Fraimovich
0b7cd07db0 Merge pull request #403 from EverythingMe/chore/release_process
Fix: schema browser styles
2015-04-08 16:14:30 +03:00
Arik Fraimovich
6297ffd523 Fix: schema browser styles 2015-04-08 16:13:03 +03:00
Arik Fraimovich
368f4fdbef Merge pull request #402 from EverythingMe/chore/release_process
New release process.
2015-04-06 12:51:12 +03:00
Arik Fraimovich
f52044a209 New release process 2015-04-06 12:50:17 +03:00
Arik Fraimovich
9fb33cf746 Merge pull request #399 from EverythingMe/feature/schema
Feature: schema browser and simple autocomplete
2015-04-02 17:10:07 +03:00
Arik Fraimovich
e3c5da5bc5 Fix tests to use correct data 2015-04-02 17:05:16 +03:00
Arik Fraimovich
e675690cc6 Sort schema by name 2015-04-02 16:56:00 +03:00
Arik Fraimovich
edc1622cf5 Schema support for MySQL 2015-04-02 16:55:52 +03:00
Arik Fraimovich
5ab3d4a40d Basic autocomplete functionality 2015-04-02 16:12:33 +03:00
Arik Fraimovich
cb29d87b63 Improve formatting of schema browser 2015-04-02 15:40:43 +03:00
Arik Fraimovich
6ff6bdad9f Use the correct redis connection in tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
e3cc3ef9a4 Move schema fetching to DataSource + tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
1fe4f291f2 Flush test redis db after each test 2015-04-02 11:25:22 +03:00
Arik Fraimovich
a54119f4a2 Show schema along side the query 2015-04-02 11:25:22 +03:00
Arik Fraimovich
c5b7fe5321 Use codemirror directly without ui-codemirror 2015-04-02 11:24:47 +03:00
Arik Fraimovich
d487ec9153 Upgrade codemirror to latest version 2015-04-02 11:24:18 +03:00
Arik Fraimovich
fa19b1ddc8 Endpoint to return data source schema 2015-04-02 11:23:52 +03:00
Arik Fraimovich
267c32b390 Merge pull request #401 from EverythingMe/fix/wrong_time_zone
Fix: use correct date when converting to UTC to get correct timezone.
2015-04-02 07:40:27 +03:00
Arik Fraimovich
aeff3f1494 Fix: use correct date when converting to UTC to get correct timezone. 2015-04-02 07:39:37 +03:00
Arik Fraimovich
e80e52f6c9 Add annotations for the injector. 2015-04-01 20:23:18 +03:00
Arik Fraimovich
fe41a70602 Merge pull request #400 from EverythingMe/feature/better_scheduler
Improved query scheduling option
2015-04-01 17:28:02 +03:00
Arik Fraimovich
976d9abe2d Disable UI tests, as they are no longer maintained :-( 2015-04-01 17:23:08 +03:00
Arik Fraimovich
041bc1100a New UI for query schedule setting 2015-04-01 17:07:19 +03:00
Arik Fraimovich
5d095ff6ab Resolve #113: upgrade to latest ui-bootstrap 2015-04-01 12:48:24 +03:00
Arik Fraimovich
ef01b61b29 Fix: refresh selector had empty option 2015-04-01 12:11:14 +03:00
Arik Fraimovich
faad6b656b Change query ttl field to be a string and named schedule.
This to allow other types of scheduling than just repeat every X seconds.
The first supported option will be: repeat every day at hour X.
2015-04-01 11:23:26 +03:00
Arik Fraimovich
0bc775584b Merge pull request #397 from EverythingMe/feature/edit_others_queries
Fix: forking broken
2015-03-22 17:32:13 +02:00
Arik Fraimovich
f2d96d61a1 Fix: forking broken 2015-03-22 17:28:47 +02:00
Arik Fraimovich
09bf2dd608 Merge pull request #396 from EverythingMe/feature/edit_others_queries
Feature: allow editing others' queries
2015-03-22 14:53:02 +02:00
Arik Fraimovich
ad1b9b06cf Fix test. 2015-03-22 14:42:08 +02:00
Arik Fraimovich
a4bceae60b Allow anyone to edit any query & show who edited it 2015-03-22 13:22:11 +02:00
Arik Fraimovich
9385449feb Add updated_at timestamp to visualization, query, dashboard and users models 2015-03-22 12:58:26 +02:00
Arik Fraimovich
562e1bb8c9 Merge pull request #395 from EverythingMe/feature/post_to_create_a_query
Convert additional dates to user's formatting
2015-03-19 08:54:54 +02:00
Arik Fraimovich
082b718303 Convert additional dates to user's formatting 2015-03-19 08:54:04 +02:00
Arik Fraimovich
c0872899e9 Merge pull request #394 from EverythingMe/feature/post_to_create_a_query
Fix: column definitions weren't updated.
2015-03-19 08:43:17 +02:00
Arik Fraimovich
086bbf129d Fix: column definitions weren't udpated 2015-03-19 08:40:21 +02:00
Arik Fraimovich
4b7561e538 Merge pull request #393 from EverythingMe/feature/post_to_create_a_query
Fix: allow Unicode and other special chars in column names
2015-03-19 08:34:06 +02:00
Arik Fraimovich
407c5a839b Fix: allow Unicode and other special chars in column names
Stopped using Angular's $parse and just accessing the property directly.
2015-03-19 08:33:16 +02:00
Arik Fraimovich
b8aefd26b8 Merge pull request #392 from EverythingMe/feature/post_to_create_a_query
Support posting to /queries/new to create a new query.
2015-03-18 13:42:55 +02:00
Arik Fraimovich
85a762bcd2 Support posting to /queries/new to create a new query. 2015-03-18 13:28:23 +02:00
Arik Fraimovich
4f1b3d5beb Merge pull request #391 from EverythingMe/feature/api_key_auth
Fix: allow dots in column name
2015-03-16 15:08:00 +02:00
Arik Fraimovich
9218a7c437 Fix: allow dots in column name 2015-03-16 14:59:51 +02:00
Arik Fraimovich
71a3f066a5 Ignore gh-pages branch in CircleCI. 2015-03-16 09:03:52 +02:00
Arik Fraimovich
89436d779c Merge pull request #390 from fedex1/patch-1
Update bootstrap.sh
2015-03-16 05:37:33 +02:00
Ralph Yozzo
3631e938da Update bootstrap.sh
# modified by @fedex1 3/15/2015 seems to be the latest version at this point in time.
2015-03-15 23:27:45 -04:00
Arik Fraimovich
c0a9db68f0 Merge pull request #389 from EverythingMe/feature/api_key_auth
Fix: show date/time with respect to user's locale
2015-03-15 18:53:31 +02:00
Arik Fraimovich
bec9c9e14e Fix: show date/time in user's locale 2015-03-15 18:53:02 +02:00
Arik Fraimovich
47bbc25277 Merge pull request #388 from EverythingMe/feature/api_key_auth
Make it possible to set enabled query runners from env
2015-03-12 12:00:26 +02:00
Arik Fraimovich
f02c2588d2 Make it possible to set enabled query runners from env 2015-03-12 11:52:31 +02:00
Arik Fraimovich
7db5449dad Merge pull request #387 from EverythingMe/feature/api_key_auth
Record event when accessing query result from API
2015-03-12 11:46:35 +02:00
Arik Fraimovich
7f6c7f0634 Record event when accessing query result from API 2015-03-12 11:43:21 +02:00
Arik Fraimovich
73955c74f7 Merge pull request #386 from EverythingMe/feature/api_key_auth
Code cleanup (remove "worker's status" dead link & unused settings)
2015-03-11 11:30:15 +02:00
Arik Fraimovich
7de85da8ef Remove unused settings 2015-03-11 07:50:49 +02:00
Arik Fraimovich
0aab35252a Remove broken "Worker's Status" page 2015-03-11 07:47:10 +02:00
Arik Fraimovich
141dbc9e70 Merge pull request #385 from EverythingMe/feature/api_key_auth
Feature: optional API Key authentication instead of HMAC
2015-03-10 18:29:01 +02:00
Arik Fraimovich
2e513c347c Cleanup 2015-03-10 18:21:51 +02:00
Arik Fraimovich
335c136ec2 Show API Key button in query view 2015-03-10 18:08:02 +02:00
Arik Fraimovich
df1170eb9b Feature: optional api key only authentication 2015-03-10 17:51:17 +02:00
Arik Fraimovich
69bcaddbe0 Fix: migrations stopped working due to peewee upgrade 2015-03-09 16:55:55 +02:00
Arik Fraimovich
67958cc27b MySQL query runner: make configuration access safer 2015-03-09 10:16:06 +02:00
Arik Fraimovich
6c716f23d9 Fix migration & query runner for mysql 2015-03-09 08:58:03 +02:00
Arik Fraimovich
bea11b0ac2 Merge pull request #384 from EverythingMe/feature/python_query_runner
Experimental Python query runner
2015-03-08 15:03:59 +02:00
Arik Fraimovich
4927386299 Experimental Python query runner 2015-03-08 15:02:57 +02:00
Arik Fraimovich
30a8550f6b Merge pull request #383 from EverythingMe/fix/migration
Fix: make migration work with new peewee
2015-03-08 14:37:42 +02:00
Arik Fraimovich
0389a45be4 Fix: make migration work with new peewee 2015-03-08 13:28:18 +02:00
Arik Fraimovich
707c169867 Merge pull request #382 from EverythingMe/feature/datasources_v2
Fix: import should be global
2015-03-08 12:27:34 +02:00
Arik Fraimovich
fca034ac0d Fix: import should be global 2015-03-08 12:23:51 +02:00
Arik Fraimovich
97691ea5ee Merge pull request #380 from EverythingMe/feature/datasources_v2
Refactor datasources (query runners)
2015-03-08 11:50:09 +02:00
Arik Fraimovich
40335a0e21 Fix: add missing option flags 2015-03-08 11:00:56 +02:00
Arik Fraimovich
9344cbd078 Update bootstrap script to support new format 2015-03-08 10:38:50 +02:00
Arik Fraimovich
9442fd9465 Update logging messages 2015-03-02 09:49:17 +02:00
Arik Fraimovich
c816f1003d Bump version 2015-03-02 09:45:29 +02:00
Arik Fraimovich
2107b79a80 Use validation for data source editing 2015-03-02 09:44:55 +02:00
Arik Fraimovich
8fae6de8c7 Update datasource CLI to use new format 2015-03-02 09:40:15 +02:00
Arik Fraimovich
d798c77574 Support for already valid data source config 2015-03-02 07:34:06 +02:00
Arik Fraimovich
0abce27381 Set configuration in base ctor 2015-02-24 07:50:10 +02:00
Arik Fraimovich
8a171ba39a Use JSON Schema for data source configuration 2015-02-24 07:50:10 +02:00
Arik Fraimovich
20af276772 Updated configuration spec to include friendly name and more 2015-02-24 07:50:10 +02:00
Arik Fraimovich
4058342763 WIP: configuration object 2015-02-24 07:50:10 +02:00
Arik Fraimovich
af64657260 Migration to update all data source options 2015-02-24 07:50:09 +02:00
Arik Fraimovich
b6bd46e59e New query runners implementation 2015-02-24 07:50:09 +02:00
Arik Fraimovich
31fe547e03 Merge pull request #378 from EverythingMe/feature/variables
Fix #263: timestamp fields should be with time zone
2015-02-23 11:10:20 +02:00
Arik Fraimovich
aff324071e Update peewee version 2015-02-23 09:19:39 +02:00
Arik Fraimovich
131266e408 Fix #263: timestamp fields should be with time zone 2015-02-23 09:02:16 +02:00
Arik Fraimovich
b1f97e8c8d Merge pull request #377 from olgakogan/master
'Download Dataset' fix - error in case of big numeric values
2015-02-21 15:21:18 +02:00
olgakogan
b585480c81 removed redundant handling of large numbers when generating a csv file (causes ValueError: timestamp out of range) 2015-02-20 22:33:02 +02:00
93 changed files with 3900 additions and 1123 deletions

View File

@@ -1,6 +1,7 @@
NAME=redash
VERSION=`python ./manage.py version`
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
@@ -15,8 +16,8 @@ pack:
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload:
python bin/upload_version.py $(VERSION) $(FILENAME)
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
test:
nosetests --with-coverage --cover-package=redash tests/*.py
cd rd_ui && grunt test
#cd rd_ui && grunt test

View File

@@ -1,6 +1,5 @@
<p align="center">
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
</p>
<p align="center">
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
@@ -28,7 +27,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
## Getting Started
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env python
import sys
import requests
if __name__ == '__main__':
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
if response.status_code != 200:
exit("Failed getting releases (status code: %s)." % response.status_code)
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
latest_release = sorted_releases[0]
asset_url = latest_release['assets'][0]['url']
filename = latest_release['assets'][0]['name']
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
if '--url-only' in sys.argv:
print asset_url
elif '--wget' in sys.argv:
print wget_command
else:
print "Latest release: %s" % latest_release['tag_name']
print latest_release['body']
print "\nTarball URL: %s" % asset_url
print 'wget: %s' % (wget_command)

147
bin/release_manager.py Normal file
View File

@@ -0,0 +1,147 @@
import os
import sys
import json
import re
import subprocess
import requests
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
repo = 'EverythingMe/redash'
def _github_request(method, path, params=None, headers={}):
if not path.startswith('https://api.github.com'):
url = "https://api.github.com/{}".format(path)
else:
url = path
if params is not None:
params = json.dumps(params)
response = requests.request(method, url, data=params, auth=auth)
return response
def exception_from_error(message, response):
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
def rc_tag_name(version):
return "v{}-rc".format(version)
def get_rc_release(version):
tag = rc_tag_name(version)
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
if response.status_code == 404:
return None
elif response.status_code == 200:
return response.json()
raise exception_from_error("Unknown error while looking RC release: ", response)
def create_release(version, commit_sha):
tag = rc_tag_name(version)
params = {
'tag_name': tag,
'name': "{} - RC".format(version),
'target_commitish': commit_sha,
'prerelease': True
}
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
if response.status_code != 201:
raise exception_from_error("Failed creating new release", response)
return response.json()
def upload_asset(release, filepath):
upload_url = release['upload_url'].replace('{?name}', '')
filename = filepath.split('/')[-1]
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
if response.status_code != 201: # not 200/201/...
raise exception_from_error('Failed uploading asset', response)
return response
def remove_previous_builds(release):
for asset in release['assets']:
response = _github_request('delete', asset['url'])
if response.status_code != 204:
raise exception_from_error("Failed deleting asset", response)
def get_changelog(commit_sha):
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
if latest_release.status_code != 200:
raise exception_from_error('Failed getting latest release', latest_release)
latest_release = latest_release.json()
previous_sha = latest_release['target_commitish']
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
log = subprocess.check_output(args)
changes = ["Changes since {}:".format(latest_release['name'])]
for line in log.split('\n'):
try:
sha, subject, body, parents = line[1:-1].split('|')
except ValueError:
continue
try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request)
except Exception, ex:
pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
return "\n".join(changes)
def update_release_commit_sha(release, commit_sha):
params = {
'target_commitish': commit_sha,
}
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
if response.status_code != 200:
raise exception_from_error("Failed updating commit sha for existing release", response)
return response.json()
def update_release(version, build_filepath, commit_sha):
try:
release = get_rc_release(version)
if release:
release = update_release_commit_sha(release, commit_sha)
else:
release = create_release(version, commit_sha)
print "Using release id: {}".format(release['id'])
remove_previous_builds(release)
response = upload_asset(release, build_filepath)
changelog = get_changelog(commit_sha)
response = _github_request('patch', release['url'], {'body': changelog})
if response.status_code != 200:
raise exception_from_error("Failed updating release description", response)
except Exception, ex:
print ex
if __name__ == '__main__':
commit_sha = sys.argv[1]
version = sys.argv[2]
filepath = sys.argv[3]
# TODO: make sure running from git directory & remote = repo
update_release(version, filepath, commit_sha)

View File

@@ -1,46 +0,0 @@
#!python
import os
import sys
import json
import requests
import subprocess
def capture_output(command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
return proc.stdout.read()
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'body': version_body,
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
headers=headers, verify=False)

View File

@@ -7,6 +7,9 @@ machine:
2.7.3
dependencies:
pre:
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
- tar xvf optipng-0.7.5.tar.gz
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
- make deps
- pip install -r dev_requirements.txt
- pip install -r requirements.txt
@@ -26,3 +29,7 @@ deployment:
notify:
webhooks:
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
general:
branches:
ignore:
- gh-pages

View File

@@ -2,12 +2,15 @@
"""
CLI to manage redash.
"""
import json
from flask.ext.script import Manager
from redash import settings, models, __version__
from redash.wsgi import app
from redash.import_export import import_manager
from redash.cli import users, database, data_sources
from redash.monitor import get_status
manager = Manager(app)
manager.add_command("database", database.manager)
@@ -21,6 +24,9 @@ def version():
"""Displays re:dash version."""
print __version__
@manager.command
def status():
print json.dumps(get_status(), indent=2)
@manager.command
def runworkers():

View File

@@ -1,12 +1,15 @@
from playhouse.migrate import Migrator
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = Migrator(db.database)
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrator.add_column(models.Query, models.Query.is_archived, 'is_archived')
migrate(
migrator.add_column('queries', 'is_archived', models.Query.is_archived)
)
db.close_db(None)

View File

@@ -0,0 +1,21 @@
from redash.models import db
if __name__ == '__main__':
db.connect_db()
columns = (
('activity_log', 'created_at'),
('dashboards', 'created_at'),
('data_sources', 'created_at'),
('events', 'created_at'),
('groups', 'created_at'),
('queries', 'created_at'),
('widgets', 'created_at'),
('query_results', 'retrieved_at')
)
with db.database.transaction():
for column in columns:
db.database.execute_sql("ALTER TABLE {} ALTER COLUMN {} TYPE timestamp with time zone;".format(*column))
db.close_db(None)

View File

@@ -0,0 +1,73 @@
import json
from redash import query_runner
from redash.models import DataSource
def update(data_source):
print "[%s] Old options: %s" % (data_source.name, data_source.options)
if query_runner.validate_configuration(data_source.type, data_source.options):
print "[%s] configuration already valid. skipping." % data_source.name
return
if data_source.type == 'pg':
values = data_source.options.split(" ")
configuration = {}
for value in values:
k, v = value.split("=", 1)
configuration[k] = v
if k == 'port':
configuration[k] = int(v)
data_source.options = json.dumps(configuration)
elif data_source.type == 'mysql':
mapping = {
'Server': 'host',
'User': 'user',
'Pwd': 'passwd',
'Database': 'db'
}
values = data_source.options.split(";")
configuration = {}
for value in values:
k, v = value.split("=", 1)
configuration[mapping[k]] = v
data_source.options = json.dumps(configuration)
elif data_source.type == 'graphite':
old_config = json.loads(data_source.options)
configuration = {
"url": old_config["url"]
}
if "verify" in old_config:
configuration['verify'] = old_config['verify']
if "auth" in old_config:
configuration['username'], configuration['password'] = old_config["auth"]
data_source.options = json.dumps(configuration)
elif data_source.type == 'url':
data_source.options = json.dumps({"url": data_source.options})
elif data_source.type == 'script':
data_source.options = json.dumps({"path": data_source.options})
elif data_source.type == 'mongo':
data_source.type = 'mongodb'
else:
print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type)
print "[%s] New options: %s" % (data_source.name, data_source.options)
data_source.save()
if __name__ == '__main__':
for data_source in DataSource.all():
update(data_source)

View File

@@ -0,0 +1,12 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.drop_not_null('events', 'user_id')
)

View File

@@ -0,0 +1,26 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'updated_at', models.Query.updated_at),
migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at),
migrator.add_column('widgets', 'updated_at', models.Widget.updated_at),
migrator.add_column('users', 'created_at', models.User.created_at),
migrator.add_column('users', 'updated_at', models.User.updated_at),
migrator.add_column('visualizations', 'created_at', models.Visualization.created_at),
migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at)
)
db.database.execute_sql("UPDATE queries SET updated_at = created_at;")
db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;")
db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")
db.close_db(None)

View File

@@ -0,0 +1,19 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'last_modified_by_id', models.Query.last_modified_by)
)
db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")
db.close_db(None)

View File

@@ -0,0 +1,23 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.add_column('queries', 'schedule', models.Query.schedule),
)
db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")
migrate(
migrator.drop_column('queries', 'ttl')
)
db.close_db(None)

View File

@@ -0,0 +1,20 @@
from redash.models import db
if __name__ == '__main__':
db.connect_db()
with db.database.transaction():
# Make sure all data sources names are unique.
db.database.execute_sql("""
UPDATE data_sources
SET name = new_names.name
FROM (
SELECT id, name || ' ' || id as name
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
) AS new_names
WHERE data_sources.id = new_names.id;
""")
# Add unique constraint on data_sources.name.
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
db.close_db(None)

View File

@@ -0,0 +1,27 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
column = models.User.api_key
column.null = True
migrate(
migrator.add_column('users', 'api_key', models.User.api_key),
)
for user in models.User.select():
user.save()
migrate(
migrator.add_not_null('users', 'api_key')
)
db.close_db(None)

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.0 KiB

View File

@@ -18,8 +18,15 @@
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
<link rel="stylesheet" href="/styles/redash.css">
<!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head>
<body>
<div growl></div>
@@ -33,15 +40,15 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
</div>
{% raw %}
<div class="collapse navbar-collapse navbar-ex1-collapse">
<ul class="nav navbar-nav">
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')">
<a href="#" class="dropdown-toggle" data-toggle="dropdown"><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
<ul class="dropdown-menu">
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
<ul class="dropdown-menu" dropdown-menu>
<span ng-repeat="(name, group) in groupedDashboards">
<li class="dropdown-submenu">
<a href="#" ng-bind="name"></a>
@@ -59,9 +66,9 @@
<li><a data-toggle="modal" href="#new_dashboard_dialog" ng-show="currentUser.hasPermission('create_dashboard')">New Dashboard</a></li>
</ul>
</li>
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')">
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Queries <b class="caret"></b></a>
<ul class="dropdown-menu">
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')" dropdown>
<a href="#" class="dropdown-toggle" dropdown-toggle>Queries <b class="caret"></b></a>
<ul class="dropdown-menu" dropdown-menu>
<li ng-show="currentUser.hasPermission('create_query')"><a href="/queries/new">New Query</a></li>
<li><a href="/queries">Queries</a></li>
</ul>
@@ -105,9 +112,11 @@
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
<script src="/bower_components/codemirror/mode/python/python.js"></script>
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
<script src="/bower_components/angular-ui-codemirror/ui-codemirror.js"></script>
<script src="/bower_components/highcharts/highcharts.js"></script>
<script src="/bower_components/highcharts/modules/exporting.js"></script>
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>
@@ -123,13 +132,14 @@
<script src="/bower_components/marked/lib/marked.js"></script>
<script src="/scripts/ng_highchart.js"></script>
<script src="/scripts/ng_smart_table.js"></script>
<script src="/scripts/ui-bootstrap-tpls-0.5.0.min.js"></script>
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
<script src="/bower_components/bucky/bucky.js"></script>
<script src="/bower_components/pace/pace.js"></script>
<script src="/bower_components/mustache/mustache.js"></script>
<script src="/bower_components/canvg/rgbcolor.js"></script>
<script src="/bower_components/canvg/StackBlur.js"></script>
<script src="/bower_components/canvg/canvg.js"></script>
<script src="/bower_components/canvg/canvg.js"></script>
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
<!-- endbuild -->
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
@@ -146,6 +156,7 @@
<script src="/scripts/visualizations/base.js"></script>
<script src="/scripts/visualizations/chart.js"></script>
<script src="/scripts/visualizations/cohort.js"></script>
<script src="/scripts/visualizations/map.js"></script>
<script src="/scripts/visualizations/counter.js"></script>
<script src="/scripts/visualizations/table.js"></script>
<script src="/scripts/visualizations/pivot.js"></script>

View File

@@ -13,6 +13,10 @@
<link rel="stylesheet" href="/styles/redash.css">
<link rel="stylesheet" href="/styles/login.css">
<!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head>
<body>
@@ -26,13 +30,20 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
</div>
</div>
</nav>
<div class="container">
<div class="row">
{% with messages = get_flashed_messages() %}
{% if messages %}
{% for message in messages %}
<div class="alert alert-warning" role="alert">{{ message }}</div>
{% endfor %}
{% endif %}
{% endwith %}
<div class="main">
{% if show_google_openid %}
@@ -48,6 +59,19 @@
{% endif %}
{% if show_saml_login %}
<div class="row">
<a href="/saml/login">SAML Login</a>
</div>
<div class="login-or">
<hr class="hr-or">
<span class="span-or">or</span>
</div>
{% endif %}
<form role="form" method="post" name="login">
<div class="form-group">
<label for="inputUsernameEmail">Username or email</label>

View File

@@ -6,7 +6,6 @@ angular.module('redash', [
'redash.services',
'redash.renderers',
'redash.visualization',
'ui.codemirror',
'highchart',
'ui.select2',
'angular-growl',

View File

@@ -16,16 +16,9 @@
$timeout(refresh, 59 * 1000);
};
$scope.flowerUrl = featureFlags.flowerUrl;
refresh();
}
var AdminWorkersCtrl = function ($scope, $sce) {
$scope.flowerUrl = $sce.trustAsResourceUrl(featureFlags.flowerUrl);
};
angular.module('redash.admin_controllers', [])
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])
.controller('AdminWorkersCtrl', ['$scope', '$sce', AdminWorkersCtrl])
})();

View File

@@ -1,4 +1,11 @@
(function () {
var dateFormatter = function (value) {
if (!value) {
return "-";
}
return value.toDate().toLocaleString();
};
var QuerySearchCtrl = function($scope, $location, $filter, Events, Query) {
$scope.$parent.pageTitle = "Queries Search";
@@ -8,11 +15,6 @@
maxSize: 8,
};
var dateFormatter = function (value) {
if (!value) return "-";
return value.format("DD/MM/YY HH:mm");
}
$scope.gridColumns = [
{
"label": "Name",
@@ -21,7 +23,7 @@
},
{
'label': 'Created By',
'map': 'user.name'
'map': 'user_name'
},
{
'label': 'Created At',
@@ -30,9 +32,9 @@
},
{
'label': 'Update Schedule',
'map': 'ttl',
'map': 'schedule',
'formatFunction': function (value) {
return $filter('refreshRateHumanize')(value);
return $filter('scheduleHumanize')(value);
}
}
];
@@ -43,6 +45,7 @@
Query.search({q: $scope.term }, function(results) {
$scope.queries = _.map(results, function(query) {
query.created_at = moment(query.created_at);
query.user_name = query.user.name;
return query;
});
});
@@ -70,11 +73,6 @@
$scope.allQueries = [];
$scope.queries = [];
var dateFormatter = function (value) {
if (!value) return "-";
return value.format("DD/MM/YY HH:mm");
}
var filterQueries = function () {
$scope.queries = _.filter($scope.allQueries, function (query) {
if (!$scope.selectedTab) {
@@ -95,6 +93,7 @@
$scope.allQueries = _.map(queries, function (query) {
query.created_at = moment(query.created_at);
query.retrieved_at = moment(query.retrieved_at);
query.user_name = query.user.name;
return query;
});
@@ -109,7 +108,7 @@
},
{
'label': 'Created By',
'map': 'user.name'
'map': 'user_name'
},
{
'label': 'Created At',
@@ -130,9 +129,9 @@
},
{
'label': 'Update Schedule',
'map': 'ttl',
'map': 'schedule',
'formatFunction': function (value) {
return $filter('refreshRateHumanize')(value);
return $filter('scheduleHumanize')(value);
}
}
]

View File

@@ -100,9 +100,13 @@
Events.record(currentUser, "autorefresh", "dashboard", dashboard.id, {'enable': $scope.refreshEnabled});
if ($scope.refreshEnabled) {
var refreshRate = _.min(_.flatten($scope.dashboard.widgets), function(widget) {
return widget.visualization.query.ttl;
}).visualization.query.ttl;
var refreshRate = _.min(_.map(_.flatten($scope.dashboard.widgets), function(widget) {
var schedule = widget.visualization.query.schedule;
if (schedule === null || schedule.match(/\d\d:\d\d/) !== null) {
return 60;
}
return widget.visualization.query.schedule;
}));
$scope.refreshRate = _.max([120, refreshRate * 2]) * 1000;
@@ -138,7 +142,6 @@
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
var maxAge = $location.search()['maxAge'];
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
$scope.nextUpdateTime = moment(new Date(($scope.query.updated_at + $scope.query.ttl + $scope.query.runtime + 300) * 1000)).fromNow();
$scope.type = 'visualization';
} else {

View File

@@ -17,7 +17,7 @@
saveQuery = $scope.saveQuery;
$scope.sourceMode = true;
$scope.canEdit = currentUser.canEdit($scope.query);
$scope.canEdit = true;
$scope.isDirty = false;
$scope.newVisualization = undefined;
@@ -68,7 +68,7 @@
$scope.duplicateQuery = function() {
Events.record(currentUser, 'fork', 'query', $scope.query.id);
$scope.query.id = null;
$scope.query.ttl = -1;
$scope.query.schedule = null;
$scope.saveQuery({
successMessage: 'Query forked',

View File

@@ -1,38 +1,73 @@
(function() {
'use strict';
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, Query, DataSource) {
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, $modal, Query, DataSource) {
var DEFAULT_TAB = 'table';
var getQueryResult = function(ttl) {
var getQueryResult = function(maxAge) {
// Collect params, and getQueryResult with params; getQueryResult merges it into the query
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
if (ttl == undefined) {
ttl = $location.search()['maxAge'];
if (maxAge == undefined) {
maxAge = $location.search()['maxAge'];
}
$scope.queryResult = $scope.query.getQueryResult(ttl, parameters);
if (maxAge == undefined) {
maxAge = -1;
}
$scope.showLog = false;
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
}
$scope.dataSource = {};
$scope.query = $route.current.locals.query;
var updateSchema = function() {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
DataSource.getSchema({id: dataSourceId}, function(data) {
if (data && data.length > 0) {
$scope.schema = data;
_.each(data, function(table) {
table.collapsed = true;
});
$scope.editorSize = "col-md-9";
$scope.hasSchema = true;
} else {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
}
});
}
Events.record(currentUser, 'view', 'query', $scope.query.id);
getQueryResult();
$scope.queryExecuting = false;
$scope.isQueryOwner = currentUser.id === $scope.query.user.id;
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
$scope.canViewSource = currentUser.hasPermission('view_source');
$scope.dataSources = DataSource.get(function(dataSources) {
updateSchema();
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
});
// in view mode, latest dataset is always visible
// source mode changes this behavior
$scope.showDataset = true;
$scope.showLog = false;
$scope.lockButton = function(lock) {
$scope.queryExecuting = lock;
};
$scope.showApiKey = function() {
alert("API Key for this query:\n" + $scope.query.api_key);
};
$scope.saveQuery = function(options, data) {
if (data) {
data.id = $scope.query.id;
@@ -77,24 +112,24 @@
$scope.queryResult.cancelExecution();
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
};
$scope.archiveQuery = function(options, data) {
if (data) {
data.id = $scope.query.id;
} else {
data = $scope.query;
}
$scope.isDirty = false;
options = _.extend({}, {
successMessage: 'Query archived',
errorMessage: 'Query could not be archived'
}, options);
return Query.delete({id: data.id}, function() {
$scope.query.is_archived = true;
$scope.query.ttl = -1;
$scope.query.schedule = null;
growl.addSuccessMessage(options.successMessage);
// This feels dirty.
$('#archive-confirmation-modal').modal('hide');
@@ -117,6 +152,8 @@
});
}
updateSchema();
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
$scope.executeQuery();
};
@@ -162,8 +199,34 @@
if (status === 'done' || status === 'failed') {
$scope.lockButton(false);
}
if ($scope.queryResult.getLog() != null) {
$scope.showLog = true;
}
});
$scope.openScheduleForm = function() {
if (!$scope.isQueryOwner) {
return;
};
$modal.open({
templateUrl: '/views/schedule_form.html',
size: 'sm',
scope: $scope,
controller: ['$scope', '$modalInstance', function($scope, $modalInstance) {
$scope.close = function() {
$modalInstance.close();
}
if ($scope.query.hasDailySchedule()) {
$scope.refreshType = 'daily';
} else {
$scope.refreshType = 'periodic';
}
}]
});
};
$scope.$watch(function() {
return $location.hash()
}, function(hash) {
@@ -176,5 +239,5 @@
angular.module('redash.controllers')
.controller('QueryViewCtrl',
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', 'Query', 'DataSource', QueryViewCtrl]);
['$scope', 'Events', '$route', '$location', 'notifications', 'growl', '$modal', 'Query', 'DataSource', QueryViewCtrl]);
})();

View File

@@ -8,7 +8,7 @@
'query': '=',
'visualization': '=?'
},
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
link: function(scope, element) {
scope.link = '/queries/' + scope.query.id;
if (scope.visualization) {
@@ -29,7 +29,7 @@
restrict: 'E',
template: '<span ng-show="query.id && canViewSource">\
<a ng-show="!sourceMode"\
ng-href="{{query.id}}/source#{{selectedTab}}">Show Source\
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
</a>\
<a ng-show="sourceMode"\
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
@@ -63,26 +63,97 @@
restrict: 'E',
scope: {
'query': '=',
'lock': '='
'lock': '=',
'schema': '=',
'syntax': '='
},
template: '<textarea\
ui-codemirror="editorOptions"\
ng-model="query.query">',
link: function($scope) {
$scope.editorOptions = {
mode: 'text/x-sql',
template: '<textarea></textarea>',
link: {
pre: function ($scope, element) {
$scope.syntax = $scope.syntax || 'sql';
var modes = {
'sql': 'text/x-sql',
'python': 'text/x-python',
'json': 'application/json'
};
var textarea = element.children()[0];
var editorOptions = {
mode: modes[$scope.syntax],
lineWrapping: true,
lineNumbers: true,
readOnly: false,
matchBrackets: true,
autoCloseBrackets: true
};
autoCloseBrackets: true,
extraKeys: {"Ctrl-Space": "autocomplete"}
};
$scope.$watch('lock', function(locked) {
$scope.editorOptions.readOnly = locked ? 'nocursor' : false;
});
var additionalHints = [];
CodeMirror.commands.autocomplete = function(cm) {
var hinter = function(editor, options) {
var hints = CodeMirror.hint.anyword(editor, options);
var cur = editor.getCursor(), token = editor.getTokenAt(cur).string;
hints.list = _.union(hints.list, _.filter(additionalHints, function (h) {
return h.search(token) === 0;
}));
return hints;
};
// CodeMirror.showHint(cm, CodeMirror.hint.anyword);
CodeMirror.showHint(cm, hinter);
};
var codemirror = CodeMirror.fromTextArea(textarea, editorOptions);
codemirror.on('change', function(instance) {
var newValue = instance.getValue();
if (newValue !== $scope.query.query) {
$scope.$evalAsync(function() {
$scope.query.query = newValue;
});
}
$('.schema-container').css('height', $('.CodeMirror').css('height'));
});
$scope.$watch('query.query', function () {
if ($scope.query.query !== codemirror.getValue()) {
codemirror.setValue($scope.query.query);
}
});
$scope.$watch('schema', function (schema) {
if (schema) {
var keywords = [];
_.each(schema, function (table) {
keywords.push(table.name);
_.each(table.columns, function (c) {
keywords.push(c);
});
});
additionalHints = _.unique(keywords);
}
codemirror.refresh();
});
$scope.$watch('syntax', function(syntax) {
codemirror.setOption('mode', modes[syntax]);
});
$scope.$watch('lock', function (locked) {
var readOnly = locked ? 'nocursor' : false;
codemirror.setOption('readOnly', readOnly);
});
}
}
}
};
}
function queryFormatter($http) {
@@ -111,42 +182,98 @@
}
}
function queryTimePicker() {
return {
restrict: 'E',
template: '<select ng-disabled="refreshType != \'daily\'" ng-model="hour" ng-change="updateSchedule()" ng-options="c as c for c in hourOptions"></select> :\
<select ng-disabled="refreshType != \'daily\'" ng-model="minute" ng-change="updateSchedule()" ng-options="c as c for c in minuteOptions"></select>',
link: function($scope) {
var padWithZeros = function(size, v) {
v = String(v);
if (v.length < size) {
v = "0" + v;
}
return v;
};
$scope.hourOptions = _.map(_.range(0, 24), _.partial(padWithZeros, 2));
$scope.minuteOptions = _.map(_.range(0, 60, 5), _.partial(padWithZeros, 2));
if ($scope.query.hasDailySchedule()) {
var parts = $scope.query.scheduleInLocalTime().split(':');
$scope.minute = parts[1];
$scope.hour = parts[0];
} else {
$scope.minute = "15";
$scope.hour = "00";
}
$scope.updateSchedule = function() {
var newSchedule = moment().hour($scope.hour).minute($scope.minute).utc().format('HH:mm');
if (newSchedule != $scope.query.schedule) {
$scope.query.schedule = newSchedule;
$scope.saveQuery();
}
};
$scope.$watch('refreshType', function() {
if ($scope.refreshType == 'daily') {
$scope.updateSchedule();
}
});
}
}
}
function queryRefreshSelect() {
return {
restrict: 'E',
template: '<select\
ng-disabled="!isQueryOwner"\
ng-model="query.ttl"\
ng-disabled="refreshType != \'periodic\'"\
ng-model="query.schedule"\
ng-change="saveQuery()"\
ng-options="c.value as c.name for c in refreshOptions">\
<option value="">No Refresh</option>\
</select>',
link: function($scope) {
$scope.refreshOptions = [
{
value: -1,
name: 'No Refresh'
},
{
value: 60,
value: "60",
name: 'Every minute'
},
]
}
];
_.each([5, 10, 15, 30], function(i) {
$scope.refreshOptions.push({
value: String(i*60),
name: "Every " + i + " minutes"
})
});
_.each(_.range(1, 13), function (i) {
$scope.refreshOptions.push({
value: i * 3600,
value: String(i * 3600),
name: 'Every ' + i + 'h'
});
})
$scope.refreshOptions.push({
value: 24 * 3600,
value: String(24 * 3600),
name: 'Every 24h'
});
$scope.refreshOptions.push({
value: 7 * 24 * 3600,
value: String(7 * 24 * 3600),
name: 'Once a week'
});
$scope.$watch('refreshType', function() {
if ($scope.refreshType == 'periodic') {
if ($scope.query.hasDailySchedule()) {
$scope.query.schedule = null;
$scope.saveQuery();
}
}
});
}
}
@@ -158,5 +285,6 @@
.directive('queryResultLink', queryResultCSVLink)
.directive('queryEditor', queryEditor)
.directive('queryRefreshSelect', queryRefreshSelect)
.directive('queryTimePicker', queryTimePicker)
.directive('queryFormatter', ['$http', queryFormatter]);
})();

View File

@@ -24,13 +24,17 @@ angular.module('redash.filters', []).
return durationHumanize;
})
.filter('refreshRateHumanize', function () {
return function (ttl) {
if (ttl == -1) {
.filter('scheduleHumanize', function() {
return function (schedule) {
if (schedule === null) {
return "Never";
} else {
return "Every " + durationHumanize(ttl);
} else if (schedule.match(/\d\d:\d\d/) !== null) {
var parts = schedule.split(':');
var localTime = moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
return "Every day at " + localTime;
}
return "Every " + durationHumanize(parseInt(schedule));
}
})

View File

@@ -50,7 +50,7 @@
;
if (moment.isMoment(this.x)) {
var s = '<b>' + moment(this.x).format("DD/MM/YY HH:mm") + '</b>',
var s = '<b>' + this.x.toDate().toLocaleString() + '</b>',
pointsCount = this.points.length;
$.each(this.points, function (i, point) {
@@ -308,21 +308,22 @@
// We check either for true or undefined for backward compatibility.
var series = scope.series;
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
// If this is a chart that has just one row for multiple columns, sort
// by the Y values. For example:
//
// A | B | C
// 20 | 30 | 15
//
// Will be sorted:
// C | A | B
// 15 | 20 | 30
var sortable = _.every(series, function(s) { return s.data.length == 1 });
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
if (sortable) {
series = _.sortBy(series, function (s) {
return s.data[0].y
});
series = seriesCopy;
}
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
@@ -359,6 +360,23 @@
});
}
}
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
scope.chart.counters.color = 0;

View File

@@ -91,7 +91,7 @@
//insert columns from column config
//TODO add a way to clean all columns
scope.$watch('columnCollection', function (oldValue, newValue) {
scope.$watchCollection('columnCollection', function (oldValue, newValue) {
if (scope.columnCollection) {
scope.columns.length = 0;
for (var i = 0, l = scope.columnCollection.length; i < l; i++) {
@@ -205,11 +205,10 @@
column = scope.column,
row = scope.dataRow,
format = filter('format'),
getter = parse(column.map),
childScope;
//can be useful for child directives
scope.formatedValue = format(getter(row), column.formatFunction, column.formatParameter);
scope.formatedValue = format(row[column.map], column.formatFunction, column.formatParameter);
function defaultContent() {
//clear content
@@ -267,12 +266,11 @@
replace: true,
link: function (scope, element, attrs, ctrl) {
var form = angular.element(element.children()[1]),
input = angular.element(form.children()[0]),
getter = parse(scope.column.map);
input = angular.element(form.children()[0]);
//init values
scope.isEditMode = false;
scope.value = getter(scope.row);
scope.value = scope.row[scope.column.map];
scope.submit = function () {
@@ -285,7 +283,7 @@
};
scope.toggleEditMode = function () {
scope.value = getter(scope.row);
scope.value = scope.row[scope.column.map];
scope.isEditMode = scope.isEditMode !== true;
};
@@ -383,7 +381,10 @@
function sortDataRow(array, column) {
var sortAlgo = (scope.sortAlgorithm && angular.isFunction(scope.sortAlgorithm)) === true ? scope.sortAlgorithm : filter('orderBy');
if (column) {
return arrayUtility.sort(array, sortAlgo, column.sortPredicate, column.reverse);
var predicate = function(o) {
return o[column.sortPredicate];
};
return arrayUtility.sort(array, sortAlgo, predicate, column.reverse);
} else {
return array;
}
@@ -595,13 +596,11 @@
*/
this.updateDataRow = function (dataRow, propertyName, newValue) {
var index = scope.displayedCollection.indexOf(dataRow),
getter = parse(propertyName),
setter = getter.assign,
oldValue;
if (index !== -1) {
oldValue = getter(scope.displayedCollection[index]);
oldValue = scope.displayedCollection[index][propertyName];
if (oldValue !== newValue) {
setter(scope.displayedCollection[index], newValue);
scope.displayedCollection[index][propertyName] = newValue;
scope.$emit('updateDataRow', {item: scope.displayedCollection[index]});
}
}

View File

@@ -12,6 +12,8 @@
var columnTypes = {};
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
_.each(this.query_result.data.rows, function (row) {
_.each(row, function (v, k) {
if (angular.isNumber(v)) {
@@ -30,7 +32,9 @@
_.each(this.query_result.data.columns, function(column) {
if (columnTypes[column.name]) {
column.type = columnTypes[column.name];
if (column.type == null || column.type == 'string') {
column.type = columnTypes[column.name];
}
}
});
@@ -91,6 +95,14 @@
return this.job.error;
}
QueryResult.prototype.getLog = function() {
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
return null;
}
return this.query_result.data.log;
}
QueryResult.prototype.getUpdatedAt = function () {
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
}
@@ -243,26 +255,9 @@
return parts[0];
};
var charConversionMap = {
'__pct': /%/g,
'_': / /g,
'__qm': /\?/g,
'__brkt': /[\(\)\[\]]/g,
'__dash': /-/g,
'__amp': /&/g,
'__sl': /\//g,
'__fsl': /\\/g,
};
QueryResult.prototype.getColumnCleanName = function (column) {
var name = this.getColumnNameWithoutType(column);
if (name != '') {
_.each(charConversionMap, function(regex, replacement) {
name = name.replace(regex, replacement);
});
}
return name;
}
@@ -325,7 +320,7 @@
this.filters = filters;
}
var refreshStatus = function (queryResult, query, ttl) {
var refreshStatus = function (queryResult, query) {
Job.get({'id': queryResult.job.id}, function (response) {
queryResult.update(response);
@@ -335,7 +330,7 @@
});
} else if (queryResult.getStatus() != "failed") {
$timeout(function () {
refreshStatus(queryResult, query, ttl);
refreshStatus(queryResult, query);
}, 3000);
}
})
@@ -355,14 +350,19 @@
return this.deferred.promise;
}
QueryResult.get = function (data_source_id, query, ttl) {
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
var queryResult = new QueryResult();
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'ttl': ttl}, function (response) {
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
if (queryId !== undefined) {
params['query_id'] = queryId;
};
QueryResultResource.post(params, function (response) {
queryResult.update(response);
if ('job' in response) {
refreshStatus(queryResult, query, ttl);
refreshStatus(queryResult, query);
}
});
@@ -390,7 +390,7 @@
return new Query({
query: "",
name: "New Query",
ttl: -1,
schedule: null,
user: currentUser
});
};
@@ -414,10 +414,19 @@
return '/queries/' + this.id + '/source';
};
Query.prototype.getQueryResult = function (ttl, parameters) {
if (ttl == undefined) {
ttl = this.ttl;
}
Query.prototype.hasDailySchedule = function() {
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
}
Query.prototype.scheduleInLocalTime = function() {
var parts = this.schedule.split(':');
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
}
Query.prototype.getQueryResult = function (maxAge, parameters) {
// if (ttl == undefined) {
// ttl = this.ttl;
// }
var queryText = this.query;
@@ -443,16 +452,16 @@
this.latest_query_data_id = null;
}
if (this.latest_query_data && ttl != 0) {
if (this.latest_query_data && maxAge != 0) {
if (!this.queryResult) {
this.queryResult = new QueryResult({'query_result': this.latest_query_data});
}
} else if (this.latest_query_data_id && ttl != 0) {
} else if (this.latest_query_data_id && maxAge != 0) {
if (!this.queryResult) {
this.queryResult = QueryResult.getById(this.latest_query_data_id);
}
} else if (this.data_source_id) {
this.queryResult = QueryResult.get(this.data_source_id, queryText, ttl);
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
}
return this.queryResult;
@@ -488,7 +497,12 @@
var DataSource = function ($resource) {
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
var actions = {
'get': {'method': 'GET', 'cache': true, 'isArray': true},
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
};
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
return DataSourceResource;
}

View File

@@ -55,6 +55,22 @@
}];
};
var VisualizationName = function(Visualization) {
return {
restrict: 'E',
scope: {
visualization: '='
},
template: '<small>{{name}}</small>',
replace: false,
link: function (scope) {
if (Visualization.visualizations[scope.visualization.type].name != scope.visualization.name) {
scope.name = scope.visualization.name;
}
}
}
}
var VisualizationRenderer = function ($location, Visualization) {
return {
restrict: 'E',
@@ -72,42 +88,9 @@
width: '50%'
};
function readURL() {
var searchFilters = angular.fromJson($location.search().filters);
if (searchFilters) {
_.forEach(scope.filters, function(filter) {
var value = searchFilters[filter.friendlyName];
if (value) {
filter.current = value;
}
});
}
}
function updateURL(filters) {
var current = {};
_.each(filters, function(filter) {
if (filter.current) {
current[filter.friendlyName] = filter.current;
}
});
var newSearch = angular.extend($location.search(), {
filters: angular.toJson(current)
});
$location.search(newSearch);
}
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
if (filters) {
scope.filters = filters;
if (filters.length && false) {
readURL();
// start watching for changes and update URL
scope.$watch('filters', updateURL, true);
}
}
});
}
@@ -138,7 +121,7 @@
query: '=',
queryResult: '=',
visualization: '=?',
openEditor: '=?',
openEditor: '@',
onNewSuccess: '=?'
},
link: function (scope, element, attrs) {
@@ -167,9 +150,13 @@
scope.$watch('visualization.type', function (type, oldType) {
// if not edited by user, set name to match type
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
// poor man's titlecase
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
scope.visualization.name = _.string.titleize(scope.visualization.type);
}
if (type && oldType != type && scope.visualization) {
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
}
});
scope.submit = function () {
@@ -208,6 +195,7 @@
.provider('Visualization', VisualizationProvider)
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
.directive('visualizationName', ['Visualization', VisualizationName])
.directive('filters', Filters)
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])
})();

View File

@@ -112,9 +112,6 @@
scope.columnTypes = {
"X": "x",
// "X (Date time)": "x",
// "X (Linear)": "x-linear",
// "X (Category)": "x-category",
"Y": "y",
"Series": "series",
"Unused": "unused"
@@ -166,7 +163,7 @@
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
}
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
});
scope.zIndexes = _.range(scope.series.length);
scope.yAxes = [[0, 'left'], [1, 'right']];
@@ -227,6 +224,12 @@
}
});
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
scope.visualization.options.xAxis.labels.enabled = true;
}
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {

View File

@@ -0,0 +1,238 @@
'use strict';
(function() {
var module = angular.module('redash.visualization');
module.config(['VisualizationProvider', function(VisualizationProvider) {
var renderTemplate =
'<map-renderer ' +
'options="visualization.options" query-result="queryResult">' +
'</map-renderer>';
var editTemplate = '<map-editor></map-editor>';
var defaultOptions = {
'height': 500,
'draw': 'Marker',
'classify':'none'
};
VisualizationProvider.registerVisualization({
type: 'MAP',
name: 'Map',
renderTemplate: renderTemplate,
editorTemplate: editTemplate,
defaultOptions: defaultOptions
});
}
]);
module.directive('mapRenderer', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map.html',
link: function($scope, elm, attrs) {
var setBounds = function(){
var b = $scope.visualization.options.bounds;
if(b){
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
} else if ($scope.features.length > 0){
var group= new L.featureGroup($scope.features);
$scope.map.fitBounds(group.getBounds());
}
};
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
function() {
var marker = function(lat,lon){
if (lat == null || lon == null) return;
return L.marker([lat, lon]);
};
var heatpoint = function(lat,lon,obj){
if (lat == null || lon == null) return;
var color = 'red';
if (obj &&
obj[$scope.visualization.options.classify] &&
$scope.visualization.options.classification){
var v = $.grep($scope.visualization.options.classification,function(e){
return e.value == obj[$scope.visualization.options.classify];
});
if (v.length >0) color = v[0].color;
}
var style = {
fillColor:color,
fillOpacity:0.5,
stroke:false
};
return L.circleMarker([lat,lon],style)
};
var color = function(val){
// taken from http://jsfiddle.net/xgJ2e/2/
var h= Math.floor((100 - val) * 120 / 100);
var s = Math.abs(val - 50)/50;
var v = 1;
var rgb, i, data = [];
if (s === 0) {
rgb = [v,v,v];
} else {
h = h / 60;
i = Math.floor(h);
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
switch(i) {
case 0:
rgb = [v, data[2], data[0]];
break;
case 1:
rgb = [data[1], v, data[0]];
break;
case 2:
rgb = [data[0], v, data[2]];
break;
case 3:
rgb = [data[0], data[1], v];
break;
case 4:
rgb = [data[2], data[0], v];
break;
default:
rgb = [v, data[0], data[1]];
break;
}
}
return '#' + rgb.map(function(x){
return ("0" + Math.round(x*255).toString(16)).slice(-2);
}).join('');
};
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
function getBounds(e) {
$scope.visualization.options.bounds = $scope.map.getBounds();
}
var queryData = $scope.queryResult.getData();
var classify = $scope.visualization.options.classify;
if (queryData) {
$scope.visualization.options.classification = [];
for (var row in queryData) {
if (queryData[row][classify] &&
$.grep($scope.visualization.options.classification, function (e) {
return e.value == queryData[row][classify]
}).length == 0) {
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
}
}
$.each($scope.visualization.options.classification, function (i, c) {
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
});
if (!$scope.map) {
$scope.map = L.map(elm[0].children[0].children[0])
}
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
}).addTo($scope.map);
$scope.features = $scope.features || [];
var tmp_features = [];
var lat_col = $scope.visualization.options.latColName || 'lat';
var lon_col = $scope.visualization.options.lonColName || 'lon';
for (var row in queryData) {
var feature;
if ($scope.visualization.options.draw == 'Marker') {
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
} else if ($scope.visualization.options.draw == 'Color') {
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
}
if (!feature) continue;
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
for (var k in queryData[row]){
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
}
obj_description += '</ul>';
feature.bindPopup(obj_description);
tmp_features.push(feature);
}
$.each($scope.features, function (i, f) {
$scope.map.removeLayer(f);
});
$scope.features = tmp_features;
$.each($scope.features, function (i, f) {
f.addTo($scope.map)
});
setBounds();
$scope.map.on('focus',function(){
$scope.map.on('moveend', getBounds);
});
$scope.map.on('blur',function(){
$scope.map.off('moveend', getBounds);
});
// We redraw the map if it was loaded in a hidden tab
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
setTimeout(function() {
$scope.map.invalidateSize(false);
setBounds();
},500);
});
}
}
}, true);
$scope.$watch('visualization.options.height', function() {
if (!$scope.map) return;
$scope.map.invalidateSize(false);
setBounds();
});
}
}
});
module.directive('mapEditor', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map_editor.html',
link: function($scope, elm, attrs) {
$scope.draw_options = ['Marker','Color'];
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
}
}
});
})();

View File

@@ -78,15 +78,15 @@
};
} else if (columnType === 'date') {
columnDefinition.formatFunction = function (value) {
if (value) {
return value.format("DD/MM/YY");
if (value && moment.isMoment(value)) {
return value.toDate().toLocaleDateString();
}
return value;
};
} else if (columnType === 'datetime') {
columnDefinition.formatFunction = function (value) {
if (value) {
return value.format("DD/MM/YY HH:mm");
if (value && moment.isMoment(value)) {
return value.toDate().toLocaleString();
}
return value;
};

View File

@@ -14,7 +14,12 @@ a.page-title {
}
a.navbar-brand {
font-style: italic;
padding: 5px 5px 0px 0px;
margin-left: 0px !important;
}
a.navbar-brand img {
height: 40px;
}
.graph {
@@ -92,7 +97,7 @@ a.navbar-brand {
}
.panel-heading .query-link:hover {
text-decoration: none;
text-decoration: underline;
}
/* angular-growl */
@@ -308,6 +313,23 @@ counter-renderer counter-name {
height: 100%;
}
.schema-container {
height: 300px;
}
.schema-browser {
height: 100%;
overflow-y: auto;
overflow-x: hidden;
}
div.table-name {
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
cursor: pointer;
}
/*
bootstrap's hidden-xs class adds display:block when not hidden
use this class when you need to keep the original display value
@@ -317,3 +339,7 @@ use this class when you need to keep the original display value
display: none !important;
}
}
.log-container {
margin-bottom: 50px;
}

View File

@@ -24,10 +24,6 @@
<span class="badge">{{manager.outdated_queries_count}}</span>
Outdated Queries Count
</li>
<li class="list-group-item" ng-if="flowerUrl">
<a href="/admin/workers">Workers' Status</a>
</li>
</ul>
<ul class="list-group col-lg-4">
<li class="list-group-item active">Queues</li>

View File

@@ -1,3 +0,0 @@
<div class="container-fluid iframe-container">
<iframe src="{{flowerUrl}}" style="width:100%; height:100%; background-color:transparent;"></iframe>
</div>

View File

@@ -28,6 +28,7 @@
<p>
<span ng-hide="currentUser.hasPermission('view_query')">{{query.name}}</span>
<query-link query="query" visualization="widget.visualization" ng-show="currentUser.hasPermission('view_query')"></query-link>
<visualization-name visualization="widget.visualization"/>
</p>
<div class="text-muted" ng-bind-html="query.description | markdown"></div>
</h3>
@@ -37,7 +38,7 @@
<div class="panel-footer">
<span class="label label-default"
tooltip="next update {{nextUpdateTime}} (query runtime: {{queryResult.getRuntime() | durationHumanize}})"
tooltip="(query runtime: {{queryResult.getRuntime() | durationHumanize}})"
tooltip-placement="bottom">Updated: <span am-time-ago="queryResult.getUpdatedAt()"></span></span>
<span class="pull-right">

View File

@@ -59,9 +59,9 @@
<hr>
<div class="row">
<div class="col-lg-12">
<div ng-show="sourceMode">
<div class="row" ng-if="sourceMode">
<div ng-class="editorSize">
<div>
<p>
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
<span class="glyphicon glyphicon-play"></span> Execute
@@ -77,21 +77,43 @@
</button>
</span>
</p>
</div>
<!-- code editor -->
<div ng-show="sourceMode">
<p>
<query-editor query="query" lock="queryFormatting"></query-editor>
<query-editor query="query" schema="schema" syntax="dataSource.syntax" lock="queryFormatting"></query-editor>
</p>
<hr>
</div>
</div>
<div class="col-md-3 schema-container" ng-show="hasSchema">
<div ng-show="schema.length < 200">
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
</div>
<div class="schema-browser">
<div ng-repeat="table in schema | filter:schemaFilter track by table.name">
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
</div>
<div collapse="table.collapsed && !schemaFilter">
<div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
</div>
</div>
</div>
</div>
</div>
</div>
<hr ng-if="sourceMode">
<div class="row">
<div class="col-lg-3 rd-hidden-xs">
<p>
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Created By </span>
<strong>{{query.user.name}}</strong>
</p>
<p ng-if="query.last_modified_by && query.user.id != query.last_modified_by.id">
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Last Modified By </span>
<strong>{{query.last_modified_by.name}}</strong>
</p>
<p>
<span class="glyphicon glyphicon-time"></span>
<span class="text-muted">Last update </span>
@@ -99,12 +121,6 @@
<rd-time-ago value="queryResult.query_result.retrieved_at"></rd-time-ago>
</strong>
</p>
<p>
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Created By </span>
<strong ng-hide="isQueryOwner">{{query.user.name}}</strong>
<strong ng-show="isQueryOwner">You</strong>
</p>
<p>
<span class="glyphicon glyphicon-play"></span>
<span class="text-muted">Runtime </span>
@@ -117,8 +133,8 @@
</p>
<p>
<span class="glyphicon glyphicon-refresh"></span>
<span class="text-muted">Refresh Interval</span>
<query-refresh-select></query-refresh-select>
<span class="text-muted">Refresh Schedule</span>
<a href="" ng-click="openScheduleForm()">{{query.schedule | scheduleHumanize}}</a>
</p>
<p>
@@ -139,7 +155,11 @@
ng-show="!query.is_archived && query.id != undefined && (isQueryOwner || currentUser.hasPermission('admin'))">
<i class="fa fa-archive" title="Archive Query"></i>
</a>
<button class="btn btn-default btn-sm" ng-show="query.id != undefined" ng-click="showApiKey()">
<i class="fa fa-key" title="Show API Key"></i>
</button>
<div class="modal fade" id="archive-confirmation-modal" tabindex="-1" role="dialog" aria-labelledby="archiveConfirmationModal" aria-hidden="true">
<div class="modal-dialog">
<div class="modal-content">
@@ -172,6 +192,16 @@
</div>
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
<div class="row log-container" ng-show="showLog">
<span ng-show="showLog">Log Information:</span>
<table>
<tbody>
<tr ng-repeat="l in queryResult.getLog()">
<td>{{l}}</td>
</tr>
</tbody>
</table>
</div>
<!-- tabs and data -->
<div ng-show="showDataset">
<div class="row">
@@ -182,7 +212,7 @@
<rd-tab tab-id="{{vis.id}}" name="{{vis.name}}" ng-if="vis.type!='TABLE'" ng-repeat="vis in query.visualizations">
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> &times;</span>
</rd-tab>
<rd-tab tab-id="add" name="&plus; New" removeable="true" ng-show="canEdit"></rd-tab>
<rd-tab tab-id="add" name="&plus; New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
</ul>
</div>

View File

@@ -0,0 +1,18 @@
<div class="modal-header">
<button type="button" class="close" aria-label="Close" ng-click="close()"><span aria-hidden="true">&times;</span></button>
<h4 class="modal-title">Refresh Schedule</h4>
</div>
<div class="modal-body">
<div class="radio">
<label>
<input type="radio" value="periodic" ng-model="refreshType">
<query-refresh-select ng-disabled="refreshType != 'periodic'"></query-refresh-select>
</label>
</div>
<div class="radio">
<label>
<input type="radio" value="daily" ng-model="refreshType">
<query-time-picker ng-disabled="refreshType != 'daily'"></query-time-picker>
</label>
</div>
</div>

View File

@@ -54,6 +54,14 @@
ng-model="visualization.options.sortX">
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-2">Show X Axis Labels</label>
<div class="col-sm-10">
<input name="sortX" type="checkbox" class="form-control"
ng-model="visualization.options.xAxis.labels.enabled">
</div>
</div>
</div>
</div>
@@ -100,6 +108,15 @@
class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-3">Index</label>
<div class="col-sm-9">
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
ng-options="o as o for o in zIndexes"
class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-3">y Axis</label>

View File

@@ -1,7 +1,7 @@
<div>
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()">
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
<div class="form-group">
<label class="control-label">Name</label>
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">

View File

@@ -0,0 +1,3 @@
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
<div style="width:100%; height:100%;"></div>
</div>

View File

@@ -0,0 +1,55 @@
<div class="form-horizontal">
<div class="form-group">
<label class="col-lg-2">Map height (px)</label>
<div class="col-sm-4">
<input class="form-control" type="number" ng-model = "visualization.options.height" />
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Draw option</label>
<div class="col-sm-4">
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Latitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Longitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
</div>
</div>
<div ng-show = "visualization.options.draw == 'Color'">
<div class="form-group">
<label class="col-lg-2">Classify by column</label>
<div class="col-sm-4">
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
</div>
</div>
<div class="row" >
<div class="col-lg-6">
<div ng-repeat="element in visualization.options.classification" class="list-group">
<div class="list-group-item active">
{{element.value}}
</div>
<div class="list-group-item">
<div class="form-group">
<label class="col-lg-4">Color</label>
<div class="col-sm-4">
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -12,8 +12,7 @@
"es5-shim": "2.0.8",
"angular-moment": "0.2.0",
"moment": "2.1.0",
"angular-ui-bootstrap": "0.5.0",
"angular-ui-codemirror": "0.0.5",
"codemirror": "4.8.0",
"highcharts": "3.0.10",
"underscore": "1.5.1",
"pivottable": "~1.1.1",
@@ -29,7 +28,9 @@
"angular-ui-select": "0.8.2",
"font-awesome": "~4.2.0",
"mustache": "~1.0.0",
"canvg": "gabelerner/canvg"
"canvg": "gabelerner/canvg",
"angular-ui-bootstrap-bower": "~0.12.1",
"leaflet":"~0.7.3"
},
"devDependencies": {
"angular-mocks": "1.2.18",

BIN
rd_ui/favicon.ico Executable file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -4,8 +4,9 @@ import redis
from statsd import StatsClient
from redash import settings
from redash.query_runner import import_query_runners
__version__ = '0.5.0'
__version__ = '0.6.4'
def setup_logging():
@@ -31,4 +32,6 @@ def create_redis_connection():
setup_logging()
redis_connection = create_redis_connection()
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
import_query_runners(settings.QUERY_RUNNERS)

116
redash/admin.py Normal file
View File

@@ -0,0 +1,116 @@
import json
from flask_admin.contrib.peewee import ModelView
from flask.ext.admin import Admin
from flask_admin.contrib.peewee.form import CustomModelConverter
from flask_admin.form.widgets import DateTimePickerWidget
from playhouse.postgres_ext import ArrayField, DateTimeTZField
from wtforms import fields
from wtforms.widgets import TextInput
from redash import models
from redash import query_runner
from redash.permissions import require_permission
class ArrayListField(fields.Field):
widget = TextInput()
def _value(self):
if self.data:
return u', '.join(self.data)
else:
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = [x.strip() for x in valuelist[0].split(',')]
else:
self.data = []
class JSONTextAreaField(fields.TextAreaField):
def process_formdata(self, valuelist):
if valuelist:
try:
json.loads(valuelist[0])
except ValueError:
raise ValueError(self.gettext(u'Invalid JSON'))
self.data = valuelist[0]
else:
self.data = ''
class PasswordHashField(fields.PasswordField):
def _value(self):
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = models.pwd_context.encrypt(valuelist[0])
else:
self.data = u''
class PgModelConverter(CustomModelConverter):
def __init__(self, view, additional=None):
additional = {ArrayField: self.handle_array_field,
DateTimeTZField: self.handle_datetime_tz_field}
super(PgModelConverter, self).__init__(view, additional)
self.view = view
def handle_array_field(self, model, field, **kwargs):
return field.name, ArrayListField(**kwargs)
def handle_datetime_tz_field(self, model, field, **kwargs):
kwargs['widget'] = DateTimePickerWidget()
return field.name, fields.DateTimeField(**kwargs)
class BaseModelView(ModelView):
model_form_converter = PgModelConverter
@require_permission('admin')
def is_accessible(self):
return True
class UserModelView(BaseModelView):
column_searchable_list = ('name', 'email')
form_excluded_columns = ('created_at', 'updated_at')
column_exclude_list = ('password_hash',)
form_overrides = dict(password_hash=PasswordHashField)
form_args = {
'password_hash': {'label': 'Password'}
}
def query_runner_type_formatter(view, context, model, name):
qr = query_runner.query_runners.get(model.type, None)
if qr:
return qr.name()
return model.type
class DataSourceModelView(BaseModelView):
form_overrides = dict(type=fields.SelectField, options=JSONTextAreaField)
form_args = dict(type={
'choices': [(k, r.name()) for k, r in query_runner.query_runners.iteritems()]
})
column_formatters = dict(type=query_runner_type_formatter)
column_filters = ('type',)
def init_admin(app):
admin = Admin(app, name='re:dash admin')
views = {
models.User: UserModelView(models.User),
models.DataSource: DataSourceModelView(models.DataSource)
}
for m in models.all_models:
if m in views:
admin.add_view(views[m])
else:
admin.add_view(BaseModelView(m))

View File

@@ -1,13 +1,11 @@
import functools
import hashlib
import hmac
import time
import logging
from flask import request, make_response, redirect, url_for
from flask.ext.login import LoginManager, login_user, current_user
from flask.ext.login import LoginManager
from redash import models, settings, google_oauth
from redash import models, settings, google_oauth, saml_auth
login_manager = LoginManager()
logger = logging.getLogger('authentication')
@@ -23,47 +21,72 @@ def sign(key, path, expires):
return h.hexdigest()
class HMACAuthentication(object):
@staticmethod
def api_key_authentication():
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
@login_manager.user_loader
def load_user(user_id):
return models.User.get_by_id(user_id)
# TODO: 3600 should be a setting
if signature and query_id and time.time() < expires <= time.time() + 3600:
def hmac_load_user_from_request(request):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
user_id = request.args.get('user_id', None)
# TODO: 3600 should be a setting
if signature and time.time() < expires <= time.time() + 3600:
if user_id:
user = models.User.get_by_id(user_id)
calculated_signature = sign(user.api_key, request.path, expires)
if user.api_key and signature == calculated_signature:
return user
if query_id:
query = models.Query.get(models.Query.id == query_id)
calculated_signature = sign(query.api_key, request.path, expires)
if query.api_key and signature == calculated_signature:
login_user(models.ApiUser(query.api_key), remember=False)
return True
return models.ApiUser(query.api_key)
return False
return None
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated():
return fn(*args, **kwargs)
def get_user_from_api_key(api_key, query_id):
if not api_key:
return None
if self.api_key_authentication():
return fn(*args, **kwargs)
user = None
try:
user = models.User.get_by_api_key(api_key)
except models.User.DoesNotExist:
if query_id:
query = models.Query.get_by_id(query_id)
if query and query.api_key == api_key:
user = models.ApiUser(api_key)
return make_response(redirect(url_for("login", next=request.url)))
return user
return decorated
def api_key_load_user_from_request(request):
api_key = request.args.get('api_key', None)
query_id = request.view_args.get('query_id', None)
@login_manager.user_loader
def load_user(user_id):
return models.User.select().where(models.User.id == user_id).first()
user = get_user_from_api_key(api_key, query_id)
return user
def setup_authentication(app):
login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser
login_manager.login_view = 'login'
app.secret_key = settings.COOKIE_SECRET
app.register_blueprint(google_oauth.blueprint)
app.register_blueprint(saml_auth.blueprint)
if settings.AUTH_TYPE == 'hmac':
login_manager.request_loader(hmac_load_user_from_request)
elif settings.AUTH_TYPE == 'api_key':
login_manager.request_loader(api_key_load_user_from_request)
else:
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
login_manager.request_loader(hmac_load_user_from_request)
return HMACAuthentication()

View File

@@ -1,6 +1,3 @@
from flask import make_response
from functools import update_wrapper
ONE_YEAR = 60 * 60 * 24 * 365.25
headers = {

View File

@@ -1,5 +1,8 @@
import json
import click
from flask.ext.script import Manager
from redash import models
from redash.query_runner import query_runners, validate_configuration
manager = Manager(help="Data sources management commands.")
@@ -13,11 +16,70 @@ def list():
print "Id: {}\nName: {}\nType: {}\nOptions: {}".format(ds.id, ds.name, ds.type, ds.options)
def validate_data_source_type(type):
if type not in query_runners.keys():
print "Error: the type \"{}\" is not supported (supported types: {}).".format(type, ", ".join(query_runners.keys()))
exit()
def validate_data_source_options(type, options):
if not validate_configuration(type, options):
print "Error: invalid configuration."
exit()
@manager.command
def new(name, type, options):
def new(name=None, type=None, options=None):
"""Create new data source"""
# TODO: validate it's a valid type and in the future, validate the options.
if name is None:
name = click.prompt("Name")
if type is None:
print "Select type:"
for i, query_runner_name in enumerate(query_runners.keys()):
print "{}. {}".format(i+1, query_runner_name)
idx = 0
while idx < 1 or idx > len(query_runners.keys()):
idx = click.prompt("[{}-{}]".format(1, len(query_runners.keys())), type=int)
type = query_runners.keys()[idx-1]
else:
validate_data_source_type(type)
if options is None:
query_runner = query_runners[type]
schema = query_runner.configuration_schema()
types = {
'string': unicode,
'number': int,
'boolean': bool
}
options_obj = {}
for k, prop in schema['properties'].iteritems():
required = k in schema.get('required', [])
default_value = "<<DEFAULT_VALUE>>"
if required:
default_value = None
prompt = prop.get('title', k.capitalize())
if required:
prompt = "{} (required)".format(prompt)
else:
prompt = "{} (optional)".format(prompt)
value = click.prompt(prompt, default=default_value, type=types[prop['type']], show_default=False)
if value != default_value:
options_obj[k] = value
options = json.dumps(options_obj)
validate_data_source_options(type, options)
print "Creating {} data source ({}) with options:\n{}".format(type, name, options)
data_source = models.DataSource.create(name=name,
type=type,
options=options)
@@ -49,7 +111,14 @@ def update_attr(obj, attr, new_value):
def edit(name, new_name=None, options=None, type=None):
"""Edit data source settings (name, options, type)"""
try:
if type is not None:
validate_data_source_type(type)
data_source = models.DataSource.get(models.DataSource.name==name)
if options is not None:
validate_data_source_options(data_source.type, options)
update_attr(data_source, "name", new_name)
update_attr(data_source, "type", type)
update_attr(data_source, "options", options)

View File

@@ -7,22 +7,23 @@ but this is only due to configuration issues and temporary.
import csv
import hashlib
import json
import numbers
import cStringIO
import datetime
import time
import logging
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
session, url_for
session, url_for, current_app, flash
from flask.ext.restful import Resource, abort
from flask_login import current_user, login_user, logout_user
from flask_login import current_user, login_user, logout_user, login_required
import sqlparse
from redash import redis_connection, statsd_client, models, settings, utils, __version__
from redash.wsgi import app, auth, api
from redash import statsd_client, models, settings, utils
from redash.wsgi import app, api
from redash.tasks import QueryTask, record_event
from redash.cache import headers as cache_headers
from redash.permissions import require_permission
from redash.query_runner import query_runners, validate_configuration
from redash.monitor import get_status
@app.route('/ping', methods=['GET'])
@@ -37,7 +38,7 @@ def ping():
@app.route('/queries/<query_id>/<anything>')
@app.route('/personal')
@app.route('/')
@auth.required
@login_required
def index(**kwargs):
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
@@ -52,8 +53,7 @@ def index(**kwargs):
}
features = {
'clientSideMetrics': settings.CLIENT_SIDE_METRICS,
'flowerUrl': settings.CELERY_FLOWER_URL
'clientSideMetrics': settings.CLIENT_SIDE_METRICS
}
return render_template("index.html", user=json.dumps(user), name=settings.NAME,
@@ -67,22 +67,28 @@ def login():
return redirect(request.args.get('next') or '/')
if not settings.PASSWORD_LOGIN_ENABLED:
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
if settings.SAML_LOGIN_ENABLED:
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
else:
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
if request.method == 'POST':
user = models.User.select().where(models.User.email == request.form['username']).first()
if user and user.verify_password(request.form['password']):
remember = ('remember' in request.form)
login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
try:
user = models.User.get_by_email(request.form['username'])
if user and user.verify_password(request.form['password']):
remember = ('remember' in request.form)
login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
except models.User.DoesNotExist:
flash("Wrong username or password.")
return render_template("login.html",
name=settings.NAME,
analytics=settings.ANALYTICS,
next=request.args.get('next'),
username=request.form.get('username', ''),
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
show_saml_login=settings.SAML_LOGIN_ENABLED)
@app.route('/logout')
def logout():
@@ -92,43 +98,16 @@ def logout():
return redirect('/login')
@app.route('/status.json')
@auth.required
@login_required
@require_permission('admin')
def status_api():
status = {}
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = models.Query.outdated_queries().count()
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
status = get_status()
return jsonify(status)
@app.route('/api/queries/format', methods=['POST'])
@auth.required
@login_required
def format_sql_query():
arguments = request.get_json(force=True)
query = arguments.get("query", "")
@@ -136,8 +115,26 @@ def format_sql_query():
return sqlparse.format(query, reindent=True, keyword_case='upper')
@app.route('/queries/new', methods=['POST'])
@login_required
def create_query_route():
query = request.form.get('query', None)
data_source_id = request.form.get('data_source_id', None)
if query is None or data_source_id is None:
abort(400)
query = models.Query.create(name="New Query",
query=query,
data_source=data_source_id,
user=current_user._get_current_object(),
schedule=None)
return redirect('/queries/{}'.format(query.id), 303)
class BaseResource(Resource):
decorators = [auth.required]
decorators = [login_required]
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
@@ -174,14 +171,46 @@ class MetricsAPI(BaseResource):
api.add_resource(MetricsAPI, '/api/metrics/v1/send', endpoint='metrics')
class DataSourceTypeListAPI(BaseResource):
@require_permission("admin")
def get(self):
return [q.to_dict() for q in query_runners.values()]
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
class DataSourceListAPI(BaseResource):
def get(self):
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
return data_sources
@require_permission("admin")
def post(self):
req = request.get_json(True)
required_fields = ('options', 'name', 'type')
for f in required_fields:
if f not in req:
abort(400)
if not validate_configuration(req['type'], req['options']):
abort(400)
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options'])
return datasource.to_dict()
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
class DataSourceSchemaAPI(BaseResource):
def get(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
schema = data_source.get_schema()
return schema
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
class DashboardRecentAPI(BaseResource):
def get(self):
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]
@@ -295,7 +324,7 @@ class QueryListAPI(BaseResource):
@require_permission('create_query')
def post(self):
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data']:
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
query_def.pop(field, None)
query_def['user'] = self.current_user
@@ -303,8 +332,6 @@ class QueryListAPI(BaseResource):
query = models.Query(**query_def)
query.save()
query.create_default_visualizations()
return query.to_dict()
@require_permission('view_query')
@@ -316,9 +343,9 @@ class QueryAPI(BaseResource):
@require_permission('edit_query')
def post(self, query_id):
query = models.Query.get_by_id(query_id)
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user']:
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
query_def.pop(field, None)
if 'latest_query_data_id' in query_def:
@@ -327,6 +354,9 @@ class QueryAPI(BaseResource):
if 'data_source_id' in query_def:
query_def['data_source'] = query_def.pop('data_source_id')
query_def['last_modified_by'] = self.current_user
# TODO: use #save() with #dirty_fields.
models.Query.update_instance(query_id, **query_def)
query = models.Query.get_by_id(query_id)
@@ -365,7 +395,7 @@ class VisualizationListAPI(BaseResource):
kwargs = request.get_json(force=True)
kwargs['options'] = json.dumps(kwargs['options'])
kwargs['query'] = kwargs.pop('query_id')
vis = models.Visualization(**kwargs)
vis.save()
@@ -400,7 +430,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
class QueryResultListAPI(BaseResource):
@require_permission('execute_query')
def post(self):
params = request.json
params = request.get_json(force=True)
if settings.FEATURE_TABLES_PERMISSIONS:
metadata = utils.SQLMetaData(params['query'])
@@ -426,16 +456,19 @@ class QueryResultListAPI(BaseResource):
activity=params['query']
).save()
if params['ttl'] == 0:
max_age = int(params.get('max_age', -1))
if max_age == 0:
query_result = None
else:
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], int(params['ttl']))
query_result = models.QueryResult.get_latest(params['data_source_id'], params['query'], max_age)
if query_result:
return {'query_result': query_result.to_dict()}
else:
data_source = models.DataSource.get_by_id(params['data_source_id'])
job = QueryTask.add_task(params['query'], data_source)
query_id = params.get('query_id', 'adhoc')
job = QueryTask.add_task(params['query'], data_source, metadata={"Username": self.current_user.name, "Query ID": query_id})
return {'job': job.to_dict()}
@@ -449,16 +482,34 @@ class QueryResultAPI(BaseResource):
writer.writer = utils.UnicodeWriter(s)
writer.writeheader()
for row in query_data['rows']:
for k, v in row.iteritems():
if isinstance(v, numbers.Number) and (v > 1000 * 1000 * 1000 * 100):
row[k] = datetime.datetime.fromtimestamp(v/1000.0)
writer.writerow(row)
headers = {'Content-Type': "text/csv; charset=UTF-8"}
headers.update(cache_headers)
return make_response(s.getvalue(), 200, headers)
@staticmethod
def add_cors_headers(headers):
if 'Origin' in request.headers:
origin = request.headers['Origin']
if origin in settings.ACCESS_CONTROL_ALLOW_ORIGIN:
headers['Access-Control-Allow-Origin'] = origin
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
@require_permission('view_query')
def options(self, query_id=None, query_result_id=None, filetype='json'):
headers = {}
self.add_cors_headers(headers)
if settings.ACCESS_CONTROL_REQUEST_METHOD:
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
return make_response("", 200, headers)
@require_permission('view_query')
def get(self, query_id=None, query_result_id=None, filetype='json'):
if query_result_id is None and query_id is not None:
@@ -470,9 +521,33 @@ class QueryResultAPI(BaseResource):
query_result = models.QueryResult.get_by_id(query_result_id)
if query_result:
if isinstance(self.current_user, models.ApiUser):
event = {
'user_id': None,
'action': 'api_get',
'timestamp': int(time.time()),
'api_key': self.current_user.id,
'file_type': filetype
}
if query_id:
event['object_type'] = 'query'
event['object_id'] = query_id
else:
event['object_type'] = 'query_result'
event['object_id'] = query_result_id
record_event.delay(event)
headers = {}
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
self.add_cors_headers(headers)
if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
return make_response(data, 200, cache_headers)
headers.update(cache_headers)
return make_response(data, 200, headers)
else:
return self.csv_response(query_result)
@@ -502,11 +577,13 @@ api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
@app.route('/<path:filename>')
def send_static(filename):
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
if current_app.debug:
cache_timeout = 0
else:
cache_timeout = None
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)
if __name__ == '__main__':
app.run(debug=True)

View File

@@ -1,34 +0,0 @@
import json
def get_query_runner(connection_type, connection_string):
if connection_type == 'mysql':
from redash.data import query_runner_mysql
runner = query_runner_mysql.mysql(connection_string)
elif connection_type == 'graphite':
from redash.data import query_runner_graphite
connection_params = json.loads(connection_string)
if connection_params['auth']:
connection_params['auth'] = tuple(connection_params['auth'])
else:
connection_params['auth'] = None
runner = query_runner_graphite.graphite(connection_params)
elif connection_type == 'bigquery':
from redash.data import query_runner_bigquery
connection_params = json.loads(connection_string)
runner = query_runner_bigquery.bigquery(connection_params)
elif connection_type == 'script':
from redash.data import query_runner_script
runner = query_runner_script.script(connection_string)
elif connection_type == 'url':
from redash.data import query_runner_url
runner = query_runner_url.url(connection_string)
elif connection_type == "mongo":
from redash.data import query_runner_mongodb
connection_params = json.loads(connection_string)
runner = query_runner_mongodb.mongodb(connection_params)
else:
from redash.data import query_runner_pg
runner = query_runner_pg.pg(connection_string)
return runner

View File

@@ -1,138 +0,0 @@
import datetime
import httplib2
import json
import logging
import sys
import time
try:
import apiclient.errors
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials
except ImportError:
print "Missing dependencies. Please install google-api-python-client and oauth2client."
print "You can use pip: pip install google-api-python-client oauth2client"
from redash.utils import JSONEncoder
types_map = {
'INTEGER': 'integer',
'FLOAT': 'float',
'BOOLEAN': 'boolean',
'STRING': 'string',
'TIMESTAMP': 'datetime',
}
def transform_row(row, fields):
column_index = 0
row_data = {}
for cell in row["f"]:
field = fields[column_index]
cell_value = cell['v']
if cell_value is None:
pass
# Otherwise just cast the value
elif field['type'] == 'INTEGER':
cell_value = int(cell_value)
elif field['type'] == 'FLOAT':
cell_value = float(cell_value)
elif field['type'] == 'BOOLEAN':
cell_value = cell_value.lower() == "true"
elif field['type'] == 'TIMESTAMP':
cell_value = datetime.datetime.fromtimestamp(float(cell_value))
row_data[field["name"]] = cell_value
column_index += 1
return row_data
def bigquery(connection_string):
def load_key(filename):
f = file(filename, "rb")
try:
return f.read()
finally:
f.close()
def get_bigquery_service():
scope = [
"https://www.googleapis.com/auth/bigquery",
]
credentials = SignedJwtAssertionCredentials(connection_string["serviceAccount"],
load_key(connection_string["privateKey"]), scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
if not query_reply['jobComplete']:
time.sleep(10)
return get_query_results(jobs, project_id, job_id, start_index)
return query_reply
def query_runner(query):
bigquery_service = get_bigquery_service()
jobs = bigquery_service.jobs()
job_data = {
"configuration": {
"query": {
"query": query,
}
}
}
logging.debug("bigquery got query: %s", query)
project_id = connection_string["projectId"]
try:
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
current_row = 0
query_reply = get_query_results(jobs, project_id=project_id,
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
logging.debug("bigquery replied: %s", query_reply)
rows = []
while ("rows" in query_reply) and current_row < query_reply['totalRows']:
for row in query_reply["rows"]:
rows.append(transform_row(row, query_reply["schema"]["fields"]))
current_row += len(query_reply['rows'])
query_reply = jobs.getQueryResults(projectId=project_id, jobId=query_reply['jobReference']['jobId'],
startIndex=current_row).execute()
columns = [{'name': f["name"],
'friendly_name': f["name"],
'type': types_map.get(f['type'], "string")} for f in query_reply["schema"]["fields"]]
data = {
"columns": columns,
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
except apiclient.errors.HttpError, e:
json_data = None
error = e.content
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
return query_runner

View File

@@ -1,46 +0,0 @@
"""
QueryRunner for Graphite.
"""
import json
import datetime
import requests
from redash.utils import JSONEncoder
def graphite(connection_params):
def transform_result(response):
columns = [{'name': 'Time::x'}, {'name': 'value::y'}, {'name': 'name::series'}]
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json.dumps(data, cls=JSONEncoder)
def query_runner(query):
base_url = "%s/render?format=json&" % connection_params['url']
url = "%s%s" % (base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=connection_params['auth'],
verify=connection_params['verify'])
if response.status_code == 200:
data = transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception, ex:
data = None
error = ex.message
return data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -1,64 +0,0 @@
"""
QueryRunner is the function that the workers use, to execute queries. This is the Redshift
(PostgreSQL in fact) version, but easily we can write another to support additional databases
(MySQL and others).
Because the worker just pass the query, this can be used with any data store that has some sort of
query language (for example: HiveQL).
"""
import logging
import json
import MySQLdb
import sys
from redash.utils import JSONEncoder
def mysql(connection_string):
if connection_string.endswith(';'):
connection_string = connection_string[0:-1]
def query_runner(query):
connections_params = [entry.split('=')[1] for entry in connection_string.split(';')]
connection = MySQLdb.connect(*connections_params, charset="utf8", use_unicode=True)
cursor = connection.cursor()
logging.debug("mysql got query: %s", query)
try:
cursor.execute(query)
data = cursor.fetchall()
cursor_desc = cursor.description
if (cursor_desc != None):
num_fields = len(cursor_desc)
column_names = [i[0] for i in cursor.description]
rows = [dict(zip(column_names, row)) for row in data]
columns = [{'name': col_name,
'friendly_name': col_name,
'type': None} for col_name in column_names]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
else:
json_data = None
error = "No data was returned."
cursor.close()
except MySQLdb.Error, e:
json_data = None
error = e.args[1]
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
return query_runner

View File

@@ -1,110 +0,0 @@
"""
QueryRunner is the function that the workers use, to execute queries. This is the PostgreSQL
version, but easily we can write another to support additional databases (MySQL and others).
Because the worker just pass the query, this can be used with any data store that has some sort of
query language (for example: HiveQL).
"""
import json
import sys
import select
import logging
import psycopg2
from redash.utils import JSONEncoder
types_map = {
20: 'integer',
21: 'integer',
23: 'integer',
700: 'float',
1700: 'float',
701: 'float',
16: 'boolean',
1082: 'date',
1114: 'datetime',
1184: 'datetime',
1014: 'string',
1015: 'string',
1008: 'string',
1009: 'string',
2951: 'string'
}
def pg(connection_string):
def column_friendly_name(column_name):
return column_name
def wait(conn):
while 1:
try:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [conn.fileno()], [])
elif state == psycopg2.extensions.POLL_READ:
select.select([conn.fileno()], [], [])
else:
raise psycopg2.OperationalError("poll() returned %s" % state)
except select.error:
raise psycopg2.OperationalError("select.error received")
def query_runner(query):
connection = psycopg2.connect(connection_string, async=True)
wait(connection)
cursor = connection.cursor()
try:
cursor.execute(query)
wait(connection)
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
# size of the data we can assume it's ok.
column_names = []
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name = column_name + str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_friendly_name(column_name),
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
return query_runner

View File

@@ -1,51 +0,0 @@
import json
import logging
import sys
import os
import subprocess
# We use subprocess.check_output because we are lazy.
# If someone will really want to run this on Python < 2.7 they can easily update the code to run
# Popen, check the retcodes and other things and read the standard output to a variable.
if not "check_output" in subprocess.__dict__:
print "ERROR: This runner uses subprocess.check_output function which exists in Python 2.7"
def script(connection_string):
def query_runner(query):
try:
json_data = None
error = None
if connection_string is None:
return None, "script execution path is not set. Please reconfigure the data source"
# Poor man's protection against running scripts from output the scripts directory
if connection_string.find("../") > -1:
return None, "Scripts can only be run from the configured scripts directory"
query = query.strip()
script = os.path.join(connection_string, query)
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
output = subprocess.check_output(script, shell=False)
if output != None:
output = output.strip()
if output != "":
return output, None
error = "Error reading output"
except subprocess.CalledProcessError as e:
return None, str(e)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
query_runner.annotate_query = False
return query_runner

View File

@@ -1,25 +1,25 @@
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint
from flask import redirect, url_for, Blueprint, flash
from flask_oauth import OAuth
from redash import models, settings
logger = logging.getLogger('google_oauth')
oauth = OAuth()
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
if settings.GOOGLE_APPS_DOMAIN:
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
else:
if not settings.GOOGLE_APPS_DOMAIN:
logger.warning("No Google Apps domain defined, all Google accounts allowed.")
google = oauth.remote_app('google',
base_url='https://www.google.com/accounts/',
authorize_url='https://accounts.google.com/o/oauth2/auth',
request_token_url=None,
request_token_params=request_token_params,
request_token_params={
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
'response_type': 'code'
},
access_token_url='https://accounts.google.com/o/oauth2/token',
access_token_method='POST',
access_token_params={'grant_type': 'authorization_code'},
@@ -31,7 +31,7 @@ blueprint = Blueprint('google_oauth', __name__)
def get_user_profile(access_token):
headers = {'Authorization': 'OAuth '+access_token}
headers = {'Authorization': 'OAuth {}'.format(access_token)}
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
if response.status_code == 401:
@@ -41,9 +41,17 @@ def get_user_profile(access_token):
return response.json()
def verify_profile(profile):
if not settings.GOOGLE_APPS_DOMAIN:
return True
domain = profile['email'].split('@')[-1]
return domain in settings.GOOGLE_APPS_DOMAIN
def create_and_login_user(name, email):
try:
user_object = models.User.get(models.User.email == email)
user_object = models.User.get_by_email(email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
@@ -70,10 +78,17 @@ def authorized(resp):
if access_token is None:
logger.warning("Access token missing in call back request.")
flash("Validation error. Please retry.")
return redirect(url_for('login'))
profile = get_user_profile(access_token)
if profile is None:
flash("Validation error. Please retry.")
return redirect(url_for('login'))
if not verify_profile(profile):
logger.warning("User tried to login with unauthorized domain name: %s", profile['email'])
flash("Your Google Apps domain name isn't allowed.")
return redirect(url_for('login'))
create_and_login_user(profile['name'], profile['email'])

View File

@@ -28,7 +28,7 @@ class Importer(object):
def import_query(self, user, query):
new_query = self._get_or_create(models.Query, query['id'], name=query['name'],
user=user,
ttl=-1,
schedule=None,
query=query['query'],
query_hash=query['query_hash'],
description=query['description'],

View File

@@ -9,17 +9,21 @@ import itertools
import peewee
from passlib.apps import custom_app_context as pwd_context
from playhouse.postgres_ext import ArrayField
from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDatabase
from flask.ext.login import UserMixin, AnonymousUserMixin
import psycopg2
from redash import utils, settings
from redash import utils, settings, redis_connection
from redash.query_runner import get_query_runner
from utils import generate_token
class Database(object):
def __init__(self):
self.database_config = dict(settings.DATABASE_CONFIG)
self.database_config['register_hstore'] = False
self.database_name = self.database_config.pop('name')
self.database = peewee.PostgresqlDatabase(self.database_name, **self.database_config)
self.database = PostgresqlExtDatabase(self.database_name, **self.database_config)
self.app = None
self.pid = os.getpid()
@@ -59,6 +63,30 @@ class BaseModel(peewee.Model):
def get_by_id(cls, model_id):
return cls.get(cls.id == model_id)
def pre_save(self, created):
pass
def post_save(self, created):
# Handler for post_save operations. Overriding if needed.
pass
def save(self, *args, **kwargs):
pk_value = self._get_pk_value()
created = kwargs.get('force_insert', False) or not bool(pk_value)
self.pre_save(created)
super(BaseModel, self).save(*args, **kwargs)
self.post_save(created)
class ModelTimestampsMixin(BaseModel):
updated_at = DateTimeTZField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
def pre_save(self, created):
super(ModelTimestampsMixin, self).pre_save(created)
self.updated_at = datetime.datetime.now()
class PermissionsCheckMixin(object):
def has_permission(self, permission):
@@ -83,6 +111,9 @@ class ApiUser(UserMixin, PermissionsCheckMixin):
def __init__(self, api_key):
self.id = api_key
def __repr__(self):
return u"<ApiUser: {}>".format(self.id)
@property
def permissions(self):
return ['view_query']
@@ -96,7 +127,7 @@ class Group(BaseModel):
name = peewee.CharField(max_length=100)
permissions = ArrayField(peewee.CharField, default=DEFAULT_PERMISSIONS)
tables = ArrayField(peewee.CharField)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'groups'
@@ -114,7 +145,7 @@ class Group(BaseModel):
return unicode(self.id)
class User(BaseModel, UserMixin, PermissionsCheckMixin):
class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
DEFAULT_GROUPS = ['default']
id = peewee.PrimaryKeyField()
@@ -122,6 +153,7 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
email = peewee.CharField(max_length=320, index=True, unique=True)
password_hash = peewee.CharField(max_length=128, null=True)
groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS)
api_key = peewee.CharField(max_length=40, unique=True)
class Meta:
db_table = 'users'
@@ -130,13 +162,21 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
return {
'id': self.id,
'name': self.name,
'email': self.email
'email': self.email,
'updated_at': self.updated_at,
'created_at': self.created_at
}
def __init__(self, *args, **kwargs):
super(User, self).__init__(*args, **kwargs)
self._allowed_tables = None
def pre_save(self, created):
super(User, self).pre_save(created)
if not self.api_key:
self.api_key = generate_token(40)
@property
def permissions(self):
# TODO: this should be cached.
@@ -156,6 +196,10 @@ class User(BaseModel, UserMixin, PermissionsCheckMixin):
def get_by_email(cls, email):
return cls.get(cls.email == email)
@classmethod
def get_by_api_key(cls, api_key):
return cls.get(cls.api_key == api_key)
def __unicode__(self):
return '%r, %r' % (self.name, self.email)
@@ -173,7 +217,7 @@ class ActivityLog(BaseModel):
user = peewee.ForeignKeyField(User)
type = peewee.IntegerField()
activity = peewee.TextField()
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'activity_log'
@@ -193,12 +237,12 @@ class ActivityLog(BaseModel):
class DataSource(BaseModel):
id = peewee.PrimaryKeyField()
name = peewee.CharField()
name = peewee.CharField(unique=True)
type = peewee.CharField()
options = peewee.TextField()
queue_name = peewee.CharField(default="queries")
scheduled_queue_name = peewee.CharField(default="queries")
created_at = peewee.DateTimeField(default=datetime.datetime.now)
scheduled_queue_name = peewee.CharField(default="scheduled_queries")
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'data_sources'
@@ -207,9 +251,31 @@ class DataSource(BaseModel):
return {
'id': self.id,
'name': self.name,
'type': self.type
'type': self.type,
'syntax': self.query_runner.syntax
}
def get_schema(self, refresh=False):
key = "data_source:schema:{}".format(self.id)
cache = None
if not refresh:
cache = redis_connection.get(key)
if cache is None:
query_runner = self.query_runner
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
redis_connection.set(key, json.dumps(schema))
else:
schema = json.loads(cache)
return schema
@property
def query_runner(self):
return get_query_runner(self.type, self.options)
@classmethod
def all(cls):
return cls.select().order_by(cls.id.asc())
@@ -222,7 +288,7 @@ class QueryResult(BaseModel):
query = peewee.TextField()
data = peewee.TextField()
runtime = peewee.FloatField()
retrieved_at = peewee.DateTimeField()
retrieved_at = DateTimeTZField()
class Meta:
db_table = 'query_results'
@@ -248,16 +314,16 @@ class QueryResult(BaseModel):
return unused_results
@classmethod
def get_latest(cls, data_source, query, ttl=0):
def get_latest(cls, data_source, query, max_age=0):
query_hash = utils.gen_query_hash(query)
if ttl == -1:
if max_age == -1:
query = cls.select().where(cls.query_hash == query_hash,
cls.data_source == data_source).order_by(cls.retrieved_at.desc())
else:
query = cls.select().where(cls.query_hash == query_hash, cls.data_source == data_source,
peewee.SQL("retrieved_at + interval '%s second' >= now() at time zone 'utc'",
ttl)).order_by(cls.retrieved_at.desc())
max_age)).order_by(cls.retrieved_at.desc())
return query.first()
@@ -284,7 +350,28 @@ class QueryResult(BaseModel):
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
class Query(BaseModel):
def should_schedule_next(previous_iteration, now, schedule):
if schedule.isdigit():
ttl = int(schedule)
next_iteration = previous_iteration + datetime.timedelta(seconds=ttl)
else:
hour, minute = schedule.split(':')
hour, minute = int(hour), int(minute)
# The following logic is needed for cases like the following:
# - The query scheduled to run at 23:59.
# - The scheduler wakes up at 00:01.
# - Using naive implementation of comparing timestamps, it will skip the execution.
normalized_previous_iteration = previous_iteration.replace(hour=hour, minute=minute)
if normalized_previous_iteration > previous_iteration:
previous_iteration = normalized_previous_iteration - datetime.timedelta(days=1)
next_iteration = (previous_iteration + datetime.timedelta(days=1)).replace(hour=hour, minute=minute)
return now > next_iteration
class Query(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource)
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
@@ -293,21 +380,15 @@ class Query(BaseModel):
query = peewee.TextField()
query_hash = peewee.CharField(max_length=32)
api_key = peewee.CharField(max_length=40)
ttl = peewee.IntegerField()
user_email = peewee.CharField(max_length=360, null=True)
user = peewee.ForeignKeyField(User)
last_modified_by = peewee.ForeignKeyField(User, null=True, related_name="modified_queries")
is_archived = peewee.BooleanField(default=False, index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
schedule = peewee.CharField(max_length=10, null=True)
class Meta:
db_table = 'queries'
def create_default_visualizations(self):
table_visualization = Visualization(query=self, name="Table",
description='',
type="TABLE", options="{}")
table_visualization.save()
def to_dict(self, with_stats=False, with_visualizations=False, with_user=True):
d = {
'id': self.id,
@@ -316,15 +397,17 @@ class Query(BaseModel):
'description': self.description,
'query': self.query,
'query_hash': self.query_hash,
'ttl': self.ttl,
'schedule': self.schedule,
'api_key': self.api_key,
'is_archived': self.is_archived,
'updated_at': self.updated_at,
'created_at': self.created_at,
'data_source_id': self._data.get('data_source', None)
}
if with_user:
d['user'] = self.user.to_dict()
d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
else:
d['user_id'] = self._data['user']
@@ -340,7 +423,7 @@ class Query(BaseModel):
def archive(self):
self.is_archived = True
self.ttl = -1
self.schedule = None
for vis in self.visualizations:
for w in vis.widgets:
@@ -361,21 +444,19 @@ class Query(BaseModel):
@classmethod
def outdated_queries(cls):
# TODO: this will only find scheduled queries that were executed before. I think this is
# a reasonable assumption, but worth revisiting.
outdated_queries_ids = cls.select(
peewee.Func('first_value', cls.id).over(partition_by=[cls.query_hash, cls.data_source])) \
.join(QueryResult) \
.where(cls.ttl > 0,
cls.is_archived==False,
(QueryResult.retrieved_at +
(cls.ttl * peewee.SQL("interval '1 second'"))) <
peewee.SQL("(now() at time zone 'utc')"))
queries = cls.select(cls, QueryResult.retrieved_at, DataSource)\
.join(QueryResult)\
.switch(Query).join(DataSource)\
.where(cls.schedule != None)
queries = cls.select(cls, DataSource).join(DataSource) \
.where(cls.id << outdated_queries_ids)
now = utils.utcnow()
outdated_queries = {}
for query in queries:
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
key = "{}:{}".format(query.query_hash, query.data_source.id)
outdated_queries[key] = query
return queries
return outdated_queries.values()
@classmethod
def search(cls, term):
@@ -392,6 +473,7 @@ class Query(BaseModel):
@classmethod
def recent(cls, user_id):
# TODO: instead of t2 here, we should define table_alias for Query table
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\
where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\
@@ -410,10 +492,23 @@ class Query(BaseModel):
update = cls.update(**kwargs).where(cls.id == query_id)
return update.execute()
def save(self, *args, **kwargs):
def pre_save(self, created):
super(Query, self).pre_save(created)
self.query_hash = utils.gen_query_hash(self.query)
self._set_api_key()
super(Query, self).save(*args, **kwargs)
if self.last_modified_by is None:
self.last_modified_by = self.user
def post_save(self, created):
if created:
self._create_default_visualizations()
def _create_default_visualizations(self):
table_visualization = Visualization(query=self, name="Table",
description='',
type="TABLE", options="{}")
table_visualization.save()
def _set_api_key(self):
if not self.api_key:
@@ -432,7 +527,7 @@ class Query(BaseModel):
return unicode(self.id)
class Dashboard(BaseModel):
class Dashboard(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
slug = peewee.CharField(max_length=140, index=True)
name = peewee.CharField(max_length=100)
@@ -441,7 +536,6 @@ class Dashboard(BaseModel):
layout = peewee.TextField()
dashboard_filters_enabled = peewee.BooleanField(default=False)
is_archived = peewee.BooleanField(default=False, index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
class Meta:
db_table = 'dashboards'
@@ -483,7 +577,9 @@ class Dashboard(BaseModel):
'user_id': self._data['user'],
'layout': layout,
'dashboard_filters_enabled': self.dashboard_filters_enabled,
'widgets': widgets_layout
'widgets': widgets_layout,
'updated_at': self.updated_at,
'created_at': self.created_at
}
@classmethod
@@ -516,7 +612,7 @@ class Dashboard(BaseModel):
return u"%s=%s" % (self.id, self.name)
class Visualization(BaseModel):
class Visualization(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
type = peewee.CharField(max_length=100)
query = peewee.ForeignKeyField(Query, related_name='visualizations')
@@ -534,6 +630,8 @@ class Visualization(BaseModel):
'name': self.name,
'description': self.description,
'options': json.loads(self.options),
'updated_at': self.updated_at,
'created_at': self.created_at
}
if with_query:
@@ -545,14 +643,13 @@ class Visualization(BaseModel):
return u"%s %s" % (self.id, self.type)
class Widget(BaseModel):
class Widget(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
visualization = peewee.ForeignKeyField(Visualization, related_name='widgets', null=True)
text = peewee.TextField(null=True)
width = peewee.IntegerField()
options = peewee.TextField()
dashboard = peewee.ForeignKeyField(Dashboard, related_name='widgets', index=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
# unused; kept for backward compatability:
type = peewee.CharField(max_length=100, null=True)
@@ -567,7 +664,9 @@ class Widget(BaseModel):
'width': self.width,
'options': json.loads(self.options),
'dashboard_id': self._data['dashboard'],
'text': self.text
'text': self.text,
'updated_at': self.updated_at,
'created_at': self.created_at
}
if self.visualization and self.visualization.id:
@@ -586,13 +685,14 @@ class Widget(BaseModel):
self.dashboard.save()
super(Widget, self).delete_instance(*args, **kwargs)
class Event(BaseModel):
user = peewee.ForeignKeyField(User, related_name="events")
user = peewee.ForeignKeyField(User, related_name="events", null=True)
action = peewee.CharField()
object_type = peewee.CharField()
object_id = peewee.CharField(null=True)
additional_properties = peewee.TextField(null=True)
created_at = peewee.DateTimeField(default=datetime.datetime.now)
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'events'

33
redash/monitor.py Normal file
View File

@@ -0,0 +1,33 @@
from redash import redis_connection, models, __version__
def get_status():
status = {}
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
return status

View File

@@ -0,0 +1,117 @@
import logging
import json
import jsonschema
from jsonschema import ValidationError
logger = logging.getLogger(__name__)
__all__ = [
'ValidationError',
'BaseQueryRunner',
'TYPE_DATETIME',
'TYPE_BOOLEAN',
'TYPE_INTEGER',
'TYPE_STRING',
'TYPE_DATE',
'TYPE_FLOAT',
'SUPPORTED_COLUMN_TYPES',
'register',
'get_query_runner',
'import_query_runners'
]
# Valid types of columns returned in results:
TYPE_INTEGER = 'integer'
TYPE_FLOAT = 'float'
TYPE_BOOLEAN = 'boolean'
TYPE_STRING = 'string'
TYPE_DATETIME = 'datetime'
TYPE_DATE = 'date'
SUPPORTED_COLUMN_TYPES = set([
TYPE_INTEGER,
TYPE_FLOAT,
TYPE_BOOLEAN,
TYPE_STRING,
TYPE_DATETIME,
TYPE_DATE
])
class BaseQueryRunner(object):
def __init__(self, configuration):
jsonschema.validate(configuration, self.configuration_schema())
self.syntax = 'sql'
self.configuration = configuration
@classmethod
def name(cls):
return cls.__name__
@classmethod
def type(cls):
return cls.__name__.lower()
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return True
@classmethod
def configuration_schema(cls):
return {}
def run_query(self, query):
raise NotImplementedError()
def get_schema(self):
return []
@classmethod
def to_dict(cls):
return {
'name': cls.name(),
'type': cls.type(),
'configuration_schema': cls.configuration_schema()
}
query_runners = {}
def register(query_runner_class):
global query_runners
if query_runner_class.enabled():
logger.debug("Registering %s (%s) query runner.", query_runner_class.name(), query_runner_class.type())
query_runners[query_runner_class.type()] = query_runner_class
else:
logger.warning("%s query runner enabled but not supported, not registering. Either disable or install missing dependencies.", query_runner_class.name())
def get_query_runner(query_runner_type, configuration_json):
query_runner_class = query_runners.get(query_runner_type, None)
if query_runner_class is None:
return None
return query_runner_class(json.loads(configuration_json))
def validate_configuration(query_runner_type, configuration_json):
query_runner_class = query_runners.get(query_runner_type, None)
if query_runner_class is None:
return False
try:
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema())
except (ValidationError, ValueError):
return False
return True
def import_query_runners(query_runner_imports):
for runner_import in query_runner_imports:
__import__(runner_import)

View File

@@ -0,0 +1,204 @@
import datetime
import json
import httplib2
import logging
import sys
import time
import requests
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
import apiclient.errors
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials
from oauth2client import gce
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install google-api-python-client and oauth2client.")
logger.warning("You can use pip: pip install google-api-python-client oauth2client")
enabled = False
types_map = {
'INTEGER': TYPE_INTEGER,
'FLOAT': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'STRING': TYPE_STRING,
'TIMESTAMP': TYPE_DATETIME,
}
def transform_row(row, fields):
column_index = 0
row_data = {}
for cell in row["f"]:
field = fields[column_index]
cell_value = cell['v']
if cell_value is None:
pass
# Otherwise just cast the value
elif field['type'] == 'INTEGER':
cell_value = int(cell_value)
elif field['type'] == 'FLOAT':
cell_value = float(cell_value)
elif field['type'] == 'BOOLEAN':
cell_value = cell_value.lower() == "true"
elif field['type'] == 'TIMESTAMP':
cell_value = datetime.datetime.fromtimestamp(float(cell_value))
row_data[field["name"]] = cell_value
column_index += 1
return row_data
def _load_key(filename):
f = file(filename, "rb")
try:
return f.read()
finally:
f.close()
def _get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
if not query_reply['jobComplete']:
time.sleep(10)
return _get_query_results(jobs, project_id, job_id, start_index)
return query_reply
class BigQuery(BaseQueryRunner):
@classmethod
def enabled(cls):
return enabled
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'serviceAccount': {
'type': 'string',
'title': 'Service Account'
},
'projectId': {
'type': 'string',
'title': 'Project ID'
},
'privateKey': {
'type': 'string',
'title': 'Private Key Path'
}
},
'required': ['serviceAccount', 'projectId', 'privateKey']
}
def __init__(self, configuration_json):
super(BigQuery, self).__init__(configuration_json)
def _get_bigquery_service(self):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
private_key = _load_key(self.configuration["privateKey"])
credentials = SignedJwtAssertionCredentials(self.configuration['serviceAccount'], private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_project_id(self):
return self.configuration["projectId"]
def run_query(self, query):
bigquery_service = self._get_bigquery_service()
jobs = bigquery_service.jobs()
job_data = {
"configuration": {
"query": {
"query": query,
}
}
}
logger.debug("BigQuery got query: %s", query)
project_id = self._get_project_id()
try:
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
current_row = 0
query_reply = _get_query_results(jobs, project_id=project_id,
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
logger.debug("bigquery replied: %s", query_reply)
rows = []
while ("rows" in query_reply) and current_row < query_reply['totalRows']:
for row in query_reply["rows"]:
rows.append(transform_row(row, query_reply["schema"]["fields"]))
current_row += len(query_reply['rows'])
query_reply = jobs.getQueryResults(projectId=project_id, jobId=query_reply['jobReference']['jobId'],
startIndex=current_row).execute()
columns = [{'name': f["name"],
'friendly_name': f["name"],
'type': types_map.get(f['type'], "string")} for f in query_reply["schema"]["fields"]]
data = {
"columns": columns,
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
except apiclient.errors.HttpError, e:
json_data = None
error = e.content
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
class BigQueryGCE(BigQuery):
@classmethod
def type(cls):
return "bigquery_gce"
@classmethod
def configuration_schema(cls):
return {}
def _get_project_id(self):
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
def _get_bigquery_service(self):
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
register(BigQuery)
register(BigQueryGCE)

View File

@@ -0,0 +1,258 @@
import datetime
import json
import logging
import sys
import urllib
from redash.query_runner import *
from redash import models
import requests
import dateutil
from dateutil.parser import parse
try:
import http.client as http_client
except ImportError:
# Python 2
import httplib as http_client
logger = logging.getLogger(__name__)
ELASTICSEARCH_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"date" : TYPE_DATE,
# "geo_point" TODO: Need to split to 2 fields somehow
}
PYTHON_TYPES_MAPPING = {
str: TYPE_STRING,
unicode: TYPE_STRING,
bool : TYPE_BOOLEAN,
int : TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT
}
#
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
# but without the aggregation).
#
# Full blown JSON based ElasticSearch queries (including aggregations) will be
# added later
#
# Simple query example:
#
# - Query the index named "twitter"
# - Filter by "user:kimchy"
# - Return the fields: "@timestamp", "tweet" and "user"
# - Return up to 15 results
# - Sort by @timestamp ascending
#
# {
# "index" : "twitter",
# "query" : "user:kimchy",
# "fields" : ["@timestamp", "tweet", "user"],
# "size" : 15,
# "sort" : "@timestamp:asc"
# }
#
#
# Simple query on a logstash ElasticSearch instance:
#
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
# - Return up to 250 results
# - Sort by @timestamp ascending
# {
# "index" : "logstash-2015.04.*",
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
# "size" : 250,
# "sort" : "@timestamp:asc"
# }
#
#
class ElasticSearch(BaseQueryRunner):
DEBUG_ENABLED = False
"""
ElastichSearch query runner for querying ElasticSearch servers.
Query can be done using the Lucene Syntax (single line) or the more complex,
full blown ElasticSearch JSON syntax
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'server': {
'type': 'string'
}
},
"required" : ["server"]
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(ElasticSearch, self).__init__(configuration_json)
self.syntax = "json"
if self.DEBUG_ENABLED:
http_client.HTTPConnection.debuglevel = 1
# you need to initialize logging, otherwise you will not see anything from requests
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
def get_mappings(self, url):
mappings = {}
r = requests.get(url)
mappings_data = r.json()
for index_name in mappings_data:
index_mappings = mappings_data[index_name]
for m in index_mappings.get("mappings", {}):
for property_name in index_mappings["mappings"][m]["properties"]:
property_data = index_mappings["mappings"][m]["properties"][property_name]
if not property_name in mappings:
property_type = property_data.get("type", None)
if property_type:
if property_type in ELASTICSEARCH_TYPES_MAPPING:
mappings[property_name] = property_type
else:
raise "Unknown property type: {0}".format(property_type)
return mappings
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
result_columns_index = {}
for c in result_columns:
result_columns_index[c["name"]] = c
result_fields_index = {}
if result_fields:
for r in result_fields:
result_fields_index[r] = None
for h in raw_result["hits"]["hits"]:
row = {}
for column in h["_source"]:
if result_fields and column not in result_fields_index:
continue
if column not in result_columns_index:
result_columns.append({
"name" : column,
"friendly_name" : column,
"type" : mappings.get(column, "string")
})
result_columns_index[column] = result_columns[-1]
row[column] = h["_source"][column]
if row and len(row) > 0:
result_rows.append(row)
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
url += "&from={0}".format(_from)
r = requests.get(url)
if r.status_code != 200:
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
raw_result = r.json()
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
total = raw_result["hits"]["total"]
result_size = len(raw_result["hits"]["hits"])
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
return raw_result["hits"]["total"]
def run_query(self, query):
try:
error = None
logger.debug(query)
query_params = json.loads(query)
index_name = query_params["index"]
query_data = query_params["query"]
size = int(query_params.get("size", 500))
result_fields = query_params.get("fields", None)
sort = query_params.get("sort", None)
server_url = self.configuration["server"]
if not server_url:
error = "Missing configuration key 'server'"
return None, error
if server_url[-1] == "/":
server_url = server_url[:-1]
url = "{0}/{1}/_search?".format(server_url, index_name)
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
mappings = self.get_mappings(mapping_url)
logger.debug(json.dumps(mappings, indent=4))
if size:
url += "&size={0}".format(size)
if sort:
url += "&sort={0}".format(urllib.quote_plus(sort))
url += "&q={0}".format(urllib.quote_plus(query_data))
logger.debug("Using URL: {0}".format(url))
logger.debug("Using Query: {0}".format(query_data))
result_columns = []
result_rows = []
if isinstance(query_data, str) or isinstance(query_data, unicode):
_from = 0
while True:
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
_from += size
if _from >= total:
break
else:
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
raise Exception("Advanced queries are not supported")
json_data = json.dumps({
"columns" : result_columns,
"rows" : result_rows
})
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(ElasticSearch)

View File

@@ -0,0 +1,83 @@
import json
import datetime
import requests
import logging
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
def _transform_result(response):
columns = ({'name': 'Time::x', 'type': TYPE_DATETIME},
{'name': 'value::y', 'type': TYPE_FLOAT},
{'name': 'name::series', 'type': TYPE_STRING})
rows = []
for series in response.json():
for values in series['datapoints']:
timestamp = datetime.datetime.fromtimestamp(int(values[1]))
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
data = {'columns': columns, 'rows': rows}
return json.dumps(data, cls=JSONEncoder)
class Graphite(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
},
'username': {
'type': 'string'
},
'password': {
'type': 'string'
},
'verify': {
'type': 'boolean',
'title': 'Verify SSL certificate'
}
},
'required': ['url']
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Graphite, self).__init__(configuration_json)
if "username" in self.configuration and self.configuration["username"]:
self.auth = (self.configuration["username"], self.configuration["password"])
else:
self.auth = None
self.verify = self.configuration["verify"]
self.base_url = "%s/render?format=json&" % self.configuration['url']
def run_query(self, query):
url = "%s%s" % (self.base_url, "&".join(query.split("\n")))
error = None
data = None
try:
response = requests.get(url, auth=self.auth, verify=self.verify)
if response.status_code == 200:
data = _transform_result(response)
else:
error = "Failed getting results (%d)" % response.status_code
except Exception, ex:
data = None
error = ex.message
return data, error
register(Graphite)

View File

@@ -0,0 +1,83 @@
import json
import logging
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
try:
from influxdb import InfluxDBClusterClient
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install influxdb.")
logger.warning("You can use pip: pip install influxdb")
enabled = False
def _transform_result(results):
result_columns = []
result_rows = []
for result in results:
if not result_columns:
for c in result.raw['series'][0]['columns']:
result_columns.append({ "name": c })
for point in result.get_points():
result_rows.append(point)
return json.dumps({
"columns" : result_columns,
"rows" : result_rows
}, cls=JSONEncoder)
class InfluxDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
}
},
'required': ['url']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "influxdb"
def __init__(self, configuration_json):
super(InfluxDB, self).__init__(configuration_json)
def run_query(self, query):
client = InfluxDBClusterClient.from_DSN(self.configuration['url'])
logger.debug("influxdb url: %s", self.configuration['url'])
logger.debug("influxdb got query: %s", query)
try:
results = client.query(query)
if not isinstance(results, list):
results = [results]
json_data = _transform_result(results)
error = None
except Exception, ex:
json_data = None
error = ex.message
return json_data, error
register(InfluxDB)

View File

@@ -1,33 +1,46 @@
import json
import datetime
import logging
import json
import sys
import re
import time
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
try:
import pymongo
from bson.objectid import ObjectId
from bson.son import SON
enabled = True
except ImportError:
print "Missing dependencies. Please install pymongo."
print "You can use pip: pip install pymongo"
raise
logger.warning("Missing dependencies. Please install pymongo.")
logger.warning("You can use pip: pip install pymongo")
enabled = False
TYPES_MAP = {
ObjectId : "string",
str : "string",
unicode : "string",
int : "integer",
long : "integer",
float : "float",
bool : "boolean",
datetime.datetime: "datetime",
str: TYPE_STRING,
unicode: TYPE_STRING,
int: TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT,
bool: TYPE_BOOLEAN,
datetime.datetime: TYPE_DATETIME,
}
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
class MongoDBJSONEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
return super(MongoDBJSONEncoder, self).default(o)
# Simple query example:
#
# {
@@ -68,10 +81,8 @@ date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
# },
# {
# "$group" : {
# {
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# },
# {
@@ -90,15 +101,54 @@ date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
# }
#
#
def mongodb(connection_string):
def _get_column_by_name(columns, column_name):
class MongoDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'connectionString': {
'type': 'string',
'title': 'Connection String'
},
'dbName': {
'type': 'string',
'title': "Database Name"
},
'replicaSetName': {
'type': 'string',
'title': 'Replica Set Name'
},
},
'required': ['connectionString']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(MongoDB, self).__init__(configuration_json)
self.syntax = 'json'
self.db_name = self.configuration["dbName"]
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
def _get_column_by_name(self, columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
def _convert_date(self, q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
@@ -106,44 +156,27 @@ def mongodb(connection_string):
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
def query_runner(query):
if not "dbName" in connection_string or not connection_string["dbName"]:
return None, "dbName is missing from connection string JSON or is empty"
db_name = connection_string["dbName"]
if not "connectionString" in connection_string or not connection_string["connectionString"]:
return None, "connectionString is missing from connection string JSON or is empty"
is_replica_set = True if "replicaSetName" in connection_string and connection_string["replicaSetName"] else False
if is_replica_set:
if not connection_string["replicaSetName"]:
return None, "replicaSetName is set in the connection string JSON but is empty"
db_connection = pymongo.MongoReplicaSetClient(connection_string["connectionString"], replicaSet=connection_string["replicaSetName"])
def run_query(self, query):
if self.is_replica_set:
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
else:
db_connection = pymongo.MongoClient(connection_string["connectionString"])
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
if db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % db_name
if self.db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % self.db_name
db = db_connection[db_name]
db = db_connection[self.db_name ]
logging.debug("mongodb connection string: %s", connection_string)
logging.debug("mongodb got query: %s", query)
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
logger.debug("mongodb got query: %s", query)
try:
query_data = json.loads(query)
except:
except ValueError:
return None, "Invalid query format. The query is not a valid JSON."
if "query" in query_data and "aggregate" in query_data:
return None, "'query' and 'aggregate' sections cannot be used at the same time"
collection = None
if not "collection" in query_data:
return None, "'collection' must be set"
if "collection" not in query_data:
return None, "'collection' must have a value to run a query"
else:
collection = query_data["collection"]
@@ -153,11 +186,11 @@ def mongodb(connection_string):
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
self._convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
self._convert_date(q[k], k2)
f = None
@@ -184,6 +217,12 @@ def mongodb(connection_string):
if "fields" in query_data:
f = query_data["fields"]
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field_data in query_data["sort"]:
s.append((field_data["name"], field_data["direction"]))
columns = []
rows = []
@@ -209,23 +248,19 @@ def mongodb(connection_string):
for r in cursor:
for k in r:
if _get_column_by_name(columns, k) is None:
if self._get_column_by_name(columns, k) is None:
columns.append({
"name": k,
"friendly_name": k,
"type": TYPES_MAP[type(r[k])] if type(r[k]) in TYPES_MAP else None
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
})
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:
ordered_columns = []
for k in sorted(f, key=f.get):
ordered_columns.append(_get_column_by_name(columns, k))
ordered_columns.append(self._get_column_by_name(columns, k))
columns = ordered_columns
@@ -234,9 +269,8 @@ def mongodb(connection_string):
"rows": rows
}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
return json_data, error
query_runner.annotate_query = False
return query_runner
register(MongoDB)

View File

@@ -0,0 +1,152 @@
import sys
import json
import logging
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
types_map = {
0: TYPE_FLOAT,
1: TYPE_INTEGER,
2: TYPE_INTEGER,
3: TYPE_INTEGER,
4: TYPE_FLOAT,
5: TYPE_FLOAT,
7: TYPE_DATETIME,
8: TYPE_INTEGER,
9: TYPE_INTEGER,
10: TYPE_DATE,
12: TYPE_DATETIME,
15: TYPE_STRING,
16: TYPE_INTEGER,
246: TYPE_FLOAT,
253: TYPE_STRING,
254: TYPE_STRING,
}
class Mysql(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'host': {
'type': 'string'
},
'user': {
'type': 'string'
},
'passwd': {
'type': 'string',
'title': 'Password'
},
'db': {
'type': 'string',
'title': 'Database name'
},
"port": {
"type": "number"
},
},
'required': ['db']
}
@classmethod
def enabled(cls):
try:
import MySQLdb
except ImportError:
return False
return True
def __init__(self, configuration_json):
super(Mysql, self).__init__(configuration_json)
def get_schema(self):
query = """
SELECT col.table_schema,
col.table_name,
col.column_name
FROM `information_schema`.`columns` col
INNER JOIN
(SELECT table_schema,
TABLE_NAME
FROM information_schema.tables
WHERE table_type <> 'SYSTEM VIEW' AND table_schema NOT IN ('performance_schema', 'mysql')) tables ON tables.table_schema = col.table_schema
AND tables.TABLE_NAME = col.TABLE_NAME;
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != self.configuration['db']:
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
import MySQLdb
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
try:
cursor.execute(query)
data = cursor.fetchall()
# TODO - very similar to pg.py
if cursor.description is not None:
columns_data = [(i[0], i[1]) for i in cursor.description]
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
columns = [{'name': col[0],
'friendly_name': col[0],
'type': types_map.get(col[1], None)} for col in columns_data]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
else:
json_data = None
error = "No data was returned."
cursor.close()
except MySQLdb.Error, e:
json_data = None
error = e.args[1]
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Mysql)

170
redash/query_runner/pg.py Normal file
View File

@@ -0,0 +1,170 @@
import json
import logging
import psycopg2
import select
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
types_map = {
20: TYPE_INTEGER,
21: TYPE_INTEGER,
23: TYPE_INTEGER,
700: TYPE_FLOAT,
1700: TYPE_FLOAT,
701: TYPE_FLOAT,
16: TYPE_BOOLEAN,
1082: TYPE_DATE,
1114: TYPE_DATETIME,
1184: TYPE_DATETIME,
1014: TYPE_STRING,
1015: TYPE_STRING,
1008: TYPE_STRING,
1009: TYPE_STRING,
2951: TYPE_STRING
}
def _wait(conn):
while 1:
try:
state = conn.poll()
if state == psycopg2.extensions.POLL_OK:
break
elif state == psycopg2.extensions.POLL_WRITE:
select.select([], [conn.fileno()], [])
elif state == psycopg2.extensions.POLL_READ:
select.select([conn.fileno()], [], [])
else:
raise psycopg2.OperationalError("poll() returned %s" % state)
except select.error:
raise psycopg2.OperationalError("select.error received")
class PostgreSQL(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"user": {
"type": "string"
},
"password": {
"type": "string"
},
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"dbname": {
"type": "string",
"title": "Database Name"
}
},
"required": ["dbname"]
}
@classmethod
def type(cls):
return "pg"
def __init__(self, configuration_json):
super(PostgreSQL, self).__init__(configuration_json)
values = []
for k, v in self.configuration.iteritems():
values.append("{}={}".format(k, v))
self.connection_string = " ".join(values)
def get_schema(self):
query = """
SELECT table_schema, table_name, column_name
FROM information_schema.columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != 'public':
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
connection = psycopg2.connect(self.connection_string, async=True)
_wait(connection)
cursor = connection.cursor()
try:
cursor.execute(query)
_wait(connection)
# While set would be more efficient here, it sorts the data which is not what we want, but due to the small
# size of the data we can assume it's ok.
column_names = []
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name += str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(PostgreSQL)

View File

@@ -0,0 +1,203 @@
import sys
import datetime
import json
import logging
import weakref
from redash.query_runner import *
from redash import models
import importlib
logger = logging.getLogger(__name__)
from RestrictedPython import compile_restricted
from RestrictedPython.Guards import safe_builtins
class CustomPrint(object):
""" CustomPrint redirect "print" calls to be sent as "log" on the result object """
def __init__(self, python_runner):
self._python_runner = python_runner
def write(self, text):
if self._python_runner()._enable_print_log:
if text and text.strip():
log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
self._python_runner()._result["log"].append(log_line)
def __call__(self):
return self
class Python(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'allowedImportModules': {
'type': 'string',
'title': 'Modules to import prior to running the script'
}
},
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
global ALLOWED_MODULES
super(Python, self).__init__(configuration_json)
self.syntax = "python"
self._allowed_modules = {}
self._result = { "rows" : [], "columns" : [], "log" : [] }
self._enable_print_log = True
if self.configuration.get("allowedImportModules", None):
for item in self.configuration["allowedImportModules"].split(","):
self._allowed_modules[item] = None
def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
if name in self._allowed_modules:
m = None
if self._allowed_modules[name] is None:
m = importlib.import_module(name)
self._allowed_modules[name] = m
else:
m = self._allowed_modules[name]
return m
raise Exception("'{0}' is not configured as a supported import module".format(name))
def custom_write(self, obj):
"""
Custom hooks which controls the way objects/lists/tuples/dicts behave in
RestrictedPython
"""
return obj
def custom_get_item(self, obj, key):
return obj[key]
def custom_get_iter(self, obj):
return iter(obj)
def disable_print_log(self):
self._enable_print_log = False
def enable_print_log(self):
self._enable_print_log = True
def add_result_column(self, result, column_name, friendly_name, column_type):
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
if column_type not in SUPPORTED_COLUMN_TYPES:
raise Exception("'{0}' is not a supported column type".format(column_type))
if not "columns" in result:
result["columns"] = []
result["columns"].append({
"name" : column_name,
"friendly_name" : friendly_name,
"type" : column_type
})
def add_result_row(self, result, values):
if not "rows" in result:
result["rows"] = []
result["rows"].append(values)
def execute_query(self, data_source_name_or_id, query):
try:
if type(data_source_name_or_id) == int:
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
def get_query_result(self, query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
def run_query(self, query):
try:
error = None
code = compile_restricted(query, '<string>', 'exec')
safe_builtins["_write_"] = self.custom_write
safe_builtins["__import__"] = self.custom_import
safe_builtins["_getattr_"] = getattr
safe_builtins["getattr"] = getattr
safe_builtins["_setattr_"] = setattr
safe_builtins["setattr"] = setattr
safe_builtins["_getitem_"] = self.custom_get_item
safe_builtins["_getiter_"] = self.custom_get_iter
safe_builtins["_print_"] = CustomPrint(weakref.ref(self))
script_locals = { "result" : self._result }
restricted_globals = dict(__builtins__=safe_builtins)
restricted_globals["get_query_result"] = self.get_query_result
restricted_globals["execute_query"] = self.execute_query
restricted_globals["add_result_column"] = self.add_result_column
restricted_globals["add_result_row"] = self.add_result_row
restricted_globals["disable_print_log"] = self.disable_print_log
restricted_globals["enable_print_log"] = self.enable_print_log
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
restricted_globals["TYPE_STRING"] = TYPE_STRING
restricted_globals["TYPE_DATE"] = TYPE_DATE
restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT
# TODO: Figure out the best way to have a timeout on a script
# One option is to use ETA with Celery + timeouts on workers
# And replacement of worker process every X requests handled.
exec(code) in restricted_globals, script_locals
json_data = json.dumps(self._result)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
error = str(e)
json_data = None
return json_data, error
register(Python)

View File

@@ -0,0 +1,67 @@
import os
import sys
import subprocess
from redash.query_runner import *
class Script(BaseQueryRunner):
@classmethod
def enabled(cls):
return "check_output" in subprocess.__dict__
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'path': {
'type': 'string',
'title': 'Scripts path'
}
},
'required': ['path']
}
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(Script, self).__init__(configuration_json)
# Poor man's protection against running scripts from outside the scripts directory
if self.configuration["path"].find("../") > -1:
raise ValidationError("Scripts can only be run from the configured scripts directory")
def run_query(self, query):
try:
json_data = None
error = None
query = query.strip()
script = os.path.join(self.configuration["path"], query.split(" ")[0])
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
script = os.path.join(self.configuration["path"], query)
output = subprocess.check_output(script.split(" "), shell=False)
if output is not None:
output = output.strip()
if output != "":
return output, None
error = "Error reading output"
except subprocess.CalledProcessError as e:
return None, str(e)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(Script)

View File

@@ -1,16 +1,30 @@
import json
import logging
import sys
import os
import urllib2
def url(connection_string):
from redash.query_runner import *
def query_runner(query):
base_url = connection_string
class Url(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string',
'title': 'URL base path'
}
}
}
@classmethod
def annotate_query(cls):
return False
def run_query(self, query):
base_url = self.configuration["url"]
try:
json_data = None
error = None
query = query.strip()
@@ -41,5 +55,4 @@ def url(connection_string):
return json_data, error
query_runner.annotate_query = False
return query_runner
register(Url)

145
redash/saml_auth.py Normal file
View File

@@ -0,0 +1,145 @@
# Copyright 2015 Okta, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint, request
from flask_oauth import OAuth
from redash import models, settings
from saml2 import (
BINDING_HTTP_POST,
BINDING_HTTP_REDIRECT,
entity,
)
from saml2.client import Saml2Client
from saml2.config import Config as Saml2Config
logger = logging.getLogger('saml_auth')
blueprint = Blueprint('saml_auth', __name__)
def get_saml_client():
'''
Return saml configuation.
The configuration is a hash for use by saml2.config.Config
'''
if settings.SAML_CALLBACK_SERVER_NAME:
acs_url=settings.SAML_CALLBACK_SERVER_NAME + url_for("saml_auth.idp_initiated")
else:
acs_url = url_for("saml_auth.idp_initiated",_external=True)
# NOTE:
# Ideally, this should fetch the metadata and pass it to
# PySAML2 via the "inline" metadata type.
# However, this method doesn't seem to work on PySAML2 v2.4.0
#
# SAML metadata changes very rarely. On a production system,
# this data should be cached as approprate for your production system.
rv = requests.get(settings.SAML_METADATA_URL)
import tempfile
tmp = tempfile.NamedTemporaryFile()
f = open(tmp.name, 'w')
f.write(rv.text)
f.close()
saml_settings = {
'metadata': {
# 'inline': metadata,
"local": [tmp.name]
},
'service': {
'sp': {
'endpoints': {
'assertion_consumer_service': [
(acs_url, BINDING_HTTP_REDIRECT),
(acs_url, BINDING_HTTP_POST)
],
},
# Don't verify that the incoming requests originate from us via
# the built-in cache for authn request ids in pysaml2
'allow_unsolicited': True,
# Don't sign authn requests, since signed requests only make
# sense in a situation where you control both the SP and IdP
'authn_requests_signed': False,
'logout_requests_signed': True,
'want_assertions_signed': True,
'want_response_signed': False,
},
},
}
spConfig = Saml2Config()
spConfig.load(saml_settings)
spConfig.allow_unknown_attributes = True
saml_client = Saml2Client(config=spConfig)
tmp.close()
return saml_client
@blueprint.route("/saml/callback", methods=['POST'])
def idp_initiated():
saml_client = get_saml_client()
authn_response = saml_client.parse_authn_request_response(
request.form['SAMLResponse'],
entity.BINDING_HTTP_POST)
authn_response.get_identity()
user_info = authn_response.get_subject()
email = user_info.text
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
# This is what as known as "Just In Time (JIT) provisioning".
# What that means is that, if a user in a SAML assertion
# isn't in the user store, we create that user first, then log them in
try:
user_object = models.User.get(models.User.email == email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
user_object.save()
except models.User.DoesNotExist:
logger.debug("Creating user object (%r)", name)
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
login_user(user_object, remember=True)
url = url_for('index')
return redirect(url)
@blueprint.route("/saml/login")
def sp_initiated():
if not settings.SAML_METADATA_URL:
logger.error("Cannot invoke saml endpoint without metadata url in settings.")
return redirect(url_for('index'))
saml_client = get_saml_client()
reqid, info = saml_client.prepare_for_authenticate()
redirect_url = None
# Select the IdP URL to send the AuthN request to
for key, value in info['headers']:
if key is 'Location':
redirect_url = value
response = redirect(redirect_url, code=302)
# NOTE:
# I realize I _technically_ don't need to set Cache-Control or Pragma:
# http://stackoverflow.com/a/5494469
# However, Section 3.2.3.2 of the SAML spec suggests they are set:
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
# We set those headers here as a "belt and suspenders" approach,
# since enterprise environments don't always conform to RFCs
response.headers['Cache-Control'] = 'no-cache, no-store'
response.headers['Pragma'] = 'no-cache'
return response

View File

@@ -32,6 +32,10 @@ def array_from_string(str):
return array
def set_from_string(str):
return set(array_from_string(str))
def parse_boolean(str):
return json.loads(str.lower())
@@ -44,38 +48,58 @@ STATSD_HOST = os.environ.get('REDASH_STATSD_HOST', "127.0.0.1")
STATSD_PORT = int(os.environ.get('REDASH_STATSD_PORT', "8125"))
STATSD_PREFIX = os.environ.get('REDASH_STATSD_PREFIX', "redash")
# The following is kept for backward compatability, and shouldn't be used any more.
CONNECTION_ADAPTER = os.environ.get("REDASH_CONNECTION_ADAPTER", "pg")
CONNECTION_STRING = os.environ.get("REDASH_CONNECTION_STRING", "user= password= host= port=5439 dbname=")
# Connection settings for re:dash's own database (where we store the queries, results, etc)
DATABASE_CONFIG = parse_db_url(os.environ.get("REDASH_DATABASE_URL", "postgresql://postgres"))
# Celery related settings
CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL)
CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
CELERY_FLOWER_URL = os.environ.get("REDASH_CELERY_FLOWER_URL", "/flower")
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
# proved to be "safe".
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false"))
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "hmac")
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
GOOGLE_APPS_DOMAIN = set_from_string(os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", ""))
GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
SAML_CALLBACK_SERVER_NAME = os.environ.get("REDASH_SAML_CALLBACK_SERVER_NAME", "")
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
WORKERS_COUNT = int(os.environ.get("REDASH_WORKERS_COUNT", "2"))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600*6))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# CORS settings for the Query Result API (and possbily future external APIs).
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
# to the calling domain (or domains in a comma separated list).
ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", ""))
ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false"))
ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT")
ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
# Query Runners
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
'redash.query_runner.big_query',
'redash.query_runner.graphite',
'redash.query_runner.mongodb',
'redash.query_runner.mysql',
'redash.query_runner.pg',
'redash.query_runner.script',
'redash.query_runner.url',
'redash.query_runner.influx_db',
])))
# Features:
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))

View File

@@ -1,14 +1,13 @@
import time
import datetime
import logging
import redis
from celery import Task
from celery.result import AsyncResult
from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client, settings
from redash import redis_connection, models, statsd_client, settings, utils
from redash.utils import gen_query_hash
from redash.worker import celery
from redash.data.query_runner import get_query_runner
from redash.query_runner import get_query_runner
logger = get_task_logger(__name__)
@@ -47,12 +46,13 @@ class QueryTask(object):
return self._async_result.id
@classmethod
def add_task(cls, query, data_source, scheduled=False):
def add_task(cls, query, data_source, scheduled=False, metadata={}):
query_hash = gen_query_hash(query)
logging.info("[Manager][%s] Inserting job", query_hash)
logging.info("[Manager] Metadata: [%s]", metadata)
try_count = 0
job = None
while try_count < cls.MAX_RETRIES:
try_count += 1
@@ -77,8 +77,9 @@ class QueryTask(object):
else:
queue_name = data_source.queue_name
result = execute_query.apply_async(args=(query, data_source.id), queue=queue_name)
result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)
job = cls(async_result=result)
logging.info("[Manager][%s] Created new job: %s", query_hash, job.id)
pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)
pipe.execute()
@@ -146,13 +147,11 @@ def refresh_queries():
outdated_queries_count = 0
for query in models.Query.outdated_queries():
# TODO: this should go into lower priority
QueryTask.add_task(query.query, query.data_source, scheduled=True)
QueryTask.add_task(query.query, query.data_source, scheduled=True,
metadata={'Query ID': query.id, 'Username': 'Scheduled'})
outdated_queries_count += 1
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
# TODO: decide if we still need this
# statsd_client.gauge('manager.queue_size', self.redis_connection.zcard('jobs'))
logger.info("Done refreshing queries. Found %d outdated queries." % outdated_queries_count)
@@ -199,9 +198,9 @@ def cleanup_tasks():
logger.warning("%s is ready (%s), removing lock.", lock_keys[i], t.celery_status)
redis_connection.delete(lock_keys[i])
if t.celery_status == 'STARTED' and t.id not in all_tasks:
logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
redis_connection.delete(lock_keys[i])
# if t.celery_status == 'STARTED' and t.id not in all_tasks:
# logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
# redis_connection.delete(lock_keys[i])
@celery.task(base=BaseTask)
@@ -220,9 +219,19 @@ def cleanup_query_results():
logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
@celery.task(base=BaseTask)
def refresh_schemas():
"""
Refershs the datasources schema.
"""
for ds in models.DataSource.all():
logger.info("Refreshing schema for: {}".format(ds.name))
ds.get_schema(refresh=True)
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?
def execute_query(self, query, data_source_id, metadata):
start_time = time.time()
logger.info("Loading data source (%d)...", data_source_id)
@@ -237,15 +246,21 @@ def execute_query(self, query, data_source_id):
query_hash = gen_query_hash(query)
query_runner = get_query_runner(data_source.type, data_source.options)
if getattr(query_runner, 'annotate_query', True):
# TODO: anotate with queu ename
annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \
(self.request.id, query_hash, query)
if query_runner.annotate_query():
metadata['Task ID'] = self.request.id
metadata['Query Hash'] = query_hash
metadata['Queue'] = self.request.delivery_info['routing_key']
annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()])
logging.debug(u"Annotation: %s", annotation)
annotated_query = u"/* {} */ {}".format(annotation, query)
else:
annotated_query = query
with statsd_client.timer('query_runner.{}.{}.run_time'.format(data_source.type, data_source.name)):
data, error = query_runner(annotated_query)
data, error = query_runner.run_query(annotated_query)
run_time = time.time() - start_time
logger.info("Query finished... data length=%s, error=%s", data and len(data), error)
@@ -255,10 +270,8 @@ def execute_query(self, query, data_source_id):
# Delete query_hash
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
# TODO: it is possible that storing the data will fail, and we will need to retry
# while we already marked the job as done
if not error:
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow())
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
else:
raise Exception(error)

View File

@@ -4,9 +4,11 @@ import codecs
import decimal
import datetime
import json
import random
import re
import hashlib
import sqlparse
import pytz
COMMENTS_REGEX = re.compile("/\*.*?\*/")
@@ -62,6 +64,14 @@ class SQLMetaData(object):
return False
def utcnow():
"""Return datetime.now value with timezone specified.
Without the timezone data, when the timestamp stored to the database it gets the current timezone of the server,
which leads to errors in calculations.
"""
return datetime.datetime.now(pytz.utc)
def slugify(s):
return re.sub('[^a-z0-9_\-]+', '-', s.lower())
@@ -79,6 +89,14 @@ def gen_query_hash(sql):
return hashlib.md5(sql.encode('utf-8')).hexdigest()
def generate_token(length):
chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789')
rand = random.SystemRandom()
return ''.join(rand.choice(chars) for x in range(length))
class JSONEncoder(json.JSONEncoder):
"""Custom JSON encoding class, to handle Decimal and datetime.date instances.
"""
@@ -86,9 +104,9 @@ class JSONEncoder(json.JSONEncoder):
if isinstance(o, decimal.Decimal):
return float(o)
if isinstance(o, datetime.date):
if isinstance(o, (datetime.date, datetime.time, datetime.timedelta)):
return o.isoformat()
super(JSONEncoder, self).default(o)
@@ -128,4 +146,4 @@ class UnicodeWriter:
def writerows(self, rows):
for row in rows:
self.writerow(row)
self.writerow(row)

View File

@@ -15,6 +15,10 @@ celery_schedule = {
'cleanup_tasks': {
'task': 'redash.tasks.cleanup_tasks',
'schedule': timedelta(minutes=5)
},
'refresh_schemas': {
'task': 'redash.tasks.refresh_schemas',
'schedule': timedelta(minutes=30)
}
}

View File

@@ -1,9 +1,12 @@
import json
from flask import Flask, make_response
from werkzeug.wrappers import Response
from flask.ext.restful import Api
from redash import settings, utils
from redash.models import db
from redash.admin import init_admin
__version__ = '0.4.0'
@@ -14,6 +17,7 @@ app = Flask(__name__,
api = Api(app)
init_admin(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
@@ -21,10 +25,13 @@ app.config['DATABASE'] = settings.DATABASE_CONFIG
db.init_app(app)
from redash.authentication import setup_authentication
auth = setup_authentication(app)
setup_authentication(app)
@api.representation('application/json')
def json_representation(data, code, headers=None):
# Flask-Restful checks only for flask.Response but flask-login uses werkzeug.wrappers.Response
if isinstance(data, Response):
return data
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {})
return resp

View File

@@ -1,6 +1,7 @@
Flask==0.10.1
Flask-Admin==1.1.0
Flask-RESTful==0.2.10
Flask-Login==0.2.9
Flask-Login==0.2.11
Flask-OAuth==0.12
passlib==1.6.2
Jinja2==2.7.2
@@ -9,12 +10,12 @@ Werkzeug==0.9.4
aniso8601==0.82
blinker==1.3
itsdangerous==0.23
peewee==2.2.2
peewee==2.4.7
psycopg2==2.5.2
python-dateutil==2.1
pytz==2013.9
redis==2.7.5
requests==2.2.0
requests==2.3.0
six==1.5.2
sqlparse==0.1.8
wsgiref==0.1.2
@@ -23,3 +24,9 @@ honcho==0.5.0
statsd==2.1.2
gunicorn==18.0
celery==3.1.11
jsonschema==2.4.0
click==3.3
RestrictedPython==3.6.0
wtf-peewee==0.2.3
pysaml2==2.4.0
pycrypto==2.6.1

View File

@@ -98,8 +98,8 @@ if [ ! -f "/opt/redash/.env" ]; then
fi
# Install latest version
REDASH_VERSION=${REDASH_VERSION-0.4.0.b589}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION/.b/%2Bb}/redash.$REDASH_VERSION.tar.gz"
REDASH_VERSION=${REDASH_VERSION-0.6.3.b906}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
REDASH_TARBALL=/tmp/redash.tar.gz
REDASH_TARBALL=/tmp/redash.tar.gz
@@ -146,7 +146,7 @@ if [ $pg_user_exists -ne 0 ]; then
sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
cd /opt/redash/current
sudo -u redash bin/run ./manage.py ds new "re:dash metadata" "pg" "user=redash_reader password=$REDASH_READER_PASSWORD host=localhost dbname=redash"
sudo -u redash bin/run ./manage.py ds new -n "re:dash metadata" -t "pg" -o "{\"user\": \"redash_reader\", \"password\": \"$REDASH_READER_PASSWORD\", \"host\": \"localhost\", \"dbname\": \"redash\"}"
fi
# BigQuery dependencies:

View File

@@ -20,8 +20,12 @@ autorestart=true
stdout_logfile=/opt/redash/logs/api.log
stderr_logfile=/opt/redash/logs/api_error.log
# There are two queue types here: one for ad-hoc queries, and one for the refresh of scheduled queries
# (note that "scheduled_queries" appears only in the queue list of "redash_celery_scheduled").
# The default concurrency level for each is 2 (-c2), you can increase based on your machine's resources.
[program:redash_celery]
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -c2 -Qqueries,celery
process_name=redash_celery
numprocs=1
priority=999
@@ -29,3 +33,13 @@ autostart=true
autorestart=true
stdout_logfile=/opt/redash/logs/celery.log
stderr_logfile=/opt/redash/logs/celery_error.log
[program:redash_celery_scheduled]
command=/opt/redash/current/bin/run celery worker --app=redash.worker -c2 -Qscheduled_queries
process_name=redash_celery_scheduled
numprocs=1
priority=999
autostart=true
autorestart=true
stdout_logfile=/opt/redash/logs/celery.log
stderr_logfile=/opt/redash/logs/celery_error.log

View File

@@ -1,12 +1,17 @@
import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
@@ -18,4 +23,19 @@ class BaseTestCase(TestCase):
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
continue
if isinstance(v, list):
continue
if isinstance(v, dict):
self.assertResponseEqual(v, actual[k])
continue
self.assertEqual(v, actual[k], "{} not equal (expected: {}, actual: {}).".format(k, v, actual[k]))

View File

@@ -1,6 +1,5 @@
import datetime
import redash.models
from redash.utils import gen_query_hash
from redash.utils import gen_query_hash, utcnow
class ModelFactory(object):
@@ -45,9 +44,9 @@ user_factory = ModelFactory(redash.models.User,
data_source_factory = ModelFactory(redash.models.DataSource,
name='Test',
name=Sequence('Test {}'),
type='pg',
options='')
options='{"dbname": "test"}')
dashboard_factory = ModelFactory(redash.models.Dashboard,
@@ -58,15 +57,15 @@ query_factory = ModelFactory(redash.models.Query,
name='New Query',
description='',
query='SELECT 1',
ttl=-1,
user=user_factory.create,
is_archived=False,
schedule=None,
data_source=data_source_factory.create)
query_result_factory = ModelFactory(redash.models.QueryResult,
data='{"columns":{}, "rows":[]}',
runtime=1,
retrieved_at=datetime.datetime.utcnow,
retrieved_at=utcnow,
query="SELECT 1",
query_hash=gen_query_hash('SELECT 1'),
data_source=data_source_factory.create)
@@ -83,4 +82,4 @@ widget_factory = ModelFactory(redash.models.Widget,
width=1,
options='{}',
dashboard=dashboard_factory.create,
visualization=visualization_factory.create)
visualization=visualization_factory.create)

View File

@@ -1,10 +1,91 @@
from flask import request
from mock import patch
import time
from tests import BaseTestCase
from redash import models
from redash.google_oauth import create_and_login_user
from tests.factories import user_factory
from redash.authentication import api_key_load_user_from_request, hmac_load_user_from_request, sign
from tests.factories import user_factory, query_factory
from redash.wsgi import app
class TestApiKeyAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestApiKeyAuthentication, self).setUp()
self.api_key = 10
self.query = query_factory.create(api_key=self.api_key)
def test_no_api_key(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id))
self.assertIsNone(api_key_load_user_from_request(request))
def test_wrong_api_key(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': 'whatever'})
self.assertIsNone(api_key_load_user_from_request(request))
def test_correct_api_key(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': self.api_key})
self.assertIsNotNone(api_key_load_user_from_request(request))
def test_no_query_id(self):
with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertIsNone(api_key_load_user_from_request(request))
def test_user_api_key(self):
user = user_factory.create(api_key="user_key")
with app.test_client() as c:
rv = c.get('/api/queries/', query_string={'api_key': user.api_key})
self.assertEqual(user.id, api_key_load_user_from_request(request).id)
class TestHMACAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestHMACAuthentication, self).setUp()
self.api_key = 10
self.query = query_factory.create(api_key=self.api_key)
self.path = '/api/queries/{0}'.format(self.query.id)
self.expires = time.time() + 1800
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_no_signature(self):
with app.test_client() as c:
rv = c.get(self.path)
self.assertIsNone(hmac_load_user_from_request(request))
def test_wrong_signature(self):
with app.test_client() as c:
rv = c.get(self.path, query_string={'signature': 'whatever', 'expires': self.expires})
self.assertIsNone(hmac_load_user_from_request(request))
def test_correct_signature(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'signature': self.signature(self.expires), 'expires': self.expires})
self.assertIsNotNone(hmac_load_user_from_request(request))
def test_no_query_id(self):
with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertIsNone(hmac_load_user_from_request(request))
def test_user_api_key(self):
user = user_factory.create(api_key="user_key")
path = '/api/queries/'
with app.test_client() as c:
signature = sign(user.api_key, path, self.expires)
rv = c.get(path, query_string={'signature': signature, 'expires': self.expires, 'user_id': user.id})
self.assertEqual(user.id, hmac_load_user_from_request(request).id)
class TestCreateAndLoginUser(BaseTestCase):
def test_logins_valid_user(self):
user = user_factory.create(email='test@example.com')

View File

@@ -1,6 +1,7 @@
from contextlib import contextmanager
import json
import time
import datetime
from unittest import TestCase
from flask import url_for
from flask.ext.login import current_user
@@ -104,7 +105,11 @@ class DashboardAPITest(BaseTestCase, AuthenticationTestMixin):
with app.test_client() as c, authenticated_user(c):
rv = c.get('/api/dashboards/{0}'.format(d1.slug))
self.assertEquals(rv.status_code, 200)
self.assertDictEqual(json.loads(rv.data), d1.to_dict(with_widgets=True))
expected = d1.to_dict(with_widgets=True)
actual = json.loads(rv.data)
self.assertResponseEqual(expected, actual)
def test_get_non_existint_dashbaord(self):
with app.test_client() as c, authenticated_user(c):
@@ -222,10 +227,13 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
def test_update_query(self):
query = query_factory.create()
with app.test_client() as c, authenticated_user(c):
other_user = user_factory.create()
with app.test_client() as c, authenticated_user(c, user=other_user):
rv = json_request(c.post, '/api/queries/{0}'.format(query.id), data={'name': 'Testing'})
self.assertEqual(rv.status_code, 200)
self.assertEquals(rv.json['name'], 'Testing')
self.assertEqual(rv.json['name'], 'Testing')
self.assertEqual(rv.json['last_modified_by']['id'], other_user.id)
def test_create_query(self):
user = user_factory.create()
@@ -233,7 +241,7 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
query_data = {
'name': 'Testing',
'query': 'SELECT 1',
'ttl': 3600,
'schedule': "3600",
'data_source_id': data_source.id
}
@@ -256,9 +264,7 @@ class QueryAPITest(BaseTestCase, AuthenticationTestMixin):
rv = json_request(c.get, '/api/queries/{0}'.format(query.id))
self.assertEquals(rv.status_code, 200)
d = query.to_dict(with_visualizations=True)
d.pop('created_at')
self.assertDictContainsSubset(d, rv.json)
self.assertResponseEqual(rv.json, query.to_dict(with_visualizations=True))
def test_get_all_queries(self):
queries = [query_factory.create() for _ in range(10)]
@@ -294,7 +300,8 @@ class VisualizationAPITest(BaseTestCase):
rv = json_request(c.delete, '/api/visualizations/{0}'.format(visualization.id))
self.assertEquals(rv.status_code, 200)
self.assertEquals(models.Visualization.select().count(), 0)
# =1 because each query has a default table visualization.
self.assertEquals(models.Visualization.select().count(), 1)
def test_update_visualization(self):
visualization = visualization_factory.create()
@@ -312,6 +319,17 @@ class QueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
self.paths = []
super(QueryResultAPITest, self).setUp()
def test_post_result_list(self):
data_source = data_source_factory.create()
query_result = query_result_factory.create()
query = query_factory.create()
with app.test_client() as c, authenticated_user(c):
rv = json_request(c.post, '/api/query_results',
data={'data_source_id': data_source.id,
'query': query.query})
self.assertEquals(rv.status_code, 200)
class JobAPITest(BaseTestCase, AuthenticationTestMixin):
def setUp(self):
@@ -319,58 +337,6 @@ class JobAPITest(BaseTestCase, AuthenticationTestMixin):
super(JobAPITest, self).setUp()
class CsvQueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
def setUp(self):
super(CsvQueryResultAPITest, self).setUp()
self.paths = []
self.query_result = query_result_factory.create()
self.query = query_factory.create()
self.path = '/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id)
# TODO: factor out the HMAC authentication tests
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_redirect_when_unauthenticated(self):
with app.test_client() as c:
rv = c.get(self.path)
self.assertEquals(rv.status_code, 302)
def test_redirect_for_wrong_signature(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': 'whatever', 'expires': 0})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_wrong_expires(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(0), 'expires': 0})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_no_expires(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(time.time()+3600)})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_expires_too_long(self):
with app.test_client() as c:
expires = time.time()+(10*3600)
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
self.assertEquals(rv.status_code, 302)
def test_returns_200_for_correct_signature(self):
with app.test_client() as c:
expires = time.time()+1800
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
self.assertEquals(rv.status_code, 200)
def test_returns_200_for_authenticated_user(self):
with app.test_client() as c, authenticated_user(c):
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id))
self.assertEquals(rv.status_code, 200)
class TestLogin(BaseTestCase):
def setUp(self):
settings.PASSWORD_LOGIN_ENABLED = True
@@ -472,4 +438,45 @@ class TestLogout(BaseTestCase):
self.assertTrue(current_user.is_authenticated())
rv = c.get('/logout')
self.assertEquals(rv.status_code, 302)
self.assertFalse(current_user.is_authenticated())
self.assertFalse(current_user.is_authenticated())
class DataSourceTypesTest(BaseTestCase):
def test_returns_data_for_admin(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = c.get("/api/data_sources/types")
self.assertEqual(rv.status_code, 200)
def test_returns_403_for_non_admin(self):
with app.test_client() as c, authenticated_user(c):
rv = c.get("/api/data_sources/types")
self.assertEqual(rv.status_code, 403)
class DataSourceTest(BaseTestCase):
def test_returns_400_when_missing_fields(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = c.post("/api/data_sources")
self.assertEqual(rv.status_code, 400)
rv = json_request(c.post, '/api/data_sources', data={'name': 'DS 1'})
self.assertEqual(rv.status_code, 400)
def test_returns_400_when_configuration_invalid(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{}'})
self.assertEqual(rv.status_code, 400)
def test_creates_data_source(self):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{"dbname": "redash"}'})
self.assertEqual(rv.status_code, 200)

View File

@@ -26,8 +26,10 @@ class ImportTest(BaseTestCase):
self.assertEqual(dashboard.widgets.count(),
reduce(lambda s, row: s + len(row), self.dashboard['widgets'], 0))
self.assertEqual(models.Visualization.select().count(), dashboard.widgets.count()-1)
self.assertEqual(models.Query.select().count(), dashboard.widgets.count()-2)
queries_count = models.Query.select().count()
self.assertEqual(models.Visualization.select().count(), dashboard.widgets.count()+queries_count-1)
self.assertEqual(queries_count, dashboard.widgets.count()-2)
def test_imports_updates_existing_models(self):
importer = import_export.Importer(data_source=data_source_factory.create())

View File

@@ -1,10 +1,12 @@
#encoding: utf8
import datetime
import json
from unittest import TestCase
import mock
from tests import BaseTestCase
from redash import models
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
from redash.utils import gen_query_hash
from redash.utils import gen_query_hash, utcnow
class DashboardTest(BaseTestCase):
@@ -65,20 +67,102 @@ class QueryTest(BaseTestCase):
self.assertNotIn(q1, queries)
self.assertNotIn(q2, queries)
def test_save_creates_default_visualization(self):
q = query_factory.create()
self.assertEquals(q.visualizations.count(), 1)
def test_save_updates_updated_at_field(self):
# This should be a test of ModelTimestampsMixin, but it's easier to test in context of existing model... :-\
one_day_ago = datetime.datetime.today() - datetime.timedelta(days=1)
q = query_factory.create(created_at=one_day_ago, updated_at=one_day_ago)
q.save()
self.assertNotEqual(q.updated_at, one_day_ago)
class ShouldScheduleNextTest(TestCase):
def test_interval_schedule_that_needs_reschedule(self):
now = datetime.datetime.now()
two_hours_ago = now - datetime.timedelta(hours=2)
self.assertTrue(models.should_schedule_next(two_hours_ago, now, "3600"))
def test_interval_schedule_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
half_an_hour_ago = now - datetime.timedelta(minutes=30)
self.assertFalse(models.should_schedule_next(half_an_hour_ago, now, "3600"))
def test_exact_time_that_needs_reschedule(self):
now = datetime.datetime.now()
yesterday = now - datetime.timedelta(days=1)
schedule = "{:02d}:00".format(now.hour - 3)
self.assertTrue(models.should_schedule_next(yesterday, now, schedule))
def test_exact_time_that_doesnt_need_reschedule(self):
now = datetime.datetime.now()
yesterday = (now - datetime.timedelta(days=1)).replace(hour=now.hour+3, minute=now.minute+1)
schedule = "{:02d}:00".format(now.hour + 3)
self.assertFalse(models.should_schedule_next(yesterday, now, schedule))
def test_exact_time_with_day_change(self):
now = datetime.datetime.now().replace(hour=0, minute=1)
previous = (now - datetime.timedelta(days=2)).replace(hour=23, minute=59)
schedule = "23:59".format(now.hour + 3)
self.assertTrue(models.should_schedule_next(previous, now, schedule))
class QueryOutdatedQueriesTest(BaseTestCase):
# TODO: this test can be refactored to use mock version of should_schedule_next to simplify it.
def test_outdated_queries_skips_unscheduled_queries(self):
query = query_factory.create(schedule=None)
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_ttl_based_schedule(self):
two_hours_ago = datetime.datetime.now() - datetime.timedelta(hours=2)
query = query_factory.create(schedule="3600")
query_result = query_result_factory.create(query=query, retrieved_at=two_hours_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
def test_skips_fresh_queries(self):
half_an_hour_ago = datetime.datetime.now() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule="3600")
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago)
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertNotIn(query, queries)
def test_outdated_queries_works_with_specific_time_schedule(self):
half_an_hour_ago = utcnow() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M'))
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1))
query.latest_query_data = query_result
query.save()
queries = models.Query.outdated_queries()
self.assertIn(query, queries)
class QueryArchiveTest(BaseTestCase):
def setUp(self):
super(QueryArchiveTest, self).setUp()
def test_archive_query_sets_flag(self):
query = query_factory.create(ttl=1)
query = query_factory.create()
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEquals(query.is_archived, True)
def test_archived_query_doesnt_return_in_all(self):
query = query_factory.create(ttl=1)
query = query_factory.create(schedule="1")
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
query_result = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
123, yesterday)
@@ -103,15 +187,53 @@ class QueryArchiveTest(BaseTestCase):
self.assertRaises(models.Widget.DoesNotExist, models.Widget.get_by_id, widget.id)
def test_removes_scheduling(self):
query = query_factory.create(ttl=1)
query = query_factory.create(schedule="1")
query.archive()
query = models.Query.get_by_id(query.id)
self.assertEqual(-1, query.ttl)
self.assertEqual(None, query.schedule)
class DataSourceTest(BaseTestCase):
def test_get_schema(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
def test_get_schema_uses_cache(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
self.assertEqual(patched_get_schema.call_count, 1)
def test_get_schema_skips_cache_with_refresh_true(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
new_return_value = [{'name': 'new_table', 'columns': []}]
patched_get_schema.return_value = new_return_value
schema = ds.get_schema(refresh=True)
self.assertEqual(new_return_value, schema)
self.assertEqual(patched_get_schema.call_count, 2)
class QueryResultTest(BaseTestCase):
def setUp(self):
@@ -145,7 +267,7 @@ class QueryResultTest(BaseTestCase):
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
qr = query_result_factory.create(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=60)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=60)
self.assertIsNone(found_query_result)
@@ -153,7 +275,7 @@ class QueryResultTest(BaseTestCase):
yesterday = datetime.datetime.now() - datetime.timedelta(seconds=30)
qr = query_result_factory.create(retrieved_at=yesterday)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, ttl=120)
found_query_result = models.QueryResult.get_latest(qr.data_source, qr.query, max_age=120)
self.assertEqual(found_query_result, qr)
@@ -203,7 +325,7 @@ class TestQueryResultStoreResult(BaseTestCase):
self.query = "SELECT 1"
self.query_hash = gen_query_hash(self.query)
self.runtime = 123
self.utcnow = datetime.datetime.utcnow()
self.utcnow = utcnow()
self.data = "data"
def test_stores_the_result(self):

View File

@@ -1,7 +1,8 @@
import datetime
from mock import patch, call
from mock import patch, call, ANY
from tests import BaseTestCase
from tests.factories import query_factory, query_result_factory
from redash.utils import utcnow
from redash.tasks import refresh_queries
@@ -10,8 +11,8 @@ from redash.tasks import refresh_queries
# 2. test for the refresh_query task
class TestRefreshQueries(BaseTestCase):
def test_enqueues_outdated_queries(self):
query = query_factory.create(ttl=60)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query = query_factory.create(schedule="60")
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -19,11 +20,11 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_with(query.query, query.data_source, scheduled=True, metadata=ANY)
def test_skips_fresh_queries(self):
query = query_factory.create(ttl=1200)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query = query_factory.create(schedule="1200")
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -32,8 +33,8 @@ class TestRefreshQueries(BaseTestCase):
self.assertFalse(add_job_mock.called)
def test_skips_queries_with_no_ttl(self):
query = query_factory.create(ttl=-1)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query = query_factory.create(schedule=None)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
@@ -42,10 +43,10 @@ class TestRefreshQueries(BaseTestCase):
self.assertFalse(add_job_mock.called)
def test_enqueues_query_only_once(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash,
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash,
data_source=query.data_source)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -55,12 +56,12 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)#{'Query ID': query.id, 'Username': 'Scheduled'})
def test_enqueues_query_with_correct_data_source(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=60, query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash)
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -70,13 +71,16 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True), call(query.query, query.data_source, scheduled=True)], any_order=True)
add_job_mock.assert_has_calls([call(query2.query, query2.data_source, scheduled=True, metadata=ANY),
call(query.query, query.data_source, scheduled=True, metadata=ANY)],
any_order=True)
self.assertEquals(2, add_job_mock.call_count)
def test_enqueues_only_for_relevant_data_source(self):
query = query_factory.create(ttl=60)
query2 = query_factory.create(ttl=3600, query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash)
import psycopg2
retrieved_at = utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash)
query.latest_query_data = query_result
@@ -86,4 +90,4 @@ class TestRefreshQueries(BaseTestCase):
with patch('redash.tasks.QueryTask.add_task') as add_job_mock:
refresh_queries()
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True)
add_job_mock.assert_called_once_with(query.query, query.data_source, scheduled=True, metadata=ANY)