Compare commits

...

179 Commits

Author SHA1 Message Date
Arik Fraimovich
27639f83c7 Update index.rst 2015-07-26 22:38:37 +03:00
Arik Fraimovich
c08e6791df Remove version info from conf.py -- rtd doesn't use it 2015-07-26 15:54:56 +03:00
Arik Fraimovich
5c7158b6ae Update vagrant instructions 2015-07-26 15:46:26 +03:00
Arik Fraimovich
b886067a9f Merge pull request #509 from EverythingMe/docs
Moving documentation to ReatTheDocs
2015-07-26 15:25:29 +03:00
Arik Fraimovich
2421de8819 Add Sphinx based documentation to the project. 2015-07-26 15:24:16 +03:00
Arik Fraimovich
9e87e42400 Merge pull request #508 from EverythingMe/cleanup
Some cleanup (updated settings and bootstrap script)
2015-07-26 12:30:31 +03:00
Arik Fraimovich
8c750826e3 Install dependencies for new sources 2015-07-26 11:49:23 +03:00
Arik Fraimovich
b14b6d1773 Give permission to read user(id, name) to redash_reader 2015-07-26 11:45:58 +03:00
Arik Fraimovich
76cb73f4ce Add description to the server param of elastic search 2015-07-26 11:45:25 +03:00
Arik Fraimovich
8854a45598 Update to settings:
1. Enable API key auth by default.
2. Enable query results cleanup by default.
3. Add ElasticSearch to the enabled query runners list.
2015-07-26 11:44:11 +03:00
Arik Fraimovich
228b8c7614 Merge pull request #507 from EverythingMe/cleanup
Fix: when editing alerts show correct column
2015-07-26 11:39:55 +03:00
Arik Fraimovich
5de79213ae Fix: when editing alerts show correct column 2015-07-26 11:39:22 +03:00
Arik Fraimovich
c7d30c8b87 Merge pull request #498 from EverythingMe/feature/ds_admin
Feature: datasources web admin (closes #193)
2015-07-26 11:35:52 +03:00
Arik Fraimovich
076710f0c6 Bump version 2015-07-26 10:24:09 +03:00
Arik Fraimovich
a9172dac00 Fix: if connection fails connection isn't set 2015-07-26 10:24:08 +03:00
Arik Fraimovich
accca51f39 Feature: web interface to edit datasources
* Web interface to add and delete data sources, without the need to ssh
into the server.
* Ability to safely delete datasources -- query results from this data sources
are deleted, while queries get assigned null datasource.
* Updated the BigQuery datasource to use the JSON key file from Google Developer
console. Also both BigQuery and the Google Spreadsheets datasource no longer store
their key on the filesystem, but rather in the DB.
* Minor updates to the Flask Admin.
2015-07-26 10:24:08 +03:00
Arik Fraimovich
5f5774d01b Merge pull request #506 from EverythingMe/small_fixes
Fix: makes sure the totals series is sorted
2015-07-23 15:03:36 +03:00
Arik Fraimovich
00e99d858c Fix: makes sure the totals series is sorted 2015-07-23 15:03:18 +03:00
Arik Fraimovich
da56dc883f Merge pull request #505 from EverythingMe/small_fixes
Fix: Update URL after creating an alert
2015-07-22 20:38:52 +03:00
Arik Fraimovich
02582cab65 Update URL after creating an alert 2015-07-22 20:38:22 +03:00
Arik Fraimovich
bff4d31ada Read HOST from env. 2015-07-22 18:19:27 +03:00
Arik Fraimovich
83554207e1 Merge pull request #504 from EverythingMe/fix/python_result_set
Fix: cohort was wrong if values were not sorted
2015-07-22 18:06:41 +03:00
Arik Fraimovich
1c0c3e0b93 Fix: cohort was wrong if values were not sorted 2015-07-22 18:05:54 +03:00
Arik Fraimovich
5feb563dc9 Merge pull request #503 from EverythingMe/fix/python_result_set
Fix: if you change the result object, python runner wouldn't return any results
2015-07-22 18:03:29 +03:00
Arik Fraimovich
07b88d0b53 Fix: log results were lost 2015-07-22 17:56:49 +03:00
Arik Fraimovich
21f33462d5 Anoter try in removing optipng from build 2015-07-22 17:43:05 +03:00
Arik Fraimovich
6a9d95f1ac Fix: if you change the result object, python runner wouldn't return any results 2015-07-22 17:36:46 +03:00
Arik Fraimovich
36b80fc4ef Remove optipng from build artifacts 2015-07-22 17:21:50 +03:00
Arik Fraimovich
d89dd2c9af Merge pull request #502 from EverythingMe/feature/alerts
Feature: alerts on query results
2015-07-22 17:14:29 +03:00
Arik Fraimovich
658af526c7 Add alerts to menu 2015-07-22 17:05:31 +03:00
Arik Fraimovich
3d859ec5f3 Feature: alerts for query results.
This is basic implementation for alerts feature, where you can
define a simple rule on the last query result to send an alert.

As part of the implementation added Flask-Mail to the project,
to send emails. Should be useful to make re:dash more "self aware"
(notify users about potential issues, when queries done executing
and more).
2015-07-22 17:05:31 +03:00
Arik Fraimovich
fdff799d23 ng_smart_table: support for inline templates 2015-07-22 17:05:09 +03:00
Arik Fraimovich
5fc0b88b23 ng_smart_table: support for nested objects 2015-07-22 17:05:09 +03:00
Alexander Leibzon
63de247478 add datasources 2015-07-22 14:55:26 +03:00
Arik Fraimovich
5d3caac1b5 Merge pull request #499 from alexanderlz/master
Feature: Support Impala as DataSource
2015-07-22 14:23:55 +03:00
Alexander Leibzon
e4b9d23dfe minor fixes 2015-07-22 14:21:40 +03:00
Alexander Leibzon
890f59a4c9 add get_schema ability to impala 2015-07-22 13:54:00 +03:00
Arik Fraimovich
d4a18ba611 Merge pull request #501 from johnkearney/all_pg_queries_with_no_results
All pg queries with no results
2015-07-21 06:47:33 +03:00
John Kearney
c4502b2925 Add a more use-friendly message when redshift returns no rows 2015-07-20 14:17:51 -07:00
Alexander Leibzon
1d5efdd93f fixes in accordance with pull req spec 2015-07-20 23:21:02 +03:00
John Kearney
2b95da102e Remove unused exports for env examples 2015-07-20 12:36:10 -07:00
Arik Fraimovich
d512cd0c1d Merge pull request #500 from EverythingMe/feature/login_events
Feature: add event for users logining in
2015-07-20 18:30:57 +03:00
Arik Fraimovich
3dc9c84a98 Feature: add event for users logining in 2015-07-20 18:26:45 +03:00
Alexander Leibzon
4a33b987b8 datasource rename 2015-07-20 02:07:17 +03:00
Alexander Leibzon
f7041977d5 impala datasource fixes 2015-07-20 02:06:15 +03:00
Alexander Leibzon
83bc38579e impala data source 2015-07-19 01:44:48 +03:00
Arik Fraimovich
4b8a94e795 Merge pull request #495 from EverythingMe/fix/bq_param
Fix: Update BigQuery configuration parameter name to avoid confusion.
2015-07-15 19:49:56 +03:00
Arik Fraimovich
406010a7a6 Fix: Update BigQuery configuration parameter name to avoid confusion. 2015-07-15 19:45:55 +03:00
Arik Fraimovich
4f11f28efa Merge pull request #494 from erans/master
MongoDB query runner: cleanup
2015-07-15 18:24:28 +03:00
Eran Sandler
c919602b20 cleanups and shit. 2015-07-15 18:17:55 +03:00
Arik Fraimovich
7702b05635 Merge pull request #493 from erans/master
Fix: a generic way to parse all the input JSON and make sure we replace ISODate to Python date times.
2015-07-15 17:50:21 +03:00
Eran Sandler
5fc7c499a3 stupid auto merge. 2015-07-15 17:48:03 +03:00
Eran Sandler
628240906e Fix: a generic way to parse all the input JSON and make sure we replace ISODate to Python date times. 2015-07-15 17:44:33 +03:00
Arik Fraimovich
41b9b21a20 Merge pull request #492 from erans/master
Fix: MongoDB: Date parsing and dates in aggregation $match
2015-07-15 17:03:17 +03:00
Eran Sandler
dbd3f754ba - Fixed parsing dates in the format of YYYY-MM-DDTHH:mm
- Added handling dates in the aggregate $match
2015-07-15 16:58:10 +03:00
Arik Fraimovich
4ef3c27fe6 Merge pull request #489 from kataring/suport-presto
Feature: Support Presto
2015-07-15 16:50:51 +03:00
Arik Fraimovich
58a005c71b Merge pull request #484 from alexanderlz/master
Feature: Google Spreadsheets support (alpha)
2015-07-14 12:14:57 +03:00
Alexander Leibzon
9d7ff31178 replace camelcase vars 2015-07-14 12:13:04 +03:00
Noriaki Katayama
93d6b01fbf add bigint 2015-07-14 16:59:25 +09:00
Arik Fraimovich
7d57f9d0f1 Merge pull request #488 from EverythingMe/fix/mongo-support-sandbox
Fix: Drop database name check in MongoDB queries  to support sandboxed environments
2015-07-14 08:56:47 +03:00
Arik Fraimovich
e80f470255 Mongo: Drop database name check to support sandboxed environments 2015-07-14 08:51:31 +03:00
Arik Fraimovich
5636cec0eb Merge pull request #487 from erans/master
Fix: Support newer as well as older PyMongo versions
2015-07-14 08:23:58 +03:00
Eran Sandler
912bbc1a4a Added backwards compatibility mode with older versions of PyMongo.
It appears that older versions would return a dictionary from an aggregate operation that had the cursor inside the "result" key.
Newer versions return a new type of cursor called CommandCursor.
2015-07-14 08:19:25 +03:00
Arik Fraimovich
d3bb58167e Merge pull request #486 from EverythingMe/fix/mysql-noerror-onconnect
Fix: no error when failing to connect to a MySQL data source
2015-07-13 19:04:16 +03:00
Arik Fraimovich
2911fa8af7 Bump version. 2015-07-13 18:31:31 +03:00
Arik Fraimovich
4503c6af66 Move the MySQL connect to the try/except block 2015-07-13 18:31:00 +03:00
Arik Fraimovich
7fc2d5ee0b Update bootstrap.sh to use 0.6.3. 2015-07-13 12:00:28 +03:00
Arik Fraimovich
3c9c1466a3 Merge pull request #483 from erans/master
Feature: Updated more fine grained support for CORS in QueryResultAPI
2015-07-13 11:04:10 +03:00
Eran Sandler
4a7c066bf0 Too many languages... :-( 2015-07-13 10:05:07 +03:00
Alexander Leibzon
b850da52a2 remove .nojekyll, naming convention 2015-07-13 09:56:11 +03:00
Eran Sandler
1a3657572e Added fine grind control of CORS header for QueryResultAPI and possibly future APIs. 2015-07-13 09:42:23 +03:00
Arik Fraimovich
666e3281e4 Merge pull request #482 from erans/master
Feature: CORS support for the Query Result API to allow embedding in other domains
2015-07-13 08:41:38 +03:00
Eran Sandler
66084b1a3b minor fixes 2015-07-12 23:07:06 +03:00
Eran Sandler
421470666a use set_from_string. 2015-07-12 23:06:00 +03:00
Eran Sandler
f8e2bc9eca Added configurable CORS support for the Query Result API (to export data as JSON or CSV). Configuration is via an environment variable that is comma separated to include one or more domains (REDASH_QUERIES_RESULT_CORS) 2015-07-12 23:00:50 +03:00
Alexander Leibzon
079fbf33f4 don't execute query if the query string is empty (when changing datasource) 2015-07-12 22:53:08 +03:00
Arik Fraimovich
c195362710 Merge pull request #479 from erans/master
Feature: improved error reporting and new log information support for the Python query runner
2015-07-12 22:30:21 +03:00
Arik Fraimovich
b671dd0431 Merge pull request #480 from EverythingMe/feature/multiple-domains
Feature: support for multiple domains with Google OAuth
2015-07-12 13:36:17 +03:00
Arik Fraimovich
7793f3b257 Feature: support for multiple Google Auth domains 2015-07-12 12:51:45 +03:00
Arik Fraimovich
e09aa6f81a Show message for wrong user/password (closes #275) 2015-07-12 12:43:37 +03:00
Eran Sandler
780e0c0418 - refactored the Python query runner
- Improved error handling to show the real error (including indentation and syntax errors - it should show the right row number)
- Added support for "print" statements which becomes log messages show in a single query window (where you edit). It's on by default. To remove it simply called 'disable_print_log()'
2015-07-12 12:14:46 +03:00
Alexander Leibzon
43edb009d6 safer handling of worksheet num 2015-07-12 01:28:52 +03:00
Alexander Leibzon
81978c5049 jekyll disable 2015-07-11 22:51:31 +03:00
Alexander Leibzon
239813e195 modify google spreadsheed datasource params, only the credentials file needed 2015-07-11 22:43:07 +03:00
Alexander Leibzon
28dd571a03 google spreadsheets working version 2015-07-10 21:27:16 +03:00
Alexander Leibzon
808126cf91 forgot type 2015-07-09 01:22:08 +03:00
Alexander Leibzon
69a8295f4c forgot type 2015-07-09 01:18:31 +03:00
Arik Fraimovich
a692e3f664 Merge pull request #476 from EverythingMe/feature/api
Feature: support for per user API keys
2015-07-08 21:34:06 +03:00
Arik Fraimovich
6860dde1f7 Set api_key to be unique 2015-07-08 21:29:32 +03:00
Arik Fraimovich
e183affdd0 Feature: support for per user api keys 2015-07-08 20:59:07 +03:00
Noriaki Katayama
6338be3811 modified response 2015-07-08 10:33:55 +09:00
Alexander Leibzon
3ee6371250 initial work on google spreadsheets 2015-07-07 01:39:59 +03:00
Noriaki Katayama
4f38d42182 add presto 2015-07-06 18:22:23 +09:00
Arik Fraimovich
39db74ff20 Merge pull request #475 from hakobera/support-infuxdb
Feature: Support InfluxDB v0.9+
2015-07-05 22:39:40 +03:00
Arik Fraimovich
05c2c21a85 Bump version. 2015-07-05 22:39:12 +03:00
Kazuyuki Honda
00edc29e50 Support influxdb 0.9+ 2015-07-06 01:15:43 +09:00
Arik Fraimovich
3771af0a8c Update bootstrap.sh to use 0.6.2. 2015-07-05 08:30:11 +03:00
Arik Fraimovich
c32c2d43f7 Bump version. 2015-07-04 10:41:06 +03:00
Arik Fraimovich
4e2e3f9077 Merge pull request #472 from BrunoSalerno/map_visualization_options_fixed
map visualization: options fixed
2015-07-04 10:23:55 +03:00
Bruno Salerno
2a27422df9 map visualization: draw_options to scope 2015-07-03 18:02:22 -03:00
Bruno Salerno
f9e0ce8e9c map visualization: options fixed 2015-07-03 16:45:55 -03:00
Arik Fraimovich
a1d49f13d3 Merge pull request #471 from EverythingMe/fix/visualization_api
Fix: opening viz editor resets its options
2015-07-02 13:17:50 +03:00
Arik Fraimovich
26aa199f9c Fix: opening viz editor resets its options 2015-07-02 13:17:32 +03:00
Arik Fraimovich
4c77f3f914 Merge pull request #470 from EverythingMe/fix/visualization_api
Increase limit of tables for showing search.
2015-07-02 11:06:19 +03:00
Arik Fraimovich
d6be792595 Increase limit of tables for showing search. 2015-07-02 11:05:45 +03:00
Arik Fraimovich
59c1ea7f16 Merge pull request #469 from EverythingMe/fix/visualization_api
Fix: map - HTTPS support in tiles/marker
2015-07-02 11:05:25 +03:00
Arik Fraimovich
4d24005eff Fix: map - HTTPS support in tiles/marker 2015-07-02 11:02:44 +03:00
Arik Fraimovich
2dab35b614 Merge pull request #468 from EverythingMe/fix/visualization_api
Fix: visualizations API fixes
2015-07-02 08:51:09 +03:00
Arik Fraimovich
0b61b88f5f Fix: make default options apply to new visualizations 2015-07-02 08:38:08 +03:00
Arik Fraimovich
e5cb58207c Fix: vis title wasn't updating when changing type 2015-07-02 08:26:10 +03:00
Arik Fraimovich
fc17d1af81 Don't cache static assets in debug mode 2015-07-02 08:25:51 +03:00
Arik Fraimovich
e6650e1e2d Merge pull request #467 from BrunoSalerno/leaflet-visualization-marker-path-bug-fixed
leaflet visualization: marker path bug handled
2015-07-01 22:49:12 +03:00
Bruno Salerno
3aa1cd0133 leaflet visualization: marker path bug handled 2015-07-01 16:40:56 -03:00
Arik Fraimovich
e04833c327 Merge pull request #466 from BrunoSalerno/leaflet-visualization
Feature: Map visualization (using Leaflet)
2015-07-01 20:58:43 +03:00
Bruno Salerno
b743cceb60 leaflet visualization: map template margins fixed 2015-07-01 14:53:31 -03:00
Bruno Salerno
a0e134d3b5 leaflet visualization: dinamic height 2015-07-01 14:15:17 -03:00
Bruno Salerno
d7fb2d7458 leaflet-visualization: div size fixed and bounds storting improved 2015-07-01 12:30:48 -03:00
Bruno Salerno
b913ce6022 leaflet visualization: color series named properly 2015-07-01 10:03:43 -03:00
Bruno Salerno
1eb7945d16 leaflet visualization: map bounds are stored and kept 2015-06-30 18:18:34 -03:00
Bruno Salerno
37d0026ee4 leaflet-visualization: point feature 2015-06-30 17:34:31 -03:00
Arik Fraimovich
9cdc2cb2f7 Merge pull request #465 from EverythingMe/fix/time_field_serialize
Feature: ability to control series order in charts.
2015-06-30 09:26:26 +03:00
Arik Fraimovich
a9bff9063e Feature: cli to get status. 2015-06-30 09:25:32 +03:00
Arik Fraimovich
380126ee44 Feature: ability to control series index in charts. 2015-06-30 09:15:00 +03:00
Arik Fraimovich
d8377375b8 Merge pull request #461 from myinsiders/saml
Added SAML authentication support, eg for OneLogin or Okta
2015-06-30 08:25:44 +03:00
Arik Fraimovich
98ff701f9a Merge pull request #464 from EverythingMe/fix/time_field_serialize
Fix #463: support for datetime.time and datetime.timedelta fields
2015-06-29 18:06:25 +03:00
Arik Fraimovich
f5ea3e97d3 Fix: support for datetime.time and datetime.timedelta fields 2015-06-29 18:01:36 +03:00
Mark White
719e96dd2f Added SAML login option to login form 2015-06-28 17:19:57 +01:00
Arik Fraimovich
6c6c0256ba Merge pull request #462 from EverythingMe/fix_codemirror_resize_issue
Fix: refresh CodeMirror size when schema browser appears
2015-06-28 13:46:12 +03:00
Arik Fraimovich
723df51cdd Fix: refresh CodeMirror size when schema browser appears 2015-06-28 13:45:49 +03:00
Arik Fraimovich
a0f4e263b2 Merge pull request #459 from olgakogan/patch-2
Fixed an error in case a query doesn't have last_modified_date
2015-06-28 10:27:04 +03:00
Arik Fraimovich
4706bf8060 Merge pull request #458 from erans/master
Initial and very early support for ElasticSearch query runner
2015-06-28 10:26:26 +03:00
Mark White
f96a9f659a Added Apache license to code taken from Okta 2015-06-26 11:45:24 +01:00
Mark White
63c273f896 Fixed issue in saml login 2015-06-26 11:12:27 +01:00
Mark White
622ac6d781 Fixes to saml callback server name code 2015-06-26 10:26:59 +01:00
Mark White
8dc564a8bc Added configuration of flask server name 2015-06-26 09:06:50 +01:00
Mark White
3ae5baef22 Added OneLogin support 2015-06-25 17:52:00 +01:00
olga
8d819068b5 Fixed an error in case a query doesn't have last_modified_date 2015-06-25 11:31:22 +03:00
Eran Sandler
585e056265 Initial very early release of an ElasticSearch query runner. It only support Lucene style queries (single line, similar to what Kibana uses but without aggregations). 2015-06-24 09:53:09 +03:00
Arik Fraimovich
1914ed7c7c Merge pull request #456 from bells17/master
Changed the README's 'Setting up re:dash instance' url to a new url
2015-06-19 10:25:11 +01:00
bells17
bd216e93e7 Changed the README's 'Setting up re:dash instance' url to a new url 2015-06-19 10:20:41 +09:00
Arik Fraimovich
5e351de896 Merge pull request #455 from erans/master
added Mongo JSON serializer to correctly serialize ObjectId + datetime.datetime serialization
2015-06-17 10:59:42 +03:00
Eran Sandler
de0e534c77 removed the unnecessary check for datetime.datetime in the JSON encoder. 2015-06-17 10:58:12 +03:00
Eran Sandler
5fa1f9440d duh! 2015-06-16 11:50:20 +03:00
Eran Sandler
b3ddc5f8b9 removed old conversion of ObjectId to string since it is now part of the new JSON serializer 2015-06-16 11:34:19 +03:00
Eran Sandler
8cde5f9673 added Mongo JSON serializer to correctly serialize ObjectId 2015-06-16 11:27:23 +03:00
Arik Fraimovich
1bb53ca497 Merge pull request #451 from EverythingMe/fix/unicode_in_annotation
Fix: charts with category X axis were not sorted properly
2015-06-11 21:46:26 +03:00
Arik Fraimovich
0a3cd9267f Fix: charts with category x axis were not sorted properly 2015-06-11 21:45:45 +03:00
Arik Fraimovich
075d843354 Merge pull request #449 from EverythingMe/fix/unicode_in_annotation
Fix: schema browser chokes on large schemas
2015-06-10 13:36:53 +03:00
Arik Fraimovich
b14e5e8c0e Fix: schema browser chokes on large schemas 2015-06-10 13:36:05 +03:00
Arik Fraimovich
c9da4be422 Merge pull request #442 from EverythingMe/fix/timezone
Fix: when the server has non UTC timezone, timestamps were wrong
2015-06-07 22:23:46 +03:00
Arik Fraimovich
276ee7c27a Merge pull request #448 from olgakogan/master
supervisord default config: separate queue for ad-hoc and scheduled queries
2015-06-07 17:38:53 +03:00
olga
334040532a changed default concurrency level to 2 per queue 2015-06-07 17:36:24 +03:00
olga
335a3a98b5 separated the queue for ad-hoc and for scheduled queries (someone who runs an ad-hoc query should not wait because there scheduled queries are being refreshed at that time) 2015-06-07 17:28:57 +03:00
Arik Fraimovich
b17080a7f5 Merge pull request #446 from EverythingMe/fix/unicode_in_annotation
Fix #443: open table when searching & don't hide columns
2015-06-05 18:13:05 +03:00
Arik Fraimovich
8441c12b01 Fix #443: open table when searching & don't hide columns 2015-06-05 18:08:06 +03:00
Arik Fraimovich
3b4af1b6fa Merge pull request #445 from EverythingMe/fix/unicode_in_annotation
Fix #444: unicode characters in username fail query execution
2015-06-05 16:58:00 +03:00
Arik Fraimovich
c3deb8e2fa Fix #444: unicode characters in username fail query execution 2015-06-05 16:49:25 +03:00
Arik Fraimovich
a60b1686da Fix: when the server has non UTC timezone, timestamps were wrong 2015-06-03 07:58:28 +03:00
Arik Fraimovich
b56e87ceb2 Merge pull request #440 from EverythingMe/fix_ui
Fix: python query runner didn't allow iterating lists
2015-05-31 10:20:32 +03:00
Arik Fraimovich
fc89bcdaf3 Fix: python query runner didn't allow accessing dicts 2015-05-31 10:15:48 +03:00
Arik Fraimovich
15ec8321bb Merge pull request #437 from EverythingMe/fix_ui
Feature: ability to disable x axis labels
2015-05-19 22:24:36 +03:00
Arik Fraimovich
e6ba62485c Merge pull request #436 from EverythingMe/fix_ui
Fix: sorting not working for columns with special characters
2015-05-19 22:15:21 +03:00
Arik Fraimovich
9077b01fb9 Feature: ability to disable x axis labels 2015-05-19 22:15:08 +03:00
Arik Fraimovich
f45281be96 Fix: annotation was failing if query had unicode in it 2015-05-19 22:01:02 +03:00
Arik Fraimovich
a1c8ef9037 Merge pull request #435 from EverythingMe/fix_ui
Fix: string columns with date/time values failed to render.
2015-05-19 22:00:37 +03:00
Arik Fraimovich
f46e8af23f Fix: sorting not working for columns with special characters 2015-05-19 22:00:15 +03:00
Arik Fraimovich
30a89bfd2c Fix: string columns with dates failed to render. 2015-05-19 21:43:50 +03:00
Arik Fraimovich
6312f8738d Merge pull request #433 from stanhu/make-query-link-obvious
Make it obvious that the query link is clickable.
2015-05-17 08:18:19 +03:00
Stan Hu
9e3d5c10c5 Make it obvious that the query link is clickable: underline when hovering and add glyphicon 2015-05-16 22:06:04 -07:00
Arik Fraimovich
59b87ec4fd Merge pull request #434 from erans/master
MongoDB aggregation support + mongo documentation (as comments)
2015-05-17 07:49:20 +03:00
Eran Sandler
27ecf5f25c Merged the older MongoDB code into the new mongodb query runner to support aggregation 2015-05-16 22:22:33 +03:00
Arik Fraimovich
105971c4c8 Merge pull request #432 from stanhu/allow-undefined-max-age
Allow undefined max_age parameter in query_results endpoint
2015-05-15 11:25:24 +03:00
Stan Hu
690f8323c3 Allow undefined max_age parameter in query_results endpoint
An Error 500 would be returned by the endpoint if you attempted to
pass a query parameter to the dashboard since maxAge was undefined in JavaScript.
2015-05-14 22:00:08 -07:00
Arik Fraimovich
20eb110ce3 Fix: update_release_commit_sha should return json 2015-05-14 10:09:57 +03:00
Arik Fraimovich
571c9d0aee Update release manager: update tag commit sha on new release 2015-05-14 09:59:21 +03:00
Arik Fraimovich
0ee7292f16 Merge pull request #431 from EverythingMe/feature/additional_refresh_rates
Feature: additional refresh times (5, 10, 15, 30 minutes)
2015-05-14 09:25:35 +03:00
Arik Fraimovich
8c28392dfd Feature: additional refersh times (5,10,15,30 minutes) 2015-05-13 20:59:39 +03:00
Arik Fraimovich
671f1f4478 Merge pull request #428 from olgakogan/master
Feature: support for column types in MySQL query runner
2015-05-12 13:55:56 +03:00
olga
557d3748be added support to column types in mysql 2015-05-12 12:01:47 +03:00
95 changed files with 5194 additions and 1366 deletions

View File

@@ -1,9 +1,6 @@
REDASH_CONNECTION_ADAPTER=pg export REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
REDASH_CONNECTION_STRING="dbname=data" export REDASH_LOG_LEVEL="INFO"
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/ export REDASH_REDIS_URL=redis://localhost:6379/1
REDASH_GOOGLE_APPS_DOMAIN= export REDASH_DATABASE_URL="postgresql://redash"
REDASH_ADMINS= export REDASH_COOKIE_SECRET=veryverysecret
REDASH_WORKERS_COUNT=2 export REDASH_GOOGLE_APPS_DOMAIN=
REDASH_COOKIE_SECRET=
REDASH_DATABASE_URL='postgresql://rd'
REDASH_LOG_LEVEL = "INFO"

1
.gitignore vendored
View File

@@ -8,6 +8,7 @@ celerybeat-schedule*
.#* .#*
\#*# \#*#
*~ *~
_build
# Vagrant related # Vagrant related
.vagrant .vagrant

View File

@@ -13,7 +13,7 @@ deps:
pack: pack:
sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" * tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload: upload:
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME) python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)

View File

@@ -10,7 +10,8 @@
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one. Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL). **_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts. Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
Presto, Google Spreadsheets, Cloudera Impala and custom scripts.
**_re:dash_** consists of two parts: **_re:dash_** consists of two parts:
@@ -27,7 +28,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
## Getting Started ## Getting Started
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images). * [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki). * Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).

View File

@@ -104,9 +104,26 @@ def get_changelog(commit_sha):
return "\n".join(changes) return "\n".join(changes)
def update_release_commit_sha(release, commit_sha):
params = {
'target_commitish': commit_sha,
}
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
if response.status_code != 200:
raise exception_from_error("Failed updating commit sha for existing release", response)
return response.json()
def update_release(version, build_filepath, commit_sha): def update_release(version, build_filepath, commit_sha):
try: try:
release = get_rc_release(version) or create_release(version, commit_sha) release = get_rc_release(version)
if release:
release = update_release_commit_sha(release, commit_sha)
else:
release = create_release(version, commit_sha)
print "Using release id: {}".format(release['id']) print "Using release id: {}".format(release['id'])
remove_previous_builds(release) remove_previous_builds(release)

View File

@@ -1,63 +0,0 @@
"""
Script to test concurrency (multithreading/multiprocess) issues with the workers. Use with caution.
"""
import json
import atfork
atfork.monkeypatch_os_fork_functions()
import atfork.stdlib_fixer
atfork.stdlib_fixer.fix_logging_module()
import time
from redash.data import worker
from redash import models, data_manager, redis_connection
if __name__ == '__main__':
models.create_db(True, False)
print "Creating data source..."
data_source = models.DataSource.create(name="Concurrency", type="pg", options="dbname=postgres")
print "Clear jobs/hashes:"
redis_connection.delete("jobs")
query_hashes = redis_connection.keys("query_hash_*")
if query_hashes:
redis_connection.delete(*query_hashes)
starting_query_results_count = models.QueryResult.select().count()
jobs_count = 5000
workers_count = 10
print "Creating jobs..."
for i in xrange(jobs_count):
query = "SELECT {}".format(i)
print "Inserting: {}".format(query)
data_manager.add_job(query=query, priority=worker.Job.LOW_PRIORITY,
data_source=data_source)
print "Starting workers..."
workers = data_manager.start_workers(workers_count)
print "Waiting for jobs to be done..."
keep_waiting = True
while keep_waiting:
results_count = models.QueryResult.select().count() - starting_query_results_count
print "QueryResults: {}".format(results_count)
time.sleep(5)
if results_count == jobs_count:
print "Yay done..."
keep_waiting = False
data_manager.stop_workers()
qr_count = 0
for qr in models.QueryResult.select():
number = int(qr.query.split()[1])
data_number = json.loads(qr.data)['rows'][0].values()[0]
if number != data_number:
print "Oops? {} != {} ({})".format(number, data_number, qr.id)
qr_count += 1
print "Verified {} query results.".format(qr_count)
print "Done."

192
docs/Makefile Normal file
View File

@@ -0,0 +1,192 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/redash.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/redash.qhc"
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/redash"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/redash"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

111
docs/conf.py Normal file
View File

@@ -0,0 +1,111 @@
# -*- coding: utf-8 -*-
#
# re:dash documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 20 22:40:24 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u're:dash'
copyright = u'2015, EverythingMe'
author = u'EverythingMe'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# Output file base name for HTML help builder.
htmlhelp_basename = 'redashdoc'
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'redash', u're:dash Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'redash', u're:dash Documentation',
author, 'redash', 'One line description of project.',
'Miscellaneous'),
]

245
docs/datasources.rst Normal file
View File

@@ -0,0 +1,245 @@
Supported Data Sources
######################
re:dash supports several types of data sources (see below the full list)
and their management is done with the CLI (``manage.py``):
Create new data source
======================
.. code:: bash
$ cd /opt/redash/current
$ sudo -u redash bin/run ./manage.py ds new -n {name} -t {type} -o {options}
If you omit any of the options (-n, -t, -o) it will show a prompt asking
for it. Options is a JSON string with the connection parameters. Unless
you're doing some sort of automation, it's probably easier to leave it
empty and fill out the prompt.
See below for the different supported data sources types and the
relevant options string format.
Listing existing data sources
=============================
.. code:: bash
$ sudo -u redash bin/run ./manage.py ds list
Supported data sources
======================
PostgreSQL / Redshift
---------------------
- **Type**: pg
- **Options**:
- User (user)
- Password (password)
- Host (host)
- Port (port)
- Database name (dbname) (mandatory)
- **Options string format (for v0.5 and older)**: "user= password=
host= port=5439 dbname="
MySQL
-----
- **Type**: mysql
- **Options**:
- User (user)
- Password (passwd)
- Host (host)
- Port (port)
- Database name (db) (mandatory)
- **Options string format (for v0.5 and older)**:
"Server=localhost;User=;Pwd=;Database="
Note that you need to install the MySQLDb package as it is not included
in the ``requirements.txt`` file.
Graphite
--------
- **Type**: graphite
- **Options**:
- Url (url) (mandatory)
- User (username)
- Password (password)
- Verify SSL ceritficate (verify)
- **Options string format**: '{"url":
"https://graphite.yourcompany.com", "auth": ["user", "password"],
"verify": true}'
Google BigQuery
---------------
- **Type**: bigquery
- **Options**:
- Service Account (serviceAccount) (mandatory)
- Project ID (projectId) (mandatory)
- Private Key filename (privateKey) (mandatory)
- **Options string format (for v0.5 and older)**: {"serviceAccount" :
"43242343247-fjdfakljr3r2@developer.gserviceaccount.com",
"privateKey" : "/somewhere/23fjkfjdsfj21312-privatekey.p12",
"projectId" : "myproject-123" }
Notes:
1. To obtain BigQuery credentials follow the guidelines at:
https://developers.google.com/bigquery/authorization#service-accounts
2. You need to install the ``google-api-python-client``,
``oauth2client`` and ``pyopenssl`` packages (PyOpenSSL requires
``libffi-dev`` and ``libssl-dev`` packages), as they are not included
in the ``requirements.txt`` file.
Google Spreadsheets
-------------------
(supported from v0.6.4)
- **Type**: google\_spreadsheets
- **Options**:
- Credentials filename (credentialsFilePath) (mandatory)
Notes:
1. To obtain Google ServiceAccount credentials follow the guidelines at:
https://developers.google.com/console/help/new/#serviceaccounts (save
the JSON version of the credentials file)
2. To be able to load the spreadsheet in re:dash - share your it with
your ServiceAccount's email (it can be found in the credentials json
file, for example
43242343247-fjdfakljr3r2@developer.gserviceaccount.com) Note: all the
service account details can be seen inside the json file you should
obtain following step #1
3. The query format is "DOC\_UUID\|SHEET\_NUM" (for example
"kjsdfhkjh4rsEFSDFEWR232jkddsfh\|0")
4. You (might) need to install the ``gspread``, ``oauth2client`` and
``dateutil`` packages as they are not included in the
``requirements.txt`` file.
MongoDB
-------
- **Type**: mongo
- **Options**:
- Connection String (connectionString) (mandatory)
- Database name (dbName)
- Replica set name (replicaSetName)
- **Options string format (for v0.5 and older)**: { "connectionString"
: "mongodb://user:password@localhost:27017/mydb", "dbName" : "mydb" }
For ReplicaSet databases use the following connection string: \*
**Options string format**: { "connectionString" :
"mongodb://user:pasword@server1:27017,server2:27017/mydb", "dbName" :
"mydb", "replicaSetName" : "myreplicaSet" }
Notes:
1. You need to install ``pymongo``, as it is not included in the
``requirements.txt`` file.
URL
---
A URL based data source which requests URLs that conforms to the
supported :doc:`results JSON
format </dev/results_format>`.
Very useful in situations where you want to expose the data without
connecting directly to the database.
The query itself inside re:dash will simply contain the URL to be
executed (i.e. http://myserver/path/myquery)
- **Type**: url
- **Options**:
- Url (url)
- **Options string format (optional) (for v0.5 and older)**:
http://myserver/path/
Notes:
1. All URLs must return the supported :doc:`results JSON
format </dev/results_format>`.
2. If the Options string is set, only URLs that are part of the supplied
path can be executed using this data source. Not setting the options
path allows any URL to be executed as long as it returns the
supported :doc:`results JSON
format </dev/results_format>`.
Script
------
Allows executing any executable script residing on the server as long as
its standard output conforms to the supported :doc:`results JSON
format </dev/results_format>`.
This integration is useful in situations where you need more than just a
query and requires some processing to happen.
Once the path to scripts is configured in the datasource the query needs
to contain the file name of the script as well as any command line
parameters the script requires (i.e. myscript.py param1 param2
--param3=value)
- **Type**: script
- **Options**:
- Scripts Path (path) (mandatory)
- **Options string format (for v0.5 and older)**: /path/to/scripts/
Notes:
1. You MUST set a path to execute the scripts, otherwise the data source
will not work.
2. All scripts must be executable, otherwise results won't return
3. The script data source does not allow relative paths in the form of
"../". You may use a relative sub path such as "./mydir/myscript".
4. All scripts must output to the standard output the supported :doc:`results
JSON format </dev/results_format>` and
only that, otherwise the data source will not be able to load the
data.
Python
------
Execute other queries, manipulate and compute with Python code
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The Python data source allows running Python code in a secure and safe
environment. It won't allow writing files to disk, importing modules
that were not pre-approved in the configuration etc.
One of the benefits of using the Python data source is its ability to
execute queries (or saved queries) which you can store in a variable and
then manipulate/transform/merge with other data and queries.
You can import data analysis libraries such as Pandas, NumPy and SciPy.
This saved the trouble of having outside scripts do the synthesis of
data from multiple sources to create a single data set that can then be
used in dashboards.
- **Type**: Python
- **Options**:
- Allowed Modules in a comma separated list (optional). **NOTE:**
You MUST make sure these modules are installed on the machine
running the Celery workers

11
docs/dev.rst Normal file
View File

@@ -0,0 +1,11 @@
Developer Information
=====================
.. toctree::
:maxdepth: 2
:glob:
dev/vagrant
dev/*

View File

@@ -0,0 +1,94 @@
Query Execution Model
#####################
Introduction
============
The first datasource which was used with re:dash was Redshift. Because
we had billions of records in Redshift, and some queries were costly to
re-run, from the get go there was the idea of caching query results in
re:dash.
This was to relieve stress from the Redshift cluster and also to improve
user experience.
How queries get executed and cached in re:dash?
===============================================
Server
------
To make sure each query is executed only once at any giving time, we
translate the query to a ``query hash``, using the following code:
.. code:: python
COMMENTS_REGEX = re.compile("/\*.*?\*/")
def gen_query_hash(sql):
sql = COMMENTS_REGEX.sub("", sql)
sql = "".join(sql.split()).lower()
return hashlib.md5(sql.encode('utf-8')).hexdigest()
When query execution is done, the result gets stored to
``query_results`` table. Also we check for all queries in the
``queries`` table that have the same query hash and update their
reference to the query result we just saved
(`code <https://github.com/EverythingMe/redash/blob/master/redash/models.py#L235>`__).
Client
------
The client (UI) will execute queries in two scenarios:
1. (automatically) When opening a query page of a query that doesn't
have a result yet.
2. (manually) When the user clicks on "Execute".
In each case the client does a POST request to ``/api/query_results``
with the following parameters: ``query`` (the query text),
``data_source_id`` (data source to execute the query with) and ``ttl``.
When loading a cached result, ``ttl`` will be the one set to the query
(if it was set). This is a relic from previous versions, and I'm not
sure if it's really used anymore, as usually we will fetch query result
using its id.
When loading a non cached result, ``ttl`` will be 0 which will "force"
the server to execute the query.
As a response to ``/api/query_results`` the server will send either the
query results (in case of a cached query) or job id of the currently
executing query. When job id received the client will start polling on
this id, until a query result received (this is encapsulated in
``Query`` and ``QueryResult`` services).
Ideas on how to implement query parameters
==========================================
Client side only implementation
-------------------------------
(This was actually implemented in. See pull request `#363 <https://github.com/EverythingMe/redash/pull/363>`__ for details.)
The basic idea of how to implement parametized queries is to treat the
query as a template and merge it with parameters taken from query string
or UI (or both).
When the caching facility isn't required (with queries that return in a
reasonable time frame) the implementation can be completly client side
and the backend can be "blind" to the parameters - it just receives the
final query to execute and returns result.
As one improvement over this, we can let the UI/user specify the TTL
value when making the request to ``/api/query_results``, in which case
caching will be availble too, while not having to make the server aware
of the parameters.
Hybrid
------
Another option, will be to store the list of possible parameters for a
query, with their default/optional values. In such case, the server can
prefetch all the options and cache them to provide faster results to the
client.

View File

@@ -0,0 +1,30 @@
Data Source Results Format
==========================
All data sources in re:dash return the following results in JSON format:
.. code:: javascript
{
"columns" : [
{
// Required: a unique identifier of the column name in this result
"name" : "COLUMN_NAME",
// Required: friendly name of the column that will appear in the results
"friendly_name" : "FRIENDLY_NAME",
// Optional: If not specified sort might not work well.
// Supported types: integer, float, boolean, string (default), datetime (ISO-8601 text format)
"type" : "VALUE_TYPE"
},
...
],
"rows" : [
{
// name is the column name as it appears in the columns above.
// VALUE is a valid JSON value. For dates its an ISO-8601 string.
"name" : VALUE,
"name2" : VALUE2
},
...
]
}

49
docs/dev/vagrant.rst Normal file
View File

@@ -0,0 +1,49 @@
Setting up development environment (using Vagrant)
==================================================
To simplify contribution there is a `Vagrant
box <https://vagrantcloud.com/redash/boxes/dev>`__ available with all
the needed software to run re:dash for development (use it only for
development, for demo purposes there is
`redash/demo <https://vagrantcloud.com/redash/boxes/demo>`__ box and the
AWS/GCE images).
To get started with this box:
1. Make sure you have recent version of
`Vagrant <https://www.vagrantup.com/>`__ installed.
2. Clone the re:dash repository:
``git clone https://github.com/EverythingMe/redash.git``.
3. Change dir into the repository (``cd redash``) and run run
``vagrant up``. This might take some time the first time you run it,
as it downloads the Vagrant virtual box.
4. Once Vagrant is ready, ssh into the instance (``vagrant ssh``), and
change dir to ``/opt/redash/current`` -- this is where your local
repository copy synced to.
5. Copy ``.env`` file into this directory (``cp ../.env ./``).
6. From ``/opt/redash/current/rd_ui`` run ``bower install`` to install
frontend packages. This can be done from your host machine as well,
if you have bower installed.
7. Go back to ``/opt/redash/current`` and install python dependencies
``sudo pip install -r requirements.txt``
8. Apply migrations
::
PYTHONPATH=. bin/run python migrations/0001_allow_delete_query.py
PYTHONPATH=. bin/run python migrations/0002_fix_timestamp_fields.py
PYTHONPATH=. bin/run python migrations/0003_update_data_source_config.py
PYTHONPATH=. bin/run python migrations/0004_allow_null_in_event_user.py
PYTHONPATH=. bin/run python migrations/0005_add_updated_at.py
PYTHONPATH=. bin/run python migrations/0006_queries_last_edit_by.py
PYTHONPATH=. bin/run python migrations/0007_add_schedule_to_queries.py
PYTHONPATH=. bin/run python migrations/0008_make_ds_name_unique.py
PYTHONPATH=. bin/run python migrations/0009_add_api_key_to_user.py
PYTHONPATH=. bin/run python migrations/0010_create_alerts.py
PYTHONPATH=. bin/run python migrations/0010_allow_deleting_datasources.py
PYTHONPATH=. bin/run python migrations/0011_migrate_bigquery_to_json.py
9. Start the server and background workers with
``bin/run honcho start -f Procfile.dev``.
10. Now the server should be available on your host on port 9001 and you
can login with username admin and password admin.

57
docs/index.rst Normal file
View File

@@ -0,0 +1,57 @@
.. image:: http://redash.io/static/img/redash_logo.png
:width: 200px
Open Source Data Collaboration and Visualization Platform
===================================
**re:dash** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
Prior to **re:dash**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
**re:dash** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery,Google Spreadsheets, PostgreSQL, MySQL, Graphite and custom scripts.
Features
########
1. **Query Editor**: think of `JS Fiddle`_ for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it.
2. **Visualizations**: once you have a dataset, you can create different visualizations out of it. Currently it supports charts, pivot table and cohorts.
3. **Dashboards**: combine several visualizations into a single dashboard.
Demo
####
.. figure:: https://raw.github.com/EverythingMe/redash/screenshots/screenshots.gif
:alt: Screenshots
You can try out the demo instance: `http://demo.redash.io`_ (login with any Google account).
.. _http://demo.redash.io: http://demo.redash.io
.. _JS Fiddle: http://jsfiddle.net
Getting Started
###############
:doc:`Setting up re:dash instance </setup>` (includes links to ready made AWS/GCE images).
Getting Help
############
* Source: https://github.com/everythingme/redash
* Issues: https://github.com/everythingme/redash/issues
* Mailing List: https://groups.google.com/forum/#!forum/redash-users
* Gitter (chat): https://gitter.im/EverythingMe/redash
* Contact Arik, the maintainer directly: arik@everything.me.
TOC
###
.. toctree::
:maxdepth: 2
setup
upgrade
datasources
usage
dev
misc

10
docs/misc.rst Normal file
View File

@@ -0,0 +1,10 @@
Miscellaneous
=============
.. toctree::
:maxdepth: 2
:glob:
misc/*

View File

@@ -0,0 +1,50 @@
How To: Create a Google Developers Project
==========================================
1. Go to the `Google Developers
Console <https://console.developers.google.com/>`__.
2. Select a project, or create a new one by clicking Create Project:
1. In the Project name field, type in a name for your project.
2. In the Project ID field, optionally type in a project ID for your
project or use the one that the console has created for you. This
ID must be unique world-wide.
3. Click the **Create** button and wait for the project to be
created.
4. Click on the new project name in the list to start editing the
project.
3. In the left sidebar, select the **APIs** item below "APIs & auth". A
list of Google web services appears.
4. Find the **Google+ API** service and set its status to **ON**—notice
that this action moves the service to the top of the list.
5. In the sidebar under "APIs & auth", select **Consent screen**.
- Choose an Email Address and specify a Product Name.
6. In the sidebar under "APIs & auth", select **Credentials**.
7. Click **Create a new Client ID** — a dialog box appears.
- In the **Application type** section of the dialog, select **Web
application**.
- In the **Authorized JavaScript origins** field, enter the origin
for your app. You can enter multiple origins to use with multiple
re:dash instance. Wildcards are not allowed. In the example below,
we assume your re:dash instance address is *redash.example.com*:
::
http://redash.example.com
https://redash.example.com
- In the Authorized redirect URI field, enter the redirect URI
callback:
::
http://redash.example.com/oauth/google_callback
- Click the ``Create Client ID`` button.
8. In the resulting **Client ID for web application** section, copy the
**Client ID** and **Client secret** to your ``.env`` file.

59
docs/misc/ssl.rst Normal file
View File

@@ -0,0 +1,59 @@
SSL (HTTPS) Setup
=================
If you used the provided images or the bootstrap script, to start using
SSL with your instance you need to:
1. Update the nginx config file (``/etc/nginx/sites-available/redash``)
with SSL configuration (see below an example). Make sure to upload
the certificate to the server, and set the paths correctly in the new
config.
2. Open port 443 in your security group (if using AWS or GCE).
.. code:: nginx
upstream redash_servers {
server 127.0.0.1:5000;
}
server {
listen 80;
# Allow accessing /ping without https. Useful when placing behind load balancer.
location /ping {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://redash_servers;
}
location / {
# Enforce SSL.
return 301 https://$host$request_uri;
}
}
server {
listen 443 ssl;
# Make sure to set paths to your certificate .pem and .key files.
ssl on;
ssl_certificate /path-to/cert.pem; # or crt
ssl_certificate_key /path-to/cert.key;
access_log /var/log/nginx/redash.access.log;
gzip on;
gzip_types *;
gzip_proxied any;
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_pass http://redash_servers;
proxy_redirect off;
}
}

3
docs/requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
sphinx
sphinx-autobuild
sphinx_rtd_theme

159
docs/setup.rst Normal file
View File

@@ -0,0 +1,159 @@
Setting up re:dash instance
###########################
The `provisioning
script <https://github.com/EverythingMe/redash/blob/master/setup/bootstrap.sh>`__
works on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy. This script
installs all needed dependencies and creates basic setup.
To ease the process, there are also images for AWS and Google Compute
Cloud. These images created with the same provision script using Packer.
Create an instance
==================
Google Compute Engine
---------------------
First, you need to add the images to your account:
.. code:: bash
$ gcloud compute images add redash-063-b906 gs://redash-images/redash.0.6.3.b906.tar.gz
Next you need to launch an instance using this image (n1-standard-1
instance type is recommended). If you plan using re:dash with BigQuery,
you can use a dedicated image which comes with BigQuery preconfigured
(using instance permissions):
.. code:: bash
$ gcloud compute images add redash-063-b906-bq gs://redash-images/redash.0.6.3.b906-bq.tar.gz
Note that you need to launch this instance with BigQuery access:
.. code:: bash
$ gcloud compute instances create <your_instance_name> --image redash-060-b812-bq --scopes storage-ro bigquery
(the same can be done from the web interface, just make sure to enable
BigQuery access)
Now proceed to `"Setup" <#setup>`__.
AWS
---
Launch the instance with from the pre-baked AMI (for small deployments
t2.micro should be enough):
- us-east-1:
`ami-47b4612c <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-47b4612c>`__
- us-west-1:
`ami-a72edde3 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-a72edde3>`__
- us-west-2:
`ami-f9d6d5c9 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-f9d6d5c9>`__
- eu-central-1:
`ami-72eed46f <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-72eed46f>`__
- eu-west-1:
`ami-5a135c2d <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-5a135c2d>`__
- sa-east-1:
`ami-2b78f436 <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-2b78f436>`__
- ap-northeast-1:
`ami-0a55fd0a <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-0a55fd0a>`__
- ap-southeast-2:
`ami-9f793ea5 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-9f793ea5>`__
- ap-southeast-1:
`ami-12545740 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-12545740>`__
Now proceed to `"Setup" <#setup>`__.
Other
-----
Download the provision script and run it on your machine. Note that:
1. You need to run the script as root.
2. It was tested only on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy.
Setup
=====
Once you created the instance with either the image or the script, you
should have a running re:dash instance with everything you need to get
started. You can even login to it with the user "admin" (password:
"admin"). But to make it useful, there are a few more steps that you
need to manually do to complete the setup:
First ssh to your instance and change directory to ``/opt/redash``. If
you're using the GCE image, switch to root (``sudo su``).
Users & Google Authentication setup
-----------------------------------
Most of the settings you need to edit are in the ``/opt/redash/.env``
file.
1. Update the cookie secret (important! otherwise anyone can sign new
cookies and impersonate users): change "veryverysecret" in the line:
``export REDASH_COOKIE_SECRET=veryverysecret`` to something else (you
can use ``pwgen 32 -1`` to generate random string).
2. By default we create an admin user with the password "admin". You
need to change the password:
- ``cd /opt/redash/current``
- ``sudo -u redash bin/run ./manage.py users password admin {new password}``
3. If you want to use Google OAuth to authenticate users, you need to
create a Google Developers project (see :doc:`instructions </misc/google_developers_project>`)
and then add the needed configuration in the ``.env`` file:
.. code::
export REDASH_GOOGLE_CLIENT_ID=""
export REDASH_GOOGLE_CLIENT_SECRET=""
export REDASH_GOOGLE_APPS_DOMAIN=""
``REDASH_GOOGLE_CLIENT_ID`` and ``REDASH_GOOGLE_CLIENT_SECRET`` are the values you get after registering with Google. ``READASH_GOOGLE_APPS_DOMAIN`` is used in case you want to limit access to single Google apps domain (*if you leave it empty anyone with a Google account can access your instance*).
4. Restart the web server to apply the configuration changes:
``sudo supervisorctl restart redash_server``.
5. Once you have Google OAuth enabled, you can login using your Google
Apps account. If you want to grant admin permissions to some users,
you can do it with the ``users grant_admin`` command:
``sudo -u redash bin/run ./manage.py users grant_admin {email}``.
6. If you don't use Google OAuth or just need username/password logins,
you can create additional users using the CLI (see :doc:`documentation </usage/users>`).
Datasources
-----------
To make re:dash truly useful, you need to setup your data sources in it.
Currently all data sources management is done with the CLI.
See
:doc:`documentation </datasources>`
for the different options. Your instance comes ready with dependencies
needed to setup supported sources.
Follow issue
`#193 <https://github.com/EverythingMe/redash/issues/193>`__ to know
when UI was implemented to manage data sources.
How to upgrade?
---------------
It's recommended to upgrade once in a while your re:dash instance to
benefit from bug fixes and new features. See :doc:`here </upgrade>` for full upgrade
instructions (including Fabric script).
Notes
=====
- If this is a production setup, you should enforce HTTPS and make sure
you set the cookie secret (see :doc:`instructions </misc/ssl>`).

34
docs/upgrade.rst Normal file
View File

@@ -0,0 +1,34 @@
How to Upgrade
##############
It's recommended to upgrade your re:dash instance once there are new
releases, to benefit from new features and bug fixes. The upgrade
process is relatively simple, and assuming you used one of the base
images we provide, you can just use the
`Fabric <http://www.fabfile.org/>`__ script provided here:
https://gist.github.com/arikfr/440d1403b4aeb76ebaf8.
How to run the Fabric script
============================
1. Install Fabric: ``pip install fabric requests`` (needed only once)
2. Download the ``fabfile.py`` from the gist.
3. Run the script:
``fab -H{your re:dash host} -u{the ssh user for this host} deploy_latest_release``
What the Fabric script does
===========================
Even if you didn't use the image, it's very likely you can reuse most of
this script with small modifications. What this script does is:
1. Find the URL of the latest release tarball (from `GitHub releases
page <github.com/everythingme/redash/releases>`__).
2. Download it.
3. Create new directory for this version (for example:
``/opt/redash/redash.0.5.0.b685``).
4. Unpack that (``tar -C {dir} -xvf {tarball path}``).
5. Link ``/opt/redash/.env`` file into this directory.
6. Apply any new migrations.
7. Link ``/opt/redash/current`` to new version.
8. Restart web server and celery workers.

12
docs/usage.rst Normal file
View File

@@ -0,0 +1,12 @@
Usage
=====
.. toctree::
:maxdepth: 2
:glob:
usage/maintenance.rst
usage/users.rst
usage/*

View File

@@ -0,0 +1,48 @@
ElasticSearch: Querying
#######################
ElasticSearch currently supports only simple Lucene style queries (like
Kibana but without the aggregation).
Full blown JSON based ElasticSearch queries (including aggregations)
will be added later.
Simple query example:
=====================
- Query the index named "twitter"
- Filter by "user:kimchy"
- Return the fields: "@timestamp", "tweet" and "user"
- Return up to 15 results
- Sort by @timestamp ascending
.. code:: json
{
"index" : "twitter",
"query" : "user:kimchy",
"fields" : ["@timestamp", "tweet", "user"],
"size" : 15,
"sort" : "@timestamp:asc"
}
Simple query on a logstash ElasticSearch instance:
==================================================
- Query the index named "logstash-2015.04.\*" (in this case its all of
April 2015)
- Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
- Return fields: "@timestamp", "userId", "channel", "utm\_source",
"utm\_medium", "utm\_campaign", "utm\_content"
- Return up to 250 results
- Sort by @timestamp ascending
.. code:: json
{
"index" : "logstash-2015.04.*",
"query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
"fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
"size" : 250,
"sort" : "@timestamp:asc"
}

View File

@@ -0,0 +1,94 @@
Ongoing Maintanence and Basic Operations
########################################
Configuration and logs
======================
The supervisor config can be found in
``/opt/redash/supervisord/supervisord.conf``.
There you can see the names of its programs (``redash_celery``,
``redash_server``) and the location of their logs.
Restart
=======
Restarting the Web Server
-------------------------
``sudo supervisorctl stop redash_server``
Restarting Celery Workers
-------------------------
``sudo supervisorctl restart redash_celery``
Restarting Celery Workers & the Queries Queue
---------------------------------------------
In case you are handling a problem, and you need to stop the currently
running queries and reset the queue, follow the steps below.
1. Stop celery: ``sudo supervisorctl stop redash_celery`` (celery might
take some time to stop, if it's in the middle of running a query)
2. Flush redis: ``redis-cli flushdb``
3. Start celery: ``sudo supervisorctl start redash_celery``
Changing the Number of Workers
==============================
By default, Celery will start a worker per CPU core. Because most of
re:dash's tasks are IO bound, the real limit for number of workers you
can use depends on the amount of memory your machine has. It's
recommended to increase number of workers, to support more concurrent
queries.
1. Open the supervisord configuration file:
``/opt/redash/supervisord/supervisord.conf``
2. Edit the ``[program:redash_celery]`` section and add to the *command*
value, the param "-c" with the number of concurrent workers you need.
3. Restart supervisord to apply new configuration:
``sudo /etc/init.d/redash_supervisord restart``.
DB
==
Show the Currently Configured Data Source
-----------------------------------------
This varies based on the redash version and personal preferences. You
can do one of the following:
Using the CLI
~~~~~~~~~~~~~
In ``/opt/redash/current``, run:
``sudo -u redash bin/run ./manage.py ds list``
Using the Admin
~~~~~~~~~~~~~~~
(available from version 0.6b797). Browse to ``/admin/datasource``
View the Definition Directly in the DB
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Open psql: ``sudo -u redash psql``
2. Run the query: ``SELECT * from data_sources;``
Backup re:dash's DB:
--------------------
``sudo -u redash pg_dump > backup_filename.sql``
Version
=======
See current version:
``bin/run ./manage.py version``

View File

@@ -0,0 +1,74 @@
MongoDB: Querying
#################
Simple query example:
=====================
.. code:: json
{
"collection" : "my_collection",
"query" : {
"date" : {
"$gt" : "ISODate(\"2015-01-15 11:41\")",
},
"type" : 1
},
"fields" : {
"_id" : 1,
"name" : 2
},
"sort" : [
{
"name" : "date",
"direction" : -1
}
]
}
Live example on the demo instance:
http://demo.redash.io/queries/394/source.
Aggregation
===========
Uses a syntax similar to the one used in PyMongo, however to support the
correct order of sorting, it uses a regular list for the "$sort"
operation that converts into a SON (sorted dictionary) object before
execution.
Aggregation query example:
.. code:: json
{
"collection" : "things",
"aggregate" : [
{
"$unwind" : "$tags"
},
{
"$group" : {
"_id" : "$tags",
"count" : { "$sum" : 1 }
}
},
{
"$sort" : [
{
"name" : "count",
"direction" : -1
},
{
"name" : "_id",
"direction" : -1
}
]
}
]
}
Live examples on the demo instance:
1. http://demo.redash.io/queries/393/source
2. http://demo.redash.io/queries/387/source

39
docs/usage/users.rst Normal file
View File

@@ -0,0 +1,39 @@
Users' Management
#################
If you use Google OpenID authentication, then each user from the domains
you allowed will automatically be logged in and have the default
permissions.
If you want to give some user different permissions or you want to
create password based users (make sure you enabled this options in
settings first), you need to use the CLI (``manage.py``).
Create a new user
=================
.. code:: bash
$ bin/run ./manage.py users create --help
usage: users create [-h] [--permissions PERMISSIONS] [--password PASSWORD]
[--google] [--admin]
name email
positional arguments:
name User's full name
email User's email
optional arguments:
-h, --help show this help message and exit
--permissions PERMISSIONS
Comma seperated list of permissions (leave blank for
default).
--password PASSWORD Password for users who don't use Google Auth (leave
blank for prompt).
--google user uses Google Auth to login
--admin set user as admin
Grant admin permissions
=======================
``sudo -u redash bin/run ./manage.py users grant_admin {email}``

View File

@@ -2,12 +2,15 @@
""" """
CLI to manage redash. CLI to manage redash.
""" """
import json
from flask.ext.script import Manager from flask.ext.script import Manager
from redash import settings, models, __version__ from redash import settings, models, __version__
from redash.wsgi import app from redash.wsgi import app
from redash.import_export import import_manager from redash.import_export import import_manager
from redash.cli import users, database, data_sources from redash.cli import users, database, data_sources
from redash.monitor import get_status
manager = Manager(app) manager = Manager(app)
manager.add_command("database", database.manager) manager.add_command("database", database.manager)
@@ -21,6 +24,9 @@ def version():
"""Displays re:dash version.""" """Displays re:dash version."""
print __version__ print __version__
@manager.command
def status():
print json.dumps(get_status(), indent=2)
@manager.command @manager.command
def runworkers(): def runworkers():
@@ -37,12 +43,15 @@ def make_shell_context():
@manager.command @manager.command
def check_settings(): def check_settings():
"""Show the settings as re:dash sees them (useful for debugging).""" """Show the settings as re:dash sees them (useful for debugging)."""
from types import ModuleType for name, item in settings.all_settings().iteritems():
print "{} = {}".format(name, item)
for name in dir(settings): @manager.command
item = getattr(settings, name) def send_test_mail():
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType): from redash import mail
print "{} = {}".format(name, item) from flask_mail import Message
mail.send(Message(subject="Test Message from re:dash", recipients=[settings.MAIL_DEFAULT_SENDER], body="Test message."))
if __name__ == '__main__': if __name__ == '__main__':

View File

@@ -0,0 +1,27 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
column = models.User.api_key
column.null = True
migrate(
migrator.add_column('users', 'api_key', models.User.api_key),
)
for user in models.User.select():
user.save()
migrate(
migrator.add_not_null('users', 'api_key')
)
db.close_db(None)

View File

@@ -0,0 +1,18 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.drop_not_null('queries', 'data_source_id'),
)
db.close_db(None)

View File

@@ -0,0 +1,8 @@
from redash.models import db, Alert, AlertSubscription
if __name__ == '__main__':
with db.database.transaction():
Alert.create_table()
AlertSubscription.create_table()
db.close_db(None)

View File

@@ -0,0 +1,44 @@
from base64 import b64encode
import json
from redash.models import DataSource
def convert_p12_to_pem(p12file):
from OpenSSL import crypto
with open(p12file, 'rb') as f:
p12 = crypto.load_pkcs12(f.read(), "notasecret")
return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
if __name__ == '__main__':
for ds in DataSource.all():
if ds.type == 'bigquery':
options = json.loads(ds.options)
if 'jsonKeyFile' in options:
continue
new_options = {
'projectId': options['projectId'],
'jsonKeyFile': b64encode(json.dumps({
'client_email': options['serviceAccount'],
'private_key': convert_p12_to_pem(options['privateKey'])
}))
}
ds.options = json.dumps(new_options)
ds.save()
elif ds.type == 'google_spreadsheets':
options = json.loads(ds.options)
if 'jsonKeyFile' in options:
continue
with open(options['credentialsFilePath']) as f:
new_options = {
'jsonKeyFile': b64encode(f.read())
}
ds.options = json.dumps(new_options)
ds.save()

View File

@@ -19,6 +19,7 @@
"trailing": true, "trailing": true,
"smarttabs": true, "smarttabs": true,
"globals": { "globals": {
"angular": false "angular": false,
"_": false
} }
} }

View File

@@ -19,6 +19,7 @@
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css"> <link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css"> <link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css"> <link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
<link rel="stylesheet" href="/styles/redash.css"> <link rel="stylesheet" href="/styles/redash.css">
<!-- endbuild --> <!-- endbuild -->
@@ -72,6 +73,12 @@
<li><a href="/queries">Queries</a></li> <li><a href="/queries">Queries</a></li>
</ul> </ul>
</li> </li>
<li>
<a href="/alerts">Alerts</a>
</li>
<li ng-show="currentUser.hasPermission('admin')">
<a href="/data_sources">Data Sources</a>
</li>
</ul> </ul>
<form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()"> <form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()">
<div class="form-group"> <div class="form-group">
@@ -129,6 +136,7 @@
<script src="/bower_components/angular-ui-select/dist/select.js"></script> <script src="/bower_components/angular-ui-select/dist/select.js"></script>
<script src="/bower_components/underscore.string/lib/underscore.string.js"></script> <script src="/bower_components/underscore.string/lib/underscore.string.js"></script>
<script src="/bower_components/marked/lib/marked.js"></script> <script src="/bower_components/marked/lib/marked.js"></script>
<script src="/bower_components/angular-base64-upload/dist/angular-base64-upload.js"></script>
<script src="/scripts/ng_highchart.js"></script> <script src="/scripts/ng_highchart.js"></script>
<script src="/scripts/ng_smart_table.js"></script> <script src="/scripts/ng_smart_table.js"></script>
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script> <script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
@@ -137,7 +145,8 @@
<script src="/bower_components/mustache/mustache.js"></script> <script src="/bower_components/mustache/mustache.js"></script>
<script src="/bower_components/canvg/rgbcolor.js"></script> <script src="/bower_components/canvg/rgbcolor.js"></script>
<script src="/bower_components/canvg/StackBlur.js"></script> <script src="/bower_components/canvg/StackBlur.js"></script>
<script src="/bower_components/canvg/canvg.js"></script> <script src="/bower_components/canvg/canvg.js"></script>
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
<!-- endbuild --> <!-- endbuild -->
<!-- build:js({.tmp,app}) /scripts/scripts.js --> <!-- build:js({.tmp,app}) /scripts/scripts.js -->
@@ -149,18 +158,22 @@
<script src="/scripts/controllers/controllers.js"></script> <script src="/scripts/controllers/controllers.js"></script>
<script src="/scripts/controllers/dashboard.js"></script> <script src="/scripts/controllers/dashboard.js"></script>
<script src="/scripts/controllers/admin_controllers.js"></script> <script src="/scripts/controllers/admin_controllers.js"></script>
<script src="/scripts/controllers/data_sources.js"></script>
<script src="/scripts/controllers/query_view.js"></script> <script src="/scripts/controllers/query_view.js"></script>
<script src="/scripts/controllers/query_source.js"></script> <script src="/scripts/controllers/query_source.js"></script>
<script src="/scripts/visualizations/base.js"></script> <script src="/scripts/visualizations/base.js"></script>
<script src="/scripts/visualizations/chart.js"></script> <script src="/scripts/visualizations/chart.js"></script>
<script src="/scripts/visualizations/cohort.js"></script> <script src="/scripts/visualizations/cohort.js"></script>
<script src="/scripts/visualizations/map.js"></script>
<script src="/scripts/visualizations/counter.js"></script> <script src="/scripts/visualizations/counter.js"></script>
<script src="/scripts/visualizations/table.js"></script> <script src="/scripts/visualizations/table.js"></script>
<script src="/scripts/visualizations/pivot.js"></script> <script src="/scripts/visualizations/pivot.js"></script>
<script src="/scripts/directives/directives.js"></script> <script src="/scripts/directives/directives.js"></script>
<script src="/scripts/directives/query_directives.js"></script> <script src="/scripts/directives/query_directives.js"></script>
<script src="/scripts/directives/data_source_directives.js"></script>
<script src="/scripts/directives/dashboard_directives.js"></script> <script src="/scripts/directives/dashboard_directives.js"></script>
<script src="/scripts/filters.js"></script> <script src="/scripts/filters.js"></script>
<script src="/scripts/controllers/alerts.js"></script>
<!-- endbuild --> <!-- endbuild -->
<script> <script>
@@ -175,7 +188,7 @@
currentUser.hasPermission = function(permission) { currentUser.hasPermission = function(permission) {
return this.permissions.indexOf(permission) != -1; return this.permissions.indexOf(permission) != -1;
} };
{{ analytics|safe }} {{ analytics|safe }}
</script> </script>

View File

@@ -13,6 +13,10 @@
<link rel="stylesheet" href="/styles/redash.css"> <link rel="stylesheet" href="/styles/redash.css">
<link rel="stylesheet" href="/styles/login.css"> <link rel="stylesheet" href="/styles/login.css">
<!-- endbuild --> <!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head> </head>
<body> <body>
@@ -26,13 +30,20 @@
<span class="icon-bar"></span> <span class="icon-bar"></span>
<span class="icon-bar"></span> <span class="icon-bar"></span>
</button> </button>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a> <a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
</div> </div>
</div> </div>
</nav> </nav>
<div class="container"> <div class="container">
<div class="row"> <div class="row">
{% with messages = get_flashed_messages() %}
{% if messages %}
{% for message in messages %}
<div class="alert alert-warning" role="alert">{{ message }}</div>
{% endfor %}
{% endif %}
{% endwith %}
<div class="main"> <div class="main">
{% if show_google_openid %} {% if show_google_openid %}
@@ -48,6 +59,19 @@
{% endif %} {% endif %}
{% if show_saml_login %}
<div class="row">
<a href="/saml/login">SAML Login</a>
</div>
<div class="login-or">
<hr class="hr-or">
<span class="span-or">or</span>
</div>
{% endif %}
<form role="form" method="post" name="login"> <form role="form" method="post" name="login">
<div class="form-group"> <div class="form-group">
<label for="inputUsernameEmail">Username or email</label> <label for="inputUsernameEmail">Username or email</label>

View File

@@ -14,7 +14,8 @@ angular.module('redash', [
'smartTable.table', 'smartTable.table',
'ngResource', 'ngResource',
'ngRoute', 'ngRoute',
'ui.select' 'ui.select',
'naif.base64'
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider', ]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider',
function ($routeProvider, $locationProvider, $compileProvider, growlProvider) { function ($routeProvider, $locationProvider, $compileProvider, growlProvider) {
if (featureFlags.clientSideMetrics) { if (featureFlags.clientSideMetrics) {
@@ -80,9 +81,23 @@ angular.module('redash', [
templateUrl: '/views/admin_status.html', templateUrl: '/views/admin_status.html',
controller: 'AdminStatusCtrl' controller: 'AdminStatusCtrl'
}); });
$routeProvider.when('/admin/workers', {
templateUrl: '/views/admin_workers.html', $routeProvider.when('/alerts', {
controller: 'AdminWorkersCtrl' templateUrl: '/views/alerts/list.html',
controller: 'AlertsCtrl'
});
$routeProvider.when('/alerts/:alertId', {
templateUrl: '/views/alerts/edit.html',
controller: 'AlertCtrl'
});
$routeProvider.when('/data_sources/:dataSourceId', {
templateUrl: '/views/data_sources/edit.html',
controller: 'DataSourceCtrl'
});
$routeProvider.when('/data_sources', {
templateUrl: '/views/data_sources/list.html',
controller: 'DataSourcesCtrl'
}); });
$routeProvider.when('/', { $routeProvider.when('/', {

View File

@@ -17,7 +17,7 @@
}; };
refresh(); refresh();
} };
angular.module('redash.admin_controllers', []) angular.module('redash.admin_controllers', [])
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl]) .controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])

View File

@@ -0,0 +1,174 @@
(function() {
var AlertsCtrl = function($scope, Events, Alert) {
Events.record(currentUser, "view", "page", "alerts");
$scope.$parent.pageTitle = "Alerts";
$scope.alerts = []
Alert.query(function(alerts) {
var stateClass = {
'ok': 'label label-success',
'triggered': 'label label-danger',
'unknown': 'label label-warning'
};
_.each(alerts, function(alert) {
alert.class = stateClass[alert.state];
})
$scope.alerts = alerts;
});
$scope.gridConfig = {
isPaginationEnabled: true,
itemsByPage: 50,
maxSize: 8,
};
$scope.gridColumns = [
{
"label": "Name",
"map": "name",
"cellTemplate": '<a href="/alerts/{{dataRow.id}}">{{dataRow.name}}</a> (<a href="/queries/{{dataRow.query.id}}">query</a>)'
},
{
'label': 'Created By',
'map': 'user.name'
},
{
'label': 'State',
'cellTemplate': '<span ng-class="dataRow.class">{{dataRow.state | uppercase}}</span> since <span am-time-ago="dataRow.updated_at"></span>'
},
{
'label': 'Created At',
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
}
];
};
var AlertCtrl = function($scope, $routeParams, $location, growl, Query, Events, Alert) {
$scope.$parent.pageTitle = "Alerts";
$scope.alertId = $routeParams.alertId;
if ($scope.alertId === "new") {
Events.record(currentUser, 'view', 'page', 'alerts/new');
} else {
Events.record(currentUser, 'view', 'alert', $scope.alertId);
}
$scope.onQuerySelected = function(item) {
$scope.selectedQuery = item;
item.getQueryResultPromise().then(function(result) {
$scope.queryResult = result;
$scope.alert.options.column = $scope.alert.options.column || result.getColumnNames()[0];
});
};
if ($scope.alertId === "new") {
$scope.alert = new Alert({options: {}});
} else {
$scope.alert = Alert.get({id: $scope.alertId}, function(alert) {
$scope.onQuerySelected(new Query($scope.alert.query));
});
}
$scope.ops = ['greater than', 'less than', 'equals'];
$scope.selectedQuery = null;
$scope.getDefaultName = function() {
if (!$scope.alert.query) {
return undefined;
}
return _.template("<%= query.name %>: <%= options.column %> <%= options.op %> <%= options.value %>", $scope.alert);
};
$scope.searchQueries = function (term) {
if (!term || term.length < 3) {
return;
}
Query.search({q: term}, function(results) {
$scope.queries = results;
});
};
$scope.saveChanges = function() {
if ($scope.alert.name === undefined || $scope.alert.name === '') {
$scope.alert.name = $scope.getDefaultName();
}
$scope.alert.$save(function(alert) {
growl.addSuccessMessage("Saved.");
if ($scope.alertId === "new") {
$location.path('/alerts/' + alert.id).replace();
}
}, function() {
growl.addErrorMessage("Failed saving alert.");
});
};
};
angular.module('redash.directives').directive('alertSubscribers', ['AlertSubscription', function (AlertSubscription) {
return {
restrict: 'E',
replace: true,
templateUrl: '/views/alerts/subscribers.html',
scope: {
'alertId': '='
},
controller: function ($scope) {
$scope.subscribers = AlertSubscription.query({alertId: $scope.alertId});
}
}
}]);
angular.module('redash.directives').directive('subscribeButton', ['AlertSubscription', 'growl', function (AlertSubscription, growl) {
return {
restrict: 'E',
replace: true,
template: '<button class="btn btn-default btn-xs" ng-click="toggleSubscription()"><i ng-class="class"></i></button>',
controller: function ($scope) {
var updateClass = function() {
if ($scope.subscription) {
$scope.class = "fa fa-eye-slash";
} else {
$scope.class = "fa fa-eye";
}
}
$scope.subscribers.$promise.then(function() {
$scope.subscription = _.find($scope.subscribers, function(subscription) {
return (subscription.user.email == currentUser.email);
});
updateClass();
});
$scope.toggleSubscription = function() {
if ($scope.subscription) {
$scope.subscription.$delete(function() {
$scope.subscribers = _.without($scope.subscribers, $scope.subscription);
$scope.subscription = undefined;
updateClass();
}, function() {
growl.addErrorMessage("Failed saving subscription.");
});
} else {
$scope.subscription = new AlertSubscription({alert_id: $scope.alertId});
$scope.subscription.$save(function() {
$scope.subscribers.push($scope.subscription);
updateClass();
}, function() {
growl.addErrorMessage("Unsubscription failed.");
});
}
}
}
}
}]);
angular.module('redash.controllers')
.controller('AlertsCtrl', ['$scope', 'Events', 'Alert', AlertsCtrl])
.controller('AlertCtrl', ['$scope', '$routeParams', '$location', 'growl', 'Query', 'Events', 'Alert', AlertCtrl])
})();

View File

@@ -23,7 +23,7 @@
}, },
{ {
'label': 'Created By', 'label': 'Created By',
'map': 'user_name' 'map': 'user.name'
}, },
{ {
'label': 'Created At', 'label': 'Created At',
@@ -45,7 +45,6 @@
Query.search({q: $scope.term }, function(results) { Query.search({q: $scope.term }, function(results) {
$scope.queries = _.map(results, function(query) { $scope.queries = _.map(results, function(query) {
query.created_at = moment(query.created_at); query.created_at = moment(query.created_at);
query.user_name = query.user.name;
return query; return query;
}); });
}); });
@@ -93,7 +92,6 @@
$scope.allQueries = _.map(queries, function (query) { $scope.allQueries = _.map(queries, function (query) {
query.created_at = moment(query.created_at); query.created_at = moment(query.created_at);
query.retrieved_at = moment(query.retrieved_at); query.retrieved_at = moment(query.retrieved_at);
query.user_name = query.user.name;
return query; return query;
}); });
@@ -108,7 +106,7 @@
}, },
{ {
'label': 'Created By', 'label': 'Created By',
'map': 'user_name' 'map': 'user.name'
}, },
{ {
'label': 'Created At', 'label': 'Created At',

View File

@@ -0,0 +1,47 @@
(function () {
var DataSourcesCtrl = function ($scope, $location, growl, Events, DataSource) {
Events.record(currentUser, "view", "page", "admin/data_sources");
$scope.$parent.pageTitle = "Data Sources";
$scope.dataSources = DataSource.query();
$scope.openDataSource = function(datasource) {
$location.path('/data_sources/' + datasource.id);
};
$scope.deleteDataSource = function(event, datasource) {
event.stopPropagation();
Events.record(currentUser, "delete", "datasource", datasource.id);
datasource.$delete(function(resource) {
growl.addSuccessMessage("Data source deleted succesfully.");
this.$parent.dataSources = _.without(this.dataSources, resource);
}.bind(this), function(httpResponse) {
console.log("Failed to delete data source: ", httpResponse.status, httpResponse.statusText, httpResponse.data);
growl.addErrorMessage("Failed to delete data source.");
});
}
};
var DataSourceCtrl = function ($scope, $routeParams, $http, $location, Events, DataSource) {
Events.record(currentUser, "view", "page", "admin/data_source");
$scope.$parent.pageTitle = "Data Sources";
$scope.dataSourceId = $routeParams.dataSourceId;
if ($scope.dataSourceId == "new") {
$scope.dataSource = new DataSource({options: {}});
} else {
$scope.dataSource = DataSource.get({id: $routeParams.dataSourceId});
}
$scope.$watch('dataSource.id', function(id) {
if (id != $scope.dataSourceId && id !== undefined) {
$location.path('/data_sources/' + id).replace();
}
});
};
angular.module('redash.controllers')
.controller('DataSourcesCtrl', ['$scope', '$location', 'growl', 'Events', 'DataSource', DataSourcesCtrl])
.controller('DataSourceCtrl', ['$scope', '$routeParams', '$http', '$location', 'Events', 'DataSource', DataSourceCtrl])
})();

View File

@@ -15,6 +15,7 @@
maxAge = -1; maxAge = -1;
} }
$scope.showLog = false;
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters); $scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
} }
@@ -48,15 +49,19 @@
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin'); $scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
$scope.canViewSource = currentUser.hasPermission('view_source'); $scope.canViewSource = currentUser.hasPermission('view_source');
$scope.dataSources = DataSource.get(function(dataSources) { $scope.dataSources = DataSource.query(function(dataSources) {
updateSchema(); updateSchema();
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; }); if ($scope.query.isNew()) {
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
}
}); });
// in view mode, latest dataset is always visible // in view mode, latest dataset is always visible
// source mode changes this behavior // source mode changes this behavior
$scope.showDataset = true; $scope.showDataset = true;
$scope.showLog = false;
$scope.lockButton = function(lock) { $scope.lockButton = function(lock) {
$scope.queryExecuting = lock; $scope.queryExecuting = lock;
@@ -99,6 +104,9 @@
}; };
$scope.executeQuery = function() { $scope.executeQuery = function() {
if (!$scope.query.query) {
return;
}
getQueryResult(0); getQueryResult(0);
$scope.lockButton(true); $scope.lockButton(true);
$scope.cancelling = false; $scope.cancelling = false;
@@ -110,21 +118,21 @@
$scope.queryResult.cancelExecution(); $scope.queryResult.cancelExecution();
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id); Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
}; };
$scope.archiveQuery = function(options, data) { $scope.archiveQuery = function(options, data) {
if (data) { if (data) {
data.id = $scope.query.id; data.id = $scope.query.id;
} else { } else {
data = $scope.query; data = $scope.query;
} }
$scope.isDirty = false; $scope.isDirty = false;
options = _.extend({}, { options = _.extend({}, {
successMessage: 'Query archived', successMessage: 'Query archived',
errorMessage: 'Query could not be archived' errorMessage: 'Query could not be archived'
}, options); }, options);
return Query.delete({id: data.id}, function() { return Query.delete({id: data.id}, function() {
$scope.query.is_archived = true; $scope.query.is_archived = true;
$scope.query.schedule = null; $scope.query.schedule = null;
@@ -197,6 +205,10 @@
if (status === 'done' || status === 'failed') { if (status === 'done' || status === 'failed') {
$scope.lockButton(false); $scope.lockButton(false);
} }
if ($scope.queryResult.getLog() != null) {
$scope.showLog = true;
}
}); });
$scope.openScheduleForm = function() { $scope.openScheduleForm = function() {

View File

@@ -0,0 +1,76 @@
(function () {
'use strict';
var directives = angular.module('redash.directives');
// Angular strips data- from the directive, so data-source-form becomes sourceForm...
directives.directive('sourceForm', ['$http', 'growl', function ($http, growl) {
return {
restrict: 'E',
replace: true,
templateUrl: '/views/data_sources/form.html',
scope: {
'dataSource': '='
},
link: function ($scope) {
var setType = function(types) {
if ($scope.dataSource.type === undefined) {
$scope.dataSource.type = types[0].type;
return types[0];
}
$scope.type = _.find(types, function (t) {
return t.type == $scope.dataSource.type;
});
};
$scope.files = {};
$scope.$watchCollection('files', function() {
_.each($scope.files, function(v, k) {
if (v) {
$scope.dataSource.options[k] = v.base64;
}
});
});
$http.get('/api/data_sources/types').success(function (types) {
setType(types);
$scope.dataSourceTypes = types;
_.each(types, function (type) {
_.each(type.configuration_schema.properties, function (prop, name) {
if (name == 'password' || name == 'passwd') {
prop.type = 'password';
}
if (_.string.endsWith(name, "File")) {
prop.type = 'file';
}
prop.required = _.contains(type.configuration_schema.required, name);
});
});
});
$scope.$watch('dataSource.type', function(current, prev) {
if (prev !== current) {
if (prev !== undefined) {
$scope.dataSource.options = {};
}
setType($scope.dataSourceTypes);
}
});
$scope.saveChanges = function() {
$scope.dataSource.$save(function() {
growl.addSuccessMessage("Saved.");
}, function() {
growl.addErrorMessage("Failed saving.");
});
}
}
}
}]);
})();

View File

@@ -8,7 +8,7 @@
'query': '=', 'query': '=',
'visualization': '=?' 'visualization': '=?'
}, },
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>', template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
link: function(scope, element) { link: function(scope, element) {
scope.link = '/queries/' + scope.query.id; scope.link = '/queries/' + scope.query.id;
if (scope.visualization) { if (scope.visualization) {
@@ -139,6 +139,8 @@
additionalHints = _.unique(keywords); additionalHints = _.unique(keywords);
} }
codemirror.refresh();
}); });
$scope.$watch('syntax', function(syntax) { $scope.$watch('syntax', function(syntax) {
@@ -239,7 +241,14 @@
value: "60", value: "60",
name: 'Every minute' name: 'Every minute'
} }
] ];
_.each([5, 10, 15, 30], function(i) {
$scope.refreshOptions.push({
value: String(i*60),
name: "Every " + i + " minutes"
})
});
_.each(_.range(1, 13), function (i) { _.each(_.range(1, 13), function (i) {
$scope.refreshOptions.push({ $scope.refreshOptions.push({

View File

@@ -145,7 +145,7 @@
if (!hasTotalsAlready) { if (!hasTotalsAlready) {
this.addSeries({ this.addSeries({
data: _.values(data), data: _.sortBy(_.values(data), 'x'),
type: 'line', type: 'line',
name: 'Total' name: 'Total'
}, false) }, false)
@@ -308,22 +308,6 @@
// We check either for true or undefined for backward compatibility. // We check either for true or undefined for backward compatibility.
var series = scope.series; var series = scope.series;
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
// If this is a chart that has just one row for multiple columns, sort // If this is a chart that has just one row for multiple columns, sort
// by the Y values. For example: // by the Y values. For example:
@@ -376,6 +360,23 @@
}); });
} }
} }
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
scope.chart.counters.color = 0; scope.chart.counters.color = 0;

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,28 @@
(function () { (function () {
function QueryResultError(errorMessage) {
this.errorMessage = errorMessage;
}
QueryResultError.prototype.getError = function() {
return this.errorMessage;
};
QueryResultError.prototype.getStatus = function() {
return 'failed';
};
QueryResultError.prototype.getData = function() {
return null;
};
QueryResultError.prototype.getLog = function() {
return null;
};
QueryResultError.prototype.getChartData = function() {
return null;
};
var QueryResult = function ($resource, $timeout, $q) { var QueryResult = function ($resource, $timeout, $q) {
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}}); var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
var Job = $resource('/api/jobs/:id', {id: '@id'}); var Job = $resource('/api/jobs/:id', {id: '@id'});
@@ -12,6 +36,8 @@
var columnTypes = {}; var columnTypes = {};
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
_.each(this.query_result.data.rows, function (row) { _.each(this.query_result.data.rows, function (row) {
_.each(row, function (v, k) { _.each(row, function (v, k) {
if (angular.isNumber(v)) { if (angular.isNumber(v)) {
@@ -30,7 +56,7 @@
_.each(this.query_result.data.columns, function(column) { _.each(this.query_result.data.columns, function(column) {
if (columnTypes[column.name]) { if (columnTypes[column.name]) {
if (column.type == null) { if (column.type == null || column.type == 'string') {
column.type = columnTypes[column.name]; column.type = columnTypes[column.name];
} }
} }
@@ -42,7 +68,7 @@
} else { } else {
this.status = undefined; this.status = undefined;
} }
} };
function QueryResult(props) { function QueryResult(props) {
this.deferred = $q.defer(); this.deferred = $q.defer();
@@ -93,6 +119,14 @@
return this.job.error; return this.job.error;
} }
QueryResult.prototype.getLog = function() {
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
return null;
}
return this.query_result.data.log;
}
QueryResult.prototype.getUpdatedAt = function () { QueryResult.prototype.getUpdatedAt = function () {
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt; return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
} }
@@ -404,20 +438,23 @@
return '/queries/' + this.id + '/source'; return '/queries/' + this.id + '/source';
}; };
Query.prototype.isNew = function() {
return this.id === undefined;
};
Query.prototype.hasDailySchedule = function() { Query.prototype.hasDailySchedule = function() {
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null); return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
} };
Query.prototype.scheduleInLocalTime = function() { Query.prototype.scheduleInLocalTime = function() {
var parts = this.schedule.split(':'); var parts = this.schedule.split(':');
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm'); return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
} };
Query.prototype.getQueryResult = function (maxAge, parameters) { Query.prototype.getQueryResult = function (maxAge, parameters) {
// if (ttl == undefined) { if (!this.query) {
// ttl = this.ttl; return;
// } }
var queryText = this.query; var queryText = this.query;
var queryParameters = this.getParameters(); var queryParameters = this.getParameters();
@@ -452,6 +489,8 @@
} }
} else if (this.data_source_id) { } else if (this.data_source_id) {
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id); this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
} else {
return new QueryResultError("Please select data source to run this query.");
} }
return this.queryResult; return this.queryResult;
@@ -488,14 +527,41 @@
var DataSource = function ($resource) { var DataSource = function ($resource) {
var actions = { var actions = {
'get': {'method': 'GET', 'cache': true, 'isArray': true}, 'get': {'method': 'GET', 'cache': false, 'isArray': false},
'query': {'method': 'GET', 'cache': false, 'isArray': true},
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'} 'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
}; };
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions); var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
return DataSourceResource; return DataSourceResource;
} };
var AlertSubscription = function ($resource) {
var resource = $resource('/api/alerts/:alertId/subscriptions/:userId', {alertId: '@alert_id', userId: '@user.id'});
return resource;
};
var Alert = function ($resource, $http) {
var actions = {
save: {
method: 'POST',
transformRequest: [function(data) {
var newData = _.extend({}, data);
if (newData.query_id === undefined) {
newData.query_id = newData.query.id;
delete newData.query;
}
return newData;
}].concat($http.defaults.transformRequest)
}
};
var resource = $resource('/api/alerts/:id', {id: '@id'}, actions);
return resource;
};
var Widget = function ($resource, Query) { var Widget = function ($resource, Query) {
var WidgetResource = $resource('/api/widgets/:id', {id: '@id'}); var WidgetResource = $resource('/api/widgets/:id', {id: '@id'});
@@ -522,5 +588,7 @@
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult]) .factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query]) .factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
.factory('DataSource', ['$resource', DataSource]) .factory('DataSource', ['$resource', DataSource])
.factory('Alert', ['$resource', '$http', Alert])
.factory('AlertSubscription', ['$resource', AlertSubscription])
.factory('Widget', ['$resource', 'Query', Widget]); .factory('Widget', ['$resource', 'Query', Widget]);
})(); })();

File diff suppressed because one or more lines are too long

View File

@@ -121,7 +121,7 @@
query: '=', query: '=',
queryResult: '=', queryResult: '=',
visualization: '=?', visualization: '=?',
openEditor: '=?', openEditor: '@',
onNewSuccess: '=?' onNewSuccess: '=?'
}, },
link: function (scope, element, attrs) { link: function (scope, element, attrs) {
@@ -150,9 +150,13 @@
scope.$watch('visualization.type', function (type, oldType) { scope.$watch('visualization.type', function (type, oldType) {
// if not edited by user, set name to match type // if not edited by user, set name to match type
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) { if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
// poor man's titlecase scope.visualization.name = _.string.titleize(scope.visualization.type);
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
} }
if (type && oldType != type && scope.visualization) {
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
}
}); });
scope.submit = function () { scope.submit = function () {

View File

@@ -112,9 +112,6 @@
scope.columnTypes = { scope.columnTypes = {
"X": "x", "X": "x",
// "X (Date time)": "x",
// "X (Linear)": "x-linear",
// "X (Category)": "x-category",
"Y": "y", "Y": "y",
"Series": "series", "Series": "series",
"Unused": "unused" "Unused": "unused"
@@ -166,7 +163,7 @@
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0}; scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
} }
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex; scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
}); });
scope.zIndexes = _.range(scope.series.length); scope.zIndexes = _.range(scope.series.length);
scope.yAxes = [[0, 'left'], [1, 'right']]; scope.yAxes = [[0, 'left'], [1, 'right']];
@@ -227,6 +224,12 @@
} }
}); });
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
scope.visualization.options.xAxis.labels.enabled = true;
}
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType; scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) { xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {

View File

@@ -26,7 +26,10 @@
if ($scope.queryResult.getData() == null) { if ($scope.queryResult.getData() == null) {
} else { } else {
var sortedData = _.sortBy($scope.queryResult.getData(), "date"); var sortedData = _.sortBy($scope.queryResult.getData(),function(r) {
return r['date'] + r['day_number'] ;
});
var grouped = _.groupBy(sortedData, "date"); var grouped = _.groupBy(sortedData, "date");
var maxColumns = _.reduce(grouped, function(memo, data){ var maxColumns = _.reduce(grouped, function(memo, data){
return (data.length > memo)? data.length : memo; return (data.length > memo)? data.length : memo;

View File

@@ -0,0 +1,238 @@
'use strict';
(function() {
var module = angular.module('redash.visualization');
module.config(['VisualizationProvider', function(VisualizationProvider) {
var renderTemplate =
'<map-renderer ' +
'options="visualization.options" query-result="queryResult">' +
'</map-renderer>';
var editTemplate = '<map-editor></map-editor>';
var defaultOptions = {
'height': 500,
'draw': 'Marker',
'classify':'none'
};
VisualizationProvider.registerVisualization({
type: 'MAP',
name: 'Map',
renderTemplate: renderTemplate,
editorTemplate: editTemplate,
defaultOptions: defaultOptions
});
}
]);
module.directive('mapRenderer', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map.html',
link: function($scope, elm, attrs) {
var setBounds = function(){
var b = $scope.visualization.options.bounds;
if(b){
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
} else if ($scope.features.length > 0){
var group= new L.featureGroup($scope.features);
$scope.map.fitBounds(group.getBounds());
}
};
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
function() {
var marker = function(lat,lon){
if (lat == null || lon == null) return;
return L.marker([lat, lon]);
};
var heatpoint = function(lat,lon,obj){
if (lat == null || lon == null) return;
var color = 'red';
if (obj &&
obj[$scope.visualization.options.classify] &&
$scope.visualization.options.classification){
var v = $.grep($scope.visualization.options.classification,function(e){
return e.value == obj[$scope.visualization.options.classify];
});
if (v.length >0) color = v[0].color;
}
var style = {
fillColor:color,
fillOpacity:0.5,
stroke:false
};
return L.circleMarker([lat,lon],style)
};
var color = function(val){
// taken from http://jsfiddle.net/xgJ2e/2/
var h= Math.floor((100 - val) * 120 / 100);
var s = Math.abs(val - 50)/50;
var v = 1;
var rgb, i, data = [];
if (s === 0) {
rgb = [v,v,v];
} else {
h = h / 60;
i = Math.floor(h);
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
switch(i) {
case 0:
rgb = [v, data[2], data[0]];
break;
case 1:
rgb = [data[1], v, data[0]];
break;
case 2:
rgb = [data[0], v, data[2]];
break;
case 3:
rgb = [data[0], data[1], v];
break;
case 4:
rgb = [data[2], data[0], v];
break;
default:
rgb = [v, data[0], data[1]];
break;
}
}
return '#' + rgb.map(function(x){
return ("0" + Math.round(x*255).toString(16)).slice(-2);
}).join('');
};
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
function getBounds(e) {
$scope.visualization.options.bounds = $scope.map.getBounds();
}
var queryData = $scope.queryResult.getData();
var classify = $scope.visualization.options.classify;
if (queryData) {
$scope.visualization.options.classification = [];
for (var row in queryData) {
if (queryData[row][classify] &&
$.grep($scope.visualization.options.classification, function (e) {
return e.value == queryData[row][classify]
}).length == 0) {
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
}
}
$.each($scope.visualization.options.classification, function (i, c) {
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
});
if (!$scope.map) {
$scope.map = L.map(elm[0].children[0].children[0])
}
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
}).addTo($scope.map);
$scope.features = $scope.features || [];
var tmp_features = [];
var lat_col = $scope.visualization.options.latColName || 'lat';
var lon_col = $scope.visualization.options.lonColName || 'lon';
for (var row in queryData) {
var feature;
if ($scope.visualization.options.draw == 'Marker') {
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
} else if ($scope.visualization.options.draw == 'Color') {
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
}
if (!feature) continue;
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
for (var k in queryData[row]){
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
}
obj_description += '</ul>';
feature.bindPopup(obj_description);
tmp_features.push(feature);
}
$.each($scope.features, function (i, f) {
$scope.map.removeLayer(f);
});
$scope.features = tmp_features;
$.each($scope.features, function (i, f) {
f.addTo($scope.map)
});
setBounds();
$scope.map.on('focus',function(){
$scope.map.on('moveend', getBounds);
});
$scope.map.on('blur',function(){
$scope.map.off('moveend', getBounds);
});
// We redraw the map if it was loaded in a hidden tab
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
setTimeout(function() {
$scope.map.invalidateSize(false);
setBounds();
},500);
});
}
}
}, true);
$scope.$watch('visualization.options.height', function() {
if (!$scope.map) return;
$scope.map.invalidateSize(false);
setBounds();
});
}
}
});
module.directive('mapEditor', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map_editor.html',
link: function($scope, elm, attrs) {
$scope.draw_options = ['Marker','Color'];
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
}
}
});
})();

View File

@@ -97,7 +97,16 @@ a.navbar-brand img {
} }
.panel-heading .query-link:hover { .panel-heading .query-link:hover {
text-decoration: none; text-decoration: underline;
}
.list-group-item.clickable {
cursor: pointer;
}
.list-group-item.clickable:focus,
.list-group-item.clickable:hover {
background-color: #f5f5f5;
} }
/* angular-growl */ /* angular-growl */
@@ -330,6 +339,11 @@ div.table-name {
cursor: pointer; cursor: pointer;
} }
.blankslate {
text-align: center;
padding: 30px;
}
/* /*
bootstrap's hidden-xs class adds display:block when not hidden bootstrap's hidden-xs class adds display:block when not hidden
use this class when you need to keep the original display value use this class when you need to keep the original display value
@@ -339,3 +353,7 @@ use this class when you need to keep the original display value
display: none !important; display: none !important;
} }
} }
.log-container {
margin-bottom: 50px;
}

View File

@@ -0,0 +1,58 @@
<div class="container">
<ol class="breadcrumb">
<li><a href="/alerts">Alerts</a></li>
<li class="active">{{alert.name || getDefaultName() || "New"}}</li>
</ol>
<div class="row">
<div class="col-md-8">
<form name="alertForm" ng-submit="saveChanges()" class="form">
<div class="form-group">
<label>Query</label>
<ui-select ng-model="alert.query" theme="bootstrap" reset-search-input="false" on-select="onQuerySelected($item)">
<ui-select-match placeholder="Search a query by name">{{$select.selected.name}}</ui-select-match>
<ui-select-choices repeat="q in queries"
refresh="searchQueries($select.search)"
refresh-delay="0">
<div ng-bind-html="q.name | highlight: $select.search | trustAsHtml"></div>
</ui-select-choices>
</ui-select>
</div>
<div class="form-group" ng-show="selectedQuery">
<label>Name</label>
<input type="string" placeholder="{{getDefaultName()}}" class="form-control" ng-model="alert.name">
</div>
<div ng-show="queryResult" class="form-horizontal">
<div class="form-group">
<label class="control-label col-md-2">Value column</label>
<div class="col-md-4">
<select ng-options="name for name in queryResult.getColumnNames()" ng-model="alert.options.column" class="form-control"></select>
</div>
<label class="control-label col-md-2">Value</label>
<div class="col-md-4">
<p class="form-control-static">{{queryResult.getData()[0][alert.options.column]}}</p>
</div>
</div>
<div class="form-group">
<label class="control-label col-md-2">Op</label>
<div class="col-md-4">
<select ng-options="name for name in ops" ng-model="alert.options.op" class="form-control"></select>
</div>
<label class="control-label col-md-2">Reference</label>
<div class="col-md-4">
<input type="number" class="form-control" ng-model="alert.options.value" placeholder="reference value" required/>
</div>
</div>
</div>
<div class="form-group">
<button class="btn btn-primary" ng-disabled="!alertForm.$valid">Save</button>
</div>
</form>
</div>
<div class="col-md-4" ng-if="alert.id">
<alert-subscribers alert-id="alert.id"></alert-subscribers>
</div>
</div>
</div>

View File

@@ -0,0 +1,16 @@
<div class="container">
<ol class="breadcrumb">
<li class="active">Alerts</li>
</ol>
<div class="row">
<div class="col-md-12">
<p>
<a href="/alerts/new" class="btn btn-default"><i class="fa fa-plus"></i> New Alert</a>
</p>
<smart-table rows="alerts" columns="gridColumns"
config="gridConfig"
class="table table-condensed table-hover"></smart-table>
</div>
</div>
</div>

View File

@@ -0,0 +1,4 @@
<div>
<strong>Subscribers</strong> <subscribe-button alert-id="alertId" subscribers="subscribers"></subscribe-button><br/>
<img ng-src="{{s.user.gravatar_url}}" class="img-circle" alt="{{s.user.name}}" ng-repeat="s in subscribers"/>
</div>

View File

@@ -0,0 +1,11 @@
<div class="container">
<ol class="breadcrumb">
<li><a href="/data_sources">Data Sources</a></li>
<li class="active">{{dataSource.name || "New"}}</li>
</ol>
<div class="row">
<div class="col-md-8">
<data-source-form data-data-source="dataSource" />
</div>
</div>
</div>

View File

@@ -0,0 +1,20 @@
<form name="dataSourceForm" ng-submit="saveChanges()">
<div class="form-group">
<label for="dataSourceName">Name</label>
<input type="string" class="form-control" name="dataSourceName" ng-model="dataSource.name" required>
</div>
<div class="form-group">
<label for="type">Type</label>
<select name="type" class="form-control" ng-options="type.type as type.name for type in dataSourceTypes" ng-model="dataSource.type"></select>
</div>
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
<label>{{input.title || name | capitalize}}</label>
<input name="input" type="{{input.type}}" class="form-control" ng-model="dataSource.options[name]" ng-required="input.required"
ng-if="input.type !== 'file'" accesskey="tab">
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required"
base-sixty-four-input
ng-if="input.type === 'file'">
</div>
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid">Save</button>
</form>

View File

@@ -0,0 +1,18 @@
<div class="container">
<ol class="breadcrumb">
<li class="active">Data Sources</li>
</ol>
<div class="row">
<div class="col-md-4">
<div class="list-group">
<div class="list-group-item clickable" ng-repeat="dataSource in dataSources" ng-click="openDataSource(dataSource)">
<i class="fa fa-database"></i> {{dataSource.name}}
<button class="btn btn-xs btn-danger pull-right" ng-click="deleteDataSource($event, dataSource)">Delete</button>
</div>
<a ng-href="/data_sources/new" class="list-group-item">
<i class="fa fa-plus"></i> Add Data Source
</a>
</div>
</div>
</div>
</div>

View File

@@ -84,16 +84,16 @@
</div> </div>
</div> </div>
<div class="col-md-3 schema-container" ng-show="hasSchema"> <div class="col-md-3 schema-container" ng-show="hasSchema">
<div> <div ng-show="schema.length < 200">
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter"> <input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
</div> </div>
<div class="schema-browser"> <div class="schema-browser">
<div ng-repeat="table in schema | filter:schemaFilter"> <div ng-repeat="table in schema | filter:schemaFilter track by table.name">
<div class="table-name" ng-click="table.collapsed = !table.collapsed"> <div class="table-name" ng-click="table.collapsed = !table.collapsed">
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong> <i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
</div> </div>
<div collapse="table.collapsed"> <div collapse="table.collapsed && !schemaFilter">
<div ng-repeat="column in table.columns | filter:schemaFilter" style="padding-left:16px;">{{column}}</div> <div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
</div> </div>
</div> </div>
</div> </div>
@@ -138,7 +138,7 @@
</p> </p>
<p> <p>
<span class="glyphicon glyphicon-hdd"></span> <i class="fa fa-database"></i>
<span class="text-muted">Data Source</span> <span class="text-muted">Data Source</span>
<select ng-disabled="!isQueryOwner" ng-model="query.data_source_id" ng-change="updateDataSource()" ng-options="ds.id as ds.name for ds in dataSources"></select> <select ng-disabled="!isQueryOwner" ng-model="query.data_source_id" ng-change="updateDataSource()" ng-options="ds.id as ds.name for ds in dataSources"></select>
</p> </p>
@@ -192,6 +192,16 @@
</div> </div>
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div> <div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
<div class="row log-container" ng-show="showLog">
<span ng-show="showLog">Log Information:</span>
<table>
<tbody>
<tr ng-repeat="l in queryResult.getLog()">
<td>{{l}}</td>
</tr>
</tbody>
</table>
</div>
<!-- tabs and data --> <!-- tabs and data -->
<div ng-show="showDataset"> <div ng-show="showDataset">
<div class="row"> <div class="row">

View File

@@ -54,6 +54,14 @@
ng-model="visualization.options.sortX"> ng-model="visualization.options.sortX">
</div> </div>
</div> </div>
<div class="form-group">
<label class="control-label col-sm-2">Show X Axis Labels</label>
<div class="col-sm-10">
<input name="sortX" type="checkbox" class="form-control"
ng-model="visualization.options.xAxis.labels.enabled">
</div>
</div>
</div> </div>
</div> </div>
@@ -100,6 +108,15 @@
class="form-control"></select> class="form-control"></select>
</div> </div>
</div> </div>
<div class="form-group">
<label class="control-label col-sm-3">Index</label>
<div class="col-sm-9">
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
ng-options="o as o for o in zIndexes"
class="form-control"></select>
</div>
</div>
<div class="form-group"> <div class="form-group">
<label class="control-label col-sm-3">y Axis</label> <label class="control-label col-sm-3">y Axis</label>

View File

@@ -1,7 +1,7 @@
<div> <div>
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span> <span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()"> <form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
<div class="form-group"> <div class="form-group">
<label class="control-label">Name</label> <label class="control-label">Name</label>
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}"> <input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">

View File

@@ -0,0 +1,3 @@
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
<div style="width:100%; height:100%;"></div>
</div>

View File

@@ -0,0 +1,55 @@
<div class="form-horizontal">
<div class="form-group">
<label class="col-lg-2">Map height (px)</label>
<div class="col-sm-4">
<input class="form-control" type="number" ng-model = "visualization.options.height" />
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Draw option</label>
<div class="col-sm-4">
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Latitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Longitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
</div>
</div>
<div ng-show = "visualization.options.draw == 'Color'">
<div class="form-group">
<label class="col-lg-2">Classify by column</label>
<div class="col-sm-4">
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
</div>
</div>
<div class="row" >
<div class="col-lg-6">
<div ng-repeat="element in visualization.options.classification" class="list-group">
<div class="list-group-item active">
{{element.value}}
</div>
<div class="list-group-item">
<div class="form-group">
<label class="col-lg-4">Color</label>
<div class="col-sm-4">
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -25,11 +25,13 @@
"marked": "~0.3.2", "marked": "~0.3.2",
"bucky": "~0.2.6", "bucky": "~0.2.6",
"pace": "~0.5.1", "pace": "~0.5.1",
"angular-ui-select": "0.8.2", "angular-ui-select": "~0.12.0",
"font-awesome": "~4.2.0", "font-awesome": "~4.2.0",
"mustache": "~1.0.0", "mustache": "~1.0.0",
"canvg": "gabelerner/canvg", "canvg": "gabelerner/canvg",
"angular-ui-bootstrap-bower": "~0.12.1" "angular-ui-bootstrap-bower": "~0.12.1",
"leaflet": "~0.7.3",
"angular-base64-upload": "~0.1.11"
}, },
"devDependencies": { "devDependencies": {
"angular-mocks": "1.2.18", "angular-mocks": "1.2.18",

View File

@@ -36,6 +36,7 @@
"node": ">=0.10.0" "node": ">=0.10.0"
}, },
"scripts": { "scripts": {
"test": "grunt test" "test": "grunt test",
"bower": "bower"
} }
} }

View File

@@ -2,11 +2,12 @@ import logging
import urlparse import urlparse
import redis import redis
from statsd import StatsClient from statsd import StatsClient
from flask_mail import Mail
from redash import settings from redash import settings
from redash.query_runner import import_query_runners from redash.query_runner import import_query_runners
__version__ = '0.6.1' __version__ = '0.7.0'
def setup_logging(): def setup_logging():
@@ -32,6 +33,8 @@ def create_redis_connection():
setup_logging() setup_logging()
redis_connection = create_redis_connection() redis_connection = create_redis_connection()
mail = Mail()
mail.init_mail(settings.all_settings())
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX) statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
import_query_runners(settings.QUERY_RUNNERS) import_query_runners(settings.QUERY_RUNNERS)

View File

@@ -53,7 +53,8 @@ class PasswordHashField(fields.PasswordField):
class PgModelConverter(CustomModelConverter): class PgModelConverter(CustomModelConverter):
def __init__(self, view, additional=None): def __init__(self, view, additional=None):
additional = {ArrayField: self.handle_array_field, additional = {ArrayField: self.handle_array_field,
DateTimeTZField: self.handle_datetime_tz_field} DateTimeTZField: self.handle_datetime_tz_field,
}
super(PgModelConverter, self).__init__(view, additional) super(PgModelConverter, self).__init__(view, additional)
self.view = view self.view = view
@@ -84,33 +85,25 @@ class UserModelView(BaseModelView):
} }
def query_runner_type_formatter(view, context, model, name): class QueryResultModelView(BaseModelView):
qr = query_runner.query_runners.get(model.type, None) column_exclude_list = ('data',)
if qr:
return qr.name()
return model.type
class DataSourceModelView(BaseModelView): class QueryModelView(BaseModelView):
form_overrides = dict(type=fields.SelectField, options=JSONTextAreaField) column_exclude_list = ('latest_query_data',)
form_args = dict(type={
'choices': [(k, r.name()) for k, r in query_runner.query_runners.iteritems()]
}) class DashboardModelView(BaseModelView):
column_formatters = dict(type=query_runner_type_formatter) column_searchable_list = ('name', 'slug')
column_filters = ('type',)
def init_admin(app): def init_admin(app):
admin = Admin(app, name='re:dash admin') admin = Admin(app, name='re:dash admin', template_mode='bootstrap3')
views = { admin.add_view(UserModelView(models.User))
models.User: UserModelView(models.User), admin.add_view(QueryModelView(models.Query))
models.DataSource: DataSourceModelView(models.DataSource) admin.add_view(QueryResultModelView(models.QueryResult))
} admin.add_view(DashboardModelView(models.Dashboard))
for m in models.all_models: for m in (models.Visualization, models.Widget, models.ActivityLog, models.Group, models.Event):
if m in views: admin.add_view(BaseModelView(m))
admin.add_view(views[m])
else:
admin.add_view(BaseModelView(m))

View File

@@ -1,13 +1,13 @@
import functools
import hashlib import hashlib
import hmac import hmac
import time import time
import logging import logging
from flask import request, make_response, redirect, url_for from flask.ext.login import LoginManager
from flask.ext.login import LoginManager, login_user, current_user, logout_user from flask.ext.login import user_logged_in
from redash import models, settings, google_oauth from redash import models, settings, google_oauth, saml_auth
from redash.tasks import record_event
login_manager = LoginManager() login_manager = LoginManager()
logger = logging.getLogger('authentication') logger = logging.getLogger('authentication')
@@ -23,77 +23,85 @@ def sign(key, path, expires):
return h.hexdigest() return h.hexdigest()
class Authentication(object): @login_manager.user_loader
def verify_authentication(self): def load_user(user_id):
return False return models.User.get_by_id(user_id)
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated() or self.verify_authentication():
return fn(*args, **kwargs)
return make_response(redirect(url_for("login", next=request.url)))
return decorated
class ApiKeyAuthentication(Authentication): def hmac_load_user_from_request(request):
def verify_authentication(self): signature = request.args.get('signature')
api_key = request.args.get('api_key') expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None) query_id = request.view_args.get('query_id', None)
user_id = request.args.get('user_id', None)
if query_id and api_key: # TODO: 3600 should be a setting
query = models.Query.get(models.Query.id == query_id) if signature and time.time() < expires <= time.time() + 3600:
if user_id:
user = models.User.get_by_id(user_id)
calculated_signature = sign(user.api_key, request.path, expires)
if query.api_key and api_key == query.api_key: if user.api_key and signature == calculated_signature:
login_user(models.ApiUser(query.api_key), remember=False) return user
return True
return False if query_id:
class HMACAuthentication(Authentication):
def verify_authentication(self):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
# TODO: 3600 should be a setting
if signature and query_id and time.time() < expires <= time.time() + 3600:
query = models.Query.get(models.Query.id == query_id) query = models.Query.get(models.Query.id == query_id)
calculated_signature = sign(query.api_key, request.path, expires) calculated_signature = sign(query.api_key, request.path, expires)
if query.api_key and signature == calculated_signature: if query.api_key and signature == calculated_signature:
login_user(models.ApiUser(query.api_key), remember=False) return models.ApiUser(query.api_key)
return True
return False return None
def get_user_from_api_key(api_key, query_id):
@login_manager.user_loader if not api_key:
def load_user(user_id):
# If the user was previously logged in as api user, the user_id will be the api key and will raise an exception as
# it can't be casted to int.
if isinstance(user_id, basestring) and not user_id.isdigit():
return None return None
return models.User.select().where(models.User.id == user_id).first() user = None
try:
user = models.User.get_by_api_key(api_key)
except models.User.DoesNotExist:
if query_id:
query = models.Query.get_by_id(query_id)
if query and query.api_key == api_key:
user = models.ApiUser(api_key)
return user
def api_key_load_user_from_request(request):
api_key = request.args.get('api_key', None)
query_id = request.view_args.get('query_id', None)
user = get_user_from_api_key(api_key, query_id)
return user
def log_user_logged_in(app, user):
event = {
'user_id': user.id,
'action': 'login',
'object_type': 'redash',
'timestamp': int(time.time()),
}
record_event.delay(event)
def setup_authentication(app): def setup_authentication(app):
login_manager.init_app(app) login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser login_manager.anonymous_user = models.AnonymousUser
login_manager.login_view = 'login'
app.secret_key = settings.COOKIE_SECRET app.secret_key = settings.COOKIE_SECRET
app.register_blueprint(google_oauth.blueprint) app.register_blueprint(google_oauth.blueprint)
app.register_blueprint(saml_auth.blueprint)
user_logged_in.connect(log_user_logged_in)
if settings.AUTH_TYPE == 'hmac': if settings.AUTH_TYPE == 'hmac':
auth = HMACAuthentication() login_manager.request_loader(hmac_load_user_from_request)
elif settings.AUTH_TYPE == 'api_key': elif settings.AUTH_TYPE == 'api_key':
auth = ApiKeyAuthentication() login_manager.request_loader(api_key_load_user_from_request)
else: else:
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE)) logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
auth = HMACAuthentication() login_manager.request_loader(hmac_load_user_from_request)
return auth

View File

@@ -12,17 +12,19 @@ import time
import logging import logging
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \ from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
session, url_for session, url_for, current_app, flash
from flask.ext.restful import Resource, abort from flask.ext.restful import Resource, abort, reqparse
from flask_login import current_user, login_user, logout_user from flask_login import current_user, login_user, logout_user, login_required
from funcy import project
import sqlparse import sqlparse
from redash import redis_connection, statsd_client, models, settings, utils, __version__ from redash import statsd_client, models, settings, utils
from redash.wsgi import app, auth, api from redash.wsgi import app, api
from redash.tasks import QueryTask, record_event from redash.tasks import QueryTask, record_event
from redash.cache import headers as cache_headers from redash.cache import headers as cache_headers
from redash.permissions import require_permission from redash.permissions import require_permission
from redash.query_runner import query_runners, validate_configuration from redash.query_runner import query_runners, validate_configuration
from redash.monitor import get_status
@app.route('/ping', methods=['GET']) @app.route('/ping', methods=['GET'])
@@ -30,14 +32,19 @@ def ping():
return 'PONG.' return 'PONG.'
@app.route('/admin/<anything>/<whatever>')
@app.route('/admin/<anything>') @app.route('/admin/<anything>')
@app.route('/dashboard/<anything>') @app.route('/dashboard/<anything>')
@app.route('/alerts')
@app.route('/alerts/<pk>')
@app.route('/queries') @app.route('/queries')
@app.route('/data_sources')
@app.route('/data_sources/<pk>')
@app.route('/queries/<query_id>') @app.route('/queries/<query_id>')
@app.route('/queries/<query_id>/<anything>') @app.route('/queries/<query_id>/<anything>')
@app.route('/personal') @app.route('/personal')
@app.route('/') @app.route('/')
@auth.required @login_required
def index(**kwargs): def index(**kwargs):
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest() email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5 gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
@@ -66,22 +73,30 @@ def login():
return redirect(request.args.get('next') or '/') return redirect(request.args.get('next') or '/')
if not settings.PASSWORD_LOGIN_ENABLED: if not settings.PASSWORD_LOGIN_ENABLED:
return redirect(url_for("google_oauth.authorize", next=request.args.get('next'))) if settings.SAML_LOGIN_ENABLED:
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
else:
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
if request.method == 'POST': if request.method == 'POST':
user = models.User.select().where(models.User.email == request.form['username']).first() try:
if user and user.verify_password(request.form['password']): user = models.User.get_by_email(request.form['username'])
remember = ('remember' in request.form) if user and user.verify_password(request.form['password']):
login_user(user, remember=remember) remember = ('remember' in request.form)
return redirect(request.args.get('next') or '/') login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
else:
flash("Wrong username or password.")
except models.User.DoesNotExist:
flash("Wrong username or password.")
return render_template("login.html", return render_template("login.html",
name=settings.NAME, name=settings.NAME,
analytics=settings.ANALYTICS, analytics=settings.ANALYTICS,
next=request.args.get('next'), next=request.args.get('next'),
username=request.form.get('username', ''), username=request.form.get('username', ''),
show_google_openid=settings.GOOGLE_OAUTH_ENABLED) show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
show_saml_login=settings.SAML_LOGIN_ENABLED)
@app.route('/logout') @app.route('/logout')
def logout(): def logout():
@@ -91,43 +106,16 @@ def logout():
return redirect('/login') return redirect('/login')
@app.route('/status.json') @app.route('/status.json')
@auth.required @login_required
@require_permission('admin') @require_permission('admin')
def status_api(): def status_api():
status = {} status = get_status()
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
return jsonify(status) return jsonify(status)
@app.route('/api/queries/format', methods=['POST']) @app.route('/api/queries/format', methods=['POST'])
@auth.required @login_required
def format_sql_query(): def format_sql_query():
arguments = request.get_json(force=True) arguments = request.get_json(force=True)
query = arguments.get("query", "") query = arguments.get("query", "")
@@ -136,7 +124,7 @@ def format_sql_query():
@app.route('/queries/new', methods=['POST']) @app.route('/queries/new', methods=['POST'])
@auth.required @login_required
def create_query_route(): def create_query_route():
query = request.form.get('query', None) query = request.form.get('query', None)
data_source_id = request.form.get('data_source_id', None) data_source_id = request.form.get('data_source_id', None)
@@ -154,7 +142,7 @@ def create_query_route():
class BaseResource(Resource): class BaseResource(Resource):
decorators = [auth.required] decorators = [login_required]
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs) super(BaseResource, self).__init__(*args, **kwargs)
@@ -199,6 +187,34 @@ class DataSourceTypeListAPI(BaseResource):
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types') api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
class DataSourceAPI(BaseResource):
@require_permission('admin')
def get(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
return data_source.to_dict(all=True)
@require_permission('admin')
def post(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
req = request.get_json(True)
if not validate_configuration(req['type'], req['options']):
abort(400)
data_source.name = req['name']
data_source.options = json.dumps(req['options'])
data_source.save()
return data_source.to_dict(all=True)
@require_permission('admin')
def delete(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
data_source.delete_instance(recursive=True)
return make_response('', 204)
class DataSourceListAPI(BaseResource): class DataSourceListAPI(BaseResource):
def get(self): def get(self):
data_sources = [ds.to_dict() for ds in models.DataSource.all()] data_sources = [ds.to_dict() for ds in models.DataSource.all()]
@@ -215,11 +231,12 @@ class DataSourceListAPI(BaseResource):
if not validate_configuration(req['type'], req['options']): if not validate_configuration(req['type'], req['options']):
abort(400) abort(400)
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options']) datasource = models.DataSource.create(name=req['name'], type=req['type'], options=json.dumps(req['options']))
return datasource.to_dict() return datasource.to_dict(all=True)
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources') api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
api.add_resource(DataSourceAPI, '/api/data_sources/<data_source_id>', endpoint='data_source')
class DataSourceSchemaAPI(BaseResource): class DataSourceSchemaAPI(BaseResource):
@@ -363,7 +380,7 @@ class QueryAPI(BaseResource):
@require_permission('edit_query') @require_permission('edit_query')
def post(self, query_id): def post(self, query_id):
query = models.Query.get_by_id(query_id) query = models.Query.get_by_id(query_id)
query_def = request.get_json(force=True) query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']: for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
query_def.pop(field, None) query_def.pop(field, None)
@@ -415,7 +432,7 @@ class VisualizationListAPI(BaseResource):
kwargs = request.get_json(force=True) kwargs = request.get_json(force=True)
kwargs['options'] = json.dumps(kwargs['options']) kwargs['options'] = json.dumps(kwargs['options'])
kwargs['query'] = kwargs.pop('query_id') kwargs['query'] = kwargs.pop('query_id')
vis = models.Visualization(**kwargs) vis = models.Visualization(**kwargs)
vis.save() vis.save()
@@ -450,7 +467,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
class QueryResultListAPI(BaseResource): class QueryResultListAPI(BaseResource):
@require_permission('execute_query') @require_permission('execute_query')
def post(self): def post(self):
params = request.json params = request.get_json(force=True)
if settings.FEATURE_TABLES_PERMISSIONS: if settings.FEATURE_TABLES_PERMISSIONS:
metadata = utils.SQLMetaData(params['query']) metadata = utils.SQLMetaData(params['query'])
@@ -476,7 +493,7 @@ class QueryResultListAPI(BaseResource):
activity=params['query'] activity=params['query']
).save() ).save()
max_age = int(params['max_age']) max_age = int(params.get('max_age', -1))
if max_age == 0: if max_age == 0:
query_result = None query_result = None
@@ -508,6 +525,28 @@ class QueryResultAPI(BaseResource):
headers.update(cache_headers) headers.update(cache_headers)
return make_response(s.getvalue(), 200, headers) return make_response(s.getvalue(), 200, headers)
@staticmethod
def add_cors_headers(headers):
if 'Origin' in request.headers:
origin = request.headers['Origin']
if origin in settings.ACCESS_CONTROL_ALLOW_ORIGIN:
headers['Access-Control-Allow-Origin'] = origin
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
@require_permission('view_query')
def options(self, query_id=None, query_result_id=None, filetype='json'):
headers = {}
self.add_cors_headers(headers)
if settings.ACCESS_CONTROL_REQUEST_METHOD:
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
return make_response("", 200, headers)
@require_permission('view_query') @require_permission('view_query')
def get(self, query_id=None, query_result_id=None, filetype='json'): def get(self, query_id=None, query_result_id=None, filetype='json'):
if query_result_id is None and query_id is not None: if query_result_id is None and query_id is not None:
@@ -537,9 +576,15 @@ class QueryResultAPI(BaseResource):
record_event.delay(event) record_event.delay(event)
headers = {}
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
self.add_cors_headers(headers)
if filetype == 'json': if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder) data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
return make_response(data, 200, cache_headers) headers.update(cache_headers)
return make_response(data, 200, headers)
else: else:
return self.csv_response(query_result) return self.csv_response(query_result)
@@ -567,13 +612,110 @@ class JobAPI(BaseResource):
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job') api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
class AlertAPI(BaseResource):
def get(self, alert_id):
alert = models.Alert.get_by_id(alert_id)
return alert.to_dict()
def post(self, alert_id):
req = request.get_json(True)
params = project(req, ('options', 'name', 'query_id'))
alert = models.Alert.get_by_id(alert_id)
if 'query_id' in params:
params['query'] = params.pop('query_id')
alert.update_instance(**params)
record_event.delay({
'user_id': self.current_user.id,
'action': 'edit',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
return alert.to_dict()
class AlertListAPI(BaseResource):
def post(self):
req = request.get_json(True)
required_fields = ('options', 'name', 'query_id')
for f in required_fields:
if f not in req:
abort(400)
alert = models.Alert.create(
name=req['name'],
query=req['query_id'],
user=self.current_user,
options=req['options']
)
record_event.delay({
'user_id': self.current_user.id,
'action': 'create',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
# TODO: should be in model?
models.AlertSubscription.create(alert=alert, user=self.current_user)
record_event.delay({
'user_id': self.current_user.id,
'action': 'subscribe',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
return alert.to_dict()
def get(self):
return [alert.to_dict() for alert in models.Alert.all()]
class AlertSubscriptionListResource(BaseResource):
def post(self, alert_id):
subscription = models.AlertSubscription.create(alert=alert_id, user=self.current_user)
record_event.delay({
'user_id': self.current_user.id,
'action': 'subscribe',
'timestamp': int(time.time()),
'object_id': alert_id,
'object_type': 'alert'
})
return subscription.to_dict()
def get(self, alert_id):
subscriptions = models.AlertSubscription.all(alert_id)
return [s.to_dict() for s in subscriptions]
class AlertSubscriptionResource(BaseResource):
def delete(self, alert_id, subscriber_id):
models.AlertSubscription.unsubscribe(alert_id, subscriber_id)
record_event.delay({
'user_id': self.current_user.id,
'action': 'unsubscribe',
'timestamp': int(time.time()),
'object_id': alert_id,
'object_type': 'alert'
})
api.add_resource(AlertAPI, '/api/alerts/<alert_id>', endpoint='alert')
api.add_resource(AlertSubscriptionListResource, '/api/alerts/<alert_id>/subscriptions', endpoint='alert_subscriptions')
api.add_resource(AlertSubscriptionResource, '/api/alerts/<alert_id>/subscriptions/<subscriber_id>', endpoint='alert_subscription')
api.add_resource(AlertListAPI, '/api/alerts', endpoint='alerts')
@app.route('/<path:filename>') @app.route('/<path:filename>')
def send_static(filename): def send_static(filename):
return send_from_directory(settings.STATIC_ASSETS_PATH, filename) if current_app.debug:
cache_timeout = 0
else:
if __name__ == '__main__': cache_timeout = None
app.run(debug=True)
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)

View File

@@ -1,25 +1,25 @@
import logging import logging
from flask.ext.login import login_user from flask.ext.login import login_user
import requests import requests
from flask import redirect, url_for, Blueprint from flask import redirect, url_for, Blueprint, flash
from flask_oauth import OAuth from flask_oauth import OAuth
from redash import models, settings from redash import models, settings
logger = logging.getLogger('google_oauth') logger = logging.getLogger('google_oauth')
oauth = OAuth() oauth = OAuth()
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
if settings.GOOGLE_APPS_DOMAIN: if not settings.GOOGLE_APPS_DOMAIN:
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
else:
logger.warning("No Google Apps domain defined, all Google accounts allowed.") logger.warning("No Google Apps domain defined, all Google accounts allowed.")
google = oauth.remote_app('google', google = oauth.remote_app('google',
base_url='https://www.google.com/accounts/', base_url='https://www.google.com/accounts/',
authorize_url='https://accounts.google.com/o/oauth2/auth', authorize_url='https://accounts.google.com/o/oauth2/auth',
request_token_url=None, request_token_url=None,
request_token_params=request_token_params, request_token_params={
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
'response_type': 'code'
},
access_token_url='https://accounts.google.com/o/oauth2/token', access_token_url='https://accounts.google.com/o/oauth2/token',
access_token_method='POST', access_token_method='POST',
access_token_params={'grant_type': 'authorization_code'}, access_token_params={'grant_type': 'authorization_code'},
@@ -31,7 +31,7 @@ blueprint = Blueprint('google_oauth', __name__)
def get_user_profile(access_token): def get_user_profile(access_token):
headers = {'Authorization': 'OAuth '+access_token} headers = {'Authorization': 'OAuth {}'.format(access_token)}
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers) response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
if response.status_code == 401: if response.status_code == 401:
@@ -41,9 +41,17 @@ def get_user_profile(access_token):
return response.json() return response.json()
def verify_profile(profile):
if not settings.GOOGLE_APPS_DOMAIN:
return True
domain = profile['email'].split('@')[-1]
return domain in settings.GOOGLE_APPS_DOMAIN
def create_and_login_user(name, email): def create_and_login_user(name, email):
try: try:
user_object = models.User.get(models.User.email == email) user_object = models.User.get_by_email(email)
if user_object.name != name: if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name) logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name user_object.name = name
@@ -70,10 +78,17 @@ def authorized(resp):
if access_token is None: if access_token is None:
logger.warning("Access token missing in call back request.") logger.warning("Access token missing in call back request.")
flash("Validation error. Please retry.")
return redirect(url_for('login')) return redirect(url_for('login'))
profile = get_user_profile(access_token) profile = get_user_profile(access_token)
if profile is None: if profile is None:
flash("Validation error. Please retry.")
return redirect(url_for('login'))
if not verify_profile(profile):
logger.warning("User tried to login with unauthorized domain name: %s", profile['email'])
flash("Your Google Apps domain name isn't allowed.")
return redirect(url_for('login')) return redirect(url_for('login'))
create_and_login_user(profile['name'], profile['email']) create_and_login_user(profile['name'], profile['email'])

View File

@@ -15,6 +15,7 @@ import psycopg2
from redash import utils, settings, redis_connection from redash import utils, settings, redis_connection
from redash.query_runner import get_query_runner from redash.query_runner import get_query_runner
from utils import generate_token
class Database(object): class Database(object):
@@ -76,6 +77,17 @@ class BaseModel(peewee.Model):
super(BaseModel, self).save(*args, **kwargs) super(BaseModel, self).save(*args, **kwargs)
self.post_save(created) self.post_save(created)
def update_instance(self, **kwargs):
for k, v in kwargs.items():
# setattr(model_instance, field_name, field_obj.python_value(value))
setattr(self, k, v)
dirty_fields = self.dirty_fields
if hasattr(self, 'updated_at'):
dirty_fields = dirty_fields + [self.__class__.updated_at]
self.save(only=dirty_fields)
class ModelTimestampsMixin(BaseModel): class ModelTimestampsMixin(BaseModel):
updated_at = DateTimeTZField(default=datetime.datetime.now) updated_at = DateTimeTZField(default=datetime.datetime.now)
@@ -152,6 +164,7 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
email = peewee.CharField(max_length=320, index=True, unique=True) email = peewee.CharField(max_length=320, index=True, unique=True)
password_hash = peewee.CharField(max_length=128, null=True) password_hash = peewee.CharField(max_length=128, null=True)
groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS) groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS)
api_key = peewee.CharField(max_length=40, unique=True)
class Meta: class Meta:
db_table = 'users' db_table = 'users'
@@ -161,6 +174,7 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
'id': self.id, 'id': self.id,
'name': self.name, 'name': self.name,
'email': self.email, 'email': self.email,
'gravatar_url': self.gravatar_url,
'updated_at': self.updated_at, 'updated_at': self.updated_at,
'created_at': self.created_at 'created_at': self.created_at
} }
@@ -169,6 +183,17 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
super(User, self).__init__(*args, **kwargs) super(User, self).__init__(*args, **kwargs)
self._allowed_tables = None self._allowed_tables = None
def pre_save(self, created):
super(User, self).pre_save(created)
if not self.api_key:
self.api_key = generate_token(40)
@property
def gravatar_url(self):
email_md5 = hashlib.md5(self.email.lower()).hexdigest()
return "https://www.gravatar.com/avatar/%s?s=40" % email_md5
@property @property
def permissions(self): def permissions(self):
# TODO: this should be cached. # TODO: this should be cached.
@@ -188,8 +213,12 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
def get_by_email(cls, email): def get_by_email(cls, email):
return cls.get(cls.email == email) return cls.get(cls.email == email)
@classmethod
def get_by_api_key(cls, api_key):
return cls.get(cls.api_key == api_key)
def __unicode__(self): def __unicode__(self):
return '%r, %r' % (self.name, self.email) return u'%s (%s)' % (self.name, self.email)
def hash_password(self, password): def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password) self.password_hash = pwd_context.encrypt(password)
@@ -229,20 +258,30 @@ class DataSource(BaseModel):
type = peewee.CharField() type = peewee.CharField()
options = peewee.TextField() options = peewee.TextField()
queue_name = peewee.CharField(default="queries") queue_name = peewee.CharField(default="queries")
scheduled_queue_name = peewee.CharField(default="queries") scheduled_queue_name = peewee.CharField(default="scheduled_queries")
created_at = DateTimeTZField(default=datetime.datetime.now) created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta: class Meta:
db_table = 'data_sources' db_table = 'data_sources'
def to_dict(self): def to_dict(self, all=False):
return { d = {
'id': self.id, 'id': self.id,
'name': self.name, 'name': self.name,
'type': self.type, 'type': self.type,
'syntax': self.query_runner.syntax 'syntax': self.query_runner.syntax
} }
if all:
d['options'] = json.loads(self.options)
d['queue_name'] = self.queue_name
d['scheduled_queue_name'] = self.scheduled_queue_name
return d
def __unicode__(self):
return self.name
def get_schema(self, refresh=False): def get_schema(self, refresh=False):
key = "data_source:schema:{}".format(self.id) key = "data_source:schema:{}".format(self.id)
@@ -269,6 +308,14 @@ class DataSource(BaseModel):
return cls.select().order_by(cls.id.asc()) return cls.select().order_by(cls.id.asc())
class JSONField(peewee.TextField):
def db_value(self, value):
return json.dumps(value)
def python_value(self, value):
return json.loads(value)
class QueryResult(BaseModel): class QueryResult(BaseModel):
id = peewee.PrimaryKeyField() id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource) data_source = peewee.ForeignKeyField(DataSource)
@@ -326,13 +373,17 @@ class QueryResult(BaseModel):
logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id) logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id)
updated_count = Query.update(latest_query_data=query_result).\ sql = "UPDATE queries SET latest_query_data_id = %s WHERE query_hash = %s AND data_source_id = %s RETURNING id"
where(Query.query_hash==query_hash, Query.data_source==data_source_id).\ query_ids = [row[0] for row in db.database.execute_sql(sql, params=(query_result.id, query_hash, data_source_id))]
execute()
logging.info("Updated %s queries with result (%s).", updated_count, query_hash) # TODO: when peewee with update & returning support is released, we can get back to using this code:
# updated_count = Query.update(latest_query_data=query_result).\
# where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
# execute()
return query_result logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash)
return query_result, query_ids
def __unicode__(self): def __unicode__(self):
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at) return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
@@ -361,7 +412,7 @@ def should_schedule_next(previous_iteration, now, schedule):
class Query(ModelTimestampsMixin, BaseModel): class Query(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField() id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource) data_source = peewee.ForeignKeyField(DataSource, null=True)
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True) latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
name = peewee.CharField(max_length=255) name = peewee.CharField(max_length=255)
description = peewee.CharField(max_length=4096, null=True) description = peewee.CharField(max_length=4096, null=True)
@@ -395,7 +446,7 @@ class Query(ModelTimestampsMixin, BaseModel):
if with_user: if with_user:
d['user'] = self.user.to_dict() d['user'] = self.user.to_dict()
d['last_modified_by'] = self.last_modified_by.to_dict() d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
else: else:
d['user_id'] = self._data['user'] d['user_id'] = self._data['user']
@@ -437,7 +488,7 @@ class Query(ModelTimestampsMixin, BaseModel):
.switch(Query).join(DataSource)\ .switch(Query).join(DataSource)\
.where(cls.schedule != None) .where(cls.schedule != None)
now = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) now = utils.utcnow()
outdated_queries = {} outdated_queries = {}
for query in queries: for query in queries:
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule): if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
@@ -515,6 +566,83 @@ class Query(ModelTimestampsMixin, BaseModel):
return unicode(self.id) return unicode(self.id)
class Alert(ModelTimestampsMixin, BaseModel):
UNKNOWN_STATE = 'unknown'
OK_STATE = 'ok'
TRIGGERED_STATE = 'triggered'
id = peewee.PrimaryKeyField()
name = peewee.CharField()
query = peewee.ForeignKeyField(Query, related_name='alerts')
user = peewee.ForeignKeyField(User, related_name='alerts')
options = JSONField()
state = peewee.CharField(default=UNKNOWN_STATE)
last_triggered_at = DateTimeTZField(null=True)
class Meta:
db_table = 'alerts'
@classmethod
def all(cls):
return cls.select(Alert, User, Query).join(Query).switch(Alert).join(User)
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'query': self.query.to_dict(),
'user': self.user.to_dict(),
'options': self.options,
'state': self.state,
'last_triggered_at': self.last_triggered_at,
'updated_at': self.updated_at,
'created_at': self.created_at
}
def evaluate(self):
data = json.loads(self.query.latest_query_data.data)
# todo: safe guard for empty
value = data['rows'][0][self.options['column']]
op = self.options['op']
if op == 'greater than' and value > self.options['value']:
new_state = self.TRIGGERED_STATE
elif op == 'less than' and value < self.options['value']:
new_state = self.TRIGGERED_STATE
elif op == 'equals' and value == self.options['value']:
new_state = self.TRIGGERED_STATE
else:
new_state = self.OK_STATE
return new_state
def subscribers(self):
return User.select().join(AlertSubscription).where(AlertSubscription.alert==self)
class AlertSubscription(ModelTimestampsMixin, BaseModel):
user = peewee.ForeignKeyField(User)
alert = peewee.ForeignKeyField(Alert)
class Meta:
db_table = 'alert_subscriptions'
def to_dict(self):
return {
'user': self.user.to_dict(),
'alert_id': self._data['alert']
}
@classmethod
def all(cls, alert_id):
return AlertSubscription.select(AlertSubscription, User).join(User).where(AlertSubscription.alert==alert_id)
@classmethod
def unsubscribe(cls, alert_id, user_id):
query = AlertSubscription.delete().where(AlertSubscription.alert==alert_id).where(AlertSubscription.user==user_id)
return query.execute()
class Dashboard(ModelTimestampsMixin, BaseModel): class Dashboard(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField() id = peewee.PrimaryKeyField()
slug = peewee.CharField(max_length=140, index=True) slug = peewee.CharField(max_length=140, index=True)
@@ -704,7 +832,7 @@ class Event(BaseModel):
return event return event
all_models = (DataSource, User, QueryResult, Query, Dashboard, Visualization, Widget, ActivityLog, Group, Event) all_models = (DataSource, User, QueryResult, Query, Alert, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
def init_db(): def init_db():

33
redash/monitor.py Normal file
View File

@@ -0,0 +1,33 @@
from redash import redis_connection, models, __version__
def get_status():
status = {}
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
return status

View File

@@ -105,7 +105,11 @@ def validate_configuration(query_runner_type, configuration_json):
return False return False
try: try:
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema()) if isinstance(configuration_json, basestring):
configuration = json.loads(configuration_json)
else:
configuration = configuration_json
jsonschema.validate(configuration, query_runner_class.configuration_schema())
except (ValidationError, ValueError): except (ValidationError, ValueError):
return False return False

View File

@@ -1,3 +1,4 @@
from base64 import b64decode
import datetime import datetime
import json import json
import httplib2 import httplib2
@@ -89,20 +90,16 @@ class BigQuery(BaseQueryRunner):
return { return {
'type': 'object', 'type': 'object',
'properties': { 'properties': {
'serviceAccount': {
'type': 'string',
'title': 'Service Account'
},
'projectId': { 'projectId': {
'type': 'string', 'type': 'string',
'title': 'Project ID' 'title': 'Project ID'
}, },
'privateKey': { 'jsonKeyFile': {
'type': 'string', "type": "string",
'title': 'Private Key Path' 'title': 'JSON Key File'
} }
}, },
'required': ['serviceAccount', 'projectId', 'privateKey'] 'required': ['jsonKeyFile', 'projectId']
} }
def __init__(self, configuration_json): def __init__(self, configuration_json):
@@ -113,8 +110,9 @@ class BigQuery(BaseQueryRunner):
"https://www.googleapis.com/auth/bigquery", "https://www.googleapis.com/auth/bigquery",
] ]
private_key = _load_key(self.configuration["privateKey"]) key = json.loads(b64decode(self.configuration['jsonKeyFile']))
credentials = SignedJwtAssertionCredentials(self.configuration['serviceAccount'], private_key, scope=scope)
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
http = httplib2.Http() http = httplib2.Http()
http = credentials.authorize(http) http = credentials.authorize(http)
@@ -201,4 +199,4 @@ class BigQueryGCE(BigQuery):
register(BigQuery) register(BigQuery)
register(BigQueryGCE) register(BigQueryGCE)

View File

@@ -0,0 +1,259 @@
import datetime
import json
import logging
import sys
import urllib
from redash.query_runner import *
from redash import models
import requests
import dateutil
from dateutil.parser import parse
try:
import http.client as http_client
except ImportError:
# Python 2
import httplib as http_client
logger = logging.getLogger(__name__)
ELASTICSEARCH_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"date" : TYPE_DATE,
# "geo_point" TODO: Need to split to 2 fields somehow
}
PYTHON_TYPES_MAPPING = {
str: TYPE_STRING,
unicode: TYPE_STRING,
bool : TYPE_BOOLEAN,
int : TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT
}
#
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
# but without the aggregation).
#
# Full blown JSON based ElasticSearch queries (including aggregations) will be
# added later
#
# Simple query example:
#
# - Query the index named "twitter"
# - Filter by "user:kimchy"
# - Return the fields: "@timestamp", "tweet" and "user"
# - Return up to 15 results
# - Sort by @timestamp ascending
#
# {
# "index" : "twitter",
# "query" : "user:kimchy",
# "fields" : ["@timestamp", "tweet", "user"],
# "size" : 15,
# "sort" : "@timestamp:asc"
# }
#
#
# Simple query on a logstash ElasticSearch instance:
#
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
# - Return up to 250 results
# - Sort by @timestamp ascending
# {
# "index" : "logstash-2015.04.*",
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
# "size" : 250,
# "sort" : "@timestamp:asc"
# }
#
#
class ElasticSearch(BaseQueryRunner):
DEBUG_ENABLED = False
"""
ElastichSearch query runner for querying ElasticSearch servers.
Query can be done using the Lucene Syntax (single line) or the more complex,
full blown ElasticSearch JSON syntax
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'server': {
'type': 'string',
'title': 'Base URL'
}
},
"required" : ["server"]
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(ElasticSearch, self).__init__(configuration_json)
self.syntax = "json"
if self.DEBUG_ENABLED:
http_client.HTTPConnection.debuglevel = 1
# you need to initialize logging, otherwise you will not see anything from requests
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
def get_mappings(self, url):
mappings = {}
r = requests.get(url)
mappings_data = r.json()
for index_name in mappings_data:
index_mappings = mappings_data[index_name]
for m in index_mappings.get("mappings", {}):
for property_name in index_mappings["mappings"][m]["properties"]:
property_data = index_mappings["mappings"][m]["properties"][property_name]
if not property_name in mappings:
property_type = property_data.get("type", None)
if property_type:
if property_type in ELASTICSEARCH_TYPES_MAPPING:
mappings[property_name] = property_type
else:
raise "Unknown property type: {0}".format(property_type)
return mappings
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
result_columns_index = {}
for c in result_columns:
result_columns_index[c["name"]] = c
result_fields_index = {}
if result_fields:
for r in result_fields:
result_fields_index[r] = None
for h in raw_result["hits"]["hits"]:
row = {}
for column in h["_source"]:
if result_fields and column not in result_fields_index:
continue
if column not in result_columns_index:
result_columns.append({
"name" : column,
"friendly_name" : column,
"type" : mappings.get(column, "string")
})
result_columns_index[column] = result_columns[-1]
row[column] = h["_source"][column]
if row and len(row) > 0:
result_rows.append(row)
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
url += "&from={0}".format(_from)
r = requests.get(url)
if r.status_code != 200:
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
raw_result = r.json()
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
total = raw_result["hits"]["total"]
result_size = len(raw_result["hits"]["hits"])
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
return raw_result["hits"]["total"]
def run_query(self, query):
try:
error = None
logger.debug(query)
query_params = json.loads(query)
index_name = query_params["index"]
query_data = query_params["query"]
size = int(query_params.get("size", 500))
result_fields = query_params.get("fields", None)
sort = query_params.get("sort", None)
server_url = self.configuration["server"]
if not server_url:
error = "Missing configuration key 'server'"
return None, error
if server_url[-1] == "/":
server_url = server_url[:-1]
url = "{0}/{1}/_search?".format(server_url, index_name)
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
mappings = self.get_mappings(mapping_url)
logger.debug(json.dumps(mappings, indent=4))
if size:
url += "&size={0}".format(size)
if sort:
url += "&sort={0}".format(urllib.quote_plus(sort))
url += "&q={0}".format(urllib.quote_plus(query_data))
logger.debug("Using URL: {0}".format(url))
logger.debug("Using Query: {0}".format(query_data))
result_columns = []
result_rows = []
if isinstance(query_data, str) or isinstance(query_data, unicode):
_from = 0
while True:
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
_from += size
if _from >= total:
break
else:
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
raise Exception("Advanced queries are not supported")
json_data = json.dumps({
"columns" : result_columns,
"rows" : result_rows
})
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(ElasticSearch)

View File

@@ -0,0 +1,117 @@
from base64 import b64decode
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
from dateutil import parser
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install gspread, dateutil and oauth2client.")
logger.warning("You can use pip: pip install gspread dateutil oauth2client")
enabled = False
def _load_key(filename):
with open(filename, "rb") as f:
return json.loads(f.read())
def _guess_type(value):
try:
val = int(value)
return TYPE_INTEGER, val
except ValueError:
pass
try:
val = float(value)
return TYPE_FLOAT, val
except ValueError:
pass
if str(value).lower() in ('true', 'false'):
return TYPE_BOOLEAN, bool(value)
try:
val = parser.parse(value)
return TYPE_DATETIME, val
except ValueError:
pass
return TYPE_STRING, value
class GoogleSpreadsheet(BaseQueryRunner):
HEADER_INDEX = 0
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "google_spreadsheets"
@classmethod
def enabled(cls):
return enabled
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'jsonKeyFile': {
"type": "string",
'title': 'JSON Key File'
}
},
'required': ['jsonKeyFile']
}
def __init__(self, configuration_json):
super(GoogleSpreadsheet, self).__init__(configuration_json)
def _get_spreadsheet_service(self):
scope = [
'https://spreadsheets.google.com/feeds',
]
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
spreadsheetservice = gspread.authorize(credentials)
return spreadsheetservice
def run_query(self, query):
logger.debug("Spreadsheet is about to execute query: %s", query)
values = query.split("|")
key = values[0] #key of the spreadsheet
worksheet_num = 0 if len(values) != 2 else int(values[1])# if spreadsheet contains more than one worksheet - this is the number of it
try:
spreadsheet_service = self._get_spreadsheet_service()
spreadsheet = spreadsheet_service.open_by_key(key)
worksheets = spreadsheet.worksheets()
all_data = worksheets[worksheet_num].get_all_values()
column_names = []
columns = []
for j, column_name in enumerate(all_data[self.HEADER_INDEX]):
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': _guess_type(all_data[self.HEADER_INDEX+1][j])
})
rows = [dict(zip(column_names, row)) for row in all_data[self.HEADER_INDEX+1:]]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(GoogleSpreadsheet)

View File

@@ -0,0 +1,159 @@
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
from impala.dbapi import connect
from impala.error import DatabaseError, RPCError
enabled = True
except ImportError, e:
logger.exception(e)
logger.warning("Missing dependencies. Please install impyla.")
logger.warning("You can use pip: pip install impyla")
enabled = False
COLUMN_NAME = 0
COLUMN_TYPE = 1
types_map = {
'BIGINT': TYPE_INTEGER,
'TINYINT': TYPE_INTEGER,
'SMALLINT': TYPE_INTEGER,
'INT': TYPE_INTEGER,
'DOUBLE': TYPE_FLOAT,
'DECIMAL': TYPE_FLOAT,
'FLOAT': TYPE_FLOAT,
'REAL': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'TIMESTAMP': TYPE_DATETIME,
'CHAR': TYPE_STRING,
'STRING': TYPE_STRING,
'VARCHAR': TYPE_STRING
}
class Impala(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"protocol": {
"type": "string",
"title": "Please specify beeswax or hiveserver2"
},
"database": {
"type": "string"
},
"use_ldap": {
"type": "boolean"
},
"ldap_user": {
"type": "string"
},
"ldap_password": {
"type": "string"
},
"timeout": {
"type": "number"
}
},
"required": ["host"]
}
@classmethod
def type(cls):
return "impala"
def __init__(self, configuration_json):
super(Impala, self).__init__(configuration_json)
def _run_query_internal(self, query):
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
return json.loads(results)['rows']
def get_schema(self):
try:
schemas_query = "show schemas;"
tables_query = "show tables in %s;"
columns_query = "show column stats %s;"
schema = {}
for schema_name in map(lambda a: a['name'], self._run_query_internal(schemas_query)):
for table_name in map(lambda a: a['name'], self._run_query_internal(tables_query % schema_name)):
columns = map(lambda a: a['Column'], self._run_query_internal(columns_query % table_name))
if schema_name != 'default':
table_name = '{}.{}'.format(schema_name, table_name)
schema[table_name] = {'name': table_name, 'columns': columns}
except Exception, e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return schema.values()
def run_query(self, query):
connection = None
try:
connection = connect(**self.configuration)
cursor = connection.cursor()
cursor.execute(query)
column_names = []
columns = []
for column in cursor.description:
column_name = column[COLUMN_NAME]
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column[COLUMN_TYPE], None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except RPCError as e:
logging.exception(e)
json_data = None
error = "Metastore Error [%s]" % e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
logging.exception(e)
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Impala)

View File

@@ -0,0 +1,83 @@
import json
import logging
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
try:
from influxdb import InfluxDBClusterClient
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install influxdb.")
logger.warning("You can use pip: pip install influxdb")
enabled = False
def _transform_result(results):
result_columns = []
result_rows = []
for result in results:
if not result_columns:
for c in result.raw['series'][0]['columns']:
result_columns.append({ "name": c })
for point in result.get_points():
result_rows.append(point)
return json.dumps({
"columns" : result_columns,
"rows" : result_rows
}, cls=JSONEncoder)
class InfluxDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
}
},
'required': ['url']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "influxdb"
def __init__(self, configuration_json):
super(InfluxDB, self).__init__(configuration_json)
def run_query(self, query):
client = InfluxDBClusterClient.from_DSN(self.configuration['url'])
logger.debug("influxdb url: %s", self.configuration['url'])
logger.debug("influxdb got query: %s", query)
try:
results = client.query(query)
if not isinstance(results, list):
results = [results]
json_data = _transform_result(results)
error = None
except Exception, ex:
json_data = None
error = ex.message
return json_data, error
register(InfluxDB)

View File

@@ -3,6 +3,7 @@ import datetime
import logging import logging
import re import re
import time import time
from dateutil.parser import parse
from redash.utils import JSONEncoder from redash.utils import JSONEncoder
from redash.query_runner import * from redash.query_runner import *
@@ -12,6 +13,7 @@ logger = logging.getLogger(__name__)
try: try:
import pymongo import pymongo
from bson.objectid import ObjectId from bson.objectid import ObjectId
from bson.son import SON
enabled = True enabled = True
except ImportError: except ImportError:
@@ -32,24 +34,73 @@ TYPES_MAP = {
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE) date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
class MongoDBJSONEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
def _get_column_by_name(columns, column_name): return super(MongoDBJSONEncoder, self).default(o)
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
# Simple query example:
#
# {
# "collection" : "my_collection",
# "query" : {
# "date" : {
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
# },
# "type" : 1
# },
# "fields" : {
# "_id" : 1,
# "name" : 2
# },
# "sort" : [
# {
# "name" : "date",
# "direction" : -1
# }
# ]
#
# }
#
#
# Aggregation
# ===========
# Uses a syntax similar to the one used in PyMongo, however to support the
# correct order of sorting, it uses a regular list for the "$sort" operation
# that converts into a SON (sorted dictionary) object before execution.
#
# Aggregation query example:
#
# {
# "collection" : "things",
# "aggregate" : [
# {
# "$unwind" : "$tags"
# },
# {
# "$group" : {
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# },
# {
# "$sort" : [
# {
# "name" : "count",
# "direction" : -1
# },
# {
# "name" : "_id",
# "direction" : -1
# }
# ]
# }
# ]
# }
#
#
class MongoDB(BaseQueryRunner): class MongoDB(BaseQueryRunner):
@classmethod @classmethod
def configuration_schema(cls): def configuration_schema(cls):
@@ -89,22 +140,43 @@ class MongoDB(BaseQueryRunner):
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
def _get_column_by_name(self, columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _fix_dates(self, data):
for k in data:
if isinstance(data[k], list):
for i in range(0, len(data[k])):
self._fix_dates(data[k][i])
elif isinstance(data[k], dict):
self._fix_dates(data[k])
else:
if isinstance(data[k], (str, unicode)):
self._convert_date(data, k)
def _convert_date(self, q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
q[field_name] = parse(m[0], yearfirst=True)
def run_query(self, query): def run_query(self, query):
if self.is_replica_set: if self.is_replica_set:
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"]) db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
else: else:
db_connection = pymongo.MongoClient(self.configuration["connectionString"]) db_connection = pymongo.MongoClient(self.configuration["connectionString"])
if self.db_name not in db_connection.database_names(): db = db_connection[self.db_name]
return None, "Unknown database name '%s'" % self.db_name
db = db_connection[self.db_name ]
logger.debug("mongodb connection string: %s", self.configuration['connectionString']) logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
logger.debug("mongodb got query: %s", query) logger.debug("mongodb got query: %s", query)
try: try:
query_data = json.loads(query) query_data = json.loads(query)
self._fix_dates(query_data)
except ValueError: except ValueError:
return None, "Invalid query format. The query is not a valid JSON." return None, "Invalid query format. The query is not a valid JSON."
@@ -113,19 +185,26 @@ class MongoDB(BaseQueryRunner):
else: else:
collection = query_data["collection"] collection = query_data["collection"]
q = None q = query_data.get("query", None)
if "query" in query_data:
q = query_data["query"]
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
f = None f = None
aggregate = query_data.get("aggregate", None)
if aggregate:
for step in aggregate:
if "$sort" in step:
sort_list = []
for sort_item in step["$sort"]:
sort_list.append((sort_item["name"], sort_item["direction"]))
step["$sort"] = SON(sort_list)
if not aggregate:
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field in query_data["sort"]:
s.append((field["name"], field["direction"]))
if "fields" in query_data: if "fields" in query_data:
f = query_data["fields"] f = query_data["fields"]
@@ -138,36 +217,47 @@ class MongoDB(BaseQueryRunner):
columns = [] columns = []
rows = [] rows = []
error = None cursor = None
json_data = None if q or (not q and not aggregate):
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
if s: if "skip" in query_data:
cursor = db[collection].find(q, f).sort(s) cursor = cursor.skip(query_data["skip"])
else:
cursor = db[collection].find(q, f)
if "limit" in query_data and query_data["limit"]: if "limit" in query_data:
cursor = cursor.limit(query_data["limit"]) cursor = cursor.limit(query_data["limit"])
elif aggregate:
r = db[collection].aggregate(aggregate)
# Backwards compatibility with older pymongo versions.
#
# Older pymongo version would return a dictionary from an aggregate command.
# The dict would contain a "result" key which would hold the cursor.
# Newer ones return pymongo.command_cursor.CommandCursor.
if isinstance(r, dict):
cursor = r["result"]
else:
cursor = r
for r in cursor: for r in cursor:
for k in r: for k in r:
if _get_column_by_name(columns, k) is None: if self._get_column_by_name(columns, k) is None:
columns.append({ columns.append({
"name": k, "name": k,
"friendly_name": k, "friendly_name": k,
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING) "type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
}) })
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r) rows.append(r)
if f: if f:
ordered_columns = [] ordered_columns = []
for k in sorted(f, key=f.get): for k in sorted(f, key=f.get):
ordered_columns.append(_get_column_by_name(columns, k)) ordered_columns.append(self._get_column_by_name(columns, k))
columns = ordered_columns columns = ordered_columns
@@ -176,7 +266,7 @@ class MongoDB(BaseQueryRunner):
"rows": rows "rows": rows
} }
error = None error = None
json_data = json.dumps(data, cls=JSONEncoder) json_data = json.dumps(data, cls=MongoDBJSONEncoder)
return json_data, error return json_data, error

View File

@@ -7,6 +7,24 @@ from redash.query_runner import *
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
types_map = {
0: TYPE_FLOAT,
1: TYPE_INTEGER,
2: TYPE_INTEGER,
3: TYPE_INTEGER,
4: TYPE_FLOAT,
5: TYPE_FLOAT,
7: TYPE_DATETIME,
8: TYPE_INTEGER,
9: TYPE_INTEGER,
10: TYPE_DATE,
12: TYPE_DATETIME,
15: TYPE_STRING,
16: TYPE_INTEGER,
246: TYPE_FLOAT,
253: TYPE_STRING,
254: TYPE_STRING,
}
class Mysql(BaseQueryRunner): class Mysql(BaseQueryRunner):
@classmethod @classmethod
@@ -85,32 +103,29 @@ class Mysql(BaseQueryRunner):
def run_query(self, query): def run_query(self, query):
import MySQLdb import MySQLdb
connection = MySQLdb.connect(host=self.configuration.get('host', ''), connection = None
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
try: try:
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
cursor.execute(query) cursor.execute(query)
data = cursor.fetchall() data = cursor.fetchall()
cursor_desc = cursor.description # TODO - very similar to pg.py
if cursor_desc is not None: if cursor.description is not None:
num_fields = len(cursor_desc) columns_data = [(i[0], i[1]) for i in cursor.description]
column_names = [i[0] for i in cursor.description]
rows = [dict(zip(column_names, row)) for row in data] rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
# TODO: add types support columns = [{'name': col[0],
columns = [{'name': col_name, 'friendly_name': col[0],
'friendly_name': col_name, 'type': types_map.get(col[1], None)} for col in columns_data]
'type': None} for col_name in column_names]
data = {'columns': columns, 'rows': rows} data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder) json_data = json.dumps(data, cls=JSONEncoder)
@@ -129,8 +144,9 @@ class Mysql(BaseQueryRunner):
except Exception as e: except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2] raise sys.exc_info()[1], None, sys.exc_info()[2]
finally: finally:
connection.close() if connection:
connection.close()
return json_data, error return json_data, error
register(Mysql) register(Mysql)

View File

@@ -93,7 +93,7 @@ class PostgreSQL(BaseQueryRunner):
results, error = self.run_query(query) results, error = self.run_query(query)
if error is not None: if error is not None:
raise Exception("Failed getting schema.") raise Exception("Failed getting schema.")
results = json.loads(results) results = json.loads(results)
@@ -127,35 +127,38 @@ class PostgreSQL(BaseQueryRunner):
columns = [] columns = []
duplicates_counter = 1 duplicates_counter = 1
for column in cursor.description: if cursor.description is not None:
# TODO: this deduplication needs to be generalized and reused in all query runners. for column in cursor.description:
column_name = column.name # TODO: this deduplication needs to be generalized and reused in all query runners.
if column_name in column_names: column_name = column.name
column_name += str(duplicates_counter) if column_name in column_names:
duplicates_counter += 1 column_name += str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name) column_names.append(column_name)
columns.append({ columns.append({
'name': column_name, 'name': column_name,
'friendly_name': column_name, 'friendly_name': column_name,
'type': types_map.get(column.type_code, None) 'type': types_map.get(column.type_code, None)
}) })
rows = [dict(zip(column_names, row)) for row in cursor] rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows} data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder) error = None
error = None json_data = json.dumps(data, cls=JSONEncoder)
cursor.close() else:
error = 'Query completed but it returned no data.'
json_data = None
except (select.error, OSError) as e: except (select.error, OSError) as e:
logging.exception(e) logging.exception(e)
error = "Query interrupted. Please retry." error = "Query interrupted. Please retry."
json_data = None json_data = None
except psycopg2.DatabaseError as e: except psycopg2.DatabaseError as e:
logging.exception(e) logging.exception(e)
json_data = None
error = e.message error = e.message
json_data = None
except KeyboardInterrupt: except KeyboardInterrupt:
connection.cancel() connection.cancel()
error = "Query cancelled by user." error = "Query cancelled by user."

View File

@@ -0,0 +1,98 @@
import json
from redash.utils import JSONEncoder
from redash.query_runner import *
import logging
logger = logging.getLogger(__name__)
try:
from pyhive import presto
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install PyHive.")
logger.warning("You can use pip: pip install pyhive")
enabled = False
PRESTO_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"bigint" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"varchar": TYPE_STRING,
"date" : TYPE_DATE,
}
class Presto(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'host': {
'type': 'string'
},
'port': {
'type': 'number'
},
'schema': {
'type': 'string'
},
'catalog': {
'type': 'string'
},
'username': {
'type': 'string'
}
},
'required': ['host']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "presto"
def __init__(self, configuration_json):
super(Presto, self).__init__(configuration_json)
def run_query(self, query):
connection = presto.connect(
host=self.configuration.get('host', ''),
port=self.configuration.get('port', 8080),
username=self.configuration.get('username', 'redash'),
catalog=self.configuration.get('catalog', 'hive'),
schema=self.configuration.get('schema', 'default'))
cursor = connection.cursor()
try:
cursor.execute(query)
columns_data = [(row[0], row[1]) for row in cursor.description]
columns = [{'name': col[0],
'friendly_name': col[0],
'type': PRESTO_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
except Exception, ex:
json_data = None
error = ex.message
return json_data, error
register(Presto)

View File

@@ -1,6 +1,8 @@
import sys import sys
import datetime
import json import json
import logging import logging
import weakref
from redash.query_runner import * from redash.query_runner import *
from redash import models from redash import models
@@ -12,93 +14,30 @@ logger = logging.getLogger(__name__)
from RestrictedPython import compile_restricted from RestrictedPython import compile_restricted
from RestrictedPython.Guards import safe_builtins from RestrictedPython.Guards import safe_builtins
ALLOWED_MODULES = {}
class CustomPrint(object):
""" CustomPrint redirect "print" calls to be sent as "log" on the result object """
def __init__(self):
self.enabled = True
self.lines = []
def custom_write(obj): def write(self, text):
""" if self.enabled:
Custom hooks which controls the way objects/lists/tuples/dicts behave in if text and text.strip():
RestrictedPython log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
""" self.lines.append(log_line)
return obj
def enable(self):
self.enabled = True
def custom_import(name, globals=None, locals=None, fromlist=(), level=0): def disable(self):
if name in ALLOWED_MODULES: self.enabled = False
m = None
if ALLOWED_MODULES[name] is None:
m = importlib.import_module(name)
ALLOWED_MODULES[name] = m
else:
m = ALLOWED_MODULES[name]
return m def __call__(self):
return self
raise Exception("'{0}' is not configured as a supported import module".format(name))
def custom_get_item(obj, key):
return obj[key]
def get_query_result(query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
def execute_query(data_source_name_or_id, query):
try:
if type(data_source_name_or_id) == int:
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
def add_result_column(result, column_name, friendly_name, column_type):
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
if column_type not in SUPPORTED_COLUMN_TYPES:
raise Exception("'{0}' is not a supported column type".format(column_type))
if not "columns" in result:
result["columns"] = []
result["columns"].append({
"name" : column_name,
"friendly_name" : friendly_name,
"type" : column_type
})
def add_result_row(result, values):
if not "rows" in result:
result["rows"] = []
result["rows"].append(values)
class Python(BaseQueryRunner): class Python(BaseQueryRunner):
"""
This is very, very unsafe. Use at your own risk with people you really trust.
"""
@classmethod @classmethod
def configuration_schema(cls): def configuration_schema(cls):
return { return {
@@ -120,15 +59,96 @@ class Python(BaseQueryRunner):
return False return False
def __init__(self, configuration_json): def __init__(self, configuration_json):
global ALLOWED_MODULES
super(Python, self).__init__(configuration_json) super(Python, self).__init__(configuration_json)
self.syntax = "python" self.syntax = "python"
self._allowed_modules = {}
self._script_locals = { "result" : { "rows" : [], "columns" : [], "log" : [] } }
self._enable_print_log = True
self._custom_print = CustomPrint()
if self.configuration.get("allowedImportModules", None): if self.configuration.get("allowedImportModules", None):
for item in self.configuration["allowedImportModules"].split(","): for item in self.configuration["allowedImportModules"].split(","):
ALLOWED_MODULES[item] = None self._allowed_modules[item] = None
def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
if name in self._allowed_modules:
m = None
if self._allowed_modules[name] is None:
m = importlib.import_module(name)
self._allowed_modules[name] = m
else:
m = self._allowed_modules[name]
return m
raise Exception("'{0}' is not configured as a supported import module".format(name))
def custom_write(self, obj):
"""
Custom hooks which controls the way objects/lists/tuples/dicts behave in
RestrictedPython
"""
return obj
def custom_get_item(self, obj, key):
return obj[key]
def custom_get_iter(self, obj):
return iter(obj)
def add_result_column(self, result, column_name, friendly_name, column_type):
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
if column_type not in SUPPORTED_COLUMN_TYPES:
raise Exception("'{0}' is not a supported column type".format(column_type))
if not "columns" in result:
result["columns"] = []
result["columns"].append({
"name" : column_name,
"friendly_name" : friendly_name,
"type" : column_type
})
def add_result_row(self, result, values):
if not "rows" in result:
result["rows"] = []
result["rows"].append(values)
def execute_query(self, data_source_name_or_id, query):
try:
if type(data_source_name_or_id) == int:
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
def get_query_result(self, query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
def run_query(self, query): def run_query(self, query):
try: try:
@@ -136,21 +156,23 @@ class Python(BaseQueryRunner):
code = compile_restricted(query, '<string>', 'exec') code = compile_restricted(query, '<string>', 'exec')
safe_builtins["_write_"] = custom_write safe_builtins["_write_"] = self.custom_write
safe_builtins["__import__"] = custom_import safe_builtins["__import__"] = self.custom_import
safe_builtins["_getattr_"] = getattr safe_builtins["_getattr_"] = getattr
safe_builtins["getattr"] = getattr safe_builtins["getattr"] = getattr
safe_builtins["_setattr_"] = setattr safe_builtins["_setattr_"] = setattr
safe_builtins["setattr"] = setattr safe_builtins["setattr"] = setattr
safe_builtins["_getitem_"] = custom_get_item safe_builtins["_getitem_"] = self.custom_get_item
safe_builtins["_getiter_"] = self.custom_get_iter
script_locals = { "result" : { "rows" : [], "columns" : [] } } safe_builtins["_print_"] = self._custom_print
restricted_globals = dict(__builtins__=safe_builtins) restricted_globals = dict(__builtins__=safe_builtins)
restricted_globals["get_query_result"] = get_query_result restricted_globals["get_query_result"] = self.get_query_result
restricted_globals["execute_query"] = execute_query restricted_globals["execute_query"] = self.execute_query
restricted_globals["add_result_column"] = add_result_column restricted_globals["add_result_column"] = self.add_result_column
restricted_globals["add_result_row"] = add_result_row restricted_globals["add_result_row"] = self.add_result_row
restricted_globals["disable_print_log"] = self._custom_print.disable
restricted_globals["enable_print_log"] = self._custom_print.enable
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
@@ -163,17 +185,17 @@ class Python(BaseQueryRunner):
# One option is to use ETA with Celery + timeouts on workers # One option is to use ETA with Celery + timeouts on workers
# And replacement of worker process every X requests handled. # And replacement of worker process every X requests handled.
exec(code) in restricted_globals, script_locals exec(code) in restricted_globals, self._script_locals
if script_locals['result'] is None: result = self._script_locals['result']
raise Exception("result wasn't set to value.") result['log'] = self._custom_print.lines
json_data = json.dumps(result)
json_data = json.dumps(script_locals['result'])
except KeyboardInterrupt: except KeyboardInterrupt:
error = "Query cancelled by user." error = "Query cancelled by user."
json_data = None json_data = None
except Exception as e: except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2] error = str(e)
json_data = None
return json_data, error return json_data, error

145
redash/saml_auth.py Normal file
View File

@@ -0,0 +1,145 @@
# Copyright 2015 Okta, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint, request
from flask_oauth import OAuth
from redash import models, settings
from saml2 import (
BINDING_HTTP_POST,
BINDING_HTTP_REDIRECT,
entity,
)
from saml2.client import Saml2Client
from saml2.config import Config as Saml2Config
logger = logging.getLogger('saml_auth')
blueprint = Blueprint('saml_auth', __name__)
def get_saml_client():
'''
Return saml configuation.
The configuration is a hash for use by saml2.config.Config
'''
if settings.SAML_CALLBACK_SERVER_NAME:
acs_url=settings.SAML_CALLBACK_SERVER_NAME + url_for("saml_auth.idp_initiated")
else:
acs_url = url_for("saml_auth.idp_initiated",_external=True)
# NOTE:
# Ideally, this should fetch the metadata and pass it to
# PySAML2 via the "inline" metadata type.
# However, this method doesn't seem to work on PySAML2 v2.4.0
#
# SAML metadata changes very rarely. On a production system,
# this data should be cached as approprate for your production system.
rv = requests.get(settings.SAML_METADATA_URL)
import tempfile
tmp = tempfile.NamedTemporaryFile()
f = open(tmp.name, 'w')
f.write(rv.text)
f.close()
saml_settings = {
'metadata': {
# 'inline': metadata,
"local": [tmp.name]
},
'service': {
'sp': {
'endpoints': {
'assertion_consumer_service': [
(acs_url, BINDING_HTTP_REDIRECT),
(acs_url, BINDING_HTTP_POST)
],
},
# Don't verify that the incoming requests originate from us via
# the built-in cache for authn request ids in pysaml2
'allow_unsolicited': True,
# Don't sign authn requests, since signed requests only make
# sense in a situation where you control both the SP and IdP
'authn_requests_signed': False,
'logout_requests_signed': True,
'want_assertions_signed': True,
'want_response_signed': False,
},
},
}
spConfig = Saml2Config()
spConfig.load(saml_settings)
spConfig.allow_unknown_attributes = True
saml_client = Saml2Client(config=spConfig)
tmp.close()
return saml_client
@blueprint.route("/saml/callback", methods=['POST'])
def idp_initiated():
saml_client = get_saml_client()
authn_response = saml_client.parse_authn_request_response(
request.form['SAMLResponse'],
entity.BINDING_HTTP_POST)
authn_response.get_identity()
user_info = authn_response.get_subject()
email = user_info.text
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
# This is what as known as "Just In Time (JIT) provisioning".
# What that means is that, if a user in a SAML assertion
# isn't in the user store, we create that user first, then log them in
try:
user_object = models.User.get(models.User.email == email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
user_object.save()
except models.User.DoesNotExist:
logger.debug("Creating user object (%r)", name)
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
login_user(user_object, remember=True)
url = url_for('index')
return redirect(url)
@blueprint.route("/saml/login")
def sp_initiated():
if not settings.SAML_METADATA_URL:
logger.error("Cannot invoke saml endpoint without metadata url in settings.")
return redirect(url_for('index'))
saml_client = get_saml_client()
reqid, info = saml_client.prepare_for_authenticate()
redirect_url = None
# Select the IdP URL to send the AuthN request to
for key, value in info['headers']:
if key is 'Location':
redirect_url = value
response = redirect(redirect_url, code=302)
# NOTE:
# I realize I _technically_ don't need to set Cache-Control or Pragma:
# http://stackoverflow.com/a/5494469
# However, Section 3.2.3.2 of the SAML spec suggests they are set:
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
# We set those headers here as a "belt and suspenders" approach,
# since enterprise environments don't always conform to RFCs
response.headers['Cache-Control'] = 'no-cache, no-store'
response.headers['Pragma'] = 'no-cache'
return response

View File

@@ -32,10 +32,25 @@ def array_from_string(str):
return array return array
def set_from_string(str):
return set(array_from_string(str))
def parse_boolean(str): def parse_boolean(str):
return json.loads(str.lower()) return json.loads(str.lower())
def all_settings():
from types import ModuleType
settings = {}
for name, item in globals().iteritems():
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
settings[name] = item
return settings
NAME = os.environ.get('REDASH_NAME', 're:dash') NAME = os.environ.get('REDASH_NAME', 're:dash')
REDIS_URL = os.environ.get('REDASH_REDIS_URL', "redis://localhost:6379/0") REDIS_URL = os.environ.get('REDASH_REDIS_URL', "redis://localhost:6379/0")
@@ -53,19 +68,23 @@ CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until # The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
# proved to be "safe". # proved to be "safe".
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false")) QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "hmac") AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true")) PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed # Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access # access
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "") GOOGLE_APPS_DOMAIN = set_from_string(os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", ""))
GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "") GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "") GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
SAML_CALLBACK_SERVER_NAME = os.environ.get("REDASH_SAML_CALLBACK_SERVER_NAME", "")
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/")) STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6)) JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f") COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
@@ -73,15 +92,41 @@ LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false")) CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "") ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# Mail settings:
MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')
MAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25))
MAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false'))
MAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false'))
MAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None)
MAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None)
MAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None)
MAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None)
MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false'))
HOST = os.environ.get('REDASH_HOST', '')
# CORS settings for the Query Result API (and possbily future external APIs).
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
# to the calling domain (or domains in a comma separated list).
ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", ""))
ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false"))
ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT")
ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
# Query Runners # Query Runners
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([ QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
'redash.query_runner.big_query', 'redash.query_runner.big_query',
'redash.query_runner.google_spreadsheets',
'redash.query_runner.graphite', 'redash.query_runner.graphite',
'redash.query_runner.mongodb', 'redash.query_runner.mongodb',
'redash.query_runner.mysql', 'redash.query_runner.mysql',
'redash.query_runner.pg', 'redash.query_runner.pg',
'redash.query_runner.script', 'redash.query_runner.script',
'redash.query_runner.url', 'redash.query_runner.url',
'redash.query_runner.influx_db',
'redash.query_runner.elasticsearch',
'redash.query_runner.presto',
'redash.query_runner.impala_ds',
]))) ])))
# Features: # Features:

View File

@@ -1,11 +1,11 @@
import time import time
import datetime
import logging import logging
from flask.ext.mail import Message
import redis import redis
from celery import Task from celery import Task
from celery.result import AsyncResult from celery.result import AsyncResult
from celery.utils.log import get_task_logger from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client, settings from redash import redis_connection, models, statsd_client, settings, utils, mail
from redash.utils import gen_query_hash from redash.utils import gen_query_hash
from redash.worker import celery from redash.worker import celery
from redash.query_runner import get_query_runner from redash.query_runner import get_query_runner
@@ -223,7 +223,7 @@ def cleanup_query_results():
@celery.task(base=BaseTask) @celery.task(base=BaseTask)
def refresh_schemas(): def refresh_schemas():
""" """
Refershs the datasources schema. Refreshs the datasources schema.
""" """
for ds in models.DataSource.all(): for ds in models.DataSource.all():
@@ -231,6 +231,39 @@ def refresh_schemas():
ds.get_schema(refresh=True) ds.get_schema(refresh=True)
@celery.task(bind=True, base=BaseTask)
def check_alerts_for_query(self, query_id):
from redash.wsgi import app
logger.debug("Checking query %d for alerts", query_id)
query = models.Query.get_by_id(query_id)
for alert in query.alerts:
alert.query = query
new_state = alert.evaluate()
if new_state != alert.state:
logger.info("Alert %d new state: %s", alert.id, new_state)
old_state = alert.state
alert.update_instance(state=new_state)
if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:
logger.debug("Skipping notification (previous state was unknown and now it's ok).")
continue
# message = Message
recipients = [s.email for s in alert.subscribers()]
logger.debug("Notifying: %s", recipients)
html = """
Check <a href="{host}/alerts/{alert_id}">alert</a> / check <a href="{host}/queries/{query_id}">query</a>.
""".format(host=settings.HOST, alert_id=alert.id, query_id=query.id)
with app.app_context():
message = Message(recipients=recipients,
subject="[{1}] {0}".format(alert.name, new_state.upper()),
html=html)
mail.send(message)
@celery.task(bind=True, base=BaseTask, track_started=True) @celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id, metadata): def execute_query(self, query, data_source_id, metadata):
start_time = time.time() start_time = time.time()
@@ -252,11 +285,11 @@ def execute_query(self, query, data_source_id, metadata):
metadata['Query Hash'] = query_hash metadata['Query Hash'] = query_hash
metadata['Queue'] = self.request.delivery_info['routing_key'] metadata['Queue'] = self.request.delivery_info['routing_key']
annotation = ", ".join(["{}: {}".format(k, v) for k, v in metadata.iteritems()]) annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()])
logging.debug("Annotation: %s", annotation) logging.debug(u"Annotation: %s", annotation)
annotated_query = "/* {} */ {}".format(annotation, query) annotated_query = u"/* {} */ {}".format(annotation, query)
else: else:
annotated_query = query annotated_query = query
@@ -272,7 +305,9 @@ def execute_query(self, query, data_source_id, metadata):
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id)) redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
if not error: if not error:
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow()) query_result, updated_query_ids = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
for query_id in updated_query_ids:
check_alerts_for_query.delay(query_id)
else: else:
raise Exception(error) raise Exception(error)

View File

@@ -4,9 +4,11 @@ import codecs
import decimal import decimal
import datetime import datetime
import json import json
import random
import re import re
import hashlib import hashlib
import sqlparse import sqlparse
import pytz
COMMENTS_REGEX = re.compile("/\*.*?\*/") COMMENTS_REGEX = re.compile("/\*.*?\*/")
@@ -62,6 +64,14 @@ class SQLMetaData(object):
return False return False
def utcnow():
"""Return datetime.now value with timezone specified.
Without the timezone data, when the timestamp stored to the database it gets the current timezone of the server,
which leads to errors in calculations.
"""
return datetime.datetime.now(pytz.utc)
def slugify(s): def slugify(s):
return re.sub('[^a-z0-9_\-]+', '-', s.lower()) return re.sub('[^a-z0-9_\-]+', '-', s.lower())
@@ -79,6 +89,14 @@ def gen_query_hash(sql):
return hashlib.md5(sql.encode('utf-8')).hexdigest() return hashlib.md5(sql.encode('utf-8')).hexdigest()
def generate_token(length):
chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789')
rand = random.SystemRandom()
return ''.join(rand.choice(chars) for x in range(length))
class JSONEncoder(json.JSONEncoder): class JSONEncoder(json.JSONEncoder):
"""Custom JSON encoding class, to handle Decimal and datetime.date instances. """Custom JSON encoding class, to handle Decimal and datetime.date instances.
""" """
@@ -86,9 +104,9 @@ class JSONEncoder(json.JSONEncoder):
if isinstance(o, decimal.Decimal): if isinstance(o, decimal.Decimal):
return float(o) return float(o)
if isinstance(o, datetime.date): if isinstance(o, (datetime.date, datetime.time, datetime.timedelta)):
return o.isoformat() return o.isoformat()
super(JSONEncoder, self).default(o) super(JSONEncoder, self).default(o)
@@ -128,4 +146,4 @@ class UnicodeWriter:
def writerows(self, rows): def writerows(self, rows):
for row in rows: for row in rows:
self.writerow(row) self.writerow(row)

View File

@@ -1,8 +1,9 @@
import json import json
from flask import Flask, make_response from flask import Flask, make_response
from werkzeug.wrappers import Response
from flask.ext.restful import Api from flask.ext.restful import Api
from redash import settings, utils from redash import settings, utils, mail
from redash.models import db from redash.models import db
from redash.admin import init_admin from redash.admin import init_admin
@@ -21,13 +22,18 @@ init_admin(app)
# configure our database # configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True}) settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG app.config['DATABASE'] = settings.DATABASE_CONFIG
app.config.update(settings.all_settings())
db.init_app(app) db.init_app(app)
mail.init_app(app)
from redash.authentication import setup_authentication from redash.authentication import setup_authentication
auth = setup_authentication(app) setup_authentication(app)
@api.representation('application/json') @api.representation('application/json')
def json_representation(data, code, headers=None): def json_representation(data, code, headers=None):
# Flask-Restful checks only for flask.Response but flask-login uses werkzeug.wrappers.Response
if isinstance(data, Response):
return data
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code) resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
resp.headers.extend(headers or {}) resp.headers.extend(headers or {})
return resp return resp

View File

@@ -1,8 +1,9 @@
Flask==0.10.1 Flask==0.10.1
Flask-Admin==1.1.0 Flask-Admin==1.1.0
Flask-RESTful==0.2.10 Flask-RESTful==0.2.10
Flask-Login==0.2.9 Flask-Login==0.2.11
Flask-OAuth==0.12 Flask-OAuth==0.12
flask-mail==0.9.1
passlib==1.6.2 passlib==1.6.2
Jinja2==2.7.2 Jinja2==2.7.2
MarkupSafe==0.18 MarkupSafe==0.18
@@ -28,3 +29,6 @@ jsonschema==2.4.0
click==3.3 click==3.3
RestrictedPython==3.6.0 RestrictedPython==3.6.0
wtf-peewee==0.2.3 wtf-peewee==0.2.3
pysaml2==2.4.0
pycrypto==2.6.1
funcy==1.5

View File

@@ -98,10 +98,8 @@ if [ ! -f "/opt/redash/.env" ]; then
fi fi
# Install latest version # Install latest version
# REDASH_VERSION=${REDASH_VERSION-0.4.0.b589} REDASH_VERSION=${REDASH_VERSION-0.6.3.b906}
# modified by @fedex1 3/15/2015 seems to be the latest version at this point in time. LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
REDASH_VERSION=${REDASH_VERSION-0.6.0.b722}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION/.b/%2Bb}/redash.$REDASH_VERSION.tar.gz"
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION" VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
REDASH_TARBALL=/tmp/redash.tar.gz REDASH_TARBALL=/tmp/redash.tar.gz
REDASH_TARBALL=/tmp/redash.tar.gz REDASH_TARBALL=/tmp/redash.tar.gz
@@ -145,6 +143,7 @@ if [ $pg_user_exists -ne 0 ]; then
REDASH_READER_PASSWORD=$(pwgen -1) REDASH_READER_PASSWORD=$(pwgen -1)
sudo -u postgres psql -c "CREATE ROLE redash_reader WITH PASSWORD '$REDASH_READER_PASSWORD' NOCREATEROLE NOCREATEDB NOSUPERUSER LOGIN" sudo -u postgres psql -c "CREATE ROLE redash_reader WITH PASSWORD '$REDASH_READER_PASSWORD' NOCREATEROLE NOCREATEDB NOSUPERUSER LOGIN"
sudo -u redash psql -c "grant select(id,name,type) ON data_sources to redash_reader;" redash sudo -u redash psql -c "grant select(id,name,type) ON data_sources to redash_reader;" redash
sudo -u redash psql -c "grant select(id,name) ON users to redash_reader;" redash
sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
cd /opt/redash/current cd /opt/redash/current
@@ -162,6 +161,18 @@ pip install MySQL-python==1.2.5
# Mongo dependencies: # Mongo dependencies:
pip install pymongo==2.7.2 pip install pymongo==2.7.2
# Google spreadsheets:
pip install gspread
# InfluxDB
pip install influxdb
# Presto
pip install pyhive
# Impala
pip install impyla
# Setup supervisord + sysv init startup script # Setup supervisord + sysv init startup script
sudo -u redash mkdir -p /opt/redash/supervisord sudo -u redash mkdir -p /opt/redash/supervisord
pip install supervisor==3.1.2 # TODO: move to requirements.txt pip install supervisor==3.1.2 # TODO: move to requirements.txt

View File

@@ -1,9 +1,6 @@
export REDASH_CONNECTION_ADAPTER=pg
export REDASH_CONNECTION_STRING="dbname=redash"
export REDASH_STATIC_ASSETS_PATH="../rd_ui/dist/" export REDASH_STATIC_ASSETS_PATH="../rd_ui/dist/"
export REDASH_LOG_LEVEL="INFO" export REDASH_LOG_LEVEL="INFO"
export REDASH_WORKERS_COUNT=6
export REDASH_REDIS_URL=redis://localhost:6379/1 export REDASH_REDIS_URL=redis://localhost:6379/1
export REDASH_DATABASE_URL="postgresql://redash" export REDASH_DATABASE_URL="postgresql://redash"
export REDASH_COOKIE_SECRET=veryverysecret export REDASH_COOKIE_SECRET=veryverysecret
export REDASH_GOOGLE_APPS_DOMAIN= export REDASH_GOOGLE_APPS_DOMAIN=

View File

@@ -20,8 +20,12 @@ autorestart=true
stdout_logfile=/opt/redash/logs/api.log stdout_logfile=/opt/redash/logs/api.log
stderr_logfile=/opt/redash/logs/api_error.log stderr_logfile=/opt/redash/logs/api_error.log
# There are two queue types here: one for ad-hoc queries, and one for the refresh of scheduled queries
# (note that "scheduled_queries" appears only in the queue list of "redash_celery_scheduled").
# The default concurrency level for each is 2 (-c2), you can increase based on your machine's resources.
[program:redash_celery] [program:redash_celery]
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -c2 -Qqueries,celery
process_name=redash_celery process_name=redash_celery
numprocs=1 numprocs=1
priority=999 priority=999
@@ -29,3 +33,13 @@ autostart=true
autorestart=true autorestart=true
stdout_logfile=/opt/redash/logs/celery.log stdout_logfile=/opt/redash/logs/celery.log
stderr_logfile=/opt/redash/logs/celery_error.log stderr_logfile=/opt/redash/logs/celery_error.log
[program:redash_celery_scheduled]
command=/opt/redash/current/bin/run celery worker --app=redash.worker -c2 -Qscheduled_queries
process_name=redash_celery_scheduled
numprocs=1
priority=999
autostart=true
autorestart=true
stdout_logfile=/opt/redash/logs/celery.log
stderr_logfile=/opt/redash/logs/celery_error.log

View File

@@ -1,6 +1,5 @@
import datetime
import redash.models import redash.models
from redash.utils import gen_query_hash from redash.utils import gen_query_hash, utcnow
class ModelFactory(object): class ModelFactory(object):
@@ -66,7 +65,7 @@ query_factory = ModelFactory(redash.models.Query,
query_result_factory = ModelFactory(redash.models.QueryResult, query_result_factory = ModelFactory(redash.models.QueryResult,
data='{"columns":{}, "rows":[]}', data='{"columns":{}, "rows":[]}',
runtime=1, runtime=1,
retrieved_at=datetime.datetime.utcnow, retrieved_at=utcnow,
query="SELECT 1", query="SELECT 1",
query_hash=gen_query_hash('SELECT 1'), query_hash=gen_query_hash('SELECT 1'),
data_source=data_source_factory.create) data_source=data_source_factory.create)

View File

@@ -1,9 +1,10 @@
from flask.ext.login import current_user from flask import request
from mock import patch from mock import patch
import time
from tests import BaseTestCase from tests import BaseTestCase
from redash import models from redash import models
from redash.google_oauth import create_and_login_user from redash.google_oauth import create_and_login_user
from redash.authentication import ApiKeyAuthentication from redash.authentication import api_key_load_user_from_request, hmac_load_user_from_request, sign
from tests.factories import user_factory, query_factory from tests.factories import user_factory, query_factory
from redash.wsgi import app from redash.wsgi import app
@@ -18,29 +19,72 @@ class TestApiKeyAuthentication(BaseTestCase):
self.query = query_factory.create(api_key=self.api_key) self.query = query_factory.create(api_key=self.api_key)
def test_no_api_key(self): def test_no_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c: with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id)) rv = c.get('/api/queries/{0}'.format(self.query.id))
self.assertFalse(auth.verify_authentication()) self.assertIsNone(api_key_load_user_from_request(request))
def test_wrong_api_key(self): def test_wrong_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c: with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': 'whatever'}) rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': 'whatever'})
self.assertFalse(auth.verify_authentication()) self.assertIsNone(api_key_load_user_from_request(request))
def test_correct_api_key(self): def test_correct_api_key(self):
auth = ApiKeyAuthentication()
with app.test_client() as c: with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': self.api_key}) rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': self.api_key})
self.assertTrue(auth.verify_authentication()) self.assertIsNotNone(api_key_load_user_from_request(request))
def test_no_query_id(self): def test_no_query_id(self):
auth = ApiKeyAuthentication()
with app.test_client() as c: with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key}) rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertFalse(auth.verify_authentication()) self.assertIsNone(api_key_load_user_from_request(request))
def test_user_api_key(self):
user = user_factory.create(api_key="user_key")
with app.test_client() as c:
rv = c.get('/api/queries/', query_string={'api_key': user.api_key})
self.assertEqual(user.id, api_key_load_user_from_request(request).id)
class TestHMACAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestHMACAuthentication, self).setUp()
self.api_key = 10
self.query = query_factory.create(api_key=self.api_key)
self.path = '/api/queries/{0}'.format(self.query.id)
self.expires = time.time() + 1800
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_no_signature(self):
with app.test_client() as c:
rv = c.get(self.path)
self.assertIsNone(hmac_load_user_from_request(request))
def test_wrong_signature(self):
with app.test_client() as c:
rv = c.get(self.path, query_string={'signature': 'whatever', 'expires': self.expires})
self.assertIsNone(hmac_load_user_from_request(request))
def test_correct_signature(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'signature': self.signature(self.expires), 'expires': self.expires})
self.assertIsNotNone(hmac_load_user_from_request(request))
def test_no_query_id(self):
with app.test_client() as c:
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
self.assertIsNone(hmac_load_user_from_request(request))
def test_user_api_key(self):
user = user_factory.create(api_key="user_key")
path = '/api/queries/'
with app.test_client() as c:
signature = sign(user.api_key, path, self.expires)
rv = c.get(path, query_string={'signature': signature, 'expires': self.expires, 'user_id': user.id})
self.assertEqual(user.id, hmac_load_user_from_request(request).id)
class TestCreateAndLoginUser(BaseTestCase): class TestCreateAndLoginUser(BaseTestCase):
def test_logins_valid_user(self): def test_logins_valid_user(self):

View File

@@ -319,6 +319,17 @@ class QueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
self.paths = [] self.paths = []
super(QueryResultAPITest, self).setUp() super(QueryResultAPITest, self).setUp()
def test_post_result_list(self):
data_source = data_source_factory.create()
query_result = query_result_factory.create()
query = query_factory.create()
with app.test_client() as c, authenticated_user(c):
rv = json_request(c.post, '/api/query_results',
data={'data_source_id': data_source.id,
'query': query.query})
self.assertEquals(rv.status_code, 200)
class JobAPITest(BaseTestCase, AuthenticationTestMixin): class JobAPITest(BaseTestCase, AuthenticationTestMixin):
def setUp(self): def setUp(self):
@@ -326,58 +337,6 @@ class JobAPITest(BaseTestCase, AuthenticationTestMixin):
super(JobAPITest, self).setUp() super(JobAPITest, self).setUp()
class CsvQueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
def setUp(self):
super(CsvQueryResultAPITest, self).setUp()
self.paths = []
self.query_result = query_result_factory.create()
self.query = query_factory.create()
self.path = '/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id)
# TODO: factor out the HMAC authentication tests
def signature(self, expires):
return sign(self.query.api_key, self.path, expires)
def test_redirect_when_unauthenticated(self):
with app.test_client() as c:
rv = c.get(self.path)
self.assertEquals(rv.status_code, 302)
def test_redirect_for_wrong_signature(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': 'whatever', 'expires': 0})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_wrong_expires(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(0), 'expires': 0})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_no_expires(self):
with app.test_client() as c:
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(time.time()+3600)})
self.assertEquals(rv.status_code, 302)
def test_redirect_for_correct_signature_and_expires_too_long(self):
with app.test_client() as c:
expires = time.time()+(10*3600)
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
self.assertEquals(rv.status_code, 302)
def test_returns_200_for_correct_signature(self):
with app.test_client() as c:
expires = time.time()+1800
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
self.assertEquals(rv.status_code, 200)
def test_returns_200_for_authenticated_user(self):
with app.test_client() as c, authenticated_user(c):
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id))
self.assertEquals(rv.status_code, 200)
class TestLogin(BaseTestCase): class TestLogin(BaseTestCase):
def setUp(self): def setUp(self):
settings.PASSWORD_LOGIN_ENABLED = True settings.PASSWORD_LOGIN_ENABLED = True
@@ -518,6 +477,6 @@ class DataSourceTest(BaseTestCase):
admin = user_factory.create(groups=['admin', 'default']) admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin): with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources', rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{"dbname": "redash"}'}) data={'name': 'DS 1', 'type': 'pg', 'options': {"dbname": "redash"}})
self.assertEqual(rv.status_code, 200) self.assertEqual(rv.status_code, 200)

View File

@@ -6,8 +6,7 @@ import mock
from tests import BaseTestCase from tests import BaseTestCase
from redash import models from redash import models
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
from redash.utils import gen_query_hash from redash.utils import gen_query_hash, utcnow
from redash import query_runner
class DashboardTest(BaseTestCase): class DashboardTest(BaseTestCase):
@@ -141,7 +140,7 @@ class QueryOutdatedQueriesTest(BaseTestCase):
self.assertNotIn(query, queries) self.assertNotIn(query, queries)
def test_outdated_queries_works_with_specific_time_schedule(self): def test_outdated_queries_works_with_specific_time_schedule(self):
half_an_hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=30) half_an_hour_ago = utcnow() - datetime.timedelta(minutes=30)
query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M')) query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M'))
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1)) query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1))
query.latest_query_data = query_result query.latest_query_data = query_result
@@ -165,7 +164,7 @@ class QueryArchiveTest(BaseTestCase):
def test_archived_query_doesnt_return_in_all(self): def test_archived_query_doesnt_return_in_all(self):
query = query_factory.create(schedule="1") query = query_factory.create(schedule="1")
yesterday = datetime.datetime.now() - datetime.timedelta(days=1) yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
query_result = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1", query_result, _ = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
123, yesterday) 123, yesterday)
query.latest_query_data = query_result query.latest_query_data = query_result
@@ -326,11 +325,11 @@ class TestQueryResultStoreResult(BaseTestCase):
self.query = "SELECT 1" self.query = "SELECT 1"
self.query_hash = gen_query_hash(self.query) self.query_hash = gen_query_hash(self.query)
self.runtime = 123 self.runtime = 123
self.utcnow = datetime.datetime.utcnow() self.utcnow = utcnow()
self.data = "data" self.data = "data"
def test_stores_the_result(self): def test_stores_the_result(self):
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query,
self.data, self.runtime, self.utcnow) self.data, self.runtime, self.utcnow)
self.assertEqual(query_result.data, self.data) self.assertEqual(query_result.data, self.data)
@@ -345,7 +344,7 @@ class TestQueryResultStoreResult(BaseTestCase):
query2 = query_factory.create(query=self.query, data_source=self.data_source) query2 = query_factory.create(query=self.query, data_source=self.data_source)
query3 = query_factory.create(query=self.query, data_source=self.data_source) query3 = query_factory.create(query=self.query, data_source=self.data_source)
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data, query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
self.runtime, self.utcnow) self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id) self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
@@ -357,7 +356,7 @@ class TestQueryResultStoreResult(BaseTestCase):
query2 = query_factory.create(query=self.query, data_source=self.data_source) query2 = query_factory.create(query=self.query, data_source=self.data_source)
query3 = query_factory.create(query=self.query + "123", data_source=self.data_source) query3 = query_factory.create(query=self.query + "123", data_source=self.data_source)
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data, query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
self.runtime, self.utcnow) self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id) self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
@@ -369,7 +368,7 @@ class TestQueryResultStoreResult(BaseTestCase):
query2 = query_factory.create(query=self.query, data_source=self.data_source) query2 = query_factory.create(query=self.query, data_source=self.data_source)
query3 = query_factory.create(query=self.query, data_source=data_source_factory.create()) query3 = query_factory.create(query=self.query, data_source=data_source_factory.create())
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data, query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
self.runtime, self.utcnow) self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id) self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)

View File

@@ -2,6 +2,7 @@ import datetime
from mock import patch, call, ANY from mock import patch, call, ANY
from tests import BaseTestCase from tests import BaseTestCase
from tests.factories import query_factory, query_result_factory from tests.factories import query_factory, query_result_factory
from redash.utils import utcnow
from redash.tasks import refresh_queries from redash.tasks import refresh_queries
@@ -11,7 +12,7 @@ from redash.tasks import refresh_queries
class TestRefreshQueries(BaseTestCase): class TestRefreshQueries(BaseTestCase):
def test_enqueues_outdated_queries(self): def test_enqueues_outdated_queries(self):
query = query_factory.create(schedule="60") query = query_factory.create(schedule="60")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query, query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash) query_hash=query.query_hash)
query.latest_query_data = query_result query.latest_query_data = query_result
@@ -23,7 +24,7 @@ class TestRefreshQueries(BaseTestCase):
def test_skips_fresh_queries(self): def test_skips_fresh_queries(self):
query = query_factory.create(schedule="1200") query = query_factory.create(schedule="1200")
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query, query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash) query_hash=query.query_hash)
@@ -33,7 +34,7 @@ class TestRefreshQueries(BaseTestCase):
def test_skips_queries_with_no_ttl(self): def test_skips_queries_with_no_ttl(self):
query = query_factory.create(schedule=None) query = query_factory.create(schedule=None)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query, query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash) query_hash=query.query_hash)
@@ -45,7 +46,7 @@ class TestRefreshQueries(BaseTestCase):
query = query_factory.create(schedule="60") query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash, query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash,
data_source=query.data_source) data_source=query.data_source)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query, query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash) query_hash=query.query_hash)
query.latest_query_data = query_result query.latest_query_data = query_result
@@ -60,7 +61,7 @@ class TestRefreshQueries(BaseTestCase):
def test_enqueues_query_with_correct_data_source(self): def test_enqueues_query_with_correct_data_source(self):
query = query_factory.create(schedule="60") query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash) query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash)
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10) retrieved_at = utcnow() - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query, query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash) query_hash=query.query_hash)
query.latest_query_data = query_result query.latest_query_data = query_result
@@ -79,7 +80,7 @@ class TestRefreshQueries(BaseTestCase):
query = query_factory.create(schedule="60") query = query_factory.create(schedule="60")
query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash) query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash)
import psycopg2 import psycopg2
retrieved_at = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10) retrieved_at = utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query, query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
query_hash=query.query_hash) query_hash=query.query_hash)
query.latest_query_data = query_result query.latest_query_data = query_result