mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Compare commits
266 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
22aa64571a | ||
|
|
621405436d | ||
|
|
4c6fdba1d1 | ||
|
|
89de5f2a18 | ||
|
|
fe32877864 | ||
|
|
6b531ac568 | ||
|
|
5bce695fcc | ||
|
|
ba910280a1 | ||
|
|
2f386781d7 | ||
|
|
4c70349ee1 | ||
|
|
74b9c51dea | ||
|
|
d95c22fa24 | ||
|
|
3a7611309c | ||
|
|
5281d6c281 | ||
|
|
5bc1e71143 | ||
|
|
31ebfb80d7 | ||
|
|
1f8ed8a6c2 | ||
|
|
3d10718650 | ||
|
|
4a4ee49187 | ||
|
|
7bf7b00633 | ||
|
|
68e3fe65ba | ||
|
|
7fe096fba8 | ||
|
|
f80951457d | ||
|
|
1da165edc1 | ||
|
|
61c7c556b6 | ||
|
|
6170c48ed2 | ||
|
|
7f8ef2a050 | ||
|
|
d21e11ba33 | ||
|
|
1fc990f11a | ||
|
|
a09a767641 | ||
|
|
12ef64f10d | ||
|
|
776e52a77c | ||
|
|
5d5af369e6 | ||
|
|
1d7fef4f7d | ||
|
|
35cb0bc805 | ||
|
|
5310385f15 | ||
|
|
41c791ff42 | ||
|
|
9b3f910326 | ||
|
|
97624a3e2c | ||
|
|
d69c9409dd | ||
|
|
5bb5f46c02 | ||
|
|
2b6fe22b3f | ||
|
|
fd798ddcf5 | ||
|
|
812177a4e0 | ||
|
|
e445fa436e | ||
|
|
797a0a30ca | ||
|
|
dfd16f3d7a | ||
|
|
a80aae0ec7 | ||
|
|
c8ad866a53 | ||
|
|
fe2f08cfd6 | ||
|
|
f64769cc80 | ||
|
|
831dfe6c8d | ||
|
|
3b4da81ec6 | ||
|
|
248c540543 | ||
|
|
0fb0ba6473 | ||
|
|
019a09945e | ||
|
|
520a5f8fa4 | ||
|
|
f840681377 | ||
|
|
0ab25c317c | ||
|
|
c8adf322a9 | ||
|
|
fae1e7152a | ||
|
|
e543e0c466 | ||
|
|
d61002a544 | ||
|
|
92f93f8ff6 | ||
|
|
bf17bdc32d | ||
|
|
70292c888c | ||
|
|
69cb5b72e1 | ||
|
|
470d2ad359 | ||
|
|
e85fa2a42c | ||
|
|
42116abcb3 | ||
|
|
2e0b930192 | ||
|
|
24ba110965 | ||
|
|
e1eeb67025 | ||
|
|
71c9cbd5a4 | ||
|
|
e1ac5bb038 | ||
|
|
c2e84c92c6 | ||
|
|
090962d09c | ||
|
|
df945a12b0 | ||
|
|
3f99f0c6d5 | ||
|
|
4706bebde0 | ||
|
|
ad3dbad8ac | ||
|
|
62c8bd3531 | ||
|
|
c8d66b3335 | ||
|
|
0217d419d1 | ||
|
|
45f448e0d1 | ||
|
|
ee4b05eb98 | ||
|
|
202c53c7d7 | ||
|
|
9816403c45 | ||
|
|
b6a1178499 | ||
|
|
2fe6110e0f | ||
|
|
c4e18bb481 | ||
|
|
add8f0eeeb | ||
|
|
694d971df9 | ||
|
|
36c93ce212 | ||
|
|
35fe1f23e3 | ||
|
|
2517abb27f | ||
|
|
1d749a83e1 | ||
|
|
fc50a7b9bb | ||
|
|
6f72d456d2 | ||
|
|
1182f8c6b0 | ||
|
|
f090f947b7 | ||
|
|
a8246471f4 | ||
|
|
229c33939c | ||
|
|
ef2eaf1fa9 | ||
|
|
d30f4f155a | ||
|
|
b2e5df6af2 | ||
|
|
0470cd6592 | ||
|
|
a517dad456 | ||
|
|
789ef1614d | ||
|
|
3dfab5009c | ||
|
|
7d5d7c4a6b | ||
|
|
5056d2fa90 | ||
|
|
1fad874dee | ||
|
|
a2c79367de | ||
|
|
bcf129e646 | ||
|
|
94077ccafd | ||
|
|
411ef7bd00 | ||
|
|
6b22c2c541 | ||
|
|
e385a147f6 | ||
|
|
ad69a6be3f | ||
|
|
a6c45da2ca | ||
|
|
d5c4d9336f | ||
|
|
c1f8e2a4e0 | ||
|
|
fe42195b5a | ||
|
|
6a0bb82f3c | ||
|
|
69825e001f | ||
|
|
ad8571f2e3 | ||
|
|
fbd3b92ba0 | ||
|
|
4f6c433f1b | ||
|
|
412f469035 | ||
|
|
eee38557d1 | ||
|
|
23cb92cf6d | ||
|
|
108137bd7e | ||
|
|
6bc53c3638 | ||
|
|
e54fff402e | ||
|
|
8d125354d2 | ||
|
|
fc96e14a8f | ||
|
|
178dfa59c1 | ||
|
|
8719de7120 | ||
|
|
af8bdf4fd1 | ||
|
|
764e347b74 | ||
|
|
2f1b1a69bd | ||
|
|
b2fea428dd | ||
|
|
1f1d7996ec | ||
|
|
47dc9a136f | ||
|
|
6ed86d9ce5 | ||
|
|
8e760705a6 | ||
|
|
9c606b9660 | ||
|
|
f65b3223f4 | ||
|
|
e85e962466 | ||
|
|
a7df809c4d | ||
|
|
beb29c66c2 | ||
|
|
749171b186 | ||
|
|
40a8187b1e | ||
|
|
6b7234c910 | ||
|
|
3807510bfe | ||
|
|
3650617928 | ||
|
|
d60843fa5b | ||
|
|
5a5917a04a | ||
|
|
ae642fddf7 | ||
|
|
b4a8fb76de | ||
|
|
b885ccb09c | ||
|
|
b70c329307 | ||
|
|
1aa54543ed | ||
|
|
e050c085df | ||
|
|
62962d28ca | ||
|
|
d7c502eb50 | ||
|
|
dd7841dc15 | ||
|
|
14c751b39e | ||
|
|
f4297ff3b0 | ||
|
|
79ffbbbe4b | ||
|
|
4c1cb037a0 | ||
|
|
f679dc7562 | ||
|
|
76470b9f09 | ||
|
|
3edec570f1 | ||
|
|
a2e07b46f2 | ||
|
|
326a80895c | ||
|
|
d200cc7405 | ||
|
|
c4dff40e1d | ||
|
|
21636c4d65 | ||
|
|
52084c322f | ||
|
|
5fd2dadef4 | ||
|
|
f312e89323 | ||
|
|
0046cfa3ee | ||
|
|
4ecc8da398 | ||
|
|
382431e34b | ||
|
|
6023dc5f3d | ||
|
|
2d38b38a7d | ||
|
|
3513d84bb8 | ||
|
|
24cd55f5cc | ||
|
|
0bce6996bf | ||
|
|
6edfdfba63 | ||
|
|
ddbbe1267a | ||
|
|
7fada5d5f7 | ||
|
|
c3f5a37e21 | ||
|
|
a76c87b3ae | ||
|
|
4a0612328e | ||
|
|
931c322ad7 | ||
|
|
58c61641d3 | ||
|
|
b675cd19d7 | ||
|
|
17b9f976c8 | ||
|
|
248808e165 | ||
|
|
7c6327be57 | ||
|
|
a86ece66b5 | ||
|
|
fd9461ef20 | ||
|
|
f121c609ad | ||
|
|
24f3e071e3 | ||
|
|
914977f279 | ||
|
|
97b92d8887 | ||
|
|
c1981b17a4 | ||
|
|
ea7c6c2be3 | ||
|
|
22e3a4d8f2 | ||
|
|
75ebbe148b | ||
|
|
75f90c190b | ||
|
|
8aa053ce21 | ||
|
|
23ba8b4aa1 | ||
|
|
6dde3170ab | ||
|
|
b9144a9d7a | ||
|
|
a1a0d766fe | ||
|
|
48322856d9 | ||
|
|
b9f8b6cdbf | ||
|
|
805ea3cb46 | ||
|
|
79187cd29a | ||
|
|
ccaf78767b | ||
|
|
94a14f93a8 | ||
|
|
5ba6af6ad4 | ||
|
|
25760494d7 | ||
|
|
a1fbd511a9 | ||
|
|
1b756de479 | ||
|
|
a9e53a6c29 | ||
|
|
111fbfd483 | ||
|
|
437778a8be | ||
|
|
1c955a570d | ||
|
|
469b041a2f | ||
|
|
7a47d6741d | ||
|
|
214a231371 | ||
|
|
eb3e30f70f | ||
|
|
7324f1f4c7 | ||
|
|
93df24de39 | ||
|
|
9f21807647 | ||
|
|
9b59394768 | ||
|
|
ac1b0a46f9 | ||
|
|
5d7795ca47 | ||
|
|
519fb49f6a | ||
|
|
f504b682f3 | ||
|
|
f0719f5ea4 | ||
|
|
ccf9cbd2c8 | ||
|
|
730b7c8cad | ||
|
|
ac557fd5b5 | ||
|
|
91396f0c52 | ||
|
|
9f3fd021ab | ||
|
|
9dedaa31c5 | ||
|
|
de77ebd961 | ||
|
|
7bdc42ff05 | ||
|
|
5306814237 | ||
|
|
7939e04e74 | ||
|
|
f77da51a7d | ||
|
|
ebaf012701 | ||
|
|
8c481cd7a7 | ||
|
|
081ac5f651 | ||
|
|
ac538c35e9 | ||
|
|
eeee592abc | ||
|
|
ce65578c72 | ||
|
|
f4c25cb941 | ||
|
|
ba0daa218e | ||
|
|
fefcb928da |
427
CHANGELOG.md
427
CHANGELOG.md
@@ -1,5 +1,110 @@
|
||||
# Change Log
|
||||
|
||||
## v2.0.0 - 2017-08-08
|
||||
|
||||
### Added
|
||||
|
||||
- [Cassandra] Support for UUID serializing and setting protocol version. @mfouilleul
|
||||
- [BigQuery] Add maximumBillingTier to BigQuery configuration. @dotneet
|
||||
- Add the propertyOrder field to specify order of data source settings. @rmakulov
|
||||
- Add Plotly based Boxplot visualization. @deecay
|
||||
- [Presto] Add: query cancellation support. @fbertsch
|
||||
- [MongoDB] add $oids JSON extension.
|
||||
- [PostgreSQL] support for loading materialized views in schema.
|
||||
- [MySQL] Add option to hide SSL settings.
|
||||
- [MySQL] support for RDS MySQL and SSL.
|
||||
- [Google Analytics] support for mcf queries & better errors.
|
||||
- Add: static enum parameter type. @rockwotj
|
||||
- Add: option to hide pivot table controls. @deecay
|
||||
- Retry reload of query results if it had an error.
|
||||
- [Data Sources] Add: MemSQL query runner. @alexanderlz
|
||||
- "Dumb" recents option (see #1779 for details)
|
||||
- Athena: direct query runner using the instead of JDBC proxy. @laughingman7743
|
||||
- Optionally support parameters in embeds. @ziahamza
|
||||
- Sorting ability in alerts view.
|
||||
- Option to change default encoding of CSV writer. @yamamanx
|
||||
- Ability to set dashboard level filters from UI.
|
||||
- CLI command to open IPython shell.
|
||||
- Add link to query page from admin view. @miketheman
|
||||
- Add the option to write logs to STDOUT instead of STDERR. @eyalzek
|
||||
- Add limit parameter to tasks API. @alexpekurovsky
|
||||
- Add SQLAlchemy pool settings.
|
||||
- Support for category type y axis.
|
||||
- Add 12 & 24 hours refresh rate option to dashboards.
|
||||
|
||||
### Changed
|
||||
|
||||
- Upgrade Google API client library for all Google data sources. @ahamino
|
||||
- [JIRA JQL] change default max results limit from 50 to 1000. @jvanegmond
|
||||
- Upgrade to newer Plotly version. @deecay
|
||||
- [Athena] Configuration flag to disable query annotations for Athena. @suemoc
|
||||
- Ignore extra columns in CSV output. @alexanderlz
|
||||
- [TreasureData] improve error handling and upgrade client.
|
||||
- [InfluxDB] simpler test connection query (show databases requires admin).
|
||||
- [MSSQL] Mark integers as decimals as well, as sometimes decimal columns being returned
|
||||
with integer column type.
|
||||
- [Google Spreadsheets] add timeout to requests.
|
||||
- Sort dashboards list by name. @deecay
|
||||
- Include Celery task name in statsd metrics.
|
||||
- Don't include paused datasource's queries in outdated queries count.
|
||||
- Cohort: handle the case where the value/total might be strings.
|
||||
- Query results: better type guessing on the client side.
|
||||
- Counter: support negative indexes to iterate from the end of the results.
|
||||
- Data sources and destinations configuration: change order of name and type (type first now).
|
||||
- Show API Key in a modal dialog instead of alert.
|
||||
- Sentry: upgrade client version.
|
||||
- Sentry: don't install logging hook.
|
||||
- Split refresh schemas into separate tasks and add a timeout.
|
||||
- Execute scheduled queries with parameters using their default value.
|
||||
- Keep track of last query execution (including failed ones) for scheduling purposes.
|
||||
- Same view for input on search result page as in header. @44px
|
||||
- Metrics: report endpoints without dots for metrics.
|
||||
- Redirect to / when org not found.
|
||||
- Improve parameters label placement. @44px
|
||||
- Auto-publish queries when they are named (with option to disable; #1830).
|
||||
- Show friendly error message in case of duplicate data source name.
|
||||
- Don't allow saving dashboard with empty name.
|
||||
- Enable strict checking for Angular DI.
|
||||
- Disable Angular debug info (should improve performance).
|
||||
- Update to Webpack 2. @44px
|
||||
- Remove /forgot endpoint if REDASH_PASSWORD_LOGIN_ENABLED is false. @amarjayr
|
||||
- Docker: make Gunicorn worker count configurable. @unixwitch
|
||||
- Snowflake support is no longer enabled by default.
|
||||
- Enable memory optimization for Excel exporter.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix: set default values in options to enable 'default: True' for checkbox. @rmakulov
|
||||
- Support MULTI_ORG again.
|
||||
- [Google Spreadsheets] handle distant future dates.
|
||||
- [SQLite] better handle utf-8 error messages.
|
||||
- Fix: don't remove locks for queries with task status of PENDING.
|
||||
- Only split columns with __/:: that end with filter/MultiFilter.
|
||||
- Alert notifications fail (sometime) with a SQLAlchemy error.
|
||||
- Safeguard against empty query results when checking alert status. @danielerapati
|
||||
- Delete data source doesn't work when query results referenced by queries.
|
||||
- Fix redirect to /setup on the last setup step. @44px
|
||||
- Cassandra: use port setting in connection options. @yershalom
|
||||
- Metrics: table name wasn't found for count queries.
|
||||
- BigQuery wasn't loading due to bad import.
|
||||
- DynamicForm component was inserting empty values.
|
||||
- Clear null values from data source options dictionary.
|
||||
- /api/session API call wasn't working when multi tenancy enabled
|
||||
- If column had no type it would use previous column's type.
|
||||
- Alert destination details were not updating.
|
||||
- When setting rearm on a new alert, it wasn't persisted.
|
||||
- Salesforce: sandbox parameter should be optional. @msnider
|
||||
- Alert page wasn't properly linked from alerts list. @alison985
|
||||
- PostgreSQL passwords with spaces were not supported. (#1056)
|
||||
- PivotTable wasn't updating after first save.
|
||||
|
||||
|
||||
## v1.0.3 - 2017-04-18
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fix: sort by column no longer working.
|
||||
|
||||
## v1.0.2 - 2017-04-18
|
||||
|
||||
### Fixed
|
||||
@@ -48,8 +153,11 @@
|
||||
- Fix: page header wasn't updating on dashboards page @MichaelJAndy
|
||||
- Fix: keyboard shortcuts didn't work in parameter inputs
|
||||
|
||||
## v1.0.0-rc.2 - 2017-02-22
|
||||
### Other
|
||||
|
||||
- Change default job expiry times to: job lock expire after 12 hours (previously: 6 hours) and Celery task result object expire after 4 hours (previously: 1 hour). @shimpeko
|
||||
|
||||
## v1.0.0-rc.2 - 2017-02-22
|
||||
|
||||
### Changed
|
||||
|
||||
@@ -94,7 +202,7 @@
|
||||
## v1.0.0-rc.1 - 2017-01-31
|
||||
|
||||
This version has two big changes behind the scenes:
|
||||
|
||||
|
||||
* Refactor the frontend to use latest (at the time) Angular version (1.5) along with better frontend pipeline based on
|
||||
WebPack.
|
||||
* Refactor the backend code to use SQLAlchemy and Alembic, for easier migrations/upgrades.
|
||||
@@ -180,109 +288,110 @@ We're releasing a new upgrade script -- see [here](https://redash.io/help-onprem
|
||||
|
||||
### Added
|
||||
|
||||
61fe16e #1374: Add: allow '*' in REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN (Allen Short)
|
||||
2f09043 #1113: Add: share modify/access permissions for queries and dashboard (whummer)
|
||||
3db0eea #1341: Add: support for specifying SAML nameid-format (zoetrope)
|
||||
b0ecd0e #1343: Add: support for local SAML metadata file (zoetrope)
|
||||
0235d37 #1335: Add: allow changing alert email subject. (Arik Fraimovich)
|
||||
2135dfd #1333: Add: control over y axis min/max values (Arik Fraimovich)
|
||||
49e788a #1328: Add: support for snapshot generation service (Arik Fraimovich)
|
||||
229ca6c #1323: Add: collect runtime metrics for Celery tasks (Arik Fraimovich)
|
||||
931a1f3 #1315: Add: support for loading BigQuery schema (Arik Fraimovich)
|
||||
39b4f9a #1314: Add: support MongoDB SSL connections (Arik Fraimovich)
|
||||
ca1ca9b #1312: Add: additional configuration for Celery jobs (Arik Fraimovich)
|
||||
fc00e61 #1310: Add: support for date/time with seconds parameters (Arik Fraimovich)
|
||||
d72a198 #1307: Add: API to force refresh data source schema (Arik Fraimovich)
|
||||
beb89ec #1305: Add: UI to edit dashboard text box widget (Kazuhito Hokamura)
|
||||
808fdd4 #1298: Add: JIRA (JQL) query runner (Arik Fraimovich)
|
||||
ff9e844 #1280: Add: configuration flag to disable scheduled queries (Hirotaka Suzuki)
|
||||
ef4699a #1269: Add: Google Drive federated tables support in BigQuery query runner (Kurt Gooden)
|
||||
2eeb947 #1236: Add: query runner for Cassandra and ScyllaDB (syerushalmy)
|
||||
10b398e #1249: Add: override slack webhook parameters (mystelynx)
|
||||
2b5e340 #1252: Add: Schema loading support for Presto query runner (using information_schema) (Rohan Dhupelia)
|
||||
2aaf5dd #1250: Add: query snippets feature (Arik Fraimovich)
|
||||
8d8af73 #1226: Add: Sankey visualization (Arik Fraimovich)
|
||||
a02edda #1222: Add: additional results format for sunburst visualization (Arik Fraimovich)
|
||||
0e70188 #1213: Add: new sunburst sequence visualization (Arik Fraimovich)
|
||||
9a6d2d7 #1204: Add: show views in schema browser for Vertica data sources (Matthew Carter)
|
||||
600afa5 #1138: Add: ability to register user defined function (UDF) resources for BigQuery DataSource/Query (fabito)
|
||||
b410410 #1166: Add: "every 14 days" refresh option (Arik Fraimovich)
|
||||
906365f #967: Add: extend ElasticSearch query_runner to support aggregations (lloydw)
|
||||
- 61fe16e #1374: Add: allow '*' in REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN (Allen Short)
|
||||
- 2f09043 #1113: Add: share modify/access permissions for queries and dashboard (whummer)
|
||||
- 3db0eea #1341: Add: support for specifying SAML nameid-format (zoetrope)
|
||||
- b0ecd0e #1343: Add: support for local SAML metadata file (zoetrope)
|
||||
- 0235d37 #1335: Add: allow changing alert email subject. (Arik Fraimovich)
|
||||
- 2135dfd #1333: Add: control over y axis min/max values (Arik Fraimovich)
|
||||
- 49e788a #1328: Add: support for snapshot generation service (Arik Fraimovich)
|
||||
- 229ca6c #1323: Add: collect runtime metrics for Celery tasks (Arik Fraimovich)
|
||||
- 931a1f3 #1315: Add: support for loading BigQuery schema (Arik Fraimovich)
|
||||
- 39b4f9a #1314: Add: support MongoDB SSL connections (Arik Fraimovich)
|
||||
- ca1ca9b #1312: Add: additional configuration for Celery jobs (Arik Fraimovich)
|
||||
- fc00e61 #1310: Add: support for date/time with seconds parameters (Arik Fraimovich)
|
||||
- d72a198 #1307: Add: API to force refresh data source schema (Arik Fraimovich)
|
||||
- beb89ec #1305: Add: UI to edit dashboard text box widget (Kazuhito Hokamura)
|
||||
- 808fdd4 #1298: Add: JIRA (JQL) query runner (Arik Fraimovich)
|
||||
- ff9e844 #1280: Add: configuration flag to disable scheduled queries (Hirotaka Suzuki)
|
||||
- ef4699a #1269: Add: Google Drive federated tables support in BigQuery query runner (Kurt Gooden)
|
||||
- 2eeb947 #1236: Add: query runner for Cassandra and ScyllaDB (syerushalmy)
|
||||
- 10b398e #1249: Add: override slack webhook parameters (mystelynx)
|
||||
- 2b5e340 #1252: Add: Schema loading support for Presto query runner (using information_schema) (Rohan Dhupelia)
|
||||
- 2aaf5dd #1250: Add: query snippets feature (Arik Fraimovich)
|
||||
- 8d8af73 #1226: Add: Sankey visualization (Arik Fraimovich)
|
||||
- a02edda #1222: Add: additional results format for sunburst visualization (Arik Fraimovich)
|
||||
- 0e70188 #1213: Add: new sunburst sequence visualization (Arik Fraimovich)
|
||||
- 9a6d2d7 #1204: Add: show views in schema browser for Vertica data sources (Matthew Carter)
|
||||
- 600afa5 #1138: Add: ability to register user defined function (UDF) resources for BigQuery DataSource/Query (fabito)
|
||||
- b410410 #1166: Add: "every 14 days" refresh option (Arik Fraimovich)
|
||||
- 906365f #967: Add: extend ElasticSearch query_runner to support aggregations (lloydw)
|
||||
|
||||
### Changed
|
||||
|
||||
2de4aa2 #1395: Change: switch to requests in URL query runner (Arik Fraimovich)
|
||||
db1a941 #1392: Change: Update documentation links to point at the new location. (Arik Fraimovich)
|
||||
002f794 #1368: Change: added ability to disable auto update in admin views (Arik Fraimovich)
|
||||
aa5d14e #1366: Change: improve error message for exception in the Python query runner (deecay)
|
||||
880627c #1355: Change: pass the user object to the run_query method (Arik Fraimovich)
|
||||
23c605b #1342: SAML: specify entity id (zoetrope)
|
||||
015b1dc #1334: Change: allow specifying recipient address when sending email test message (Arik Fraimovich)
|
||||
39aaa2f #1292: Change: improvements to map visualization (Arik Fraimovich)
|
||||
b22191b #1332: Change: upgrade Python packages (Arik Fraimovich)
|
||||
23ba98b #1331: Celery: Upgrade Celery to more recent version. (Arik Fraimovich)
|
||||
3283116 #1330: Change: upgrade Requests to latest version. (Arik Fraimovich)
|
||||
39091e0 #1324: Change: add more logging and information for refresh schemas task (Arik Fraimovich)
|
||||
462faea #1316: Change: remove deprecated settings (Arik Fraimovich)
|
||||
73e1837 #1313: Change: more flexible column width calculation (Arik Fraimovich)
|
||||
e8eb840 #1279: Change: update bootstrap.sh to support Ubuntu 16.04 (IllusiveMilkman)
|
||||
8cf0252 #1262: Change: upgrade Plot.ly version and switch to smaller build (Arik Fraimovich)
|
||||
0b79fb8 #1306: Change: paginate queries page & add explicit urls. (Arik Fraimovich)
|
||||
41f99f5 #1299: Change: send Content-Type header (application/json) in query results responses (Tsuyoshi Tatsukawa)
|
||||
dfb1a20 #1297: Change: update Slack configuration titles. (Arik Fraimovich)
|
||||
8c1056c #1294: Change: don't annotate BigQuery queries (Arik Fraimovich)
|
||||
a3cf92e #1289: Change: use key_as_string when available (ElasticSearch query runner) (Arik Fraimovich)
|
||||
e155191 #1285: Change: do not display Oracle tablespace name in schema browser (Matthew Carter)
|
||||
6cbc39c #1282: Change: deduplicate Google Spreadsheet columns (Arik Fraimovich)
|
||||
4caf2e3 #1277: Set specific version of cryptography lib (Arik Fraimovich)
|
||||
d22f0d4 #1216: Change: bootstrap.sh - use non interactive dist-upgrade (Atsushi Sasaki)
|
||||
19530f4 #1245: Change: switch from CodeMirror to Ace editor (Arik Fraimovich)
|
||||
dfb92db #1234: Change: MongoDB query runner set DB name as mandatory (Arik Fraimovich)
|
||||
b750843 #1230: Change: annotate Presto queries with metadata (Noriaki Katayama)
|
||||
5b20fe2 #1217: Change: install libffi-dev for Cryptography (Ubuntu setup script) (Atsushi Sasaki)
|
||||
a9fac34 #1206: Change: update pymssql version to 2.1.3 (kitsuyui)
|
||||
5d43cbe #1198: Change: add support for Standard SQL in BigQuery query runner (mystelynx)
|
||||
84d0c22 #1193: Change: modify the argument order of moment.add function call (Kenya Yamaguchi)
|
||||
- 2de4aa2 #1395: Change: switch to requests in URL query runner (Arik Fraimovich)
|
||||
- db1a941 #1392: Change: Update documentation links to point at the new location. (Arik Fraimovich)
|
||||
- 002f794 #1368: Change: added ability to disable auto update in admin views (Arik Fraimovich)
|
||||
- aa5d14e #1366: Change: improve error message for exception in the Python query runner (deecay)
|
||||
- 880627c #1355: Change: pass the user object to the run_query method (Arik Fraimovich)
|
||||
- 23c605b #1342: SAML: specify entity id (zoetrope)
|
||||
- 015b1dc #1334: Change: allow specifying recipient address when sending email test message (Arik Fraimovich)
|
||||
- 39aaa2f #1292: Change: improvements to map visualization (Arik Fraimovich)
|
||||
- b22191b #1332: Change: upgrade Python packages (Arik Fraimovich)
|
||||
- 23ba98b #1331: Celery: Upgrade Celery to more recent version. (Arik Fraimovich)
|
||||
- 3283116 #1330: Change: upgrade Requests to latest version. (Arik Fraimovich)
|
||||
- 39091e0 #1324: Change: add more logging and information for refresh schemas task (Arik Fraimovich)
|
||||
- 462faea #1316: Change: remove deprecated settings (Arik Fraimovich)
|
||||
- 73e1837 #1313: Change: more flexible column width calculation (Arik Fraimovich)
|
||||
- e8eb840 #1279: Change: update bootstrap.sh to support Ubuntu 16.04 (IllusiveMilkman)
|
||||
- 8cf0252 #1262: Change: upgrade Plot.ly version and switch to smaller build (Arik Fraimovich)
|
||||
- 0b79fb8 #1306: Change: paginate queries page & add explicit urls. (Arik Fraimovich)
|
||||
- 41f99f5 #1299: Change: send Content-Type header (application/json) in query results responses (Tsuyoshi Tatsukawa)
|
||||
- dfb1a20 #1297: Change: update Slack configuration titles. (Arik Fraimovich)
|
||||
- 8c1056c #1294: Change: don't annotate BigQuery queries (Arik Fraimovich)
|
||||
- a3cf92e #1289: Change: use key_as_string when available (ElasticSearch query runner) (Arik Fraimovich)
|
||||
- e155191 #1285: Change: do not display Oracle tablespace name in schema browser (Matthew Carter)
|
||||
- 6cbc39c #1282: Change: deduplicate Google Spreadsheet columns (Arik Fraimovich)
|
||||
- 4caf2e3 #1277: Set specific version of cryptography lib (Arik Fraimovich)
|
||||
- d22f0d4 #1216: Change: bootstrap.sh - use non interactive dist-upgrade (Atsushi Sasaki)
|
||||
- 19530f4 #1245: Change: switch from CodeMirror to Ace editor (Arik Fraimovich)
|
||||
- dfb92db #1234: Change: MongoDB query runner set DB name as mandatory (Arik Fraimovich)
|
||||
- b750843 #1230: Change: annotate Presto queries with metadata (Noriaki Katayama)
|
||||
- 5b20fe2 #1217: Change: install libffi-dev for Cryptography (Ubuntu setup script) (Atsushi Sasaki)
|
||||
- a9fac34 #1206: Change: update pymssql version to 2.1.3 (kitsuyui)
|
||||
- 5d43cbe #1198: Change: add support for Standard SQL in BigQuery query runner (mystelynx)
|
||||
- 84d0c22 #1193: Change: modify the argument order of moment.add function call (Kenya Yamaguchi)
|
||||
|
||||
|
||||
### Fixed
|
||||
|
||||
d6febb0 #1375: Fix: Download Dataset does not work when not logged in (Joshua Dechant)
|
||||
96553ad #1369: Fix: missing format call in Elasticsearch test method (Adam Griffiths)
|
||||
c57c765 #1365: Fix: compare retrieval times in UTC timezone (Allen Short)
|
||||
37dff5f #1360: Fix: connection test was broken for MySQL (ichihara)
|
||||
360028c #1359: Fix: schema loading query for Hive was wrong for non default schema (laughingman7743)
|
||||
7ee41d4 #1358: Fix: make sure all calls to run_query updated with new parameter (Arik Fraimovich)
|
||||
0d94479 #1329: Fix: Redis memory leak. (Arik Fraimovich)
|
||||
7145aa2 #1325: Fix: queries API was doing N+1 queries in most cases (Arik Fraimovich)
|
||||
cd2e927 #1311: Fix: BoxPlot visualization wasn't rendering on a dashboard (Arik Fraimovich)
|
||||
a562ce7 #1309: Fix: properly render checkboxes in dynamic forms (Arik Fraimovich)
|
||||
d48192c #1308: Fix: support for Unicode columns name in Google Spreadsheets (Arik Fraimovich)
|
||||
e42f93f #1283: Fix: schema browser was unstable after opening a table (Arik Fraimovich)
|
||||
170bd65 #1272: Fix: TreasureData get_schema method was returning array instead of string as column name (ariarijp)
|
||||
4710c41 #1265: Fix: refresh modal not working for unsaved query (Arik Fraimovich)
|
||||
bc3a5ab #1264: Fix: dashboard refresh not working (Arik Fraimovich)
|
||||
6202d09 #1240: Fix: when shared dashboard token not found, return 404 (Wesley Batista)
|
||||
93aac14 #1251: Fix: autocomplete went crazy when database has no autocomplete. (Arik Fraimovich)
|
||||
b8eca28 #1246: Fix: support large schemas in schema browser (Arik Fraimovich)
|
||||
b781003 #1223: Fix: Alert: when hipchat Alert.name is multibyte character, occur error. (toyama0919)
|
||||
0b928e6 #1227: Fix: Bower install fails in vagrant (Kazuhito Hokamura)
|
||||
a411af2 #1232: Fix: don't show warning when query string (parameters value) changes (Kazuhito Hokamura)
|
||||
3dbb5a6 #1221: Fix: sunburst didn't handle all cases of path lengths (Arik Fraimovich)
|
||||
a7cc1ee #1218: Fix: updated result not being saved when changing query text. (Arik Fraimovich)
|
||||
0617833 #1215: Fix: email alerts not working (Arik Fraimovich)
|
||||
78f65b1 #1187: Fix: read only users receive the permission error modal in query view (Arik Fraimovich)
|
||||
bba801f #1167: Fix the version of setuptools on bootstrap script for Ubuntu (Takuya Arita)
|
||||
ce81d69 #1160: Fix indentation in docker-compose-example.yml (Hirofumi Wakasugi)
|
||||
dd759fe #1155: Fix: make all configuration values of Oracle required (Arik Fraimovich)
|
||||
- d6febb0 #1375: Fix: Download Dataset does not work when not logged in (Joshua Dechant)
|
||||
- 96553ad #1369: Fix: missing format call in Elasticsearch test method (Adam Griffiths)
|
||||
- c57c765 #1365: Fix: compare retrieval times in UTC timezone (Allen Short)
|
||||
- 37dff5f #1360: Fix: connection test was broken for MySQL (ichihara)
|
||||
- 360028c #1359: Fix: schema loading query for Hive was wrong for non default schema (laughingman7743)
|
||||
- 7ee41d4 #1358: Fix: make sure all calls to run_query updated with new parameter (Arik Fraimovich)
|
||||
- 0d94479 #1329: Fix: Redis memory leak. (Arik Fraimovich)
|
||||
- 7145aa2 #1325: Fix: queries API was doing N+1 queries in most cases (Arik Fraimovich)
|
||||
- cd2e927 #1311: Fix: BoxPlot visualization wasn't rendering on a dashboard (Arik Fraimovich)
|
||||
- a562ce7 #1309: Fix: properly render checkboxes in dynamic forms (Arik Fraimovich)
|
||||
- d48192c #1308: Fix: support for Unicode columns name in Google Spreadsheets (Arik Fraimovich)
|
||||
- e42f93f #1283: Fix: schema browser was unstable after opening a table (Arik Fraimovich)
|
||||
- 170bd65 #1272: Fix: TreasureData get_schema method was returning array instead of string as column name (ariarijp)
|
||||
- 4710c41 #1265: Fix: refresh modal not working for unsaved query (Arik Fraimovich)
|
||||
- bc3a5ab #1264: Fix: dashboard refresh not working (Arik Fraimovich)
|
||||
- 6202d09 #1240: Fix: when shared dashboard token not found, return 404 (Wesley Batista)
|
||||
- 93aac14 #1251: Fix: autocomplete went crazy when database has no autocomplete. (Arik Fraimovich)
|
||||
- b8eca28 #1246: Fix: support large schemas in schema browser (Arik Fraimovich)
|
||||
- b781003 #1223: Fix: Alert: when hipchat Alert.name is multibyte character, occur error. (toyama0919)
|
||||
- 0b928e6 #1227: Fix: Bower install fails in vagrant (Kazuhito Hokamura)
|
||||
- a411af2 #1232: Fix: don't show warning when query string (parameters value) changes (Kazuhito Hokamura)
|
||||
- 3dbb5a6 #1221: Fix: sunburst didn't handle all cases of path lengths (Arik Fraimovich)
|
||||
- a7cc1ee #1218: Fix: updated result not being saved when changing query text. (Arik Fraimovich)
|
||||
- 0617833 #1215: Fix: email alerts not working (Arik Fraimovich)
|
||||
- 78f65b1 #1187: Fix: read only users receive the permission error modal in query view (Arik Fraimovich)
|
||||
- bba801f #1167: Fix the version of setuptools on bootstrap script for Ubuntu (Takuya Arita)
|
||||
- ce81d69 #1160: Fix indentation in docker-compose-example.yml (Hirofumi Wakasugi)
|
||||
- dd759fe #1155: Fix: make all configuration values of Oracle required (Arik Fraimovich)
|
||||
|
||||
### Docs
|
||||
a69ee0c #1225: Fix: RST formatting of the Vagrant documentation (Kazuhito Hokamura)
|
||||
03837c0 #1242: Docs: add warning re. quotes on column names and BigQuery (Ereli)
|
||||
9a98075 #1255: Docs: add documentation for InfluxDB (vishesh92)
|
||||
e0485de #1195: Docs: fix typo in maintenance page title (Antoine Augusti)
|
||||
7681d3e #1164: Docs: update permission documentation (Daniel Darabos)
|
||||
bcd3670 #1156: Docs: add SSL parameters to nginx configuration (Josh Cox)
|
||||
|
||||
- a69ee0c #1225: Fix: RST formatting of the Vagrant documentation (Kazuhito Hokamura)
|
||||
- 03837c0 #1242: Docs: add warning re. quotes on column names and BigQuery (Ereli)
|
||||
- 9a98075 #1255: Docs: add documentation for InfluxDB (vishesh92)
|
||||
- e0485de #1195: Docs: fix typo in maintenance page title (Antoine Augusti)
|
||||
- 7681d3e #1164: Docs: update permission documentation (Daniel Darabos)
|
||||
- bcd3670 #1156: Docs: add SSL parameters to nginx configuration (Josh Cox)
|
||||
|
||||
## v0.11.1.b2095 - 2016-08-02
|
||||
|
||||
@@ -300,73 +409,77 @@ Also, this release includes numerous smaller features, improvements, and bug fix
|
||||
A big thank you goes to all who contributed code and documentation in this release: @AntoineAugusti, @James226, @adamlwgriffiths, @alexdebrie, @anthony-coble, @ariarijp, @dheerajrav, @edwardsharp, @machira, @nabilblk, @ninneko, @ordd, @tomerben, @toru-takahashi, @vishesh92, @vorakumar and @whummer.
|
||||
|
||||
### Added
|
||||
d5e5b24 #1136: Feature: add --org option to all relevant CLI commands. (@adamlwgriffiths)
|
||||
87e25f2 #1129: Feature: support for JSON query formatting (Mongo, ElasticSearch) (@arikfr)
|
||||
6bb2716 #1121: Show error when failing to communicate with server (@arikfr)
|
||||
f21276e #1119: Feature: add UI to delete alerts (@arikfr)
|
||||
8656540 #1069: Feature: UI for query parameters (@arikfr)
|
||||
790128c #1067: Feature: word cloud visualization (@anthony-coble)
|
||||
8b73a2b #1098: Feature: UI for alert destinations & new destination types (@alexdebrie)
|
||||
1fbeb5d #1092: Add Heroku support (@adamlwgriffiths)
|
||||
f64622d #1089: Add support for serialising UUID type within MSSQL #961 (@James226)
|
||||
857caab #1085: Feature: API to pause a data source (@arikfr)
|
||||
214aa3b #1060: Feature: support configuring user's groups with SAML (@vorakumar)
|
||||
e20a005 #1007: Issue#1006: Make bottom margin editable for Chart visualization (@vorakumar)
|
||||
6e0dd2b #1063: Add support for date/time Y axis (@tomerben)
|
||||
b5a4a6b #979: Feature: Add CLI to edit group permissions (@ninneko)
|
||||
6d495d2 #1014: Add server-side parameter handling for embeds (@whummer)
|
||||
5255804 #1091: Add caching for queries used in embeds (@whummer)
|
||||
|
||||
- d5e5b24 #1136: Feature: add --org option to all relevant CLI commands. (@adamlwgriffiths)
|
||||
- 87e25f2 #1129: Feature: support for JSON query formatting (Mongo, ElasticSearch) (@arikfr)
|
||||
- 6bb2716 #1121: Show error when failing to communicate with server (@arikfr)
|
||||
- f21276e #1119: Feature: add UI to delete alerts (@arikfr)
|
||||
- 8656540 #1069: Feature: UI for query parameters (@arikfr)
|
||||
- 790128c #1067: Feature: word cloud visualization (@anthony-coble)
|
||||
- 8b73a2b #1098: Feature: UI for alert destinations & new destination types (@alexdebrie)
|
||||
- 1fbeb5d #1092: Add Heroku support (@adamlwgriffiths)
|
||||
- f64622d #1089: Add support for serialising UUID type within MSSQL #961 (@James226)
|
||||
- 857caab #1085: Feature: API to pause a data source (@arikfr)
|
||||
- 214aa3b #1060: Feature: support configuring user's groups with SAML (@vorakumar)
|
||||
- e20a005 #1007: Issue#1006: Make bottom margin editable for Chart visualization (@vorakumar)
|
||||
- 6e0dd2b #1063: Add support for date/time Y axis (@tomerben)
|
||||
- b5a4a6b #979: Feature: Add CLI to edit group permissions (@ninneko)
|
||||
- 6d495d2 #1014: Add server-side parameter handling for embeds (@whummer)
|
||||
- 5255804 #1091: Add caching for queries used in embeds (@whummer)
|
||||
|
||||
### Changed
|
||||
0314313 #1149: Presto QueryRunner supports tinyint and smallint (@toru-takahashi)
|
||||
8fa6fdb #1030: Make sure data sources list ordered by id (@arikfr)
|
||||
8df822e #1141: Make create data source button more prominent (@arikfr)
|
||||
96dd811 #1127: Mark basic_auth_password as secret (@adamlwgriffiths)
|
||||
ad65391 #1130: Improve Slack notification style (@AntoineAugusti)
|
||||
df637e3 #1116: Return meaningful error when there is no cached result. (@arikfr)
|
||||
65635ec #1102: Switch to HipChat V2 API (@arikfr)
|
||||
14fcf01 #1072: Remove counter from the tasks Done tab (as it always shows 50). #1047 (@arikfr)
|
||||
1a1160e #1062: DynamoDB: Better exception handling (@arikfr)
|
||||
ed45dcb #1044: Improve vagrant flow (@staritza)
|
||||
8b5dc8e #1036: Add optional block for more scripts in template (@arikfr)
|
||||
|
||||
- 0314313 #1149: Presto QueryRunner supports tinyint and smallint (@toru-takahashi)
|
||||
- 8fa6fdb #1030: Make sure data sources list ordered by id (@arikfr)
|
||||
- 8df822e #1141: Make create data source button more prominent (@arikfr)
|
||||
- 96dd811 #1127: Mark basic_auth_password as secret (@adamlwgriffiths)
|
||||
- ad65391 #1130: Improve Slack notification style (@AntoineAugusti)
|
||||
- df637e3 #1116: Return meaningful error when there is no cached result. (@arikfr)
|
||||
- 65635ec #1102: Switch to HipChat V2 API (@arikfr)
|
||||
- 14fcf01 #1072: Remove counter from the tasks Done tab (as it always shows 50). #1047 (@arikfr)
|
||||
- 1a1160e #1062: DynamoDB: Better exception handling (@arikfr)
|
||||
- ed45dcb #1044: Improve vagrant flow (@staritza)
|
||||
- 8b5dc8e #1036: Add optional block for more scripts in template (@arikfr)
|
||||
|
||||
### Fixed
|
||||
dbd48e1 #1143: Fix: use the email input type where needed (@ariarijp)
|
||||
7445972 #1142: Fix: dates in filters might be duplicated (@arikfr)
|
||||
5d0ed02 #1140: Fix: Hive should use the enabled variable (@arikfr)
|
||||
392627d #1139: Fix: Impala data source referencing wrong variable (@arikfr)
|
||||
c5bfbba #1133: Fix: query scrolling issues (@vishesh92)
|
||||
c01d266 #1128: Fix: visualization options not updating after changing type (@arikfr)
|
||||
6bc0e7a #1126: Fix #669: save fails when doing partial save of new query (@arikfr)
|
||||
3ce27b9 #1118: Fix: remove alerts for archived queries (@arikfr)
|
||||
4fabaae #1117: Fix #1052: filter not working for date/time values (@arikfr)
|
||||
c107c94 #1077: Fix: install needed dependencies to use Hive in Docker image (@nabilblk)
|
||||
abc790c #1115: Fix: allow non integers in alert reference value (@arikfr)
|
||||
4ec473c #1110: Fix #1109: mixed group permissions resulting in wrong permission (@arikfr)
|
||||
1ca5262 #1099: Fix RST syntax for links (@adamlwgriffiths)
|
||||
daa6c1c #1096: Fix typo in env variable VERSION_CHECK (@AntoineAugusti)
|
||||
cd06d27 #1095: Fix: use create_query permission for new query button. (@ordd)
|
||||
2bc0b27 #1061: Fix: area chart stacking doesn't work (@machira)
|
||||
8c21e91 #1108: Remove potnetially concurrency not safe code form enqueue_query (@arikfr)
|
||||
e831218 #1084: Fix #1049: duplicate alerts when data source belongs to multiple groups (@arikfr)
|
||||
6edb0ca #1080: Fix typo (@jeffwidman)
|
||||
64d7538 #1074: Fix: ElasticSearch wasn't using correct type names (@toyama0919)
|
||||
3f90dd9 #1064: Fix: old task trackers were not really removed (@arikfr)
|
||||
e10ecd2 #1058: Bring back filters if dashboard filters are enabled (@AntoineAugusti)
|
||||
701035f #1059: Fix: DynamoDB having issues when setting host (@arikfr)
|
||||
2924d4f #1040: Small fixes to visualizations view (@arikfr)
|
||||
fec0d5f #1037: Fix: multi filter wasn't working with __ syntax (@dheerajrav)
|
||||
b066ce4 #1033: Fix: only ask for notification permissions if wasn't denied (@arikfr)
|
||||
960c416 #1032: Fix: make sure we return dashboards only for current org only (@arikfr)
|
||||
b3844d3 #1029: Hive: close connection only if it exists (@arikfr)
|
||||
|
||||
- dbd48e1 #1143: Fix: use the email input type where needed (@ariarijp)
|
||||
- 7445972 #1142: Fix: dates in filters might be duplicated (@arikfr)
|
||||
- 5d0ed02 #1140: Fix: Hive should use the enabled variable (@arikfr)
|
||||
- 392627d #1139: Fix: Impala data source referencing wrong variable (@arikfr)
|
||||
- c5bfbba #1133: Fix: query scrolling issues (@vishesh92)
|
||||
- c01d266 #1128: Fix: visualization options not updating after changing type (@arikfr)
|
||||
- 6bc0e7a #1126: Fix #669: save fails when doing partial save of new query (@arikfr)
|
||||
- 3ce27b9 #1118: Fix: remove alerts for archived queries (@arikfr)
|
||||
- 4fabaae #1117: Fix #1052: filter not working for date/time values (@arikfr)
|
||||
- c107c94 #1077: Fix: install needed dependencies to use Hive in Docker image (@nabilblk)
|
||||
- abc790c #1115: Fix: allow non integers in alert reference value (@arikfr)
|
||||
- 4ec473c #1110: Fix #1109: mixed group permissions resulting in wrong permission (@arikfr)
|
||||
- 1ca5262 #1099: Fix RST syntax for links (@adamlwgriffiths)
|
||||
- daa6c1c #1096: Fix typo in env variable VERSION_CHECK (@AntoineAugusti)
|
||||
- cd06d27 #1095: Fix: use create_query permission for new query button. (@ordd)
|
||||
- 2bc0b27 #1061: Fix: area chart stacking doesn't work (@machira)
|
||||
- 8c21e91 #1108: Remove potnetially concurrency not safe code form enqueue_query (@arikfr)
|
||||
- e831218 #1084: Fix #1049: duplicate alerts when data source belongs to multiple groups (@arikfr)
|
||||
- 6edb0ca #1080: Fix typo (@jeffwidman)
|
||||
- 64d7538 #1074: Fix: ElasticSearch wasn't using correct type names (@toyama0919)
|
||||
- 3f90dd9 #1064: Fix: old task trackers were not really removed (@arikfr)
|
||||
- e10ecd2 #1058: Bring back filters if dashboard filters are enabled (@AntoineAugusti)
|
||||
- 701035f #1059: Fix: DynamoDB having issues when setting host (@arikfr)
|
||||
- 2924d4f #1040: Small fixes to visualizations view (@arikfr)
|
||||
- fec0d5f #1037: Fix: multi filter wasn't working with __ syntax (@dheerajrav)
|
||||
- b066ce4 #1033: Fix: only ask for notification permissions if wasn't denied (@arikfr)
|
||||
- 960c416 #1032: Fix: make sure we return dashboards only for current org only (@arikfr)
|
||||
- b3844d3 #1029: Hive: close connection only if it exists (@arikfr)
|
||||
|
||||
### Docs
|
||||
6bb09d8 #1146: Docs: add a link to settings documentation. (@adamlwgriffiths)
|
||||
095e759 #1103: Docs: add section about monitoring (@AntoineAugusti)
|
||||
e942486 #1090: Contributing Guide (@arikfr)
|
||||
3037c4f #1066: Docs: command type-o fix. (@edwardsharp)
|
||||
2ee0065 #1038: Add an ISSUE_TEMPLATE.md to direct people at the forum (@arikfr)
|
||||
f7322a4 #1021: Vagrant docs: add purging the cache step (@ariarijp)
|
||||
|
||||
- 6bb09d8 #1146: Docs: add a link to settings documentation. (@adamlwgriffiths)
|
||||
- 095e759 #1103: Docs: add section about monitoring (@AntoineAugusti)
|
||||
- e942486 #1090: Contributing Guide (@arikfr)
|
||||
- 3037c4f #1066: Docs: command type-o fix. (@edwardsharp)
|
||||
- 2ee0065 #1038: Add an ISSUE_TEMPLATE.md to direct people at the forum (@arikfr)
|
||||
- f7322a4 #1021: Vagrant docs: add purging the cache step (@ariarijp)
|
||||
|
||||
---
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ The following is a set of guidelines for contributing to Redash. These are guide
|
||||
|
||||
## Quick Links:
|
||||
|
||||
- [Feature Roadmap](https://trello.com/b/b2LUHU7A/re-dash-roadmap)
|
||||
- [Feature Roadmap](https://trello.com/b/b2LUHU7A/redash-roadmap)
|
||||
- [Feature Requests](https://discuss.redash.io/c/feature-requests)
|
||||
- [Gitter Chat](https://gitter.im/getredash/redash) or [Slack](https://slack.redash.io)
|
||||
- [Documentation](https://redash.io/help/)
|
||||
@@ -29,7 +29,7 @@ The following is a set of guidelines for contributing to Redash. These are guide
|
||||
- [Documentation](#documentation)
|
||||
- Design?
|
||||
|
||||
[Addtional Notes](#additional-notes)
|
||||
[Additional Notes](#additional-notes)
|
||||
|
||||
- [Release Method](#release-method)
|
||||
- [Code of Conduct](#code-of-conduct)
|
||||
@@ -46,9 +46,9 @@ When creating a new bug report, please make sure to:
|
||||
|
||||
### Suggesting Enhancements / Feature Requests
|
||||
|
||||
If you would like to suggest an enchancement or ask for a new feature:
|
||||
If you would like to suggest an enhancement or ask for a new feature:
|
||||
|
||||
- Please check [the roadmap](https://trello.com/b/b2LUHU7A/re-dash-roadmap) for existing Trello card for what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- Please check [the roadmap](https://trello.com/b/b2LUHU7A/redash-roadmap) for existing Trello card for what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- If there is no existing card, open a thread in [the forum](https://discuss.redash.io/c/feature-requests) to start a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*.
|
||||
|
||||
### Pull Requests
|
||||
@@ -56,7 +56,7 @@ If you would like to suggest an enchancement or ask for a new feature:
|
||||
- **Code contributions are welcomed**. For big changes or significant features, it's usually better to reach out first and discuss what you want to implement and how (we recommend reading: [Pull Request First](https://medium.com/practical-blend/pull-request-first-f6bb667a9b6#.ozlqxvj36)). This to make sure that what you want to implement is aligned with our goals for the project and that no one else is already working on it.
|
||||
- Include screenshots and animated GIFs in your pull request whenever possible.
|
||||
- Please add [documentation](#documentation) for new features or changes in functionality along with the code.
|
||||
- Please follow existing code style. We use PEP8 for Python and sensible style for Javascript.
|
||||
- Please follow existing code style. We use PEP8 for Python and sensible style for JavaScript.
|
||||
|
||||
### Documentation
|
||||
|
||||
|
||||
@@ -19,7 +19,7 @@ scheduler() {
|
||||
}
|
||||
|
||||
server() {
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w4 redash.wsgi:app
|
||||
exec /usr/local/bin/gunicorn -b 0.0.0.0:5000 --name redash -w${REDASH_WEB_WORKERS:-4} redash.wsgi:app
|
||||
}
|
||||
|
||||
help() {
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
import argparse
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from collections import namedtuple
|
||||
@@ -111,7 +111,10 @@ def restart_services():
|
||||
# otherwise it won't notice that /opt/redash/current pointing at a different
|
||||
# directory.
|
||||
green("Restarting...")
|
||||
run('sudo /etc/init.d/redash_supervisord restart')
|
||||
try:
|
||||
run('sudo /etc/init.d/redash_supervisord restart')
|
||||
except subprocess.CalledProcessError as e:
|
||||
run('sudo service supervisor restart')
|
||||
|
||||
|
||||
def update_requirements(version_name):
|
||||
|
||||
@@ -18,7 +18,7 @@ test:
|
||||
- nosetests --with-xunit --xunit-file=$CIRCLE_TEST_REPORTS/junit.xml --with-coverage --cover-package=redash tests/
|
||||
deployment:
|
||||
github_and_docker:
|
||||
branch: master
|
||||
branch: [master, /release.*/]
|
||||
commands:
|
||||
- make pack
|
||||
# Skipping uploads for now, until master is stable.
|
||||
|
||||
@@ -437,6 +437,10 @@ counter-renderer counter-name {
|
||||
border: 1px solid rgba(0,0,0,.15);
|
||||
}
|
||||
|
||||
.parameter-label {
|
||||
display: block;
|
||||
}
|
||||
|
||||
div.table-name {
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
|
||||
@@ -7,7 +7,7 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><img ng-src="{{$ctrl.logoUrl}}"/></a>
|
||||
<a class="navbar-brand" ng-href="{{$ctrl.basePath}}"><img ng-src="{{$ctrl.logoUrl}}"/></a>
|
||||
</div>
|
||||
<div class="collapse navbar-collapse" uib-collapse="!isNavOpen">
|
||||
<ul class="nav navbar-nav">
|
||||
|
||||
@@ -6,9 +6,9 @@ import './app-header.css';
|
||||
|
||||
const logger = debug('redash:appHeader');
|
||||
|
||||
function controller($rootScope, $location, $uibModal, Auth, currentUser, Dashboard) {
|
||||
// TODO: logoUrl should come from clientconfig
|
||||
function controller($rootScope, $location, $uibModal, Auth, currentUser, clientConfig, Dashboard) {
|
||||
this.logoUrl = logoUrl;
|
||||
this.basePath = clientConfig.basePath;
|
||||
this.currentUser = currentUser;
|
||||
this.showQueriesMenu = currentUser.hasPermission('view_query');
|
||||
this.showNewQueryMenu = currentUser.hasPermission('create_query');
|
||||
|
||||
@@ -1,27 +1,28 @@
|
||||
<form name="dataSourceForm">
|
||||
<div class="form-group">
|
||||
<label for="dataSourceName">Name</label>
|
||||
<input type="string" class="form-control" name="dataSourceName" ng-model="target.name" required>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="type">Type</label>
|
||||
<select name="type" class="form-control" ng-options="type.type as type.name for type in types" ng-model="target.type"></select>
|
||||
</div>
|
||||
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
|
||||
<label ng-if="input.type !== 'checkbox'">{{input.title || name | capitalize}}</label>
|
||||
<input name="input" type="{{input.type}}" class="form-control" ng-model="target.options[name]" ng-required="input.required"
|
||||
ng-if="input.type !== 'file' && input.type !== 'checkbox'" accesskey="tab" placeholder="{{input.default}}">
|
||||
<div class="form-group">
|
||||
<label for="dataSourceName">Name</label>
|
||||
<input type="string" class="form-control" name="dataSourceName" ng-model="target.name" required>
|
||||
</div>
|
||||
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="field in fields">
|
||||
<label ng-if="field.property.type !== 'checkbox'">{{field.property.title || field.name | capitalize}}</label>
|
||||
<input name="input" type="{{field.property.type}}" class="form-control" ng-model="target.options[field.name]" ng-required="field.property.required"
|
||||
ng-if="field.property.type !== 'file' && field.property.type !== 'checkbox'" accesskey="tab" placeholder="{{field.property.default}}">
|
||||
|
||||
<label ng-if="input.type=='checkbox'">
|
||||
<input name="input" type="{{input.type}}" ng-model="target.options[name]" ng-required="input.required"
|
||||
ng-if="input.type !== 'file'" accesskey="tab" placeholder="{{input.default}}">
|
||||
{{input.title || name | capitalize}}
|
||||
<label ng-if="field.property.type=='checkbox'">
|
||||
<input name="input" type="{{field.property.type}}" ng-model="target.options[field.name]" ng-required="field.property.required"
|
||||
ng-if="field.property.type !== 'file'" accesskey="tab" placeholder="{{field.property.default}}">
|
||||
{{field.property.title || field.name | capitalize}}
|
||||
</label>
|
||||
|
||||
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required && !target.options[name]"
|
||||
<input name="input" type="file" class="form-control" ng-model="files[field.name]" ng-required="field.property.required && !target.options[field.name]"
|
||||
base-sixty-four-input
|
||||
ng-if="input.type === 'file'">
|
||||
ng-if="field.property.type === 'file'">
|
||||
</div>
|
||||
|
||||
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid" ng-click="saveChanges()">Save</button>
|
||||
<span ng-repeat="action in actions">
|
||||
<button class="btn"
|
||||
|
||||
@@ -1,8 +1,22 @@
|
||||
import { each, contains, find } from 'underscore';
|
||||
import { isUndefined, each, contains, find } from 'underscore';
|
||||
import endsWith from 'underscore.string/endsWith';
|
||||
import template from './dynamic-form.html';
|
||||
|
||||
function DynamicForm($http, toastr, $q) {
|
||||
function orderedInputs(properties, order) {
|
||||
const inputs = new Array(order.length);
|
||||
Object.keys(properties).forEach((key) => {
|
||||
const position = order.indexOf(key);
|
||||
const input = { name: key, property: properties[key] };
|
||||
if (position > -1) {
|
||||
inputs[position] = input;
|
||||
} else {
|
||||
inputs.push(input);
|
||||
}
|
||||
});
|
||||
return inputs;
|
||||
}
|
||||
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: 'true',
|
||||
@@ -19,7 +33,15 @@ function DynamicForm($http, toastr, $q) {
|
||||
$scope.target.type = types[0].type;
|
||||
}
|
||||
|
||||
$scope.type = find(types, t => t.type === $scope.target.type);
|
||||
const type = find(types, t => t.type === $scope.target.type);
|
||||
const configurationSchema = type.configuration_schema;
|
||||
|
||||
$scope.fields = orderedInputs(
|
||||
configurationSchema.properties,
|
||||
configurationSchema.order || []
|
||||
);
|
||||
|
||||
return type;
|
||||
}
|
||||
|
||||
$scope.inProgressActions = {};
|
||||
@@ -35,6 +57,7 @@ function DynamicForm($http, toastr, $q) {
|
||||
$scope.inProgressActions[action.name] = false;
|
||||
action.name = name;
|
||||
}
|
||||
|
||||
originalCallback(release);
|
||||
};
|
||||
});
|
||||
@@ -80,24 +103,42 @@ function DynamicForm($http, toastr, $q) {
|
||||
prop.required = contains(type.configuration_schema.required, name);
|
||||
});
|
||||
});
|
||||
|
||||
$scope.$watch('target.type', (current, prev) => {
|
||||
if (prev !== current) {
|
||||
if (prev !== undefined) {
|
||||
$scope.target.options = {};
|
||||
}
|
||||
|
||||
const type = setType($scope.types);
|
||||
|
||||
if (Object.keys($scope.target.options).length === 0) {
|
||||
const properties = type.configuration_schema.properties;
|
||||
Object.keys(properties).forEach((property) => {
|
||||
if (!isUndefined(properties[property].default)) {
|
||||
$scope.target.options[property] = properties[property].default;
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$scope.$watch('target.type', (current, prev) => {
|
||||
if (prev !== current) {
|
||||
if (prev !== undefined) {
|
||||
$scope.target.options = {};
|
||||
}
|
||||
setType($scope.types);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.saveChanges = () => {
|
||||
$scope.target.$save(() => {
|
||||
toastr.success('Saved.');
|
||||
$scope.dataSourceForm.$setPristine();
|
||||
}, () => {
|
||||
toastr.error('Failed saving.');
|
||||
});
|
||||
$scope.target.$save(
|
||||
() => {
|
||||
toastr.success('Saved.');
|
||||
$scope.dataSourceForm.$setPristine();
|
||||
},
|
||||
(error) => {
|
||||
if (error.status === 400 && 'message' in error.data) {
|
||||
toastr.error(error.data.message);
|
||||
} else {
|
||||
toastr.error('Failed saving.');
|
||||
}
|
||||
}
|
||||
);
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
@@ -41,9 +41,9 @@ function DynamicTable($sanitize) {
|
||||
}
|
||||
|
||||
if (this.orderByField) {
|
||||
this.allRows = sortBy(this.allRows, this.orderByField.name);
|
||||
this.rows = sortBy(this.rows, this.orderByField.name);
|
||||
if (this.orderByReverse) {
|
||||
this.allRows = this.allRows.reverse();
|
||||
this.rows = this.rows.reverse();
|
||||
}
|
||||
this.pageChanged();
|
||||
}
|
||||
|
||||
@@ -18,3 +18,4 @@ export { default as rdTimeAgo } from './rd-time-ago';
|
||||
export { default as overlay } from './overlay';
|
||||
export { default as routeStatus } from './route-status';
|
||||
export { default as filters } from './filters';
|
||||
export { default as sortIcon } from './sort-icon';
|
||||
|
||||
@@ -13,6 +13,7 @@
|
||||
<select ng-model="$ctrl.parameter.type" class="form-control">
|
||||
<option value="text">Text</option>
|
||||
<option value="number">Number</option>
|
||||
<option value="enum">Dropdown List</option>
|
||||
<option value="date">Date</option>
|
||||
<option value="datetime-local">Date and Time</option>
|
||||
<option value="datetime-with-seconds">Date and Time (with seconds)</option>
|
||||
@@ -22,5 +23,9 @@
|
||||
<label>Global</label>
|
||||
<input type="checkbox" class="form-inline" ng-model="$ctrl.parameter.global">
|
||||
</div>
|
||||
<div class="form-group" ng-if="$ctrl.parameter.type === 'enum'">
|
||||
<label>Dropdown List Values (newline delimited)</label>
|
||||
<textarea class="form-control" rows="3" ng-model="$ctrl.parameter.enumOptions"></textarea>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -1,11 +1,24 @@
|
||||
<div class="form-inline bg-white p-5" ng-if="parameters | notEmpty" ui-sortable="{ 'ui-floating': true, 'disabled': !editable }" ng-model="parameters">
|
||||
<div class="form-group" ng-repeat="param in parameters">
|
||||
<label>{{param.title}}</label>
|
||||
<button class="btn btn-default btn-xs" ng-click="showParameterSettings(param)" ng-if="editable"><i class="zmdi zmdi-settings"></i></button>
|
||||
<div class="form-inline bg-white p-5"
|
||||
ng-if="parameters | notEmpty"
|
||||
ui-sortable="{ 'ui-floating': true, 'disabled': !editable }"
|
||||
ng-model="parameters">
|
||||
<div class="form-group m-l-10 m-r-10"
|
||||
ng-repeat="param in parameters">
|
||||
<label class="parameter-label">{{param.title}}</label>
|
||||
<button class="btn btn-default btn-xs"
|
||||
ng-click="showParameterSettings(param)"
|
||||
ng-if="editable">
|
||||
<i class="zmdi zmdi-settings"></i>
|
||||
</button>
|
||||
<span ng-switch="param.type">
|
||||
<input ng-switch-when="datetime-with-seconds" type="datetime-local" step="1" class="form-control" ng-model="param.ngModel">
|
||||
<input ng-switch-when="datetime-local" type="datetime-local" class="form-control" ng-model="param.ngModel">
|
||||
<input ng-switch-when="date" type="date" class="form-control" ng-model="param.ngModel">
|
||||
<span ng-switch-when="enum">
|
||||
<select ng-model="param.value" class="form-control">
|
||||
<option ng-repeat="option in extractEnumOptions(param.enumOptions)" value="{{option}}">{{option}}</option>
|
||||
</select>
|
||||
</span>
|
||||
<input ng-switch-default type="{{param.type}}" class="form-control" ng-model="param.ngModel">
|
||||
</span>
|
||||
</div>
|
||||
|
||||
@@ -40,7 +40,14 @@ function ParametersDirective($location, $uibModal) {
|
||||
});
|
||||
}, true);
|
||||
}
|
||||
|
||||
// These are input as newline delimited values,
|
||||
// so we split them here.
|
||||
scope.extractEnumOptions = (enumOptions) => {
|
||||
if (enumOptions) {
|
||||
return enumOptions.split('\n');
|
||||
}
|
||||
return [];
|
||||
};
|
||||
scope.showParameterSettings = (param) => {
|
||||
$uibModal.open({
|
||||
component: 'parameterSettings',
|
||||
|
||||
26
client/app/components/sort-icon.js
Normal file
26
client/app/components/sort-icon.js
Normal file
@@ -0,0 +1,26 @@
|
||||
export default function (ngModule) {
|
||||
ngModule.component('sortIcon', {
|
||||
template: '<span ng-if="$ctrl.showIcon"><i class="fa fa-sort-{{$ctrl.icon}}"></i></span>',
|
||||
bindings: {
|
||||
column: '<',
|
||||
sortColumn: '<',
|
||||
reverse: '<',
|
||||
},
|
||||
controller() {
|
||||
this.$onChanges = (changes) => {
|
||||
['column', 'sortColumn', 'reverse'].forEach((v) => {
|
||||
if (v in changes) {
|
||||
this[v] = changes[v].currentValue;
|
||||
}
|
||||
});
|
||||
|
||||
this.showIcon = false;
|
||||
|
||||
if (this.column === this.sortColumn) {
|
||||
this.showIcon = true;
|
||||
this.icon = this.reverse ? 'desc' : 'asc';
|
||||
}
|
||||
};
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
<!DOCTYPE html>
|
||||
<html ng-app="app">
|
||||
<html ng-app="app" ng-strict-di>
|
||||
<head lang="en">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta charset="UTF-8">
|
||||
|
||||
@@ -100,6 +100,7 @@ registerVisualizations(ngModule);
|
||||
|
||||
ngModule.config(($routeProvider, $locationProvider, $compileProvider,
|
||||
uiSelectConfig, toastrConfig) => {
|
||||
$compileProvider.debugInfoEnabled(false);
|
||||
$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|http|data):/);
|
||||
$locationProvider.html5Mode(true);
|
||||
uiSelectConfig.theme = 'bootstrap';
|
||||
|
||||
19
client/app/multi_org.html
Normal file
19
client/app/multi_org.html
Normal file
@@ -0,0 +1,19 @@
|
||||
<!DOCTYPE html>
|
||||
<html ng-app="app" ng-strict-di>
|
||||
<head lang="en">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta charset="UTF-8">
|
||||
<base href="{{base_href}}">
|
||||
<title>Redash</title>
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<section>
|
||||
<div ng-view></div>
|
||||
</section>
|
||||
</body>
|
||||
</html>
|
||||
@@ -35,7 +35,7 @@
|
||||
<td>{{row.data_source_id}}</td>
|
||||
<td>{{row.username}}</td>
|
||||
<td>{{row.state}} <span ng-if="row.state === 'failed'" uib-popover="{{row.error}}" popover-trigger="mouseenter" class="zmdi zmdi-help"></span></td>
|
||||
<td>{{row.query_id}}</td>
|
||||
<td><a href="queries/{{row.query_id}}">{{row.query_id}}</a></td>
|
||||
<td>{{row.query_hash}}</td>
|
||||
<td>{{row.run_time | durationHumanize}}</td>
|
||||
<td>{{row.created_at | toMilliseconds | dateTime }}</td>
|
||||
|
||||
@@ -4,20 +4,20 @@
|
||||
</page-header>
|
||||
|
||||
<div class="container">
|
||||
<div class="container bg-white">
|
||||
<div class="bg-white">
|
||||
<table class="table table-condensed table-hover">
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Name</th>
|
||||
<th>Created By</th>
|
||||
<th>State</th>
|
||||
<th>Created At</th>
|
||||
<th class="sortable-column" ng-click="$ctrl.alerts.orderBy('name')">Name <sort-icon column="'name'" sort-column="$ctrl.alerts.orderByField" reverse="$ctrl.alerts.orderByReverse"></sort-icon></th>
|
||||
<th class="sortable-column" ng-click="$ctrl.alerts.orderBy('created_by')">Created By <sort-icon column="'created_by'" sort-column="$ctrl.alerts.orderByField" reverse="$ctrl.alerts.orderByReverse"></sort-icon></th>
|
||||
<th class="sortable-column" ng-click="$ctrl.alerts.orderBy('state')">State <sort-icon column="'state'" sort-column="$ctrl.alerts.orderByField" reverse="$ctrl.alerts.orderByReverse"></sort-icon></th>
|
||||
<th class="sortable-column" ng-click="$ctrl.alerts.orderBy('created_at')">Created By <sort-icon column="'created_at'" sort-column="$ctrl.alerts.orderByField" reverse="$ctrl.alerts.orderByReverse"></sort-icon></th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
<tr ng-repeat="row in $ctrl.alerts.getPageRows()">
|
||||
<td><a href="alerts/{{row.id}}">{{row.name}}</a></td>
|
||||
<td>{{row.user.name}}</td>
|
||||
<td>{{row.created_by}}</td>
|
||||
<td><span ng-class="row.class">{{row.state | uppercase}}</span> since <span am-time-ago="row.updated_at"></span></td>
|
||||
<td><span am-time-ago="row.created_at"></span></td>
|
||||
</tr>
|
||||
|
||||
@@ -1,24 +1,27 @@
|
||||
import { Paginator } from '../../utils';
|
||||
import template from './alerts-list.html';
|
||||
|
||||
const stateClass = {
|
||||
ok: 'label label-success',
|
||||
triggered: 'label label-danger',
|
||||
unknown: 'label label-warning',
|
||||
};
|
||||
|
||||
class AlertsListCtrl {
|
||||
constructor(Events, Alert) {
|
||||
Events.record('view', 'page', 'alerts');
|
||||
|
||||
this.alerts = new Paginator([], { itemsPerPage: 20 });
|
||||
|
||||
Alert.query((alerts) => {
|
||||
const stateClass = {
|
||||
ok: 'label label-success',
|
||||
triggered: 'label label-danger',
|
||||
unknown: 'label label-warning',
|
||||
};
|
||||
|
||||
alerts.forEach((alert) => {
|
||||
alert.class = stateClass[alert.state];
|
||||
});
|
||||
|
||||
this.alerts.updateRows(alerts);
|
||||
this.alerts.updateRows(alerts.map(alert => ({
|
||||
id: alert.id,
|
||||
name: alert.name,
|
||||
state: alert.state,
|
||||
class: stateClass[alert.state],
|
||||
created_by: alert.user.name,
|
||||
created_at: alert.created_at,
|
||||
updated_at: alert.updated_at,
|
||||
})));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -46,6 +46,7 @@ function DashboardListCtrl(Dashboard, $location, clientConfig) {
|
||||
|
||||
this.update = () => {
|
||||
this.dashboards.$promise.then((data) => {
|
||||
data = _.sortBy(data, 'name');
|
||||
const filteredDashboards = data.map((dashboard) => {
|
||||
dashboard.tags = (dashboard.name.match(TAGS_REGEX) || []).map(tag => tag.replace(/:$/, ''));
|
||||
dashboard.untagged_name = dashboard.name.replace(TAGS_REGEX, '').trim();
|
||||
|
||||
@@ -17,6 +17,8 @@ function DashboardCtrl($rootScope, $routeParams, $location, $timeout, $q, $uibMo
|
||||
{ name: '10 minutes', rate: 60 * 10 },
|
||||
{ name: '30 minutes', rate: 60 * 30 },
|
||||
{ name: '1 hour', rate: 60 * 60 },
|
||||
{ name: '12 hour', rate: 12 * 60 * 60 },
|
||||
{ name: '24 hour', rate: 24 * 60 * 60 },
|
||||
];
|
||||
|
||||
this.setRefreshRate = (rate) => {
|
||||
@@ -155,12 +157,20 @@ function DashboardCtrl($rootScope, $routeParams, $location, $timeout, $q, $uibMo
|
||||
};
|
||||
|
||||
this.editDashboard = () => {
|
||||
const previousFiltersState = this.dashboard.dashboard_filters_enabled;
|
||||
$uibModal.open({
|
||||
component: 'editDashboardDialog',
|
||||
resolve: {
|
||||
dashboard: () => this.dashboard,
|
||||
},
|
||||
}).result.then((dashboard) => { this.dashboard = dashboard; });
|
||||
}).result.then((dashboard) => {
|
||||
const shouldRenderDashboard = !previousFiltersState && dashboard.dashboard_filters_enabled;
|
||||
this.dashboard = dashboard;
|
||||
|
||||
if (shouldRenderDashboard) {
|
||||
renderDashboard(this.dashboard);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
this.addWidget = () => {
|
||||
|
||||
@@ -7,6 +7,13 @@
|
||||
<input type="text" class="form-control" placeholder="Dashboard Name" ng-model="$ctrl.dashboard.name">
|
||||
</p>
|
||||
|
||||
<p ng-if="$ctrl.dashboard.id">
|
||||
<label>
|
||||
<input name="input" type="checkbox" ng-model="$ctrl.dashboard.dashboard_filters_enabled">
|
||||
Use Dashboard Level Filters
|
||||
</label>
|
||||
</p>
|
||||
|
||||
<div gridster="$ctrl.gridsterOptions" ng-if="$ctrl.items | notEmpty">
|
||||
<ul>
|
||||
<li gridster-item="item" ng-repeat="item in $ctrl.items" class="widget panel panel-default gs-w">
|
||||
@@ -17,5 +24,5 @@
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-default" ng-disabled="$ctrl.saveInProgress" ng-click="$ctrl.dismiss()">Close</button>
|
||||
<button type="button" class="btn btn-primary" ng-disabled="$ctrl.saveInProgress" ng-click="$ctrl.saveDashboard()">Save</button>
|
||||
<button type="button" class="btn btn-primary" ng-disabled="$ctrl.saveInProgress || !$ctrl.isFormValid()" ng-click="$ctrl.saveDashboard()">Save</button>
|
||||
</div>
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { sortBy } from 'underscore';
|
||||
import { isEmpty, sortBy } from 'underscore';
|
||||
import template from './edit-dashboard-dialog.html';
|
||||
|
||||
const EditDashboardDialog = {
|
||||
@@ -45,6 +45,8 @@ const EditDashboardDialog = {
|
||||
});
|
||||
}
|
||||
|
||||
this.isFormValid = () => !isEmpty(this.dashboard.name);
|
||||
|
||||
this.saveDashboard = () => {
|
||||
this.saveInProgress = true;
|
||||
|
||||
@@ -65,6 +67,7 @@ const EditDashboardDialog = {
|
||||
slug: this.dashboard.id,
|
||||
name: this.dashboard.name,
|
||||
version: this.dashboard.version,
|
||||
dashboard_filters_enabled: this.dashboard.dashboard_filters_enabled,
|
||||
layout: JSON.stringify(layout),
|
||||
};
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ export default function (ngModule) {
|
||||
'ngInject';
|
||||
|
||||
const token = $route.current.params.token;
|
||||
return $http.get(`/api/dashboards/public/${token}`).then(response =>
|
||||
return $http.get(`api/dashboards/public/${token}`).then(response =>
|
||||
response.data
|
||||
);
|
||||
}
|
||||
|
||||
@@ -78,8 +78,8 @@ function DashboardWidgetCtrl($location, $uibModal, $window, Events, currentUser)
|
||||
};
|
||||
|
||||
if (this.widget.visualization) {
|
||||
Events.record('view', 'query', this.widget.visualization.query.id);
|
||||
Events.record('view', 'visualization', this.widget.visualization.id);
|
||||
Events.record('view', 'query', this.widget.visualization.query.id, { dashboard: true });
|
||||
Events.record('view', 'visualization', this.widget.visualization.id, { dashboard: true });
|
||||
|
||||
this.query = this.widget.getQuery();
|
||||
this.reload(false);
|
||||
|
||||
37
client/app/pages/queries/api-key-dialog.js
Normal file
37
client/app/pages/queries/api-key-dialog.js
Normal file
@@ -0,0 +1,37 @@
|
||||
const ApiKeyDialog = {
|
||||
template: `<div class="modal-header">
|
||||
<button type="button" class="close" aria-label="Close" ng-click="$ctrl.close()"><span aria-hidden="true">×</span></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<h5>API Key</h5>
|
||||
<pre>{{$ctrl.apiKey}}</pre>
|
||||
|
||||
<h5>Example API Calls:</h5>
|
||||
|
||||
<div>
|
||||
Results in CSV format:
|
||||
|
||||
<pre>{{$ctrl.csvUrl}}</pre>
|
||||
|
||||
Results in JSON format:
|
||||
|
||||
<pre>{{$ctrl.jsonUrl}}</pre>
|
||||
</div>
|
||||
</div>`,
|
||||
controller(clientConfig) {
|
||||
'ngInject';
|
||||
|
||||
this.apiKey = this.resolve.query.api_key;
|
||||
this.csvUrl = `${clientConfig.basePath}api/queries/${this.resolve.query.id}/results.csv?api_key=${this.apiKey}`;
|
||||
this.jsonUrl = `${clientConfig.basePath}api/queries/${this.resolve.query.id}/results.json?api_key=${this.apiKey}`;
|
||||
},
|
||||
bindings: {
|
||||
resolve: '<',
|
||||
close: '&',
|
||||
dismiss: '&',
|
||||
},
|
||||
};
|
||||
|
||||
export default function (ngModule) {
|
||||
ngModule.component('apiKeyDialog', ApiKeyDialog);
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import registerQueryResultsLink from './query-results-link';
|
||||
import registerQueryEditor from './query-editor';
|
||||
import registerSchemaBrowser from './schema-browser';
|
||||
import registerEmbedCodeDialog from './embed-code-dialog';
|
||||
import registerApiKeyDialog from './api-key-dialog';
|
||||
import registerScheduleDialog from './schedule-dialog';
|
||||
import registerAlertUnsavedChanges from './alert-unsaved-changes';
|
||||
import registerQuerySearchResultsPage from './queries-search-results-page';
|
||||
@@ -17,6 +18,7 @@ export default function (ngModule) {
|
||||
registerScheduleDialog(ngModule);
|
||||
registerAlertUnsavedChanges(ngModule);
|
||||
registerVisualizationEmbed(ngModule);
|
||||
registerApiKeyDialog(ngModule);
|
||||
|
||||
return Object.assign({}, registerQuerySearchResultsPage(ngModule),
|
||||
registerSourceView(ngModule),
|
||||
|
||||
@@ -1,12 +1,14 @@
|
||||
<div class="container">
|
||||
<div class="bg-white p-5 m-t-10 m-b-10">
|
||||
<form class="form-inline" role="form" ng-submit="$ctrl.search()">
|
||||
<div class="form-group">
|
||||
<input class="form-control" placeholder="Search..." ng-model="$ctrl.term" autofocus>
|
||||
</div>
|
||||
<button type="submit" class="btn btn-primary">
|
||||
<span class="zmdi zmdi-search"></span>
|
||||
</button>
|
||||
<div class="input-group">
|
||||
<input class="form-control input-sm" placeholder="Search..." ng-model="$ctrl.term" autofocus>
|
||||
<span class="input-group-btn">
|
||||
<button class="btn btn-primary" type="submit">
|
||||
<span class="zmdi zmdi-search"></span>
|
||||
</button>
|
||||
</span>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="bg-white">
|
||||
|
||||
@@ -158,7 +158,12 @@ function QueryViewCtrl($scope, Events, $route, $routeParams, $location, $window,
|
||||
};
|
||||
|
||||
$scope.showApiKey = () => {
|
||||
$window.alert(`API Key for this query:\n${$scope.query.api_key}`);
|
||||
$uibModal.open({
|
||||
component: 'apiKeyDialog',
|
||||
resolve: {
|
||||
query: $scope.query,
|
||||
},
|
||||
});
|
||||
};
|
||||
|
||||
$scope.saveQuery = (customOptions, data) => {
|
||||
@@ -206,7 +211,12 @@ function QueryViewCtrl($scope, Events, $route, $routeParams, $location, $window,
|
||||
|
||||
$scope.saveName = () => {
|
||||
Events.record('edit_name', 'query', $scope.query.id);
|
||||
$scope.saveQuery(undefined, { name: $scope.query.name });
|
||||
|
||||
if ($scope.query.is_draft && clientConfig.autoPublishNamedQueries && $scope.query.name !== 'New Query') {
|
||||
$scope.query.is_draft = false;
|
||||
}
|
||||
|
||||
$scope.saveQuery(undefined, { name: $scope.query.name, is_draft: $scope.query.is_draft });
|
||||
};
|
||||
|
||||
$scope.cancelExecution = () => {
|
||||
|
||||
@@ -36,8 +36,8 @@ export default function (ngModule) {
|
||||
function loadData($http, $route, $q, Auth) {
|
||||
return session($http, $route, Auth).then(() => {
|
||||
const queryId = $route.current.params.queryId;
|
||||
const query = $http.get(`/api/queries/${queryId}`).then(response => response.data);
|
||||
const queryResult = $http.get(`/api/queries/${queryId}/results.json`).then(response => response.data);
|
||||
const query = $http.get(`api/queries/${queryId}`).then(response => response.data);
|
||||
const queryResult = $http.get(`api/queries/${queryId}/results.json${location.search}`).then(response => response.data);
|
||||
return $q.all([query, queryResult]);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -31,12 +31,12 @@ function AuthService($window, $location, $q, $http) {
|
||||
login() {
|
||||
const next = encodeURI($location.url());
|
||||
logger('Calling login with next = %s', next);
|
||||
window.location.href = `/login?next=${next}`;
|
||||
window.location.href = `login?next=${next}`;
|
||||
},
|
||||
logout() {
|
||||
logger('Logout.');
|
||||
window.sessionStorage.removeItem(SESSION_ITEM);
|
||||
$window.location.href = '/logout';
|
||||
$window.location.href = 'logout';
|
||||
},
|
||||
loadSession() {
|
||||
logger('Loading session');
|
||||
@@ -47,7 +47,7 @@ function AuthService($window, $location, $q, $http) {
|
||||
}
|
||||
|
||||
this.setApiKey(null);
|
||||
return $http.get('/api/session').then((response) => {
|
||||
return $http.get('api/session').then((response) => {
|
||||
storeSession(response.data);
|
||||
return session;
|
||||
});
|
||||
|
||||
@@ -3,6 +3,7 @@ import moment from 'moment';
|
||||
import { uniq, contains, values, some, each, isArray, isNumber, isString } from 'underscore';
|
||||
|
||||
const logger = debug('redash:services:QueryResult');
|
||||
const filterTypes = ['filter', 'multi-filter', 'multiFilter'];
|
||||
|
||||
function getColumnNameWithoutType(column) {
|
||||
let typeSplit;
|
||||
@@ -18,6 +19,11 @@ function getColumnNameWithoutType(column) {
|
||||
if (parts[0] === '' && parts.length === 2) {
|
||||
return parts[1];
|
||||
}
|
||||
|
||||
if (!contains(filterTypes, parts[1])) {
|
||||
return column;
|
||||
}
|
||||
|
||||
return parts[0];
|
||||
}
|
||||
|
||||
@@ -86,16 +92,27 @@ function QueryResultService($resource, $timeout, $q) {
|
||||
// and better be removed. Kept for now, for backward compatability.
|
||||
each(this.query_result.data.rows, (row) => {
|
||||
each(row, (v, k) => {
|
||||
let newType = null;
|
||||
if (isNumber(v)) {
|
||||
columnTypes[k] = 'float';
|
||||
newType = 'float';
|
||||
} else if (isString(v) && v.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||
row[k] = moment.utc(v);
|
||||
columnTypes[k] = 'datetime';
|
||||
newType = 'datetime';
|
||||
} else if (isString(v) && v.match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||
row[k] = moment.utc(v);
|
||||
columnTypes[k] = 'date';
|
||||
newType = 'date';
|
||||
} else if (typeof (v) === 'object' && v !== null) {
|
||||
row[k] = JSON.stringify(v);
|
||||
} else {
|
||||
newType = 'string';
|
||||
}
|
||||
|
||||
if (newType !== null) {
|
||||
if (columnTypes[k] !== undefined && columnTypes[k] !== newType) {
|
||||
columnTypes[k] = 'string';
|
||||
} else {
|
||||
columnTypes[k] = newType;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
@@ -331,7 +348,6 @@ function QueryResultService($resource, $timeout, $q) {
|
||||
}
|
||||
|
||||
const filters = [];
|
||||
const filterTypes = ['filter', 'multi-filter', 'multiFilter'];
|
||||
|
||||
this.getColumns().forEach((col) => {
|
||||
const name = col.name;
|
||||
@@ -388,14 +404,39 @@ function QueryResultService($resource, $timeout, $q) {
|
||||
return queryResult;
|
||||
}
|
||||
|
||||
loadResult(tryCount) {
|
||||
QueryResultResource.get({ id: this.job.query_result_id },
|
||||
(response) => {
|
||||
this.update(response);
|
||||
},
|
||||
(error) => {
|
||||
if (tryCount === undefined) {
|
||||
tryCount = 0;
|
||||
}
|
||||
|
||||
if (tryCount > 3) {
|
||||
logger('Connection error while trying to load result', error);
|
||||
this.update({
|
||||
job: {
|
||||
error: 'failed communicating with server. Please check your Internet connection and try again.',
|
||||
status: 4,
|
||||
},
|
||||
});
|
||||
} else {
|
||||
$timeout(() => {
|
||||
this.loadResult(tryCount + 1);
|
||||
}, 1000 * Math.pow(2, tryCount));
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
refreshStatus(query) {
|
||||
Job.get({ id: this.job.id }, (jobResponse) => {
|
||||
this.update(jobResponse);
|
||||
|
||||
if (this.getStatus() === 'processing' && this.job.query_result_id && this.job.query_result_id !== 'None') {
|
||||
QueryResultResource.get({ id: this.job.query_result_id }, (response) => {
|
||||
this.update(response);
|
||||
});
|
||||
this.loadResult();
|
||||
} else if (this.getStatus() !== 'failed') {
|
||||
$timeout(() => {
|
||||
this.refreshStatus(query);
|
||||
@@ -409,7 +450,7 @@ function QueryResultService($resource, $timeout, $q) {
|
||||
}
|
||||
|
||||
getLink(queryId, fileType, apiKey) {
|
||||
let link = `/api/queries/${queryId}/results/${this.getId()}.${fileType}`;
|
||||
let link = `api/queries/${queryId}/results/${this.getId()}.${fileType}`;
|
||||
if (apiKey) {
|
||||
link = `${link}?api_key=${apiKey}`;
|
||||
}
|
||||
|
||||
@@ -50,6 +50,7 @@ class Parameter {
|
||||
this.type = parameter.type;
|
||||
this.value = parameter.value;
|
||||
this.global = parameter.global;
|
||||
this.enumOptions = parameter.enumOptions;
|
||||
}
|
||||
|
||||
get ngModel() {
|
||||
|
||||
@@ -1,8 +1,12 @@
|
||||
import { sortBy } from 'underscore';
|
||||
|
||||
export default class Paginator {
|
||||
constructor(rows, { page = 1, itemsPerPage = 20, totalCount = undefined } = {}) {
|
||||
this.page = page;
|
||||
this.itemsPerPage = itemsPerPage;
|
||||
this.updateRows(rows, totalCount);
|
||||
this.orderByField = undefined;
|
||||
this.orderByReverse = false;
|
||||
}
|
||||
|
||||
setPage(page) {
|
||||
@@ -24,4 +28,20 @@ export default class Paginator {
|
||||
this.totalCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
orderBy(column) {
|
||||
if (column === this.orderByField) {
|
||||
this.orderByReverse = !this.orderByReverse;
|
||||
} else {
|
||||
this.orderByField = column;
|
||||
this.orderByReverse = false;
|
||||
}
|
||||
|
||||
if (this.orderByField) {
|
||||
this.rows = sortBy(this.rows, this.orderByField);
|
||||
if (this.orderByReverse) {
|
||||
this.rows = this.rows.reverse();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -176,7 +176,7 @@ export default function (ngModule) {
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'BOXPLOT',
|
||||
name: 'Boxplot',
|
||||
name: 'Boxplot (Deprecated)',
|
||||
renderTemplate,
|
||||
editorTemplate: editTemplate,
|
||||
});
|
||||
|
||||
@@ -103,6 +103,13 @@
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div class="checkbox" ng-if="options.globalSeriesType == 'box'">
|
||||
<label>
|
||||
<input type="checkbox" ng-model="options.showpoints">
|
||||
<i class="input-helper"></i> Show All Points
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div class="form-group" ng-if="options.globalSeriesType != 'custom'">
|
||||
<label class="control-label">Stacking</label>
|
||||
|
||||
@@ -116,6 +123,13 @@
|
||||
</ui-select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group" ng-if="options.globalSeriesType == 'box'">
|
||||
<label>
|
||||
<label class="control-label">Graph Height</label>
|
||||
<input name="graph-height" type="number" class="form-control" ng-model="options.height">
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group" ng-if="options.globalSeriesType == 'custom'">
|
||||
|
||||
@@ -69,6 +69,7 @@ function ChartEditor(ColorPalette, clientConfig) {
|
||||
pie: { name: 'Pie', icon: 'pie-chart' },
|
||||
scatter: { name: 'Scatter', icon: 'circle-o' },
|
||||
bubble: { name: 'Bubble', icon: 'circle-o' },
|
||||
box: { name: 'Box', icon: 'square-o' },
|
||||
};
|
||||
|
||||
if (clientConfig.allowCustomJSVisualizations) {
|
||||
@@ -76,7 +77,7 @@ function ChartEditor(ColorPalette, clientConfig) {
|
||||
}
|
||||
|
||||
scope.xAxisScales = ['datetime', 'linear', 'logarithmic', 'category'];
|
||||
scope.yAxisScales = ['linear', 'logarithmic', 'datetime'];
|
||||
scope.yAxisScales = ['linear', 'logarithmic', 'datetime', 'category'];
|
||||
|
||||
scope.chartTypeChanged = () => {
|
||||
keys(scope.options.seriesOptions).forEach((key) => {
|
||||
|
||||
@@ -4,10 +4,11 @@ import Plotly from 'plotly.js/lib/core';
|
||||
import bar from 'plotly.js/lib/bar';
|
||||
import pie from 'plotly.js/lib/pie';
|
||||
import histogram from 'plotly.js/lib/histogram';
|
||||
import box from 'plotly.js/lib/box';
|
||||
|
||||
import moment from 'moment';
|
||||
|
||||
Plotly.register([bar, pie, histogram]);
|
||||
Plotly.register([bar, pie, histogram, box]);
|
||||
Plotly.setPlotConfig({
|
||||
modeBarButtonsToRemove: ['sendDataToCloud'],
|
||||
});
|
||||
@@ -197,6 +198,9 @@ const PlotlyChart = () => {
|
||||
link(scope, element) {
|
||||
function calculateHeight() {
|
||||
const height = Math.max(scope.height, (scope.height - 50) + bottomMargin);
|
||||
if (scope.options.globalSeriesType === 'box') {
|
||||
return scope.options.height || height;
|
||||
}
|
||||
return height;
|
||||
}
|
||||
|
||||
@@ -213,6 +217,9 @@ const PlotlyChart = () => {
|
||||
series.mode = 'markers';
|
||||
} else if (type === 'bubble') {
|
||||
series.mode = 'markers';
|
||||
} else if (type === 'box') {
|
||||
series.type = 'box';
|
||||
series.mode = 'markers';
|
||||
}
|
||||
}
|
||||
|
||||
@@ -273,6 +280,12 @@ const PlotlyChart = () => {
|
||||
return;
|
||||
}
|
||||
|
||||
if (scope.options.globalSeriesType === 'box') {
|
||||
scope.options.sortX = false;
|
||||
scope.layout.boxmode = 'group';
|
||||
scope.layout.boxgroupgap = 0.50;
|
||||
}
|
||||
|
||||
let hasY2 = false;
|
||||
const sortX = scope.options.sortX === true || scope.options.sortX === undefined;
|
||||
const useUnifiedXaxis = sortX && scope.options.xAxis.type === 'category';
|
||||
@@ -341,6 +354,22 @@ const PlotlyChart = () => {
|
||||
size: pluck(data, 'size'),
|
||||
};
|
||||
}
|
||||
|
||||
if (seriesOptions.type === 'box') {
|
||||
plotlySeries.boxpoints = 'outliers';
|
||||
plotlySeries.marker = {
|
||||
size: 3,
|
||||
};
|
||||
if (scope.options.showpoints) {
|
||||
plotlySeries.boxpoints = 'all';
|
||||
plotlySeries.jitter = 0.3;
|
||||
plotlySeries.pointpos = -1.8;
|
||||
plotlySeries.marker = {
|
||||
size: 3,
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
scope.data.push(plotlySeries);
|
||||
});
|
||||
|
||||
|
||||
@@ -25,7 +25,7 @@ function cohortRenderer() {
|
||||
}
|
||||
|
||||
const sortedData = _.sortBy($scope.queryResult.getData(), r =>
|
||||
r.date + r.day_number
|
||||
r.date + parseInt(r.day_number, 10)
|
||||
);
|
||||
|
||||
const grouped = _.groupBy(sortedData, 'date');
|
||||
@@ -35,9 +35,9 @@ function cohortRenderer() {
|
||||
, 0);
|
||||
|
||||
const data = _.map(grouped, (values) => {
|
||||
const row = [values[0].total];
|
||||
const row = [parseInt(values[0].total, 10)];
|
||||
_.each(values, (value) => {
|
||||
row.push(value.value);
|
||||
row.push(parseInt(value.value, 10));
|
||||
});
|
||||
_.each(_.range(values.length, maxColumns), () => {
|
||||
row.push(null);
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
<div class="form-group">
|
||||
<label class="col-lg-6">Counter Value Row Number</label>
|
||||
<div class="col-lg-6">
|
||||
<input type="number" ng-model="visualization.options.rowNumber" min="1" class="form-control" ng-disabled="visualization.options.countRow">
|
||||
<input type="number" ng-model="visualization.options.rowNumber" class="form-control" ng-disabled="visualization.options.countRow">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
@@ -22,7 +22,7 @@
|
||||
<div class="form-group" ng-if="visualization.options.targetColName">
|
||||
<label class="col-lg-6">Target Value Row Number</label>
|
||||
<div class="col-lg-6">
|
||||
<input type="number" ng-model="visualization.options.targetRowNumber" min="1" class="form-control">
|
||||
<input type="number" ng-model="visualization.options.targetRowNumber" class="form-control">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
|
||||
@@ -1,6 +1,18 @@
|
||||
import counterTemplate from './counter.html';
|
||||
import counterEditorTemplate from './counter-editor.html';
|
||||
|
||||
function getRowNumber(index, size) {
|
||||
if (index >= 0) {
|
||||
return index - 1;
|
||||
}
|
||||
|
||||
if (Math.abs(index) > size) {
|
||||
index %= size;
|
||||
}
|
||||
|
||||
return size + index;
|
||||
}
|
||||
|
||||
function CounterRenderer() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
@@ -9,8 +21,9 @@ function CounterRenderer() {
|
||||
const refreshData = () => {
|
||||
const queryData = $scope.queryResult.getData();
|
||||
if (queryData) {
|
||||
const rowNumber = $scope.visualization.options.rowNumber - 1;
|
||||
const targetRowNumber = $scope.visualization.options.targetRowNumber - 1;
|
||||
const rowNumber = getRowNumber($scope.visualization.options.rowNumber, queryData.length);
|
||||
const targetRowNumber =
|
||||
getRowNumber($scope.visualization.options.targetRowNumber, queryData.length);
|
||||
const counterColName = $scope.visualization.options.counterColName;
|
||||
const targetColName = $scope.visualization.options.targetColName;
|
||||
|
||||
|
||||
@@ -3,6 +3,9 @@ import $ from 'jquery';
|
||||
import 'pivottable';
|
||||
import 'pivottable/dist/pivot.css';
|
||||
|
||||
import editorTemplate from './pivottable-editor.html';
|
||||
|
||||
|
||||
function pivotTableRenderer() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
@@ -13,48 +16,77 @@ function pivotTableRenderer() {
|
||||
template: '',
|
||||
replace: false,
|
||||
link($scope, element) {
|
||||
$scope.$watch('queryResult && queryResult.getData()', (data) => {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
function removeControls() {
|
||||
const hideControls =
|
||||
$scope.visualization.options.controls &&
|
||||
$scope.visualization.options.controls.enabled;
|
||||
|
||||
if ($scope.queryResult.getData() !== null) {
|
||||
// We need to give the pivot table its own copy of the data, because it changes
|
||||
// it which interferes with other visualizations.
|
||||
data = angular.copy($scope.queryResult.getData());
|
||||
const options = {
|
||||
renderers: $.pivotUtilities.renderers,
|
||||
onRefresh(config) {
|
||||
const configCopy = Object.assign({}, config);
|
||||
// delete some values which are functions
|
||||
delete configCopy.aggregators;
|
||||
delete configCopy.renderers;
|
||||
delete configCopy.onRefresh;
|
||||
// delete some bulky default values
|
||||
delete configCopy.rendererOptions;
|
||||
delete configCopy.localeStrings;
|
||||
|
||||
if ($scope.visualization) {
|
||||
$scope.visualization.options = configCopy;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if ($scope.visualization) {
|
||||
Object.assign(options, $scope.visualization.options);
|
||||
document.querySelectorAll('.pvtAxisContainer, .pvtRenderer, .pvtVals').forEach((control) => {
|
||||
if (hideControls) {
|
||||
control.style.display = 'none';
|
||||
} else {
|
||||
control.style.display = '';
|
||||
}
|
||||
$(element).pivotUI(data, options, true);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function updatePivot() {
|
||||
$scope.$watch('queryResult && queryResult.getData()', (data) => {
|
||||
if (!data) {
|
||||
return;
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getData() !== null) {
|
||||
// We need to give the pivot table its own copy of the data, because it changes
|
||||
// it which interferes with other visualizations.
|
||||
data = angular.copy($scope.queryResult.getData());
|
||||
const options = {
|
||||
renderers: $.pivotUtilities.renderers,
|
||||
onRefresh(config) {
|
||||
const configCopy = Object.assign({}, config);
|
||||
// delete some values which are functions
|
||||
delete configCopy.aggregators;
|
||||
delete configCopy.renderers;
|
||||
delete configCopy.onRefresh;
|
||||
// delete some bulky default values
|
||||
delete configCopy.rendererOptions;
|
||||
delete configCopy.localeStrings;
|
||||
|
||||
if ($scope.visualization) {
|
||||
$scope.visualization.options = configCopy;
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
if ($scope.visualization) {
|
||||
Object.assign(options, $scope.visualization.options);
|
||||
}
|
||||
|
||||
$(element).pivotUI(data, options, true);
|
||||
removeControls();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$scope.$watch('queryResult && queryResult.getData()', updatePivot);
|
||||
$scope.$watch('visualization.options.controls.enabled', removeControls);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function pivotTableEditor() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: editorTemplate,
|
||||
};
|
||||
}
|
||||
|
||||
export default function (ngModule) {
|
||||
ngModule.directive('pivotTableRenderer', pivotTableRenderer);
|
||||
ngModule.directive('pivotTableEditor', pivotTableEditor);
|
||||
|
||||
ngModule.config((VisualizationProvider) => {
|
||||
const editTemplate = '<div/>';
|
||||
const editTemplate = '<pivot-table-editor></pivot-table-editor>';
|
||||
const defaultOptions = {
|
||||
};
|
||||
|
||||
|
||||
10
client/app/visualizations/pivot/pivottable-editor.html
Normal file
10
client/app/visualizations/pivot/pivottable-editor.html
Normal file
@@ -0,0 +1,10 @@
|
||||
<div class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<div class="col-lg-6">
|
||||
<label>
|
||||
<input type="checkbox" ng-model="visualization.options.controls.enabled">
|
||||
Hide Pivot Controls
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -20,6 +20,7 @@ services:
|
||||
REDASH_REDIS_URL: "redis://redis:6379/0"
|
||||
REDASH_DATABASE_URL: "postgresql://postgres@postgres/postgres"
|
||||
REDASH_COOKIE_SECRET: veryverysecret
|
||||
REDASH_WEB_WORKERS: 4
|
||||
worker:
|
||||
image: redash/redash:latest
|
||||
command: scheduler
|
||||
|
||||
388
npm-shrinkwrap.json
generated
388
npm-shrinkwrap.json
generated
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "1.0.0",
|
||||
"version": "2.0.0",
|
||||
"dependencies": {
|
||||
"3d-view": {
|
||||
"version": "2.0.0",
|
||||
@@ -8,15 +8,20 @@
|
||||
"resolved": "https://registry.npmjs.org/3d-view/-/3d-view-2.0.0.tgz"
|
||||
},
|
||||
"3d-view-controls": {
|
||||
"version": "2.1.1",
|
||||
"from": "3d-view-controls@>=2.0.0 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/3d-view-controls/-/3d-view-controls-2.1.1.tgz"
|
||||
"version": "2.2.0",
|
||||
"from": "3d-view-controls@>=2.2.0 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/3d-view-controls/-/3d-view-controls-2.2.0.tgz"
|
||||
},
|
||||
"a-big-triangle": {
|
||||
"version": "1.0.3",
|
||||
"from": "a-big-triangle@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/a-big-triangle/-/a-big-triangle-1.0.3.tgz"
|
||||
},
|
||||
"acorn": {
|
||||
"version": "4.0.4",
|
||||
"from": "acorn@4.0.4",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.4.tgz"
|
||||
},
|
||||
"add-line-numbers": {
|
||||
"version": "1.0.1",
|
||||
"from": "add-line-numbers@>=1.0.1 <2.0.0",
|
||||
@@ -32,6 +37,11 @@
|
||||
"from": "align-text@>=0.1.3 <0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/align-text/-/align-text-0.1.4.tgz"
|
||||
},
|
||||
"almost-equal": {
|
||||
"version": "1.1.0",
|
||||
"from": "almost-equal@>=1.1.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/almost-equal/-/almost-equal-1.1.0.tgz"
|
||||
},
|
||||
"alpha-complex": {
|
||||
"version": "1.0.0",
|
||||
"from": "alpha-complex@>=1.0.0 <2.0.0",
|
||||
@@ -42,11 +52,6 @@
|
||||
"from": "alpha-shape@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/alpha-shape/-/alpha-shape-1.0.0.tgz"
|
||||
},
|
||||
"alter": {
|
||||
"version": "0.2.0",
|
||||
"from": "alter@>=0.2.0 <0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/alter/-/alter-0.2.0.tgz"
|
||||
},
|
||||
"amdefine": {
|
||||
"version": "1.0.0",
|
||||
"from": "amdefine@>=0.0.4",
|
||||
@@ -64,7 +69,7 @@
|
||||
},
|
||||
"angular-gridster": {
|
||||
"version": "0.13.14",
|
||||
"from": "angular-gridster@latest",
|
||||
"from": "angular-gridster@>=0.13.14 <0.14.0",
|
||||
"resolved": "https://registry.npmjs.org/angular-gridster/-/angular-gridster-0.13.14.tgz"
|
||||
},
|
||||
"angular-messages": {
|
||||
@@ -79,7 +84,7 @@
|
||||
},
|
||||
"angular-resizable": {
|
||||
"version": "1.2.0",
|
||||
"from": "angular-resizable@latest",
|
||||
"from": "angular-resizable@>=1.2.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/angular-resizable/-/angular-resizable-1.2.0.tgz"
|
||||
},
|
||||
"angular-resource": {
|
||||
@@ -99,12 +104,12 @@
|
||||
},
|
||||
"angular-toastr": {
|
||||
"version": "2.1.1",
|
||||
"from": "angular-toastr@latest",
|
||||
"from": "angular-toastr@>=2.1.1 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/angular-toastr/-/angular-toastr-2.1.1.tgz"
|
||||
},
|
||||
"angular-ui-ace": {
|
||||
"version": "0.2.3",
|
||||
"from": "angular-ui-ace@latest",
|
||||
"from": "angular-ui-ace@>=0.2.3 <0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/angular-ui-ace/-/angular-ui-ace-0.2.3.tgz"
|
||||
},
|
||||
"angular-ui-bootstrap": {
|
||||
@@ -114,7 +119,7 @@
|
||||
},
|
||||
"angular-vs-repeat": {
|
||||
"version": "1.1.7",
|
||||
"from": "angular-vs-repeat@latest",
|
||||
"from": "angular-vs-repeat@>=1.1.7 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/angular-vs-repeat/-/angular-vs-repeat-1.1.7.tgz"
|
||||
},
|
||||
"ansi-regex": {
|
||||
@@ -129,7 +134,7 @@
|
||||
},
|
||||
"arraytools": {
|
||||
"version": "1.1.2",
|
||||
"from": "arraytools@>=1.0.0 <2.0.0",
|
||||
"from": "arraytools@>=1.1.2 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/arraytools/-/arraytools-1.1.2.tgz"
|
||||
},
|
||||
"asn1": {
|
||||
@@ -179,14 +184,9 @@
|
||||
"optional": true
|
||||
},
|
||||
"big-rat": {
|
||||
"version": "1.0.2",
|
||||
"from": "big-rat@>=1.0.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/big-rat/-/big-rat-1.0.2.tgz"
|
||||
},
|
||||
"big.js": {
|
||||
"version": "3.1.3",
|
||||
"from": "big.js@>=3.1.3 <4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/big.js/-/big.js-3.1.3.tgz"
|
||||
"version": "1.0.4",
|
||||
"from": "big-rat@>=1.0.3 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/big-rat/-/big-rat-1.0.4.tgz"
|
||||
},
|
||||
"binary-search-bounds": {
|
||||
"version": "1.0.0",
|
||||
@@ -199,14 +199,14 @@
|
||||
"resolved": "https://registry.npmjs.org/bit-twiddle/-/bit-twiddle-1.0.2.tgz"
|
||||
},
|
||||
"bl": {
|
||||
"version": "1.2.0",
|
||||
"version": "1.2.1",
|
||||
"from": "bl@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-1.2.0.tgz"
|
||||
"resolved": "https://registry.npmjs.org/bl/-/bl-1.2.1.tgz"
|
||||
},
|
||||
"bn.js": {
|
||||
"version": "2.2.0",
|
||||
"from": "bn.js@>=2.0.5 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-2.2.0.tgz"
|
||||
"version": "4.11.6",
|
||||
"from": "bn.js@>=4.11.6 <5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/bn.js/-/bn.js-4.11.6.tgz"
|
||||
},
|
||||
"boom": {
|
||||
"version": "2.10.1",
|
||||
@@ -343,10 +343,15 @@
|
||||
"from": "circumradius@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/circumradius/-/circumradius-1.0.0.tgz"
|
||||
},
|
||||
"clamp": {
|
||||
"version": "1.0.1",
|
||||
"from": "clamp@>=1.0.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/clamp/-/clamp-1.0.1.tgz"
|
||||
},
|
||||
"clean-pslg": {
|
||||
"version": "1.1.0",
|
||||
"version": "1.1.2",
|
||||
"from": "clean-pslg@>=1.1.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/clean-pslg/-/clean-pslg-1.1.0.tgz"
|
||||
"resolved": "https://registry.npmjs.org/clean-pslg/-/clean-pslg-1.1.2.tgz"
|
||||
},
|
||||
"cliui": {
|
||||
"version": "2.1.0",
|
||||
@@ -365,6 +370,31 @@
|
||||
"from": "clone@>=1.0.2 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/clone/-/clone-1.0.2.tgz"
|
||||
},
|
||||
"color-id": {
|
||||
"version": "1.0.3",
|
||||
"from": "color-id@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/color-id/-/color-id-1.0.3.tgz"
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.2",
|
||||
"from": "color-name@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.2.tgz"
|
||||
},
|
||||
"color-parse": {
|
||||
"version": "1.3.2",
|
||||
"from": "color-parse@>=1.2.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/color-parse/-/color-parse-1.3.2.tgz"
|
||||
},
|
||||
"color-rgba": {
|
||||
"version": "1.1.0",
|
||||
"from": "color-rgba@>=1.0.4 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/color-rgba/-/color-rgba-1.1.0.tgz"
|
||||
},
|
||||
"color-space": {
|
||||
"version": "1.14.7",
|
||||
"from": "color-space@>=1.14.6 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/color-space/-/color-space-1.14.7.tgz"
|
||||
},
|
||||
"colormap": {
|
||||
"version": "2.2.0",
|
||||
"from": "colormap@>=2.1.0 <3.0.0",
|
||||
@@ -429,7 +459,7 @@
|
||||
},
|
||||
"core-js": {
|
||||
"version": "2.4.1",
|
||||
"from": "core-js@>=2.4.0 <3.0.0",
|
||||
"from": "https://registry.npmjs.org/core-js/-/core-js-2.4.1.tgz",
|
||||
"resolved": "https://registry.npmjs.org/core-js/-/core-js-2.4.1.tgz"
|
||||
},
|
||||
"core-util-is": {
|
||||
@@ -443,9 +473,9 @@
|
||||
"resolved": "git+https://github.com/restorando/cornelius.git#24d935811186c165c8ba63244ff363da71f32dcf"
|
||||
},
|
||||
"country-regex": {
|
||||
"version": "1.0.3",
|
||||
"from": "country-regex@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/country-regex/-/country-regex-1.0.3.tgz"
|
||||
"version": "1.1.0",
|
||||
"from": "country-regex@>=1.1.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/country-regex/-/country-regex-1.1.0.tgz"
|
||||
},
|
||||
"cryptiles": {
|
||||
"version": "2.0.5",
|
||||
@@ -463,9 +493,9 @@
|
||||
"resolved": "https://registry.npmjs.org/cubic-hermite/-/cubic-hermite-1.0.0.tgz"
|
||||
},
|
||||
"cwise": {
|
||||
"version": "1.0.9",
|
||||
"version": "1.0.10",
|
||||
"from": "cwise@>=1.0.3 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cwise/-/cwise-1.0.9.tgz"
|
||||
"resolved": "https://registry.npmjs.org/cwise/-/cwise-1.0.10.tgz"
|
||||
},
|
||||
"cwise-compiler": {
|
||||
"version": "1.1.2",
|
||||
@@ -486,7 +516,7 @@
|
||||
},
|
||||
"d3": {
|
||||
"version": "3.5.17",
|
||||
"from": "d3@>=3.5.6 <3.6.0",
|
||||
"from": "d3@>=3.5.17 <4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/d3/-/d3-3.5.17.tgz"
|
||||
},
|
||||
"d3-cloud": {
|
||||
@@ -538,7 +568,7 @@
|
||||
},
|
||||
"defined": {
|
||||
"version": "1.0.0",
|
||||
"from": "defined@>=1.0.0 <2.0.0",
|
||||
"from": "defined@>=1.0.0 <1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/defined/-/defined-1.0.0.tgz"
|
||||
},
|
||||
"delaunay-triangulate": {
|
||||
@@ -594,15 +624,10 @@
|
||||
"from": "edges-to-adjacency-list@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/edges-to-adjacency-list/-/edges-to-adjacency-list-1.0.0.tgz"
|
||||
},
|
||||
"emojis-list": {
|
||||
"version": "2.1.0",
|
||||
"from": "emojis-list@>=2.0.0 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emojis-list/-/emojis-list-2.1.0.tgz"
|
||||
},
|
||||
"es-abstract": {
|
||||
"version": "1.6.1",
|
||||
"version": "1.7.0",
|
||||
"from": "es-abstract@>=1.5.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.6.1.tgz"
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.7.0.tgz"
|
||||
},
|
||||
"es-to-primitive": {
|
||||
"version": "1.1.1",
|
||||
@@ -648,9 +673,9 @@
|
||||
"resolved": "https://registry.npmjs.org/esprima/-/esprima-2.7.3.tgz"
|
||||
},
|
||||
"espurify": {
|
||||
"version": "1.6.0",
|
||||
"version": "1.7.0",
|
||||
"from": "espurify@>=1.3.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/espurify/-/espurify-1.6.0.tgz"
|
||||
"resolved": "https://registry.npmjs.org/espurify/-/espurify-1.7.0.tgz"
|
||||
},
|
||||
"estraverse": {
|
||||
"version": "1.5.1",
|
||||
@@ -664,7 +689,7 @@
|
||||
},
|
||||
"events": {
|
||||
"version": "1.1.1",
|
||||
"from": "events@>=1.0.0 <2.0.0",
|
||||
"from": "events@>=1.0.2 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/events/-/events-1.1.1.tgz"
|
||||
},
|
||||
"extend": {
|
||||
@@ -736,9 +761,14 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"font-atlas-sdf": {
|
||||
"version": "1.2.0",
|
||||
"from": "font-atlas-sdf@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/font-atlas-sdf/-/font-atlas-sdf-1.2.0.tgz"
|
||||
},
|
||||
"font-awesome": {
|
||||
"version": "4.7.0",
|
||||
"from": "font-awesome@latest",
|
||||
"from": "font-awesome@>=4.7.0 <5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/font-awesome/-/font-awesome-4.7.0.tgz"
|
||||
},
|
||||
"for-each": {
|
||||
@@ -768,7 +798,7 @@
|
||||
},
|
||||
"function-bind": {
|
||||
"version": "1.1.0",
|
||||
"from": "function-bind@>=1.0.2 <2.0.0",
|
||||
"from": "function-bind@>=1.1.0 <1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.0.tgz"
|
||||
},
|
||||
"functional-red-black-tree": {
|
||||
@@ -1128,7 +1158,7 @@
|
||||
},
|
||||
"gl-plot2d": {
|
||||
"version": "1.2.0",
|
||||
"from": "gl-plot2d@>=1.1.6 <2.0.0",
|
||||
"from": "gl-plot2d@>=1.2.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/gl-plot2d/-/gl-plot2d-1.2.0.tgz",
|
||||
"dependencies": {
|
||||
"binary-search-bounds": {
|
||||
@@ -1174,9 +1204,9 @@
|
||||
}
|
||||
},
|
||||
"gl-plot3d": {
|
||||
"version": "1.5.1",
|
||||
"from": "gl-plot3d@>=1.5.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/gl-plot3d/-/gl-plot3d-1.5.1.tgz",
|
||||
"version": "1.5.4",
|
||||
"from": "gl-plot3d@>=1.5.4 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/gl-plot3d/-/gl-plot3d-1.5.4.tgz",
|
||||
"dependencies": {
|
||||
"bl": {
|
||||
"version": "0.9.5",
|
||||
@@ -1304,11 +1334,16 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"gl-scatter2d-fancy": {
|
||||
"version": "1.2.1",
|
||||
"from": "gl-scatter2d-fancy@>=1.2.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/gl-scatter2d-fancy/-/gl-scatter2d-fancy-1.2.1.tgz",
|
||||
"gl-scatter2d-sdf": {
|
||||
"version": "1.3.4",
|
||||
"from": "gl-scatter2d-sdf@1.3.4",
|
||||
"resolved": "https://registry.npmjs.org/gl-scatter2d-sdf/-/gl-scatter2d-sdf-1.3.4.tgz",
|
||||
"dependencies": {
|
||||
"binary-search-bounds": {
|
||||
"version": "2.0.3",
|
||||
"from": "binary-search-bounds@>=2.0.3 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/binary-search-bounds/-/binary-search-bounds-2.0.3.tgz"
|
||||
},
|
||||
"bl": {
|
||||
"version": "0.9.5",
|
||||
"from": "bl@>=0.9.4 <0.10.0",
|
||||
@@ -1343,6 +1378,11 @@
|
||||
"version": "1.0.34",
|
||||
"from": "readable-stream@~1.0.26",
|
||||
"resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-1.0.34.tgz"
|
||||
},
|
||||
"snap-points-2d": {
|
||||
"version": "3.1.0",
|
||||
"from": "snap-points-2d@>=3.1.0 <4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/snap-points-2d/-/snap-points-2d-3.1.0.tgz"
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -1479,7 +1519,7 @@
|
||||
},
|
||||
"gl-surface3d": {
|
||||
"version": "1.3.0",
|
||||
"from": "gl-surface3d@>=1.2.3 <2.0.0",
|
||||
"from": "gl-surface3d@>=1.3.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/gl-surface3d/-/gl-surface3d-1.3.0.tgz",
|
||||
"dependencies": {
|
||||
"bl": {
|
||||
@@ -1521,7 +1561,7 @@
|
||||
},
|
||||
"gl-vao": {
|
||||
"version": "1.3.0",
|
||||
"from": "gl-vao@>=1.1.3 <2.0.0",
|
||||
"from": "gl-vao@>=1.3.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/gl-vao/-/gl-vao-1.3.0.tgz"
|
||||
},
|
||||
"gl-vec3": {
|
||||
@@ -1680,7 +1720,7 @@
|
||||
},
|
||||
"has": {
|
||||
"version": "1.0.1",
|
||||
"from": "has@>=1.0.1 <2.0.0",
|
||||
"from": "has@>=1.0.1 <1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/has/-/has-1.0.1.tgz"
|
||||
},
|
||||
"has-ansi": {
|
||||
@@ -1708,9 +1748,14 @@
|
||||
"from": "http-signature@>=1.1.0 <1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.1.1.tgz"
|
||||
},
|
||||
"husl": {
|
||||
"version": "5.0.3",
|
||||
"from": "husl@>=5.0.0 <6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/husl/-/husl-5.0.3.tgz"
|
||||
},
|
||||
"ieee754": {
|
||||
"version": "1.1.8",
|
||||
"from": "ieee754@>=1.1.4 <2.0.0",
|
||||
"from": "ieee754@>=1.1.6 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.1.8.tgz"
|
||||
},
|
||||
"incremental-convex-hull": {
|
||||
@@ -1725,7 +1770,7 @@
|
||||
},
|
||||
"inherits": {
|
||||
"version": "2.0.3",
|
||||
"from": "inherits@>=2.0.1 <2.1.0",
|
||||
"from": "inherits@>=2.0.3 <2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.3.tgz"
|
||||
},
|
||||
"interval-tree-1d": {
|
||||
@@ -1763,20 +1808,30 @@
|
||||
"from": "is-function@>=1.0.0 <1.1.0",
|
||||
"resolved": "https://registry.npmjs.org/is-function/-/is-function-1.0.1.tgz"
|
||||
},
|
||||
"is-mobile": {
|
||||
"version": "0.2.2",
|
||||
"from": "is-mobile@>=0.2.2 <0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/is-mobile/-/is-mobile-0.2.2.tgz"
|
||||
},
|
||||
"is-my-json-valid": {
|
||||
"version": "2.15.0",
|
||||
"from": "is-my-json-valid@>=2.10.0 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-my-json-valid/-/is-my-json-valid-2.15.0.tgz"
|
||||
},
|
||||
"is-plain-obj": {
|
||||
"version": "1.1.0",
|
||||
"from": "is-plain-obj@>=1.1.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz"
|
||||
},
|
||||
"is-property": {
|
||||
"version": "1.0.2",
|
||||
"from": "is-property@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-property/-/is-property-1.0.2.tgz"
|
||||
},
|
||||
"is-regex": {
|
||||
"version": "1.0.3",
|
||||
"version": "1.0.4",
|
||||
"from": "is-regex@>=1.0.3 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.3.tgz"
|
||||
"resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz"
|
||||
},
|
||||
"is-symbol": {
|
||||
"version": "1.0.1",
|
||||
@@ -1811,7 +1866,7 @@
|
||||
},
|
||||
"jquery-ui": {
|
||||
"version": "1.12.1",
|
||||
"from": "jquery-ui@latest",
|
||||
"from": "jquery-ui@>=1.12.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/jquery-ui/-/jquery-ui-1.12.1.tgz"
|
||||
},
|
||||
"jsbn": {
|
||||
@@ -1830,11 +1885,6 @@
|
||||
"from": "json-stringify-safe@>=5.0.1 <5.1.0",
|
||||
"resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz"
|
||||
},
|
||||
"json5": {
|
||||
"version": "0.5.0",
|
||||
"from": "json5@>=0.5.0 <0.6.0",
|
||||
"resolved": "https://registry.npmjs.org/json5/-/json5-0.5.0.tgz"
|
||||
},
|
||||
"jsonlint-lines-primitives": {
|
||||
"version": "1.6.0",
|
||||
"from": "jsonlint-lines-primitives@>=1.6.0 <1.7.0",
|
||||
@@ -1857,7 +1907,7 @@
|
||||
},
|
||||
"kdbush": {
|
||||
"version": "1.0.1",
|
||||
"from": "kdbush@>=1.0.0 <2.0.0",
|
||||
"from": "kdbush@>=1.0.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-1.0.1.tgz"
|
||||
},
|
||||
"kind-of": {
|
||||
@@ -1885,11 +1935,6 @@
|
||||
"from": "levn@>=0.3.0 <0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/levn/-/levn-0.3.0.tgz"
|
||||
},
|
||||
"loader-utils": {
|
||||
"version": "0.2.16",
|
||||
"from": "loader-utils@>=0.2.11 <0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-0.2.16.tgz"
|
||||
},
|
||||
"lodash._baseisequal": {
|
||||
"version": "3.0.7",
|
||||
"from": "lodash._baseisequal@>=3.0.0 <4.0.0",
|
||||
@@ -1950,23 +1995,25 @@
|
||||
"mapbox-gl": {
|
||||
"version": "0.22.1",
|
||||
"from": "mapbox-gl@>=0.22.0 <0.23.0",
|
||||
"resolved": "https://registry.npmjs.org/mapbox-gl/-/mapbox-gl-0.22.1.tgz"
|
||||
"resolved": "https://registry.npmjs.org/mapbox-gl/-/mapbox-gl-0.22.1.tgz",
|
||||
"dependencies": {
|
||||
"mapbox-gl-shaders": {
|
||||
"version": "1.0.0",
|
||||
"from": "mapbox/mapbox-gl-shaders#de2ab007455aa2587c552694c68583f94c9f2747",
|
||||
"resolved": "git://github.com/mapbox/mapbox-gl-shaders.git#de2ab007455aa2587c552694c68583f94c9f2747"
|
||||
},
|
||||
"mapbox-gl-style-spec": {
|
||||
"version": "8.8.0",
|
||||
"from": "mapbox/mapbox-gl-style-spec#83b1a3e5837d785af582efd5ed1a212f2df6a4ae",
|
||||
"resolved": "git://github.com/mapbox/mapbox-gl-style-spec.git#83b1a3e5837d785af582efd5ed1a212f2df6a4ae"
|
||||
}
|
||||
}
|
||||
},
|
||||
"mapbox-gl-function": {
|
||||
"version": "1.3.0",
|
||||
"from": "mapbox-gl-function@>=1.2.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mapbox-gl-function/-/mapbox-gl-function-1.3.0.tgz"
|
||||
},
|
||||
"mapbox-gl-shaders": {
|
||||
"version": "1.0.0",
|
||||
"from": "mapbox/mapbox-gl-shaders#de2ab007455aa2587c552694c68583f94c9f2747",
|
||||
"resolved": "https://github.com/mapbox/mapbox-gl-shaders.git#de2ab007455aa2587c552694c68583f94c9f2747"
|
||||
},
|
||||
"mapbox-gl-style-spec": {
|
||||
"version": "8.8.0",
|
||||
"from": "mapbox/mapbox-gl-style-spec#83b1a3e5837d785af582efd5ed1a212f2df6a4ae",
|
||||
"resolved": "https://github.com/mapbox/mapbox-gl-style-spec.git#83b1a3e5837d785af582efd5ed1a212f2df6a4ae"
|
||||
},
|
||||
"mapbox-gl-supported": {
|
||||
"version": "1.2.0",
|
||||
"from": "mapbox-gl-supported@>=1.2.0 <2.0.0",
|
||||
@@ -1979,7 +2026,7 @@
|
||||
},
|
||||
"marked": {
|
||||
"version": "0.3.6",
|
||||
"from": "marked@latest",
|
||||
"from": "marked@>=0.3.6 <0.4.0",
|
||||
"resolved": "https://registry.npmjs.org/marked/-/marked-0.3.6.tgz"
|
||||
},
|
||||
"mat4-decompose": {
|
||||
@@ -1999,13 +2046,13 @@
|
||||
},
|
||||
"material-design-iconic-font": {
|
||||
"version": "2.2.0",
|
||||
"from": "material-design-iconic-font@latest",
|
||||
"from": "material-design-iconic-font@>=2.2.0 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/material-design-iconic-font/-/material-design-iconic-font-2.2.0.tgz"
|
||||
},
|
||||
"matrix-camera-controller": {
|
||||
"version": "2.1.1",
|
||||
"from": "matrix-camera-controller@>=2.1.1 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/matrix-camera-controller/-/matrix-camera-controller-2.1.1.tgz"
|
||||
"version": "2.1.3",
|
||||
"from": "matrix-camera-controller@>=2.1.3 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/matrix-camera-controller/-/matrix-camera-controller-2.1.3.tgz"
|
||||
},
|
||||
"mime-db": {
|
||||
"version": "1.24.0",
|
||||
@@ -2039,7 +2086,7 @@
|
||||
},
|
||||
"mouse-change": {
|
||||
"version": "1.4.0",
|
||||
"from": "mouse-change@>=1.1.1 <2.0.0",
|
||||
"from": "mouse-change@>=1.4.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mouse-change/-/mouse-change-1.4.0.tgz"
|
||||
},
|
||||
"mouse-event": {
|
||||
@@ -2047,6 +2094,11 @@
|
||||
"from": "mouse-event@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mouse-event/-/mouse-event-1.0.5.tgz"
|
||||
},
|
||||
"mouse-event-offset": {
|
||||
"version": "3.0.2",
|
||||
"from": "mouse-event-offset@>=3.0.2 <4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mouse-event-offset/-/mouse-event-offset-3.0.2.tgz"
|
||||
},
|
||||
"mouse-wheel": {
|
||||
"version": "1.2.0",
|
||||
"from": "mouse-wheel@>=1.0.2 <2.0.0",
|
||||
@@ -2081,6 +2133,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"mumath": {
|
||||
"version": "3.3.4",
|
||||
"from": "mumath@>=3.0.0 <4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/mumath/-/mumath-3.3.4.tgz"
|
||||
},
|
||||
"murmurhash-js": {
|
||||
"version": "1.0.0",
|
||||
"from": "murmurhash-js@>=1.0.0 <2.0.0",
|
||||
@@ -2093,7 +2150,7 @@
|
||||
},
|
||||
"ndarray": {
|
||||
"version": "1.0.18",
|
||||
"from": "ndarray@>=1.0.16 <2.0.0",
|
||||
"from": "ndarray@>=1.0.18 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ndarray/-/ndarray-1.0.18.tgz"
|
||||
},
|
||||
"ndarray-extract-contour": {
|
||||
@@ -2102,9 +2159,9 @@
|
||||
"resolved": "https://registry.npmjs.org/ndarray-extract-contour/-/ndarray-extract-contour-1.0.1.tgz"
|
||||
},
|
||||
"ndarray-fill": {
|
||||
"version": "1.0.1",
|
||||
"from": "ndarray-fill@>=1.0.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ndarray-fill/-/ndarray-fill-1.0.1.tgz"
|
||||
"version": "1.0.2",
|
||||
"from": "ndarray-fill@>=1.0.2 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ndarray-fill/-/ndarray-fill-1.0.2.tgz"
|
||||
},
|
||||
"ndarray-gradient": {
|
||||
"version": "1.0.0",
|
||||
@@ -2151,28 +2208,6 @@
|
||||
"from": "nextafter@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/nextafter/-/nextafter-1.0.0.tgz"
|
||||
},
|
||||
"ng-annotate": {
|
||||
"version": "1.2.1",
|
||||
"from": "ng-annotate@latest",
|
||||
"resolved": "https://registry.npmjs.org/ng-annotate/-/ng-annotate-1.2.1.tgz",
|
||||
"dependencies": {
|
||||
"acorn": {
|
||||
"version": "2.6.4",
|
||||
"from": "acorn@>=2.6.4 <2.7.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-2.6.4.tgz"
|
||||
},
|
||||
"convert-source-map": {
|
||||
"version": "1.1.3",
|
||||
"from": "convert-source-map@>=1.1.2 <1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.1.3.tgz"
|
||||
}
|
||||
}
|
||||
},
|
||||
"ng-annotate-loader": {
|
||||
"version": "0.2.0",
|
||||
"from": "ng-annotate-loader@latest",
|
||||
"resolved": "https://registry.npmjs.org/ng-annotate-loader/-/ng-annotate-loader-0.2.0.tgz"
|
||||
},
|
||||
"nomnom": {
|
||||
"version": "1.8.1",
|
||||
"from": "nomnom@>=1.5.0",
|
||||
@@ -2200,11 +2235,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"normalize-path": {
|
||||
"version": "2.0.1",
|
||||
"from": "normalize-path@>=2.0.1 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-2.0.1.tgz"
|
||||
},
|
||||
"normals": {
|
||||
"version": "1.1.0",
|
||||
"from": "normals@>=1.0.1 <2.0.0",
|
||||
@@ -2226,9 +2256,9 @@
|
||||
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.0.tgz"
|
||||
},
|
||||
"object-inspect": {
|
||||
"version": "1.2.1",
|
||||
"version": "1.2.2",
|
||||
"from": "object-inspect@>=1.2.1 <1.3.0",
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.2.1.tgz"
|
||||
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.2.2.tgz"
|
||||
},
|
||||
"object-keys": {
|
||||
"version": "1.0.11",
|
||||
@@ -2240,14 +2270,9 @@
|
||||
"from": "once@>=1.3.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz"
|
||||
},
|
||||
"optimist": {
|
||||
"version": "0.6.1",
|
||||
"from": "optimist@>=0.6.0 <0.7.0",
|
||||
"resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz"
|
||||
},
|
||||
"optionator": {
|
||||
"version": "0.8.2",
|
||||
"from": "optionator@>=0.8.2 <0.9.0",
|
||||
"from": "optionator@>=0.8.1 <0.9.0",
|
||||
"resolved": "https://registry.npmjs.org/optionator/-/optionator-0.8.2.tgz",
|
||||
"dependencies": {
|
||||
"wordwrap": {
|
||||
@@ -2262,16 +2287,6 @@
|
||||
"from": "orbit-camera-controller@>=4.0.0 <5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/orbit-camera-controller/-/orbit-camera-controller-4.0.0.tgz"
|
||||
},
|
||||
"ordered-ast-traverse": {
|
||||
"version": "1.1.1",
|
||||
"from": "ordered-ast-traverse@>=1.1.1 <1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ordered-ast-traverse/-/ordered-ast-traverse-1.1.1.tgz"
|
||||
},
|
||||
"ordered-esprima-props": {
|
||||
"version": "1.1.0",
|
||||
"from": "ordered-esprima-props@>=1.1.0 <1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/ordered-esprima-props/-/ordered-esprima-props-1.1.0.tgz"
|
||||
},
|
||||
"pace-progress": {
|
||||
"version": "1.0.2",
|
||||
"from": "git+https://github.com/getredash/pace.git",
|
||||
@@ -2333,9 +2348,9 @@
|
||||
"resolved": "https://registry.npmjs.org/planar-graph-to-polyline/-/planar-graph-to-polyline-1.0.5.tgz"
|
||||
},
|
||||
"plotly.js": {
|
||||
"version": "1.21.2",
|
||||
"from": "plotly.js@1.21.2",
|
||||
"resolved": "https://registry.npmjs.org/plotly.js/-/plotly.js-1.21.2.tgz"
|
||||
"version": "1.26.1",
|
||||
"from": "plotly.js@1.26.1",
|
||||
"resolved": "https://registry.npmjs.org/plotly.js/-/plotly.js-1.26.1.tgz"
|
||||
},
|
||||
"pngjs": {
|
||||
"version": "2.3.1",
|
||||
@@ -2374,7 +2389,7 @@
|
||||
},
|
||||
"punycode": {
|
||||
"version": "1.4.1",
|
||||
"from": "punycode@>=1.2.4 <2.0.0",
|
||||
"from": "punycode@>=1.4.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz"
|
||||
},
|
||||
"quat-slerp": {
|
||||
@@ -2420,9 +2435,9 @@
|
||||
}
|
||||
},
|
||||
"rat-vec": {
|
||||
"version": "1.1.0",
|
||||
"from": "rat-vec@>=1.1.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/rat-vec/-/rat-vec-1.1.0.tgz"
|
||||
"version": "1.1.1",
|
||||
"from": "rat-vec@>=1.1.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/rat-vec/-/rat-vec-1.1.1.tgz"
|
||||
},
|
||||
"readable-stream": {
|
||||
"version": "2.1.5",
|
||||
@@ -2434,9 +2449,14 @@
|
||||
"from": "reduce-simplicial-complex@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/reduce-simplicial-complex/-/reduce-simplicial-complex-1.0.0.tgz"
|
||||
},
|
||||
"regl": {
|
||||
"version": "1.3.0",
|
||||
"from": "regl@>=1.3.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/regl/-/regl-1.3.0.tgz"
|
||||
},
|
||||
"repeat-string": {
|
||||
"version": "1.6.1",
|
||||
"from": "repeat-string@>=1.5.2 <2.0.0",
|
||||
"from": "repeat-string@>=1.3.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/repeat-string/-/repeat-string-1.6.1.tgz"
|
||||
},
|
||||
"request": {
|
||||
@@ -2458,7 +2478,7 @@
|
||||
},
|
||||
"resolve": {
|
||||
"version": "1.1.7",
|
||||
"from": "resolve@>=1.1.6 <2.0.0",
|
||||
"from": "resolve@>=1.1.7 <1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.1.7.tgz"
|
||||
},
|
||||
"resolve-protobuf-schema": {
|
||||
@@ -2566,16 +2586,6 @@
|
||||
"from": "signum@>=0.0.0 <0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/signum/-/signum-0.0.0.tgz"
|
||||
},
|
||||
"simple-fmt": {
|
||||
"version": "0.1.0",
|
||||
"from": "simple-fmt@>=0.1.0 <0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/simple-fmt/-/simple-fmt-0.1.0.tgz"
|
||||
},
|
||||
"simple-is": {
|
||||
"version": "0.2.0",
|
||||
"from": "simple-is@>=0.2.0 <0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/simple-is/-/simple-is-0.2.0.tgz"
|
||||
},
|
||||
"simplicial-complex": {
|
||||
"version": "1.0.0",
|
||||
"from": "simplicial-complex@>=1.0.0 <2.0.0",
|
||||
@@ -2670,11 +2680,6 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"stable": {
|
||||
"version": "0.1.5",
|
||||
"from": "stable@>=0.1.5 <0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/stable/-/stable-0.1.5.tgz"
|
||||
},
|
||||
"static-eval": {
|
||||
"version": "0.2.4",
|
||||
"from": "static-eval@>=0.2.0 <0.3.0",
|
||||
@@ -2744,16 +2749,6 @@
|
||||
"from": "string.prototype.trim@>=1.1.2 <1.2.0",
|
||||
"resolved": "https://registry.npmjs.org/string.prototype.trim/-/string.prototype.trim-1.1.2.tgz"
|
||||
},
|
||||
"stringmap": {
|
||||
"version": "0.2.2",
|
||||
"from": "stringmap@>=0.2.2 <0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/stringmap/-/stringmap-0.2.2.tgz"
|
||||
},
|
||||
"stringset": {
|
||||
"version": "0.2.1",
|
||||
"from": "stringset@>=0.2.1 <0.3.0",
|
||||
"resolved": "https://registry.npmjs.org/stringset/-/stringset-0.2.1.tgz"
|
||||
},
|
||||
"stringstream": {
|
||||
"version": "0.0.5",
|
||||
"from": "stringstream@>=0.0.4 <0.1.0",
|
||||
@@ -2765,9 +2760,9 @@
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-3.0.1.tgz"
|
||||
},
|
||||
"supercluster": {
|
||||
"version": "2.2.0",
|
||||
"version": "2.3.0",
|
||||
"from": "supercluster@>=2.0.1 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/supercluster/-/supercluster-2.2.0.tgz"
|
||||
"resolved": "https://registry.npmjs.org/supercluster/-/supercluster-2.3.0.tgz"
|
||||
},
|
||||
"superscript-text": {
|
||||
"version": "1.0.0",
|
||||
@@ -2798,7 +2793,7 @@
|
||||
},
|
||||
"through": {
|
||||
"version": "2.3.8",
|
||||
"from": "through@>=2.3.6 <3.0.0",
|
||||
"from": "through@>=2.3.8 <2.4.0",
|
||||
"resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz"
|
||||
},
|
||||
"through2": {
|
||||
@@ -2818,6 +2813,11 @@
|
||||
}
|
||||
}
|
||||
},
|
||||
"tiny-sdf": {
|
||||
"version": "1.0.2",
|
||||
"from": "tiny-sdf@>=1.0.2 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/tiny-sdf/-/tiny-sdf-1.0.2.tgz"
|
||||
},
|
||||
"tinycolor2": {
|
||||
"version": "1.4.1",
|
||||
"from": "tinycolor2@>=1.3.0 <2.0.0",
|
||||
@@ -2853,11 +2853,6 @@
|
||||
"from": "triangulate-polyline@>=1.0.0 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/triangulate-polyline/-/triangulate-polyline-1.0.3.tgz"
|
||||
},
|
||||
"tryor": {
|
||||
"version": "0.1.2",
|
||||
"from": "tryor@>=0.1.2 <0.2.0",
|
||||
"resolved": "https://registry.npmjs.org/tryor/-/tryor-0.1.2.tgz"
|
||||
},
|
||||
"tunnel-agent": {
|
||||
"version": "0.4.3",
|
||||
"from": "tunnel-agent@>=0.4.1 <0.5.0",
|
||||
@@ -2891,7 +2886,7 @@
|
||||
},
|
||||
"typedarray": {
|
||||
"version": "0.0.6",
|
||||
"from": "typedarray@>=0.0.5 <0.1.0",
|
||||
"from": "typedarray@>=0.0.6 <0.0.7",
|
||||
"resolved": "https://registry.npmjs.org/typedarray/-/typedarray-0.0.6.tgz"
|
||||
},
|
||||
"typedarray-pool": {
|
||||
@@ -2926,11 +2921,6 @@
|
||||
"from": "unassert@>=1.3.1 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unassert/-/unassert-1.5.1.tgz",
|
||||
"dependencies": {
|
||||
"acorn": {
|
||||
"version": "4.0.4",
|
||||
"from": "acorn@>=4.0.0 <5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.4.tgz"
|
||||
},
|
||||
"estraverse": {
|
||||
"version": "4.2.0",
|
||||
"from": "estraverse@>=4.1.0 <5.0.0",
|
||||
@@ -2943,11 +2933,6 @@
|
||||
"from": "unassertify@>=2.0.0 <3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unassertify/-/unassertify-2.0.4.tgz",
|
||||
"dependencies": {
|
||||
"acorn": {
|
||||
"version": "4.0.4",
|
||||
"from": "acorn@>=4.0.0 <5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/acorn/-/acorn-4.0.4.tgz"
|
||||
},
|
||||
"escodegen": {
|
||||
"version": "1.8.1",
|
||||
"from": "escodegen@>=1.6.1 <2.0.0",
|
||||
@@ -2968,12 +2953,12 @@
|
||||
},
|
||||
"underscore": {
|
||||
"version": "1.8.3",
|
||||
"from": "underscore@latest",
|
||||
"from": "underscore@>=1.8.3 <2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/underscore/-/underscore-1.8.3.tgz"
|
||||
},
|
||||
"underscore.string": {
|
||||
"version": "3.3.4",
|
||||
"from": "underscore.string@latest",
|
||||
"from": "underscore.string@>=3.3.4 <4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/underscore.string/-/underscore.string-3.3.4.tgz"
|
||||
},
|
||||
"union-find": {
|
||||
@@ -3056,11 +3041,6 @@
|
||||
"from": "window-size@0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/window-size/-/window-size-0.1.0.tgz"
|
||||
},
|
||||
"wordwrap": {
|
||||
"version": "0.0.3",
|
||||
"from": "wordwrap@>=0.0.2 <0.1.0",
|
||||
"resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-0.0.3.tgz"
|
||||
},
|
||||
"world-calendars": {
|
||||
"version": "1.0.3",
|
||||
"from": "world-calendars@>=1.0.3 <2.0.0",
|
||||
@@ -3073,7 +3053,7 @@
|
||||
},
|
||||
"xtend": {
|
||||
"version": "4.0.1",
|
||||
"from": "xtend@>=4.0.0 <5.0.0",
|
||||
"from": "xtend@>=4.0.0 <4.1.0-0",
|
||||
"resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.1.tgz"
|
||||
},
|
||||
"yargs": {
|
||||
|
||||
35
package.json
35
package.json
@@ -1,10 +1,10 @@
|
||||
{
|
||||
"name": "redash-client",
|
||||
"version": "1.0.2",
|
||||
"version": "2.0.1",
|
||||
"description": "The frontend part of Redash.",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"start": "webpack-dev-server --content-base client/app",
|
||||
"start": "webpack-dev-server",
|
||||
"build": "rm -rf ./client/dist/ && NODE_ENV=production node node_modules/.bin/webpack",
|
||||
"watch": "webpack --watch --progress --colors -d"
|
||||
},
|
||||
@@ -22,12 +22,12 @@
|
||||
"angular": "~1.5.8",
|
||||
"angular-base64-upload": "^0.1.19",
|
||||
"angular-gridster": "^0.13.14",
|
||||
"angular-messages": "^1.5.8",
|
||||
"angular-messages": "~1.5.8",
|
||||
"angular-moment": "^1.0.0",
|
||||
"angular-resizable": "^1.2.0",
|
||||
"angular-resource": "^1.5.8",
|
||||
"angular-route": "^1.5.8",
|
||||
"angular-sanitize": "^1.5.8",
|
||||
"angular-resource": "~1.5.8",
|
||||
"angular-route": "~1.5.8",
|
||||
"angular-sanitize": "~1.5.8",
|
||||
"angular-toastr": "^2.1.1",
|
||||
"angular-ui-ace": "^0.2.3",
|
||||
"angular-ui-bootstrap": "^2.2.0",
|
||||
@@ -48,11 +48,9 @@
|
||||
"moment": "^2.15.2",
|
||||
"mousetrap": "^1.6.0",
|
||||
"mustache": "^2.2.1",
|
||||
"ng-annotate": "^1.2.1",
|
||||
"ng-annotate-loader": "^0.2.0",
|
||||
"pace-progress": "git+https://github.com/getredash/pace.git",
|
||||
"pivottable": "^2.3.0",
|
||||
"plotly.js": "1.21.2",
|
||||
"plotly.js": "1.26.1",
|
||||
"ui-select": "^0.19.6",
|
||||
"underscore": "^1.8.3",
|
||||
"underscore.string": "^3.3.4"
|
||||
@@ -63,20 +61,21 @@
|
||||
"babel-plugin-transform-object-assign": "^6.22.0",
|
||||
"babel-preset-es2015": "^6.18.0",
|
||||
"babel-preset-stage-2": "^6.18.0",
|
||||
"css-loader": "^0.25.0",
|
||||
"css-loader": "^0.28.4",
|
||||
"eslint": "^3.9.0",
|
||||
"eslint-config-airbnb-base": "^9.0.0",
|
||||
"eslint-loader": "^1.6.0",
|
||||
"eslint-plugin-import": "^2.0.1",
|
||||
"extract-text-webpack-plugin": "^1.0.1",
|
||||
"file-loader": "^0.9.0",
|
||||
"extract-text-webpack-plugin": "^2.1.2",
|
||||
"file-loader": "^0.11.2",
|
||||
"html-webpack-plugin": "^2.24.0",
|
||||
"node-sass": "^4.3.0",
|
||||
"ng-annotate-loader": "^0.6.1",
|
||||
"node-sass": "^4.5.3",
|
||||
"raw-loader": "^0.5.1",
|
||||
"sass-loader": "^4.1.1",
|
||||
"url-loader": "^0.5.7",
|
||||
"webpack": "^1.13.3",
|
||||
"webpack-build-notifier": "^0.1.13",
|
||||
"webpack-dev-server": "^1.16.2"
|
||||
"sass-loader": "^6.0.6",
|
||||
"url-loader": "^0.5.9",
|
||||
"webpack": "^2.6.1",
|
||||
"webpack-build-notifier": "^0.1.14",
|
||||
"webpack-dev-server": "^2.4.5"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
import urlparse
|
||||
import redis
|
||||
from flask import Flask
|
||||
from flask import Flask, safe_join
|
||||
from flask_sslify import SSLify
|
||||
from werkzeug.contrib.fixers import ProxyFix
|
||||
from werkzeug.routing import BaseConverter, ValidationError
|
||||
@@ -16,11 +18,11 @@ from redash.query_runner import import_query_runners
|
||||
from redash.destinations import import_destinations
|
||||
|
||||
|
||||
__version__ = '1.0.2'
|
||||
__version__ = '2.0.1'
|
||||
|
||||
|
||||
def setup_logging():
|
||||
handler = logging.StreamHandler()
|
||||
handler = logging.StreamHandler(sys.stdout if settings.LOG_STDOUT else sys.stderr)
|
||||
formatter = logging.Formatter('[%(asctime)s][PID:%(process)d][%(levelname)s][%(name)s] %(message)s')
|
||||
handler.setFormatter(formatter)
|
||||
logging.getLogger().addHandler(handler)
|
||||
@@ -31,6 +33,7 @@ def setup_logging():
|
||||
logging.getLogger("passlib").setLevel("ERROR")
|
||||
logging.getLogger("requests.packages.urllib3").setLevel("ERROR")
|
||||
logging.getLogger("snowflake.connector").setLevel("ERROR")
|
||||
logging.getLogger('apiclient').setLevel("ERROR")
|
||||
|
||||
|
||||
def create_redis_connection():
|
||||
@@ -72,9 +75,11 @@ reset_new_version_status()
|
||||
|
||||
class SlugConverter(BaseConverter):
|
||||
def to_python(self, value):
|
||||
# This is an ugly workaround for when we enable multi-org and some files are being called by the index rule:
|
||||
if value in ('google_login.png', 'favicon.ico', 'robots.txt', 'views'):
|
||||
raise ValidationError()
|
||||
# This is ay workaround for when we enable multi-org and some files are being called by the index rule:
|
||||
for path in settings.STATIC_ASSETS_PATHS:
|
||||
full_path = safe_join(path, value)
|
||||
if os.path.isfile(full_path):
|
||||
raise ValidationError()
|
||||
|
||||
return value
|
||||
|
||||
@@ -90,7 +95,7 @@ def create_app(load_admin=True):
|
||||
from redash.metrics.request import provision_app
|
||||
|
||||
app = Flask(__name__,
|
||||
template_folder=settings.STATIC_ASSETS_PATHS[-1],
|
||||
template_folder=settings.STATIC_ASSETS_PATHS[0],
|
||||
static_folder=settings.STATIC_ASSETS_PATHS[-1],
|
||||
static_path='/static')
|
||||
|
||||
@@ -102,12 +107,15 @@ def create_app(load_admin=True):
|
||||
SSLify(app, skips=['ping'])
|
||||
|
||||
if settings.SENTRY_DSN:
|
||||
from raven import Client
|
||||
from raven.contrib.flask import Sentry
|
||||
from raven.handlers.logging import SentryHandler
|
||||
sentry = Sentry(app, dsn=settings.SENTRY_DSN)
|
||||
|
||||
client = Client(settings.SENTRY_DSN, release=__version__, install_logging_hook=False)
|
||||
sentry = Sentry(app, client=client)
|
||||
sentry.client.release = __version__
|
||||
|
||||
sentry_handler = SentryHandler(settings.SENTRY_DSN)
|
||||
sentry_handler = SentryHandler(client=client)
|
||||
sentry_handler.setLevel(logging.ERROR)
|
||||
logging.getLogger().addHandler(sentry_handler)
|
||||
|
||||
|
||||
@@ -16,7 +16,9 @@ logger = logging.getLogger('authentication')
|
||||
|
||||
|
||||
def get_login_url(external=False, next="/"):
|
||||
if settings.MULTI_ORG:
|
||||
if settings.MULTI_ORG and current_org == None:
|
||||
login_url = '/'
|
||||
elif settings.MULTI_ORG:
|
||||
login_url = url_for('redash.login', org_slug=current_org.slug, next=next, _external=external)
|
||||
else:
|
||||
login_url = url_for('redash.login', next=next, _external=external)
|
||||
@@ -155,5 +157,3 @@ def setup_authentication(app):
|
||||
else:
|
||||
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
|
||||
login_manager.request_loader(hmac_load_user_from_request)
|
||||
|
||||
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
"""
|
||||
This module implements different strategies to resolve the current Organization we are using.
|
||||
|
||||
By default we use the simple single_org strategy, which assumes you have a
|
||||
single Organization in your installation.
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from flask import request, g
|
||||
from flask import g, request
|
||||
from werkzeug.local import LocalProxy
|
||||
|
||||
from redash.models import Organization
|
||||
|
||||
@@ -68,3 +68,23 @@ def send_test_mail(email=None):
|
||||
mail.send(Message(subject="Test Message from Redash", recipients=[email],
|
||||
body="Test message."))
|
||||
|
||||
|
||||
@manager.command()
|
||||
def ipython():
|
||||
"""Starts IPython shell instead of the default Python shell."""
|
||||
import sys
|
||||
import IPython
|
||||
from flask.globals import _app_ctx_stack
|
||||
app = _app_ctx_stack.top.app
|
||||
|
||||
banner = 'Python %s on %s\nIPython: %s\nRedash version: %s\n' % (
|
||||
sys.version,
|
||||
sys.platform,
|
||||
IPython.__version__,
|
||||
__version__
|
||||
)
|
||||
|
||||
ctx = {}
|
||||
ctx.update(app.make_shell_context())
|
||||
|
||||
IPython.embed(banner1=banner, user_ns=ctx)
|
||||
|
||||
@@ -40,16 +40,15 @@ class Email(BaseDestination):
|
||||
logging.debug("Notifying: %s", recipients)
|
||||
|
||||
try:
|
||||
with app.app_context():
|
||||
alert_name = alert.name.encode('utf-8', 'ignore')
|
||||
state = new_state.upper()
|
||||
subject_template = options.get('subject_template', settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE)
|
||||
message = Message(
|
||||
recipients=recipients,
|
||||
subject=subject_template.format(alert_name=alert_name, state=state),
|
||||
html=html
|
||||
)
|
||||
mail.send(message)
|
||||
alert_name = alert.name.encode('utf-8', 'ignore')
|
||||
state = new_state.upper()
|
||||
subject_template = options.get('subject_template', settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE)
|
||||
message = Message(
|
||||
recipients=recipients,
|
||||
subject=subject_template.format(alert_name=alert_name, state=state),
|
||||
html=html
|
||||
)
|
||||
mail.send(message)
|
||||
except Exception:
|
||||
logging.exception("Mail send error.")
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
from flask_login import login_required
|
||||
from redash import models, redis_connection
|
||||
from redash.handlers import routes
|
||||
@@ -32,14 +33,19 @@ def outdated_queries():
|
||||
@require_super_admin
|
||||
@login_required
|
||||
def queries_tasks():
|
||||
waiting = QueryTaskTracker.all(QueryTaskTracker.WAITING_LIST)
|
||||
in_progress = QueryTaskTracker.all(QueryTaskTracker.IN_PROGRESS_LIST)
|
||||
done = QueryTaskTracker.all(QueryTaskTracker.DONE_LIST, limit=50)
|
||||
global_limit = int(request.args.get('limit', 50))
|
||||
waiting_limit = int(request.args.get('waiting_limit', global_limit))
|
||||
progress_limit = int(request.args.get('progress_limit', global_limit))
|
||||
done_limit = int(request.args.get('done_limit', global_limit))
|
||||
|
||||
waiting = QueryTaskTracker.all(QueryTaskTracker.WAITING_LIST, limit=waiting_limit)
|
||||
in_progress = QueryTaskTracker.all(QueryTaskTracker.IN_PROGRESS_LIST, limit=progress_limit)
|
||||
done = QueryTaskTracker.all(QueryTaskTracker.DONE_LIST, limit=done_limit)
|
||||
|
||||
response = {
|
||||
'waiting': [t.data for t in waiting],
|
||||
'in_progress': [t.data for t in in_progress],
|
||||
'done': [t.data for t in done]
|
||||
'waiting': [t.data for t in waiting if t is not None],
|
||||
'in_progress': [t.data for t in in_progress if t is not None],
|
||||
'done': [t.data for t in done if t is not None]
|
||||
}
|
||||
|
||||
return json_response(response)
|
||||
|
||||
@@ -4,8 +4,10 @@ from flask import request
|
||||
from funcy import project
|
||||
|
||||
from redash import models
|
||||
from redash.permissions import require_access, require_admin_or_owner, view_only, require_permission
|
||||
from redash.handlers.base import BaseResource, require_fields, get_object_or_404
|
||||
from redash.handlers.base import (BaseResource, get_object_or_404,
|
||||
require_fields)
|
||||
from redash.permissions import (require_access, require_admin_or_owner,
|
||||
require_permission, view_only)
|
||||
|
||||
|
||||
class AlertResource(BaseResource):
|
||||
@@ -52,6 +54,7 @@ class AlertListResource(BaseResource):
|
||||
name=req['name'],
|
||||
query_rel=query,
|
||||
user=self.current_user,
|
||||
rearm=req.get('rearm'),
|
||||
options=req['options']
|
||||
)
|
||||
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import hashlib
|
||||
import logging
|
||||
|
||||
from flask import flash, redirect, render_template, request, url_for
|
||||
from flask import abort, flash, redirect, render_template, request, url_for
|
||||
|
||||
from flask_login import current_user, login_required, login_user, logout_user
|
||||
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from redash import __version__, limiter, models, settings
|
||||
from redash.authentication import current_org, get_login_url
|
||||
from redash.authentication.account import (BadSignature, SignatureExpired,
|
||||
@@ -14,6 +12,7 @@ from redash.authentication.account import (BadSignature, SignatureExpired,
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.version_check import get_latest_version
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -75,6 +74,9 @@ def reset(token, org_slug=None):
|
||||
|
||||
@routes.route(org_scoped_rule('/forgot'), methods=['GET', 'POST'])
|
||||
def forgot_password(org_slug=None):
|
||||
if not settings.PASSWORD_LOGIN_ENABLED:
|
||||
abort(404)
|
||||
|
||||
submitted = False
|
||||
if request.method == 'POST' and request.form['email']:
|
||||
submitted = True
|
||||
@@ -130,7 +132,7 @@ def login(org_slug=None):
|
||||
return render_template("login.html",
|
||||
org_slug=org_slug,
|
||||
next=next_path,
|
||||
username=request.form.get('username', ''),
|
||||
email=request.form.get('email', ''),
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
google_auth_url=google_auth_url,
|
||||
show_saml_login=settings.SAML_LOGIN_ENABLED,
|
||||
@@ -153,7 +155,7 @@ def base_href():
|
||||
|
||||
|
||||
def client_config():
|
||||
if not isinstance(current_user._get_current_object(), models.ApiUser) and current_user.is_authenticated:
|
||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||
client_config = {
|
||||
'newVersionAvailable': get_latest_version(),
|
||||
'version': __version__
|
||||
@@ -169,7 +171,7 @@ def client_config():
|
||||
return client_config
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/api/config'), methods=['GET'])
|
||||
@routes.route('/api/config', methods=['GET'])
|
||||
def config(org_slug=None):
|
||||
return json_response({
|
||||
'org_slug': current_org.slug,
|
||||
@@ -180,7 +182,12 @@ def config(org_slug=None):
|
||||
@routes.route(org_scoped_rule('/api/session'), methods=['GET'])
|
||||
@login_required
|
||||
def session(org_slug=None):
|
||||
if not isinstance(current_user._get_current_object(), models.ApiUser):
|
||||
if current_user.is_api_user():
|
||||
user = {
|
||||
'permissions': [],
|
||||
'apiKey': current_user.id
|
||||
}
|
||||
else:
|
||||
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
@@ -192,11 +199,6 @@ def session(org_slug=None):
|
||||
'groups': current_user.group_ids,
|
||||
'permissions': current_user.permissions
|
||||
}
|
||||
else:
|
||||
user = {
|
||||
'permissions': [],
|
||||
'apiKey': current_user.id
|
||||
}
|
||||
|
||||
return json_response({
|
||||
'user': user,
|
||||
|
||||
@@ -1,16 +1,15 @@
|
||||
import time
|
||||
|
||||
from flask import Blueprint, current_app, request
|
||||
|
||||
from flask_login import current_user, login_required
|
||||
from flask_restful import Resource, abort
|
||||
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from redash import settings
|
||||
from redash.authentication import current_org
|
||||
from redash.models import ApiUser
|
||||
from redash.tasks import record_event as record_event_task
|
||||
from redash.utils import json_dumps
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
routes = Blueprint('redash', __name__, template_folder=settings.fix_assets_path('templates'))
|
||||
|
||||
|
||||
@@ -1,15 +1,15 @@
|
||||
from itertools import chain
|
||||
|
||||
from flask import request, url_for
|
||||
from flask_restful import abort
|
||||
from funcy import distinct, project, take
|
||||
from sqlalchemy.orm.exc import StaleDataError
|
||||
|
||||
from redash import models, serializers
|
||||
from flask_restful import abort
|
||||
from redash import models, serializers, settings
|
||||
from redash.handlers.base import BaseResource, get_object_or_404
|
||||
from redash.permissions import (can_modify, require_admin_or_owner,
|
||||
require_object_modify_permission,
|
||||
require_permission)
|
||||
from sqlalchemy.orm.exc import StaleDataError
|
||||
|
||||
|
||||
class RecentDashboardsResource(BaseResource):
|
||||
@@ -18,13 +18,19 @@ class RecentDashboardsResource(BaseResource):
|
||||
"""
|
||||
Lists dashboards modified in the last 7 days.
|
||||
"""
|
||||
recent = [d.to_dict() for d in models.Dashboard.recent(self.current_org, self.current_user.group_ids, self.current_user.id, for_user=True)]
|
||||
if settings.FEATURE_DUMB_RECENTS:
|
||||
dashboards = models.Dashboard.all(self.current_org, self.current_user.group_ids, self.current_user.id).order_by(models.Dashboard.updated_at.desc()).limit(10)
|
||||
dashboards = [d.to_dict() for d in dashboards]
|
||||
else:
|
||||
recent = [d.to_dict() for d in models.Dashboard.recent(self.current_org, self.current_user.group_ids, self.current_user.id, for_user=True)]
|
||||
|
||||
global_recent = []
|
||||
if len(recent) < 10:
|
||||
global_recent = [d.to_dict() for d in models.Dashboard.recent(self.current_org, self.current_user.group_ids, self.current_user.id)]
|
||||
global_recent = []
|
||||
if len(recent) < 10:
|
||||
global_recent = [d.to_dict() for d in models.Dashboard.recent(self.current_org, self.current_user.group_ids, self.current_user.id)]
|
||||
|
||||
return take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
|
||||
dashboards = take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
|
||||
|
||||
return dashboards
|
||||
|
||||
|
||||
class DashboardListResource(BaseResource):
|
||||
@@ -123,7 +129,7 @@ class DashboardResource(BaseResource):
|
||||
require_object_modify_permission(dashboard, self.current_user)
|
||||
|
||||
updates = project(dashboard_properties, ('name', 'layout', 'version',
|
||||
'is_draft'))
|
||||
'is_draft', 'dashboard_filters_enabled'))
|
||||
|
||||
# SQLAlchemy handles the case where a concurrent transaction beats us
|
||||
# to the update. But we still have to make sure that we're not starting
|
||||
|
||||
@@ -1,13 +1,18 @@
|
||||
import logging
|
||||
|
||||
from flask import make_response, request
|
||||
from flask_restful import abort
|
||||
from funcy import project
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from redash import models
|
||||
from redash.utils.configuration import ConfigurationContainer, ValidationError
|
||||
from redash.permissions import require_admin, require_permission, require_access, view_only
|
||||
from redash.query_runner import query_runners, get_configuration_schema_for_query_runner_type
|
||||
from redash.handlers.base import BaseResource, get_object_or_404
|
||||
from redash.permissions import (require_access, require_admin,
|
||||
require_permission, view_only)
|
||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||
query_runners)
|
||||
from redash.utils import filter_none
|
||||
from redash.utils.configuration import ConfigurationContainer, ValidationError
|
||||
|
||||
|
||||
class DataSourceTypeListResource(BaseResource):
|
||||
@@ -32,22 +37,28 @@ class DataSourceResource(BaseResource):
|
||||
abort(400)
|
||||
try:
|
||||
data_source.options.set_schema(schema)
|
||||
data_source.options.update(req['options'])
|
||||
data_source.options.update(filter_none(req['options']))
|
||||
except ValidationError:
|
||||
abort(400)
|
||||
|
||||
data_source.type = req['type']
|
||||
data_source.name = req['name']
|
||||
models.db.session.add(data_source)
|
||||
models.db.session.commit()
|
||||
|
||||
try:
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if req['name'] in e.message:
|
||||
abort(400, message="Data source with the name {} already exists.".format(req['name']))
|
||||
|
||||
abort(400)
|
||||
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_admin
|
||||
def delete(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
|
||||
models.db.session.delete(data_source)
|
||||
models.db.session.commit()
|
||||
data_source.delete()
|
||||
|
||||
return make_response('', 204)
|
||||
|
||||
@@ -86,16 +97,24 @@ class DataSourceListResource(BaseResource):
|
||||
if schema is None:
|
||||
abort(400)
|
||||
|
||||
config = ConfigurationContainer(req['options'], schema)
|
||||
config = ConfigurationContainer(filter_none(req['options']), schema)
|
||||
# from IPython import embed
|
||||
# embed()
|
||||
if not config.is_valid():
|
||||
abort(400)
|
||||
|
||||
datasource = models.DataSource.create_with_group(org=self.current_org,
|
||||
name=req['name'],
|
||||
type=req['type'],
|
||||
options=config)
|
||||
try:
|
||||
datasource = models.DataSource.create_with_group(org=self.current_org,
|
||||
name=req['name'],
|
||||
type=req['type'],
|
||||
options=config)
|
||||
|
||||
models.db.session.commit()
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if req['name'] in e.message:
|
||||
abort(400, message="Data source with the name {} already exists.".format(req['name']))
|
||||
|
||||
abort(400)
|
||||
|
||||
self.record_event({
|
||||
'action': 'create',
|
||||
|
||||
@@ -2,10 +2,11 @@ from flask import make_response, request
|
||||
from flask_restful import abort
|
||||
|
||||
from redash import models
|
||||
from redash.permissions import require_admin
|
||||
from redash.destinations import destinations, get_configuration_schema_for_destination_type
|
||||
from redash.utils.configuration import ConfigurationContainer, ValidationError
|
||||
from redash.destinations import (destinations,
|
||||
get_configuration_schema_for_destination_type)
|
||||
from redash.handlers.base import BaseResource
|
||||
from redash.permissions import require_admin
|
||||
from redash.utils.configuration import ConfigurationContainer, ValidationError
|
||||
|
||||
|
||||
class DestinationTypeListResource(BaseResource):
|
||||
@@ -30,6 +31,8 @@ class DestinationResource(BaseResource):
|
||||
abort(400)
|
||||
|
||||
try:
|
||||
destination.type = req['type']
|
||||
destination.name = req['name']
|
||||
destination.options.set_schema(schema)
|
||||
destination.options.update(req['options'])
|
||||
models.db.session.add(destination)
|
||||
@@ -37,9 +40,6 @@ class DestinationResource(BaseResource):
|
||||
except ValidationError:
|
||||
abort(400)
|
||||
|
||||
destination.type = req['type']
|
||||
destination.name = req['name']
|
||||
|
||||
return destination.to_dict(all=True)
|
||||
|
||||
@require_admin
|
||||
|
||||
@@ -1,21 +1,19 @@
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
|
||||
import pystache
|
||||
from flask import request
|
||||
|
||||
from authentication import current_org
|
||||
from flask import current_app, render_template, request, safe_join, send_file
|
||||
from flask_login import current_user, login_required
|
||||
from flask_restful import abort
|
||||
from funcy import project
|
||||
from redash import models, serializers, settings, utils
|
||||
from redash import models, utils
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import (get_object_or_404, org_scoped_rule,
|
||||
record_event)
|
||||
from redash.handlers.query_results import collect_query_parameters
|
||||
from redash.permissions import require_access, view_only
|
||||
from redash.utils import (collect_parameters_from_request, gen_query_hash,
|
||||
json_dumps)
|
||||
from redash.handlers.static import render_index
|
||||
from redash.utils import gen_query_hash
|
||||
|
||||
|
||||
#
|
||||
@@ -54,12 +52,12 @@ def run_query_sync(data_source, parameter_values, query_text, max_age=0):
|
||||
if max_age > 0:
|
||||
run_time = time.time() - started_at
|
||||
query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source.id,
|
||||
query_hash, query_text, data,
|
||||
run_time, utils.utcnow())
|
||||
query_hash, query_text, data,
|
||||
run_time, utils.utcnow())
|
||||
|
||||
models.db.session.commit()
|
||||
return data
|
||||
except Exception, e:
|
||||
except Exception:
|
||||
if max_age > 0:
|
||||
abort(404, message="Unable to get result from the database, and no cached query result found.")
|
||||
else:
|
||||
@@ -79,23 +77,24 @@ def embed(query_id, visualization_id, org_slug=None):
|
||||
'referer': request.headers.get('Referer')
|
||||
})
|
||||
|
||||
full_path = safe_join(settings.STATIC_ASSETS_PATHS[-2], 'index.html')
|
||||
models.db.session.commit()
|
||||
return send_file(full_path, **dict(cache_timeout=0, conditional=True))
|
||||
return render_index()
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/public/dashboards/<token>'), methods=['GET'])
|
||||
@login_required
|
||||
def public_dashboard(token, org_slug=None):
|
||||
# TODO: bring this back.
|
||||
# record_event(current_org, current_user, {
|
||||
# 'action': 'view',
|
||||
# 'object_id': dashboard.id,
|
||||
# 'object_type': 'dashboard',
|
||||
# 'public': True,
|
||||
# 'headless': 'embed' in request.args,
|
||||
# 'referer': request.headers.get('Referer')
|
||||
# })
|
||||
# models.db.session.commit()
|
||||
full_path = safe_join(settings.STATIC_ASSETS_PATHS[-2], 'index.html')
|
||||
return send_file(full_path, **dict(cache_timeout=0, conditional=True))
|
||||
if current_user.is_api_user():
|
||||
dashboard = current_user.object
|
||||
else:
|
||||
api_key = get_object_or_404(models.ApiKey.get_by_api_key, token)
|
||||
dashboard = api_key.object
|
||||
|
||||
record_event(current_org, current_user, {
|
||||
'action': 'view',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard',
|
||||
'public': True,
|
||||
'headless': 'embed' in request.args,
|
||||
'referer': request.headers.get('Referer')
|
||||
})
|
||||
return render_index()
|
||||
|
||||
@@ -7,7 +7,7 @@ from flask_restful import abort
|
||||
from funcy import distinct, take
|
||||
from sqlalchemy.orm.exc import StaleDataError
|
||||
|
||||
from redash import models
|
||||
from redash import models, settings
|
||||
from redash.handlers.base import (BaseResource, get_object_or_404,
|
||||
org_scoped_rule, paginate, routes)
|
||||
from redash.handlers.query_results import run_query
|
||||
@@ -57,14 +57,21 @@ class QueryRecentResource(BaseResource):
|
||||
|
||||
Responds with a list of :ref:`query <query-response-label>` objects.
|
||||
"""
|
||||
queries = models.Query.recent(self.current_user.group_ids, self.current_user.id)
|
||||
recent = [d.to_dict(with_last_modified_by=False) for d in queries]
|
||||
|
||||
global_recent = []
|
||||
if len(recent) < 10:
|
||||
global_recent = [d.to_dict(with_last_modified_by=False) for d in models.Query.recent(self.current_user.group_ids)]
|
||||
if settings.FEATURE_DUMB_RECENTS:
|
||||
results = models.Query.by_user(self.current_user).order_by(models.Query.updated_at.desc()).limit(10)
|
||||
queries = [q.to_dict(with_last_modified_by=False, with_user=False) for q in results]
|
||||
else:
|
||||
queries = models.Query.recent(self.current_user.group_ids, self.current_user.id)
|
||||
recent = [d.to_dict(with_last_modified_by=False, with_user=False) for d in queries]
|
||||
|
||||
return take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
|
||||
global_recent = []
|
||||
if len(recent) < 10:
|
||||
global_recent = [d.to_dict(with_last_modified_by=False, with_user=False) for d in models.Query.recent(self.current_user.group_ids)]
|
||||
|
||||
queries = take(20, distinct(chain(recent, global_recent), key=lambda d: d['id']))
|
||||
|
||||
return queries
|
||||
|
||||
|
||||
class QueryListResource(BaseResource):
|
||||
@@ -136,7 +143,7 @@ class QueryListResource(BaseResource):
|
||||
|
||||
Responds with an array of :ref:`query <query-response-label>` objects.
|
||||
"""
|
||||
|
||||
|
||||
results = models.Query.all_queries(self.current_user.group_ids, self.current_user.id)
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import logging
|
||||
import json
|
||||
import time
|
||||
|
||||
@@ -9,7 +10,7 @@ from redash import models, settings, utils
|
||||
from redash.tasks import QueryTask, record_event
|
||||
from redash.permissions import require_permission, not_view_only, has_access, require_access, view_only
|
||||
from redash.handlers.base import BaseResource, get_object_or_404
|
||||
from redash.utils import collect_query_parameters, collect_parameters_from_request
|
||||
from redash.utils import collect_query_parameters, collect_parameters_from_request, gen_query_hash
|
||||
from redash.tasks.queries import enqueue_query
|
||||
|
||||
|
||||
@@ -17,6 +18,55 @@ def error_response(message):
|
||||
return {'job': {'status': 4, 'error': message}}, 400
|
||||
|
||||
|
||||
#
|
||||
# Run a parameterized query synchronously and return the result
|
||||
# DISCLAIMER: Temporary solution to support parameters in queries. Should be
|
||||
# removed once we refactor the query results API endpoints and handling
|
||||
# on the client side. Please don't reuse in other API handlers.
|
||||
#
|
||||
def run_query_sync(data_source, parameter_values, query_text, max_age=0):
|
||||
query_parameters = set(collect_query_parameters(query_text))
|
||||
missing_params = set(query_parameters) - set(parameter_values.keys())
|
||||
if missing_params:
|
||||
raise Exception('Missing parameter value for: {}'.format(", ".join(missing_params)))
|
||||
|
||||
if query_parameters:
|
||||
query_text = pystache.render(query_text, parameter_values)
|
||||
|
||||
if max_age <= 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = models.QueryResult.get_latest(data_source, query_text, max_age)
|
||||
|
||||
query_hash = gen_query_hash(query_text)
|
||||
|
||||
if query_result:
|
||||
logging.info("Returning cached result for query %s" % query_hash)
|
||||
return query_result
|
||||
|
||||
try:
|
||||
started_at = time.time()
|
||||
data, error = data_source.query_runner.run_query(query_text, current_user)
|
||||
|
||||
if error:
|
||||
logging.info('got bak error')
|
||||
logging.info(error)
|
||||
return None
|
||||
|
||||
run_time = time.time() - started_at
|
||||
query_result, updated_query_ids = models.QueryResult.store_result(data_source.org, data_source,
|
||||
query_hash, query_text, data,
|
||||
run_time, utils.utcnow())
|
||||
|
||||
models.db.session.commit()
|
||||
return query_result
|
||||
except Exception, e:
|
||||
if max_age > 0:
|
||||
abort(404, message="Unable to get result from the database, and no cached query result found.")
|
||||
else:
|
||||
abort(503, message="Unable to get result from the database.")
|
||||
return None
|
||||
|
||||
def run_query(data_source, parameter_values, query_text, query_id, max_age=0):
|
||||
query_parameters = set(collect_query_parameters(query_text))
|
||||
missing_params = set(query_parameters) - set(parameter_values.keys())
|
||||
@@ -127,15 +177,22 @@ class QueryResultResource(BaseResource):
|
||||
# They need to be split, as they have different logic (for example, retrieving by query id
|
||||
# should check for query parameters and shouldn't cache the result).
|
||||
should_cache = query_result_id is not None
|
||||
if query_result_id is None and query_id is not None:
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
if query:
|
||||
query_result_id = query.latest_query_data_id
|
||||
|
||||
parameter_values = collect_parameters_from_request(request.args)
|
||||
max_age = int(request.args.get('maxAge', 0))
|
||||
|
||||
query_result = None
|
||||
|
||||
if query_result_id:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query_result_id, self.current_org)
|
||||
else:
|
||||
query_result = None
|
||||
elif query_id is not None:
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
|
||||
if query is not None:
|
||||
if settings.ALLOW_PARAMETERS_IN_EMBEDS and parameter_values:
|
||||
query_result = run_query_sync(query.data_source, parameter_values, query.to_dict()['query'], max_age=max_age)
|
||||
elif query.latest_query_data_id is not None:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query.latest_query_data_id, self.current_org)
|
||||
|
||||
if query_result:
|
||||
require_access(query_result.data_source.groups, self.current_user, view_only)
|
||||
@@ -209,4 +266,3 @@ class JobResource(BaseResource):
|
||||
"""
|
||||
job = QueryTask(job_id=job_id)
|
||||
job.cancel()
|
||||
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
from flask import redirect, request, render_template, url_for, g
|
||||
from flask_login import login_user
|
||||
from wtforms import Form, PasswordField, StringField, BooleanField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
from flask import g, redirect, render_template, request, url_for
|
||||
|
||||
from flask_login import login_user
|
||||
from redash import settings
|
||||
from redash.tasks.general import subscribe
|
||||
from redash.handlers.base import routes
|
||||
from redash.models import Organization, Group, User, db
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import routes
|
||||
from redash.models import Group, Organization, User, db
|
||||
from redash.tasks.general import subscribe
|
||||
from wtforms import BooleanField, Form, PasswordField, StringField, validators
|
||||
from wtforms.fields.html5 import EmailField
|
||||
|
||||
|
||||
class SetupForm(Form):
|
||||
@@ -19,6 +19,23 @@ class SetupForm(Form):
|
||||
newsletter = BooleanField()
|
||||
|
||||
|
||||
def create_org(org_name, user_name, email, password):
|
||||
default_org = Organization(name=org_name, slug='default', settings={})
|
||||
admin_group = Group(name='admin', permissions=['admin', 'super_admin'], org=default_org, type=Group.BUILTIN_GROUP)
|
||||
default_group = Group(name='default', permissions=Group.DEFAULT_PERMISSIONS, org=default_org, type=Group.BUILTIN_GROUP)
|
||||
|
||||
db.session.add_all([default_org, admin_group, default_group])
|
||||
db.session.commit()
|
||||
|
||||
user = User(org=default_org, name=user_name, email=email, group_ids=[admin_group.id, default_group.id])
|
||||
user.hash_password(password)
|
||||
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
|
||||
return default_org, user
|
||||
|
||||
|
||||
@routes.route('/setup', methods=['GET', 'POST'])
|
||||
def setup():
|
||||
if current_org != None or settings.MULTI_ORG:
|
||||
@@ -29,18 +46,7 @@ def setup():
|
||||
form.security_notifications.data = True
|
||||
|
||||
if request.method == 'POST' and form.validate():
|
||||
default_org = Organization(name=form.org_name.data, slug='default', settings={})
|
||||
admin_group = Group(name='admin', permissions=['admin', 'super_admin'], org=default_org, type=Group.BUILTIN_GROUP)
|
||||
default_group = Group(name='default', permissions=Group.DEFAULT_PERMISSIONS, org=default_org, type=Group.BUILTIN_GROUP)
|
||||
|
||||
db.session.add_all([default_org, admin_group, default_group])
|
||||
db.session.commit()
|
||||
|
||||
user = User(org=default_org, name=form.name.data, email=form.email.data, group_ids=[admin_group.id, default_group.id])
|
||||
user.hash_password(form.password.data)
|
||||
|
||||
db.session.add(user)
|
||||
db.session.commit()
|
||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||
|
||||
g.org = default_org
|
||||
login_user(user)
|
||||
@@ -52,5 +58,3 @@ def setup():
|
||||
return redirect(url_for('redash.index', org_slug=None))
|
||||
|
||||
return render_template('setup.html', form=form)
|
||||
|
||||
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import os
|
||||
|
||||
from flask import current_app, safe_join, send_file
|
||||
from flask import current_app, render_template, safe_join, send_file
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
from flask_login import login_required
|
||||
from redash import settings
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.authentication import base_href
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
from werkzeug.exceptions import NotFound
|
||||
|
||||
|
||||
@routes.route('/<path:filename>')
|
||||
@@ -24,10 +26,19 @@ def send_static(filename):
|
||||
raise NotFound()
|
||||
|
||||
|
||||
def render_index():
|
||||
if settings.MULTI_ORG:
|
||||
response = render_template("multi_org.html", base_href=base_href())
|
||||
else:
|
||||
full_path = safe_join(settings.STATIC_ASSETS_PATHS[-2], 'index.html')
|
||||
response = send_file(full_path, **dict(cache_timeout=0, conditional=True))
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@login_required
|
||||
def index(**kwargs):
|
||||
full_path = safe_join(settings.STATIC_ASSETS_PATHS[-2], 'index.html')
|
||||
return send_file(full_path, **dict(cache_timeout=0, conditional=True))
|
||||
return render_index()
|
||||
|
||||
|
||||
def register_static_routes(rules):
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import logging
|
||||
import time
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
from celery.signals import task_prerun, task_postrun
|
||||
from redash import statsd_client, settings
|
||||
import time
|
||||
|
||||
from celery.signals import task_postrun, task_prerun
|
||||
from redash import settings, statsd_client
|
||||
|
||||
tasks_start_time = {}
|
||||
|
||||
@@ -33,16 +34,19 @@ def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, st
|
||||
try:
|
||||
run_time = 1000 * (time.time() - tasks_start_time.pop(task_id))
|
||||
|
||||
tags = {'name': task.name, 'state': (state or 'unknown').lower(), 'hostname': socket.gethostname()}
|
||||
state = (state or 'unknown').lower()
|
||||
tags = {'state': state, 'hostname': socket.gethostname()}
|
||||
if task.name == 'redash.tasks.execute_query':
|
||||
if isinstance(retval, Exception):
|
||||
tags['state'] = 'exception'
|
||||
state = 'exception'
|
||||
|
||||
tags['data_source_id'] = args[1]
|
||||
|
||||
metric = "celery.task.runtime"
|
||||
normalized_task_name = task.name.replace('redash.tasks.', '').replace('.', '_')
|
||||
metric = "celery.task_runtime.{}".format(normalized_task_name)
|
||||
logging.debug("metric=%s", json.dumps({'metric': metric, 'tags': tags, 'value': run_time}))
|
||||
statsd_client.timing(metric_name(metric, tags), run_time)
|
||||
statsd_client.incr(metric_name('celery.task.count', tags))
|
||||
statsd_client.incr(metric_name('celery.task.{}.{}'.format(normalized_task_name, state), tags))
|
||||
except Exception:
|
||||
logging.exception("Exception during task_postrun handler.")
|
||||
|
||||
@@ -1,17 +1,29 @@
|
||||
import time
|
||||
import logging
|
||||
import time
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.orm.util import _ORMJoin
|
||||
from sqlalchemy.event import listens_for
|
||||
|
||||
from flask import has_request_context, g
|
||||
from flask import g, has_request_context
|
||||
|
||||
from redash import statsd_client
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.orm.util import _ORMJoin
|
||||
from sqlalchemy.sql.selectable import Alias
|
||||
|
||||
metrics_logger = logging.getLogger("metrics")
|
||||
|
||||
|
||||
def _table_name_from_select_element(elt):
|
||||
t = elt.froms[0]
|
||||
|
||||
if isinstance(t, Alias):
|
||||
t = t.original.froms[0]
|
||||
|
||||
while isinstance(t, _ORMJoin):
|
||||
t = t.left
|
||||
|
||||
return t.name
|
||||
|
||||
|
||||
@listens_for(Engine, "before_execute")
|
||||
def before_execute(conn, elt, multiparams, params):
|
||||
conn.info.setdefault('query_start_time', []).append(time.time())
|
||||
@@ -23,10 +35,11 @@ def after_execute(conn, elt, multiparams, params, result):
|
||||
action = elt.__class__.__name__
|
||||
|
||||
if action == 'Select':
|
||||
t = elt.froms[0]
|
||||
while isinstance(t, _ORMJoin):
|
||||
t = t.left
|
||||
name = t.name
|
||||
name = 'unknown'
|
||||
try:
|
||||
name = _table_name_from_select_element(elt)
|
||||
except Exception:
|
||||
logging.exception('Failed finding table name.')
|
||||
elif action in ['Update', 'Insert', 'Delete']:
|
||||
name = elt.table.name
|
||||
else:
|
||||
|
||||
@@ -3,6 +3,7 @@ import time
|
||||
from collections import namedtuple
|
||||
|
||||
from flask import g, request
|
||||
|
||||
from redash import statsd_client
|
||||
|
||||
metrics_logger = logging.getLogger("metrics")
|
||||
@@ -19,19 +20,20 @@ def calculate_metrics(response):
|
||||
request_duration = (time.time() - g.start_time) * 1000
|
||||
queries_duration = g.get('queries_duration', 0.0)
|
||||
queries_count = g.get('queries_count', 0.0)
|
||||
endpoint = (request.endpoint or 'unknown').replace('.', '_')
|
||||
|
||||
metrics_logger.info("method=%s path=%s endpoint=%s status=%d content_type=%s content_length=%d duration=%.2f query_count=%d query_duration=%.2f",
|
||||
request.method,
|
||||
request.path,
|
||||
request.endpoint,
|
||||
endpoint,
|
||||
response.status_code,
|
||||
response.content_type,
|
||||
response.content_length,
|
||||
response.content_length or -1,
|
||||
request_duration,
|
||||
queries_count,
|
||||
queries_duration)
|
||||
|
||||
statsd_client.timing('requests.{}.{}'.format(request.endpoint, request.method.lower()), request_duration)
|
||||
statsd_client.timing('requests.{}.{}'.format(endpoint, request.method.lower()), request_duration)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
179
redash/models.py
179
redash/models.py
@@ -1,41 +1,67 @@
|
||||
import cStringIO
|
||||
import csv
|
||||
import datetime
|
||||
import functools
|
||||
import hashlib
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import cStringIO
|
||||
import csv
|
||||
import xlsxwriter
|
||||
import time
|
||||
|
||||
from funcy import project
|
||||
|
||||
import xlsxwriter
|
||||
from flask_login import AnonymousUserMixin, UserMixin
|
||||
from flask_sqlalchemy import SQLAlchemy
|
||||
from flask_login import UserMixin, AnonymousUserMixin
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.inspection import inspect
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
from sqlalchemy.ext.mutable import Mutable
|
||||
from sqlalchemy.orm import object_session, backref, joinedload, subqueryload
|
||||
# noinspection PyUnresolvedReferences
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
from sqlalchemy import or_
|
||||
|
||||
from passlib.apps import custom_app_context as pwd_context
|
||||
|
||||
from redash import redis_connection, utils
|
||||
from redash.destinations import get_destination, get_configuration_schema_for_destination_type
|
||||
from redash.destinations import (get_configuration_schema_for_destination_type,
|
||||
get_destination)
|
||||
from redash.metrics import database # noqa: F401
|
||||
from redash.permissions import has_access, view_only
|
||||
from redash.query_runner import get_query_runner, get_configuration_schema_for_query_runner_type
|
||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||
get_query_runner)
|
||||
from redash.utils import generate_token, json_dumps
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.metrics import database
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.event import listens_for
|
||||
from sqlalchemy.ext.mutable import Mutable
|
||||
from sqlalchemy.inspection import inspect
|
||||
from sqlalchemy.orm import backref, joinedload, object_session, subqueryload
|
||||
from sqlalchemy.orm.exc import NoResultFound # noqa: F401
|
||||
from sqlalchemy.types import TypeDecorator
|
||||
|
||||
db = SQLAlchemy(session_options={
|
||||
'expire_on_commit': False
|
||||
})
|
||||
|
||||
Column = functools.partial(db.Column, nullable=False)
|
||||
|
||||
|
||||
class ScheduledQueriesExecutions(object):
|
||||
KEY_NAME = 'sq:executed_at'
|
||||
|
||||
def __init__(self):
|
||||
self.executions = {}
|
||||
|
||||
def refresh(self):
|
||||
self.executions = redis_connection.hgetall(self.KEY_NAME)
|
||||
|
||||
def update(self, query_id):
|
||||
redis_connection.hmset(self.KEY_NAME, {
|
||||
query_id: time.time()
|
||||
})
|
||||
|
||||
def get(self, query_id):
|
||||
timestamp = self.executions.get(str(query_id))
|
||||
if timestamp:
|
||||
timestamp = utils.dt_from_timestamp(timestamp)
|
||||
|
||||
return timestamp
|
||||
|
||||
scheduled_queries_executions = ScheduledQueriesExecutions()
|
||||
|
||||
# AccessPermission and Change use a 'generic foreign key' approach to refer to
|
||||
# either queries or dashboards.
|
||||
# TODO replace this with association tables.
|
||||
@@ -201,6 +227,9 @@ class AnonymousUser(AnonymousUserMixin, PermissionsCheckMixin):
|
||||
def permissions(self):
|
||||
return []
|
||||
|
||||
def is_api_user(self):
|
||||
return False
|
||||
|
||||
|
||||
class ApiUser(UserMixin, PermissionsCheckMixin):
|
||||
def __init__(self, api_key, org, groups, name=None):
|
||||
@@ -218,6 +247,9 @@ class ApiUser(UserMixin, PermissionsCheckMixin):
|
||||
def __repr__(self):
|
||||
return u"<{}>".format(self.name)
|
||||
|
||||
def is_api_user(self):
|
||||
return True
|
||||
|
||||
@property
|
||||
def permissions(self):
|
||||
return ['view_query']
|
||||
@@ -322,7 +354,7 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
name = Column(db.String(320))
|
||||
email = Column(db.String(320))
|
||||
password_hash = Column(db.String(128), nullable=True)
|
||||
#XXX replace with association table
|
||||
# XXX replace with association table
|
||||
group_ids = Column('groups', MutableList.as_mutable(postgresql.ARRAY(db.Integer)), nullable=True)
|
||||
api_key = Column(db.String(40),
|
||||
default=lambda: generate_token(40),
|
||||
@@ -355,6 +387,9 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
|
||||
return d
|
||||
|
||||
def is_api_user(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def gravatar_url(self):
|
||||
email_md5 = hashlib.md5(self.email.lower()).hexdigest()
|
||||
@@ -469,6 +504,27 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
db.session.add_all([data_source, data_source_group])
|
||||
return data_source
|
||||
|
||||
@classmethod
|
||||
def all(cls, org, group_ids=None):
|
||||
data_sources = cls.query.filter(cls.org == org).order_by(cls.id.asc())
|
||||
|
||||
if group_ids:
|
||||
data_sources = data_sources.join(DataSourceGroup).filter(
|
||||
DataSourceGroup.group_id.in_(group_ids))
|
||||
|
||||
return data_sources
|
||||
|
||||
@classmethod
|
||||
def get_by_id(cls, _id):
|
||||
return cls.query.filter(cls.id == _id).one()
|
||||
|
||||
def delete(self):
|
||||
Query.query.filter(Query.data_source == self).update(dict(data_source_id=None, latest_query_data_id=None))
|
||||
QueryResult.query.filter(QueryResult.data_source == self).delete()
|
||||
res = db.session.delete(self)
|
||||
db.session.commit()
|
||||
return res
|
||||
|
||||
def get_schema(self, refresh=False):
|
||||
key = "data_source:schema:{}".format(self.id)
|
||||
|
||||
@@ -526,25 +582,11 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
def query_runner(self):
|
||||
return get_query_runner(self.type, self.options)
|
||||
|
||||
@classmethod
|
||||
def get_by_id(cls, _id):
|
||||
return cls.query.filter(cls.id == _id).one()
|
||||
|
||||
@classmethod
|
||||
def get_by_name(cls, name):
|
||||
return cls.query.filter(cls.name == name).one()
|
||||
|
||||
@classmethod
|
||||
def all(cls, org, group_ids=None):
|
||||
data_sources = cls.query.filter(cls.org == org).order_by(cls.id.asc())
|
||||
|
||||
if group_ids:
|
||||
data_sources = data_sources.join(DataSourceGroup).filter(
|
||||
DataSourceGroup.group_id.in_(group_ids))
|
||||
|
||||
return data_sources
|
||||
|
||||
#XXX examine call sites to see if a regular SQLA collection would work better
|
||||
# XXX examine call sites to see if a regular SQLA collection would work better
|
||||
@property
|
||||
def groups(self):
|
||||
groups = db.session.query(DataSourceGroup).filter(
|
||||
@@ -553,7 +595,7 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
|
||||
|
||||
class DataSourceGroup(db.Model):
|
||||
#XXX drop id, use datasource/group as PK
|
||||
# XXX drop id, use datasource/group as PK
|
||||
id = Column(db.Integer, primary_key=True)
|
||||
data_source_id = Column(db.Integer, db.ForeignKey("data_sources.id"))
|
||||
data_source = db.relationship(DataSource, back_populates="data_source_groups")
|
||||
@@ -569,7 +611,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
org_id = Column(db.Integer, db.ForeignKey('organizations.id'))
|
||||
org = db.relationship(Organization)
|
||||
data_source_id = Column(db.Integer, db.ForeignKey("data_sources.id"))
|
||||
data_source = db.relationship(DataSource, backref=backref('query_results', cascade="all, delete-orphan"))
|
||||
data_source = db.relationship(DataSource, backref=backref('query_results'))
|
||||
query_hash = Column(db.String(32), index=True)
|
||||
query_text = Column('query', db.Text)
|
||||
data = Column(db.Text)
|
||||
@@ -653,7 +695,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(self.data)
|
||||
writer = csv.DictWriter(s, fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
for row in query_data['rows']:
|
||||
@@ -665,7 +707,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(self.data)
|
||||
book = xlsxwriter.Workbook(s)
|
||||
book = xlsxwriter.Workbook(s, {'constant_memory': True})
|
||||
sheet = book.add_worksheet("result")
|
||||
|
||||
column_names = []
|
||||
@@ -811,7 +853,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
joinedload(Query.latest_query_data).load_only('runtime', 'retrieved_at'))
|
||||
.join(DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id)
|
||||
.filter(Query.is_archived == False)
|
||||
.filter(DataSourceGroup.group_id.in_(group_ids))\
|
||||
.filter(DataSourceGroup.group_id.in_(group_ids))
|
||||
.order_by(Query.created_at.desc()))
|
||||
|
||||
if not drafts:
|
||||
@@ -832,12 +874,16 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
|
||||
now = utils.utcnow()
|
||||
outdated_queries = {}
|
||||
scheduled_queries_executions.refresh()
|
||||
|
||||
for query in queries:
|
||||
if query.latest_query_data:
|
||||
retrieved_at = query.latest_query_data.retrieved_at
|
||||
else:
|
||||
retrieved_at = now
|
||||
|
||||
retrieved_at = scheduled_queries_executions.get(query.id) or retrieved_at
|
||||
|
||||
if should_schedule_next(retrieved_at, now, query.schedule, query.schedule_failures):
|
||||
key = "{}:{}".format(query.query_hash, query.data_source_id)
|
||||
outdated_queries[key] = query
|
||||
@@ -869,7 +915,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
||||
|
||||
@classmethod
|
||||
def recent(cls, group_ids, user_id=None, limit=20):
|
||||
query = (cls.query.options(subqueryload(Query.user))
|
||||
query = (cls.query
|
||||
.filter(Event.created_at > (db.func.current_date() - 7))
|
||||
.join(Event, Query.id == Event.object_id.cast(db.Integer))
|
||||
.join(DataSourceGroup, Query.data_source_id == DataSourceGroup.data_source_id)
|
||||
@@ -956,11 +1002,11 @@ class AccessPermission(GFKBase, db.Model):
|
||||
|
||||
@classmethod
|
||||
def grant(cls, obj, access_type, grantee, grantor):
|
||||
grant = cls.query.filter(cls.object_type==obj.__tablename__,
|
||||
cls.object_id==obj.id,
|
||||
cls.access_type==access_type,
|
||||
cls.grantee==grantee,
|
||||
cls.grantor==grantor).one_or_none()
|
||||
grant = cls.query.filter(cls.object_type == obj.__tablename__,
|
||||
cls.object_id == obj.id,
|
||||
cls.access_type == access_type,
|
||||
cls.grantee == grantee,
|
||||
cls.grantor == grantor).one_or_none()
|
||||
|
||||
if not grant:
|
||||
grant = cls(object_type=obj.__tablename__,
|
||||
@@ -1074,12 +1120,12 @@ class Alert(TimestampMixin, db.Model):
|
||||
return db.session.query(Alert)\
|
||||
.options(joinedload(Alert.user), joinedload(Alert.query_rel))\
|
||||
.join(Query)\
|
||||
.join(DataSourceGroup, DataSourceGroup.data_source_id==Query.data_source_id)\
|
||||
.join(DataSourceGroup, DataSourceGroup.data_source_id == Query.data_source_id)\
|
||||
.filter(DataSourceGroup.group_id.in_(group_ids))
|
||||
|
||||
@classmethod
|
||||
def get_by_id_and_org(cls, id, org):
|
||||
return db.session.query(Alert).join(Query).filter(Alert.id==id, Query.org==org).one()
|
||||
return db.session.query(Alert).join(Query).filter(Alert.id == id, Query.org == org).one()
|
||||
|
||||
def to_dict(self, full=True):
|
||||
d = {
|
||||
@@ -1104,18 +1150,20 @@ class Alert(TimestampMixin, db.Model):
|
||||
|
||||
def evaluate(self):
|
||||
data = json.loads(self.query_rel.latest_query_data.data)
|
||||
# todo: safe guard for empty
|
||||
value = data['rows'][0][self.options['column']]
|
||||
op = self.options['op']
|
||||
if data['rows']:
|
||||
value = data['rows'][0][self.options['column']]
|
||||
op = self.options['op']
|
||||
|
||||
if op == 'greater than' and value > self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'less than' and value < self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'equals' and value == self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
if op == 'greater than' and value > self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'less than' and value < self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'equals' and value == self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
else:
|
||||
new_state = self.OK_STATE
|
||||
else:
|
||||
new_state = self.OK_STATE
|
||||
new_state = self.UNKNOWN_STATE
|
||||
|
||||
return new_state
|
||||
|
||||
@@ -1261,7 +1309,7 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
|
||||
|
||||
@classmethod
|
||||
def get_by_slug_and_org(cls, slug, org):
|
||||
return cls.query.filter(cls.slug == slug, cls.org==org).one()
|
||||
return cls.query.filter(cls.slug == slug, cls.org == org).one()
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%s=%s" % (self.id, self.name)
|
||||
@@ -1350,7 +1398,7 @@ class Widget(TimestampMixin, db.Model):
|
||||
|
||||
@classmethod
|
||||
def get_by_id_and_org(cls, widget_id, org):
|
||||
return db.session.query(cls).join(Dashboard).filter(cls.id == widget_id, Dashboard.org== org).one()
|
||||
return db.session.query(cls).join(Dashboard).filter(cls.id == widget_id, Dashboard.org == org).one()
|
||||
|
||||
|
||||
class Event(db.Model):
|
||||
@@ -1405,7 +1453,7 @@ class ApiKey(TimestampMixin, GFKBase, db.Model):
|
||||
org = db.relationship(Organization)
|
||||
api_key = Column(db.String(255), index=True, default=lambda: generate_token(40))
|
||||
active = Column(db.Boolean, default=True)
|
||||
#'object' provided by GFKBase
|
||||
# 'object' provided by GFKBase
|
||||
created_by_id = Column(db.Integer, db.ForeignKey("users.id"), nullable=True)
|
||||
created_by = db.relationship(User)
|
||||
|
||||
@@ -1414,11 +1462,11 @@ class ApiKey(TimestampMixin, GFKBase, db.Model):
|
||||
|
||||
@classmethod
|
||||
def get_by_api_key(cls, api_key):
|
||||
return cls.query.filter(cls.api_key==api_key, cls.active==True).one()
|
||||
return cls.query.filter(cls.api_key == api_key, cls.active == True).one()
|
||||
|
||||
@classmethod
|
||||
def get_by_object(cls, object):
|
||||
return cls.query.filter(cls.object_type==object.__class__.__tablename__, cls.object_id==object.id, cls.active==True).first()
|
||||
return cls.query.filter(cls.object_type == object.__class__.__tablename__, cls.object_id == object.id, cls.active == True).first()
|
||||
|
||||
@classmethod
|
||||
def create_for_object(cls, object, user):
|
||||
@@ -1435,8 +1483,9 @@ class NotificationDestination(BelongsToOrgMixin, db.Model):
|
||||
user = db.relationship(User, backref="notification_destinations")
|
||||
name = Column(db.String(255))
|
||||
type = Column(db.String(255))
|
||||
options = Column(Configuration)
|
||||
options = Column(ConfigurationContainer.as_mutable(Configuration))
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = 'notification_destinations'
|
||||
__table_args__ = (db.Index('notification_destinations_org_id_name', 'org_id',
|
||||
'name', unique=True),)
|
||||
@@ -1465,7 +1514,7 @@ class NotificationDestination(BelongsToOrgMixin, db.Model):
|
||||
|
||||
@classmethod
|
||||
def all(cls, org):
|
||||
notification_destinations = cls.query.filter(cls.org==org).order_by(cls.id.asc())
|
||||
notification_destinations = cls.query.filter(cls.org == org).order_by(cls.id.asc())
|
||||
|
||||
return notification_destinations
|
||||
|
||||
@@ -1557,6 +1606,6 @@ def init_db():
|
||||
default_group = Group(name='default', permissions=Group.DEFAULT_PERMISSIONS, org=default_org, type=Group.BUILTIN_GROUP)
|
||||
|
||||
db.session.add_all([default_org, admin_group, default_group])
|
||||
#XXX remove after fixing User.group_ids
|
||||
# XXX remove after fixing User.group_ids
|
||||
db.session.commit()
|
||||
return default_org, admin_group, default_group
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
import sys
|
||||
import logging
|
||||
import json
|
||||
|
||||
from collections import OrderedDict
|
||||
from redash import settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -138,6 +140,7 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
res = self._run_query_internal('select count(*) as cnt from %s' % t)
|
||||
tables_dict[t]['size'] = res[0]['cnt']
|
||||
|
||||
|
||||
query_runners = {}
|
||||
|
||||
|
||||
@@ -147,7 +150,8 @@ def register(query_runner_class):
|
||||
logger.debug("Registering %s (%s) query runner.", query_runner_class.name(), query_runner_class.type())
|
||||
query_runners[query_runner_class.type()] = query_runner_class
|
||||
else:
|
||||
logger.debug("%s query runner enabled but not supported, not registering. Either disable or install missing dependencies.", query_runner_class.name())
|
||||
logger.debug("%s query runner enabled but not supported, not registering. Either disable or install missing "
|
||||
"dependencies.", query_runner_class.name())
|
||||
|
||||
|
||||
def get_query_runner(query_runner_type, configuration):
|
||||
|
||||
@@ -1,11 +1,45 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
import requests
|
||||
from redash.query_runner import *
|
||||
from redash.settings import parse_boolean
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
logger = logging.getLogger(__name__)
|
||||
ANNOTATE_QUERY = parse_boolean(os.environ.get('ATHENA_ANNOTATE_QUERY', 'true'))
|
||||
SHOW_EXTRA_SETTINGS = parse_boolean(os.environ.get('ATHENA_SHOW_EXTRA_SETTINGS', 'true'))
|
||||
OPTIONAL_CREDENTIALS = parse_boolean(os.environ.get('ATHENA_OPTIONAL_CREDENTIALS', 'true'))
|
||||
|
||||
try:
|
||||
import pyathena
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
|
||||
_TYPE_MAPPINGS = {
|
||||
'boolean': TYPE_BOOLEAN,
|
||||
'tinyint': TYPE_INTEGER,
|
||||
'smallint': TYPE_INTEGER,
|
||||
'integer': TYPE_INTEGER,
|
||||
'bigint': TYPE_INTEGER,
|
||||
'double': TYPE_FLOAT,
|
||||
'varchar': TYPE_STRING,
|
||||
'timestamp': TYPE_DATETIME,
|
||||
'date': TYPE_DATE,
|
||||
'varbinary': TYPE_STRING,
|
||||
'array': TYPE_STRING,
|
||||
'map': TYPE_STRING,
|
||||
'row': TYPE_STRING,
|
||||
'decimal': TYPE_FLOAT,
|
||||
}
|
||||
|
||||
|
||||
class SimpleFormatter(object):
|
||||
def format(self, operation, parameters=None):
|
||||
return operation
|
||||
|
||||
PROXY_URL = os.environ.get('ATHENA_PROXY_URL')
|
||||
|
||||
class Athena(BaseQueryRunner):
|
||||
noop_query = 'SELECT 1'
|
||||
@@ -16,7 +50,7 @@ class Athena(BaseQueryRunner):
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
schema = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'region': {
|
||||
@@ -34,64 +68,102 @@ class Athena(BaseQueryRunner):
|
||||
's3_staging_dir': {
|
||||
'type': 'string',
|
||||
'title': 'S3 Staging Path'
|
||||
}
|
||||
},
|
||||
'schema': {
|
||||
'type': 'string',
|
||||
'title': 'Schema Name',
|
||||
'default': 'default'
|
||||
},
|
||||
},
|
||||
'required': ['region', 'aws_access_key', 'aws_secret_key', 's3_staging_dir'],
|
||||
'required': ['region', 's3_staging_dir'],
|
||||
'order': ['region', 'aws_access_key', 'aws_secret_key', 's3_staging_dir', 'schema'],
|
||||
'secret': ['aws_secret_key']
|
||||
}
|
||||
|
||||
if SHOW_EXTRA_SETTINGS:
|
||||
schema['properties'].update({
|
||||
'encryption_option': {
|
||||
'type': 'string',
|
||||
'title': 'Encryption Option',
|
||||
},
|
||||
'kms_key': {
|
||||
'type': 'string',
|
||||
'title': 'KMS Key',
|
||||
},
|
||||
})
|
||||
|
||||
if not OPTIONAL_CREDENTIALS:
|
||||
schema['required'] += ['aws_access_key', 'aws_secret_key']
|
||||
|
||||
return schema
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return ANNOTATE_QUERY
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "athena"
|
||||
|
||||
def __init__(self, configuration):
|
||||
super(Athena, self).__init__(configuration)
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
schema = {}
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema')
|
||||
WHERE table_schema NOT IN ('information_schema')
|
||||
"""
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
|
||||
table_name = '{0}.{1}'.format(row['table_schema'], row['table_name'])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query, user):
|
||||
cursor = pyathena.connect(
|
||||
s3_staging_dir=self.configuration['s3_staging_dir'],
|
||||
region_name=self.configuration['region'],
|
||||
aws_access_key_id=self.configuration.get('aws_access_key', None),
|
||||
aws_secret_access_key=self.configuration.get('aws_secret_key', None),
|
||||
schema_name=self.configuration.get('schema', 'default'),
|
||||
encryption_option=self.configuration.get('encryption_option', None),
|
||||
kms_key=self.configuration.get('kms_key', None),
|
||||
formatter=SimpleFormatter()).cursor()
|
||||
|
||||
try:
|
||||
data = {
|
||||
'athenaUrl': 'jdbc:awsathena://athena.{}.amazonaws.com:443/'.format(self.configuration['region'].lower()),
|
||||
'awsAccessKey': self.configuration['aws_access_key'],
|
||||
'awsSecretKey': self.configuration['aws_secret_key'],
|
||||
's3StagingDir': self.configuration['s3_staging_dir'],
|
||||
'query': query
|
||||
}
|
||||
|
||||
response = requests.post(PROXY_URL, json=data)
|
||||
response.raise_for_status()
|
||||
|
||||
json_data = response.content.strip()
|
||||
cursor.execute(query)
|
||||
column_tuples = [(i[0], _TYPE_MAPPINGS.get(i[1], None)) for i in cursor.description]
|
||||
columns = self.fetch_columns(column_tuples)
|
||||
rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
|
||||
return json_data, error
|
||||
except requests.RequestException as e:
|
||||
if e.response.status_code == 400:
|
||||
return None, response.content
|
||||
|
||||
return None, str(e)
|
||||
except KeyboardInterrupt:
|
||||
if cursor.query_id:
|
||||
cursor.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception, ex:
|
||||
if cursor.query_id:
|
||||
cursor.cancel()
|
||||
error = ex.message
|
||||
json_data = None
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(Athena)
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from base64 import b64decode
|
||||
import datetime
|
||||
import json
|
||||
import httplib2
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
from base64 import b64decode
|
||||
|
||||
import httplib2
|
||||
import requests
|
||||
|
||||
from redash import settings
|
||||
@@ -18,8 +18,8 @@ try:
|
||||
import apiclient.errors
|
||||
from apiclient.discovery import build
|
||||
from apiclient.errors import HttpError
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from oauth2client import gce
|
||||
from oauth2client.service_account import ServiceAccountCredentials
|
||||
from oauth2client.contrib import gce
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
@@ -100,7 +100,7 @@ class BigQuery(BaseQueryRunner):
|
||||
},
|
||||
'totalMBytesProcessedLimit': {
|
||||
"type": "number",
|
||||
'title': 'Total MByte Processed Limit'
|
||||
'title': 'Scanned Data Limit (MB)'
|
||||
},
|
||||
'userDefinedFunctionResourceUri': {
|
||||
"type": "string",
|
||||
@@ -113,9 +113,14 @@ class BigQuery(BaseQueryRunner):
|
||||
'loadSchema': {
|
||||
"type": "boolean",
|
||||
"title": "Load Schema"
|
||||
},
|
||||
'maximumBillingTier': {
|
||||
"type": "number",
|
||||
"title": "Maximum Billing Tier"
|
||||
}
|
||||
},
|
||||
'required': ['jsonKeyFile', 'projectId'],
|
||||
"order": ['projectId', 'jsonKeyFile', 'loadSchema', 'useStandardSql', 'totalMBytesProcessedLimit', 'maximumBillingTier', 'userDefinedFunctionResourceUri'],
|
||||
'secret': ['jsonKeyFile']
|
||||
}
|
||||
|
||||
@@ -134,9 +139,9 @@ class BigQuery(BaseQueryRunner):
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
|
||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||
http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
|
||||
http = credentials.authorize(http)
|
||||
http = creds.authorize(http)
|
||||
|
||||
return build("bigquery", "v2", http=http)
|
||||
|
||||
@@ -148,10 +153,10 @@ class BigQuery(BaseQueryRunner):
|
||||
"query": query,
|
||||
"dryRun": True,
|
||||
}
|
||||
|
||||
|
||||
if self.configuration.get('useStandardSql', False):
|
||||
job_data['useLegacySql'] = False
|
||||
|
||||
|
||||
response = jobs.query(projectId=self._get_project_id(), body=job_data).execute()
|
||||
return int(response["totalBytesProcessed"])
|
||||
|
||||
@@ -164,7 +169,7 @@ class BigQuery(BaseQueryRunner):
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if self.configuration.get('useStandardSql', False):
|
||||
job_data['configuration']['query']['useLegacySql'] = False
|
||||
|
||||
@@ -174,6 +179,9 @@ class BigQuery(BaseQueryRunner):
|
||||
job_data["configuration"]["query"]["userDefinedFunctionResources"] = map(
|
||||
lambda resource_uri: {"resourceUri": resource_uri}, resource_uris)
|
||||
|
||||
if "maximumBillingTier" in self.configuration:
|
||||
job_data["configuration"]["query"]["maximumBillingTier"] = self.configuration["maximumBillingTier"]
|
||||
|
||||
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
|
||||
current_row = 0
|
||||
query_reply = _get_query_results(jobs, project_id=project_id,
|
||||
@@ -215,7 +223,6 @@ class BigQuery(BaseQueryRunner):
|
||||
tables = service.tables().list(projectId=project_id, datasetId=dataset_id).execute()
|
||||
for table in tables.get('tables', []):
|
||||
table_data = service.tables().get(projectId=project_id, datasetId=dataset_id, tableId=table['tableReference']['tableId']).execute()
|
||||
print table_data
|
||||
|
||||
schema.append({'name': table_data['id'], 'columns': map(lambda r: r['name'], table_data['schema']['fields'])})
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.utils import JSONEncoder
|
||||
@@ -14,6 +15,13 @@ except ImportError:
|
||||
enabled = False
|
||||
|
||||
|
||||
class CassandraJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, uuid.UUID):
|
||||
return str(o)
|
||||
return super(CassandraJSONEncoder, self).default(o)
|
||||
|
||||
|
||||
class Cassandra(BaseQueryRunner):
|
||||
noop_query = "SELECT dateof(now()) FROM system.local"
|
||||
|
||||
@@ -44,6 +52,11 @@ class Cassandra(BaseQueryRunner):
|
||||
'password': {
|
||||
'type': 'string',
|
||||
'title': 'Password'
|
||||
},
|
||||
'protocol': {
|
||||
'type': 'number',
|
||||
'title': 'Protocol Version',
|
||||
'default': 3
|
||||
}
|
||||
},
|
||||
'required': ['keyspace', 'host']
|
||||
@@ -77,10 +90,14 @@ class Cassandra(BaseQueryRunner):
|
||||
if self.configuration.get('username', '') and self.configuration.get('password', ''):
|
||||
auth_provider = PlainTextAuthProvider(username='{}'.format(self.configuration.get('username', '')),
|
||||
password='{}'.format(self.configuration.get('password', '')))
|
||||
connection = Cluster([self.configuration.get('host', '')], auth_provider=auth_provider, protocol_version=3)
|
||||
connection = Cluster([self.configuration.get('host', '')],
|
||||
auth_provider=auth_provider,
|
||||
port=self.configuration.get('port', ''),
|
||||
protocol_version=self.configuration.get('protocol', 3))
|
||||
else:
|
||||
connection = Cluster([self.configuration.get('host', '')], protocol_version=3)
|
||||
|
||||
connection = Cluster([self.configuration.get('host', '')],
|
||||
port=self.configuration.get('port', ''),
|
||||
protocol_version=self.configuration.get('protocol', 3))
|
||||
session = connection.connect()
|
||||
session.set_keyspace(self.configuration['keyspace'])
|
||||
logger.debug("Cassandra running query: %s", query)
|
||||
@@ -93,7 +110,7 @@ class Cassandra(BaseQueryRunner):
|
||||
rows = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json.dumps(data, cls=CassandraJSONEncoder)
|
||||
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import simplejson as json
|
||||
import logging
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
import requests
|
||||
import simplejson as json
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from redash.query_runner import *
|
||||
|
||||
import requests
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
except ImportError:
|
||||
@@ -42,9 +42,9 @@ PYTHON_TYPES_MAPPING = {
|
||||
float: TYPE_FLOAT
|
||||
}
|
||||
|
||||
class BaseElasticSearch(BaseQueryRunner):
|
||||
|
||||
DEBUG_ENABLED = True
|
||||
class BaseElasticSearch(BaseQueryRunner):
|
||||
DEBUG_ENABLED = False
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
|
||||
360
redash/query_runner/files/rds-combined-ca-bundle.pem
Normal file
360
redash/query_runner/files/rds-combined-ca-bundle.pem
Normal file
@@ -0,0 +1,360 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID9DCCAtygAwIBAgIBQjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUwOTExMzFaFw0y
|
||||
MDAzMDUwOTExMzFaMIGKMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEbMBkGA1UEAwwSQW1hem9uIFJE
|
||||
UyBSb290IENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuD8nrZ8V
|
||||
u+VA8yVlUipCZIKPTDcOILYpUe8Tct0YeQQr0uyl018StdBsa3CjBgvwpDRq1HgF
|
||||
Ji2N3+39+shCNspQeE6aYU+BHXhKhIIStt3r7gl/4NqYiDDMWKHxHq0nsGDFfArf
|
||||
AOcjZdJagOMqb3fF46flc8k2E7THTm9Sz4L7RY1WdABMuurpICLFE3oHcGdapOb9
|
||||
T53pQR+xpHW9atkcf3pf7gbO0rlKVSIoUenBlZipUlp1VZl/OD/E+TtRhDDNdI2J
|
||||
P/DSMM3aEsq6ZQkfbz/Ilml+Lx3tJYXUDmp+ZjzMPLk/+3beT8EhrwtcG3VPpvwp
|
||||
BIOqsqVVTvw/CwIDAQABo2MwYTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUw
|
||||
AwEB/zAdBgNVHQ4EFgQUTgLurD72FchM7Sz1BcGPnIQISYMwHwYDVR0jBBgwFoAU
|
||||
TgLurD72FchM7Sz1BcGPnIQISYMwDQYJKoZIhvcNAQEFBQADggEBAHZcgIio8pAm
|
||||
MjHD5cl6wKjXxScXKtXygWH2BoDMYBJF9yfyKO2jEFxYKbHePpnXB1R04zJSWAw5
|
||||
2EUuDI1pSBh9BA82/5PkuNlNeSTB3dXDD2PEPdzVWbSKvUB8ZdooV+2vngL0Zm4r
|
||||
47QPyd18yPHrRIbtBtHR/6CwKevLZ394zgExqhnekYKIqqEX41xsUV0Gm6x4vpjf
|
||||
2u6O/+YE2U+qyyxHE5Wd5oqde0oo9UUpFETJPVb6Q2cEeQib8PBAyi0i6KnF+kIV
|
||||
A9dY7IHSubtCK/i8wxMVqfd5GtbA8mmpeJFwnDvm9rBEsHybl08qlax9syEwsUYr
|
||||
/40NawZfTUU=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEATCCAumgAwIBAgIBRDANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMDZaFw0y
|
||||
MDAzMDUyMjAzMDZaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
|
||||
UyBhcC1ub3J0aGVhc3QtMSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
|
||||
ggEBAMmM2B4PfTXCZjbZMWiDPyxvk/eeNwIRJAhfzesiGUiLozX6CRy3rwC1ZOPV
|
||||
AcQf0LB+O8wY88C/cV+d4Q2nBDmnk+Vx7o2MyMh343r5rR3Na+4izd89tkQVt0WW
|
||||
vO21KRH5i8EuBjinboOwAwu6IJ+HyiQiM0VjgjrmEr/YzFPL8MgHD/YUHehqjACn
|
||||
C0+B7/gu7W4qJzBL2DOf7ub2qszGtwPE+qQzkCRDwE1A4AJmVE++/FLH2Zx78Egg
|
||||
fV1sUxPtYgjGH76VyyO6GNKM6rAUMD/q5mnPASQVIXgKbupr618bnH+SWHFjBqZq
|
||||
HvDGPMtiiWII41EmGUypyt5AbysCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
|
||||
A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFIiKM0Q6n1K4EmLxs3ZXxINbwEwR
|
||||
MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
|
||||
A4IBAQBezGbE9Rw/k2e25iGjj5n8r+M3dlye8ORfCE/dijHtxqAKasXHgKX8I9Tw
|
||||
JkBiGWiuzqn7gO5MJ0nMMro1+gq29qjZnYX1pDHPgsRjUX8R+juRhgJ3JSHijRbf
|
||||
4qNJrnwga7pj94MhcLq9u0f6dxH6dXbyMv21T4TZMTmcFduf1KgaiVx1PEyJjC6r
|
||||
M+Ru+A0eM+jJ7uCjUoZKcpX8xkj4nmSnz9NMPog3wdOSB9cAW7XIc5mHa656wr7I
|
||||
WJxVcYNHTXIjCcng2zMKd1aCcl2KSFfy56sRfT7J5Wp69QSr+jq8KM55gw8uqAwi
|
||||
VPrXn2899T1rcTtFYFP16WXjGuc0
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEATCCAumgAwIBAgIBRTANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMTlaFw0y
|
||||
MDAzMDUyMjAzMTlaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
|
||||
UyBhcC1zb3V0aGVhc3QtMSBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
|
||||
ggEBANaXElmSEYt/UtxHFsARFhSUahTf1KNJzR0Dmay6hqOXQuRVbKRwPd19u5vx
|
||||
DdF1sLT7D69IK3VDnUiQScaCv2Dpu9foZt+rLx+cpx1qiQd1UHrvqq8xPzQOqCdC
|
||||
RFStq6yVYZ69yfpfoI67AjclMOjl2Vph3ftVnqP0IgVKZdzeC7fd+umGgR9xY0Qr
|
||||
Ubhd/lWdsbNvzK3f1TPWcfIKQnpvSt85PIEDJir6/nuJUKMtmJRwTymJf0i+JZ4x
|
||||
7dJa341p2kHKcHMgOPW7nJQklGBA70ytjUV6/qebS3yIugr/28mwReflg3TJzVDl
|
||||
EOvi6pqbqNbkMuEwGDCmEQIVqgkCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
|
||||
A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFAu93/4k5xbWOsgdCdn+/KdiRuit
|
||||
MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
|
||||
A4IBAQBlcjSyscpPjf5+MgzMuAsCxByqUt+WFspwcMCpwdaBeHOPSQrXNqX2Sk6P
|
||||
kth6oCivA64trWo8tFMvPYlUA1FYVD5WpN0kCK+P5pD4KHlaDsXhuhClJzp/OP8t
|
||||
pOyUr5109RHLxqoKB5J5m1XA7rgcFjnMxwBSWFe3/4uMk/+4T53YfCVXuc6QV3i7
|
||||
I/2LAJwFf//pTtt6fZenYfCsahnr2nvrNRNyAxcfvGZ/4Opn/mJtR6R/AjvQZHiR
|
||||
bkRNKF2GW0ueK5W4FkZVZVhhX9xh1Aj2Ollb+lbOqADaVj+AT3PoJPZ3MPQHKCXm
|
||||
xwG0LOLlRr/TfD6li1AfOVTAJXv9
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEATCCAumgAwIBAgIBRjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMjRaFw0y
|
||||
MDAzMDUyMjAzMjRaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
|
||||
UyBhcC1zb3V0aGVhc3QtMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
|
||||
ggEBAJqBAJutz69hFOh3BtLHZTbwE8eejGGKayn9hu98YMDPzWzGXWCmW+ZYWELA
|
||||
cY3cNWNF8K4FqKXFr2ssorBYim1UtYFX8yhydT2hMD5zgQ2sCGUpuidijuPA6zaq
|
||||
Z3tdhVR94f0q8mpwpv2zqR9PcqaGDx2VR1x773FupRPRo7mEW1vC3IptHCQlP/zE
|
||||
7jQiLl28bDIH2567xg7e7E9WnZToRnhlYdTaDaJsHTzi5mwILi4cihSok7Shv/ME
|
||||
hnukvxeSPUpaVtFaBhfBqq055ePq9I+Ns4KGreTKMhU0O9fkkaBaBmPaFgmeX/XO
|
||||
n2AX7gMouo3mtv34iDTZ0h6YCGkCAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
|
||||
A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFIlQnY0KHYWn1jYumSdJYfwj/Nfw
|
||||
MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
|
||||
A4IBAQA0wVU6/l41cTzHc4azc4CDYY2Wd90DFWiH9C/mw0SgToYfCJ/5Cfi0NT/Y
|
||||
PRnk3GchychCJgoPA/k9d0//IhYEAIiIDjyFVgjbTkKV3sh4RbdldKVOUB9kumz/
|
||||
ZpShplsGt3z4QQiVnKfrAgqxWDjR0I0pQKkxXa6Sjkicos9LQxVtJ0XA4ieG1E7z
|
||||
zJr+6t80wmzxvkInSaWP3xNJK9azVRTrgQZQlvkbpDbExl4mNTG66VD3bAp6t3Wa
|
||||
B49//uDdfZmPkqqbX+hsxp160OH0rxJppwO3Bh869PkDnaPEd/Pxw7PawC+li0gi
|
||||
NRV8iCEx85aFxcyOhqn0WZOasxee
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/zCCAuegAwIBAgIBRzANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMzFaFw0y
|
||||
MDAzMDUyMjAzMzFaMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEjMCEGA1UEAwwaQW1hem9uIFJE
|
||||
UyBldS1jZW50cmFsLTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
AQDFtP2dhSLuaPOI4ZrrPWsK4OY9ocQBp3yApH1KJYmI9wpQKZG/KCH2E6Oo7JAw
|
||||
QORU519r033T+FO2Z7pFPlmz1yrxGXyHpJs8ySx3Yo5S8ncDCdZJCLmtPiq/hahg
|
||||
5/0ffexMFUCQaYicFZsrJ/cStdxUV+tSw2JQLD7UxS9J97LQWUPyyG+ZrjYVTVq+
|
||||
zudnFmNSe4QoecXMhAFTGJFQXxP7nhSL9Ao5FGgdXy7/JWeWdQIAj8ku6cBDKPa6
|
||||
Y6kP+ak+In+Lye8z9qsCD/afUozfWjPR2aA4JoIZVF8dNRShIMo8l0XfgfM2q0+n
|
||||
ApZWZ+BjhIO5XuoUgHS3D2YFAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNV
|
||||
HRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBRm4GsWIA/M6q+tK8WGHWDGh2gcyTAf
|
||||
BgNVHSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOC
|
||||
AQEAHpMmeVQNqcxgfQdbDIi5UIy+E7zZykmtAygN1XQrvga9nXTis4kOTN6g5/+g
|
||||
HCx7jIXeNJzAbvg8XFqBN84Quqgpl/tQkbpco9Jh1HDs558D5NnZQxNqH5qXQ3Mm
|
||||
uPgCw0pYcPOa7bhs07i+MdVwPBsX27CFDtsgAIru8HvKxY1oTZrWnyIRo93tt/pk
|
||||
WuItVMVHjaQZVfTCow0aDUbte6Vlw82KjUFq+n2NMSCJDiDKsDDHT6BJc4AJHIq3
|
||||
/4Z52MSC9KMr0yAaaoWfW/yMEj9LliQauAgwVjArF4q78rxpfKTG9Rfd8U1BZANP
|
||||
7FrFMN0ThjfA1IvmOYcgskY5bQ==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/DCCAuSgAwIBAgIBSDANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzMzVaFw0y
|
||||
MDAzMDUyMjAzMzVaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
|
||||
UyBldS13ZXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx
|
||||
PdbqQ0HKRj79Pmocxvjc+P6i4Ux24kgFIl+ckiir1vzkmesc3a58gjrMlCksEObt
|
||||
Yihs5IhzEq1ePT0gbfS9GYFp34Uj/MtPwlrfCBWG4d2TcrsKRHr1/EXUYhWqmdrb
|
||||
RhX8XqoRhVkbF/auzFSBhTzcGGvZpQ2KIaxRcQfcXlMVhj/pxxAjh8U4F350Fb0h
|
||||
nX1jw4/KvEreBL0Xb2lnlGTkwVxaKGSgXEnOgIyOFdOQc61vdome0+eeZsP4jqeR
|
||||
TGYJA9izJsRbe2YJxHuazD+548hsPlM3vFzKKEVURCha466rAaYAHy3rKur3HYQx
|
||||
Yt+SoKcEz9PXuSGj96ejAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
|
||||
Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBTebg//h2oeXbZjQ4uuoiuLYzuiPDAfBgNV
|
||||
HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
|
||||
TikPaGeZasTPw+4RBemlsyPAjtFFQLo7ddaFdORLgdEysVf8aBqndvbA6MT/v4lj
|
||||
GtEtUdF59ZcbWOrVm+fBZ2h/jYJ59dYF/xzb09nyRbdMSzB9+mkSsnOMqluq5y8o
|
||||
DY/PfP2vGhEg/2ZncRC7nlQU1Dm8F4lFWEiQ2fi7O1cW852Vmbq61RIfcYsH/9Ma
|
||||
kpgk10VZ75b8m3UhmpZ/2uRY+JEHImH5WpcTJ7wNiPNJsciZMznGtrgOnPzYco8L
|
||||
cDleOASIZifNMQi9PKOJKvi0ITz0B/imr8KBsW0YjZVJ54HMa7W1lwugSM7aMAs+
|
||||
E3Sd5lS+SHwWaOCHwhOEVA==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/DCCAuSgAwIBAgIBSTANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzNDBaFw0y
|
||||
MDAzMDUyMjAzNDBaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
|
||||
UyBzYS1lYXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCU
|
||||
X4OBnQ5xA6TLJAiFEI6l7bUWjoVJBa/VbMdCCSs2i2dOKmqUaXu2ix2zcPILj3lZ
|
||||
GMk3d/2zvTK/cKhcFrewHUBamTeVHdEmynhMQamqNmkM4ptYzFcvEUw1TGxHT4pV
|
||||
Q6gSN7+/AJewQvyHexHo8D0+LDN0/Wa9mRm4ixCYH2CyYYJNKaZt9+EZfNu+PPS4
|
||||
8iB0TWH0DgQkbWMBfCRgolLLitAZklZ4dvdlEBS7evN1/7ttBxUK6SvkeeSx3zBl
|
||||
ww3BlXqc3bvTQL0A+RRysaVyFbvtp9domFaDKZCpMmDFAN/ntx215xmQdrSt+K3F
|
||||
cXdGQYHx5q410CAclGnbAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
|
||||
Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBT6iVWnm/uakS+tEX2mzIfw+8JL0zAfBgNV
|
||||
HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
|
||||
FmDD+QuDklXn2EgShwQxV13+txPRuVdOSrutHhoCgMwFWCMtPPtBAKs6KPY7Guvw
|
||||
DpJoZSehDiOfsgMirjOWjvfkeWSNvKfjWTVneX7pZD9W5WPnsDBvTbCGezm+v87z
|
||||
b+ZM2ZMo98m/wkMcIEAgdSKilR2fuw8rLkAjhYFfs0A7tDgZ9noKwgHvoE4dsrI0
|
||||
KZYco6DlP/brASfHTPa2puBLN9McK3v+h0JaSqqm5Ro2Bh56tZkQh8AWy/miuDuK
|
||||
3+hNEVdxosxlkM1TPa1DGj0EzzK0yoeerXuH2HX7LlCrrxf6/wdKnjR12PMrLQ4A
|
||||
pCqkcWw894z6bV9MAvKe6A==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/DCCAuSgAwIBAgIBQzANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMTU0MDRaFw0y
|
||||
MDAzMDUyMTU0MDRaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
|
||||
UyB1cy1lYXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDI
|
||||
UIuwh8NusKHk1SqPXcP7OqxY3S/M2ZyQWD3w7Bfihpyyy/fc1w0/suIpX3kbMhAV
|
||||
2ESwged2/2zSx4pVnjp/493r4luhSqQYzru78TuPt9bhJIJ51WXunZW2SWkisSaf
|
||||
USYUzVN9ezR/bjXTumSUQaLIouJt3OHLX49s+3NAbUyOI8EdvgBQWD68H1epsC0n
|
||||
CI5s+pIktyOZ59c4DCDLQcXErQ+tNbDC++oct1ANd/q8p9URonYwGCGOBy7sbCYq
|
||||
9eVHh1Iy2M+SNXddVOGw5EuruvHoCIQyOz5Lz4zSuZA9dRbrfztNOpezCNYu6NKM
|
||||
n+hzcvdiyxv77uNm8EaxAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
|
||||
Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBQSQG3TmMe6Sa3KufaPBa72v4QFDzAfBgNV
|
||||
HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
|
||||
L/mOZfB3187xTmjOHMqN2G2oSKHBKiQLM9uv8+97qT+XR+TVsBT6b3yoPpMAGhHA
|
||||
Pc7nxAF5gPpuzatx0OTLPcmYucFmfqT/1qA5WlgCnMNtczyNMH97lKFTNV7Njtek
|
||||
jWEzAEQSyEWrkNpNlC4j6kMYyPzVXQeXUeZTgJ9FNnVZqmvfjip2N22tawMjrCn5
|
||||
7KN/zN65EwY2oO9XsaTwwWmBu3NrDdMbzJnbxoWcFWj4RBwanR1XjQOVNhDwmCOl
|
||||
/1Et13b8CPyj69PC8BOVU6cfTSx8WUVy0qvYOKHNY9Bqa5BDnIL3IVmUkeTlM1mt
|
||||
enRpyBj+Bk9rh/ICdiRKmA==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/DCCAuSgAwIBAgIBSjANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzNDVaFw0y
|
||||
MDAzMDUyMjAzNDVaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
|
||||
UyB1cy13ZXN0LTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDE
|
||||
Dhw+uw/ycaiIhhyu2pXFRimq0DlB8cNtIe8hdqndH8TV/TFrljNgR8QdzOgZtZ9C
|
||||
zzQ2GRpInN/qJF6slEd6wO+6TaDBQkPY+07TXNt52POFUhdVkhJXHpE2BS7Xn6J7
|
||||
7RFAOeG1IZmc2DDt+sR1BgXzUqHslQGfFYNS0/MBO4P+ya6W7IhruB1qfa4HiYQS
|
||||
dbe4MvGWnv0UzwAqdR7OF8+8/5c58YXZIXCO9riYF2ql6KNSL5cyDPcYK5VK0+Q9
|
||||
VI6vuJHSMYcF7wLePw8jtBktqAFE/wbdZiIHhZvNyiNWPPNTGUmQbaJ+TzQEHDs5
|
||||
8en+/W7JKnPyBOkxxENbAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
|
||||
Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBS0nw/tFR9bCjgqWTPJkyy4oOD8bzAfBgNV
|
||||
HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
|
||||
CXGAY3feAak6lHdqj6+YWjy6yyUnLK37bRxZDsyDVXrPRQaXRzPTzx79jvDwEb/H
|
||||
Q/bdQ7zQRWqJcbivQlwhuPJ4kWPUZgSt3JUUuqkMsDzsvj/bwIjlrEFDOdHGh0mi
|
||||
eVIngFEjUXjMh+5aHPEF9BlQnB8LfVtKj18e15UDTXFa+xJPFxUR7wDzCfo4WI1m
|
||||
sUMG4q1FkGAZgsoyFPZfF8IVvgCuGdR8z30VWKklFxttlK0eGLlPAyIO0CQxPQlo
|
||||
saNJrHf4tLOgZIWk+LpDhNd9Et5EzvJ3aURUsKY4pISPPF5WdvM9OE59bERwUErd
|
||||
nuOuQWQeeadMceZnauRzJQ==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/DCCAuSgAwIBAgIBSzANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTAyMDUyMjAzNTBaFw0y
|
||||
MDAzMDUyMjAzNTBaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
|
||||
UyB1cy13ZXN0LTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDM
|
||||
H58SR48U6jyERC1vYTnub34smf5EQVXyzaTmspWGWGzT31NLNZGSDFaa7yef9kdO
|
||||
mzJsgebR5tXq6LdwlIoWkKYQ7ycUaadtVKVYdI40QcI3cHn0qLFlg2iBXmWp/B+i
|
||||
Z34VuVlCh31Uj5WmhaBoz8t/GRqh1V/aCsf3Wc6jCezH3QfuCjBpzxdOOHN6Ie2v
|
||||
xX09O5qmZTvMoRBAvPkxdaPg/Mi7fxueWTbEVk78kuFbF1jHYw8U1BLILIAhcqlq
|
||||
x4u8nl73t3O3l/soNUcIwUDK0/S+Kfqhwn9yQyPlhb4Wy3pfnZLJdkyHldktnQav
|
||||
9TB9u7KH5Lk0aAYslMLxAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
|
||||
Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBT8roM4lRnlFHWMPWRz0zkwFZog1jAfBgNV
|
||||
HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQUFAAOCAQEA
|
||||
JwrxwgwmPtcdaU7O7WDdYa4hprpOMamI49NDzmE0s10oGrqmLwZygcWU0jT+fJ+Y
|
||||
pJe1w0CVfKaeLYNsOBVW3X4ZPmffYfWBheZiaiEflq/P6t7/Eg81gaKYnZ/x1Dfa
|
||||
sUYkzPvCkXe9wEz5zdUTOCptDt89rBR9CstL9vE7WYUgiVVmBJffWbHQLtfjv6OF
|
||||
NMb0QME981kGRzc2WhgP71YS2hHd1kXtsoYP1yTu4vThSKsoN4bkiHsaC1cRkLoy
|
||||
0fFA4wpB3WloMEvCDaUvvH1LZlBXTNlwi9KtcwD4tDxkkBt4tQczKLGpQ/nF/W9n
|
||||
8YDWk3IIc1sd0bkZqoau2Q==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEATCCAumgAwIBAgIBTDANBgkqhkiG9w0BAQUFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNTExMDYwMDA1NDZaFw0y
|
||||
MDAzMDUwMDA1NDZaMIGUMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzElMCMGA1UEAwwcQW1hem9uIFJE
|
||||
UyBhcC1ub3J0aGVhc3QtMiBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC
|
||||
ggEBAKSwd+RVUzTRH0FgnbwoTK8TMm/zMT4+2BvALpAUe6YXbkisg2goycWuuWLg
|
||||
jOpFBB3GtyvXZnkqi7MkDWUmj1a2kf8l2oLyoaZ+Hm9x/sV+IJzOqPvj1XVUGjP6
|
||||
yYYnPJmUYqvZeI7fEkIGdFkP2m4/sgsSGsFvpD9FK1bL1Kx2UDpYX0kHTtr18Zm/
|
||||
1oN6irqWALSmXMDydb8hE0FB2A1VFyeKE6PnoDj/Y5cPHwPPdEi6/3gkDkSaOG30
|
||||
rWeQfL3pOcKqzbHaWTxMphd0DSL/quZ64Nr+Ly65Q5PRcTrtr55ekOUziuqXwk+o
|
||||
9QpACMwcJ7ROqOznZTqTzSFVXFECAwEAAaNmMGQwDgYDVR0PAQH/BAQDAgEGMBIG
|
||||
A1UdEwEB/wQIMAYBAf8CAQAwHQYDVR0OBBYEFM6Nox/QWbhzWVvzoJ/y0kGpNPK+
|
||||
MB8GA1UdIwQYMBaAFE4C7qw+9hXITO0s9QXBj5yECEmDMA0GCSqGSIb3DQEBBQUA
|
||||
A4IBAQCTkWBqNvyRf3Y/W21DwFx3oT/AIWrHt0BdGZO34tavummXemTH9LZ/mqv9
|
||||
aljt6ZuDtf5DEQjdsAwXMsyo03ffnP7doWm8iaF1+Mui77ot0TmTsP/deyGwukvJ
|
||||
tkxX8bZjDh+EaNauWKr+CYnniNxCQLfFtXYJsfOdVBzK3xNL+Z3ucOQRhr2helWc
|
||||
CDQgwfhP1+3pRVKqHvWCPC4R3fT7RZHuRmZ38kndv476GxRntejh+ePffif78bFI
|
||||
3rIZCPBGobrrUMycafSbyXteoGca/kA+/IqrAPlk0pWQ4aEL0yTWN2h2dnjoD7oX
|
||||
byIuL/g9AGRh97+ssn7D6bDRPTbW
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/TCCAuWgAwIBAgIBTTANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjA1MDMyMTI5MjJaFw0y
|
||||
MDAzMDUyMTI5MjJaMIGQMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEhMB8GA1UEAwwYQW1hem9uIFJE
|
||||
UyBhcC1zb3V0aC0xIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA
|
||||
06eWGLE0TeqL9kyWOLkS8q0fXO97z+xyBV3DKSB2lg2GkgBz3B98MkmkeB0SZy3G
|
||||
Ce4uCpCPbFKiFEdiUclOlhZsrBuCeaimxLM3Ig2wuenElO/7TqgaYHYUbT3d+VQW
|
||||
GUbLn5GRZJZe1OAClYdOWm7A1CKpuo+cVV1vxbY2nGUQSJPpVn2sT9gnwvjdE60U
|
||||
JGYU/RLCTm8zmZBvlWaNIeKDnreIc4rKn6gUnJ2cQn1ryCVleEeyc3xjYDSrjgdn
|
||||
FLYGcp9mphqVT0byeQMOk0c7RHpxrCSA0V5V6/CreFV2LteK50qcDQzDSM18vWP/
|
||||
p09FoN8O7QrtOeZJzH/lmwIDAQABo2YwZDAOBgNVHQ8BAf8EBAMCAQYwEgYDVR0T
|
||||
AQH/BAgwBgEB/wIBADAdBgNVHQ4EFgQU2i83QHuEl/d0keXF+69HNJph7cMwHwYD
|
||||
VR0jBBgwFoAUTgLurD72FchM7Sz1BcGPnIQISYMwDQYJKoZIhvcNAQELBQADggEB
|
||||
ACqnH2VjApoDqoSQOky52QBwsGaj+xWYHW5Gm7EvCqvQuhWMkeBuD6YJmMvNyA9G
|
||||
I2lh6/o+sUk/RIsbYbxPRdhNPTOgDR9zsNRw6qxaHztq/CEC+mxDCLa3O1hHBaDV
|
||||
BmB3nCZb93BvO0EQSEk7aytKq/f+sjyxqOcs385gintdHGU9uM7gTZHnU9vByJsm
|
||||
/TL07Miq67X0NlhIoo3jAk+xHaeKJdxdKATQp0448P5cY20q4b8aMk1twcNaMvCP
|
||||
dG4M5doaoUA8OQ/0ukLLae/LBxLeTw04q1/a2SyFaVUX2Twbb1S3xVWwLA8vsyGr
|
||||
igXx7B5GgP+IHb6DTjPJAi0=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/DCCAuSgAwIBAgIBTjANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjA4MTExOTU4NDVaFw0y
|
||||
MDAzMDUxOTU4NDVaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
|
||||
UyB1cy1lYXN0LTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCp
|
||||
WnnUX7wM0zzstccX+4iXKJa9GR0a2PpvB1paEX4QRCgfhEdQWDaSqyrWNgdVCKkt
|
||||
1aQkWu5j6VAC2XIG7kKoonm1ZdBVyBLqW5lXNywlaiU9yhJkwo8BR+/OqgE+PLt/
|
||||
EO1mlN0PQudja/XkExCXTO29TG2j7F/O7hox6vTyHNHc0H88zS21uPuBE+jivViS
|
||||
yzj/BkyoQ85hnkues3f9R6gCGdc+J51JbZnmgzUkvXjAEuKhAm9JksVOxcOKUYe5
|
||||
ERhn0U9zjzpfbAITIkul97VVa5IxskFFTHIPJbvRKHJkiF6wTJww/tc9wm+fSCJ1
|
||||
+DbQTGZgkQ3bJrqRN29/AgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
|
||||
Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBSAHQzUYYZbepwKEMvGdHp8wzHnfDAfBgNV
|
||||
HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQsFAAOCAQEA
|
||||
MbaEzSYZ+aZeTBxf8yi0ta8K4RdwEJsEmP6IhFFQHYUtva2Cynl4Q9tZg3RMsybT
|
||||
9mlnSQQlbN/wqIIXbkrcgFcHoXG9Odm/bDtUwwwDaiEhXVfeQom3G77QHOWMTCGK
|
||||
qadwuh5msrb17JdXZoXr4PYHDKP7j0ONfAyFNER2+uecblHfRSpVq5UeF3L6ZJb8
|
||||
fSw/GtAV6an+/0r+Qm+PiI2H5XuZ4GmRJYnGMhqWhBYrY7p3jtVnKcsh39wgfUnW
|
||||
AvZEZG/yhFyAZW0Essa39LiL5VSq14Y1DOj0wgnhSY/9WHxaAo1HB1T9OeZknYbD
|
||||
fl/EGSZ0TEvZkENrXcPlVA==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/zCCAuegAwIBAgIBTzANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjA5MTUwMDEwMTFaFw0y
|
||||
MDAzMDUwMDEwMTFaMIGSMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEjMCEGA1UEAwwaQW1hem9uIFJE
|
||||
UyBjYS1jZW50cmFsLTEgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB
|
||||
AQCZYI/iQ6DrS3ny3t1EwX1wAD+3LMgh7Fd01EW5LIuaK2kYIIQpsVKhxLCit/V5
|
||||
AGc/1qiJS1Qz9ODLTh0Na6bZW6EakRzuHJLe32KJtoFYPC7Z09UqzXrpA/XL+1hM
|
||||
P0ZmCWsU7Nn/EmvfBp9zX3dZp6P6ATrvDuYaVFr+SA7aT3FXpBroqBS1fyzUPs+W
|
||||
c6zTR6+yc4zkHX0XQxC5RH6xjgpeRkoOajA/sNo7AQF7KlWmKHbdVF44cvvAhRKZ
|
||||
XaoVs/C4GjkaAEPTCbopYdhzg+KLx9eB2BQnYLRrIOQZtRfbQI2Nbj7p3VsRuOW1
|
||||
tlcks2w1Gb0YC6w6SuIMFkl1AgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNV
|
||||
HRMBAf8ECDAGAQH/AgEAMB0GA1UdDgQWBBToYWxE1lawl6Ks6NsvpbHQ3GKEtzAf
|
||||
BgNVHSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQsFAAOC
|
||||
AQEAG/8tQ0ooi3hoQpa5EJz0/E5VYBsAz3YxA2HoIonn0jJyG16bzB4yZt4vNQMA
|
||||
KsNlQ1uwDWYL1nz63axieUUFIxqxl1KmwfhsmLgZ0Hd2mnTPIl2Hw3uj5+wdgGBg
|
||||
agnAZ0bajsBYgD2VGQbqjdk2Qn7Fjy3LEWIvGZx4KyZ99OJ2QxB7JOPdauURAtWA
|
||||
DKYkP4LLJxtj07DSzG8kuRWb9B47uqUD+eKDIyjfjbnzGtd9HqqzYFau7EX3HVD9
|
||||
9Qhnjl7bTZ6YfAEZ3nH2t3Vc0z76XfGh47rd0pNRhMV+xpok75asKf/lNh5mcUrr
|
||||
VKwflyMkQpSbDCmcdJ90N2xEXQ==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID/DCCAuSgAwIBAgIBUDANBgkqhkiG9w0BAQsFADCBijELMAkGA1UEBhMCVVMx
|
||||
EzARBgNVBAgMCldhc2hpbmd0b24xEDAOBgNVBAcMB1NlYXR0bGUxIjAgBgNVBAoM
|
||||
GUFtYXpvbiBXZWIgU2VydmljZXMsIEluYy4xEzARBgNVBAsMCkFtYXpvbiBSRFMx
|
||||
GzAZBgNVBAMMEkFtYXpvbiBSRFMgUm9vdCBDQTAeFw0xNjEwMTAxNzQ0NDJaFw0y
|
||||
MDAzMDUxNzQ0NDJaMIGPMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3Rv
|
||||
bjEQMA4GA1UEBwwHU2VhdHRsZTEiMCAGA1UECgwZQW1hem9uIFdlYiBTZXJ2aWNl
|
||||
cywgSW5jLjETMBEGA1UECwwKQW1hem9uIFJEUzEgMB4GA1UEAwwXQW1hem9uIFJE
|
||||
UyBldS13ZXN0LTIgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDO
|
||||
cttLJfubB4XMMIGWNfJISkIdCMGJyOzLiMJaiWB5GYoXKhEl7YGotpy0qklwW3BQ
|
||||
a0fmVdcCLX+dIuVQ9iFK+ZcK7zwm7HtdDTCHOCKeOh2IcnU4c/VIokFi6Gn8udM6
|
||||
N/Zi5M5OGpVwLVALQU7Yctsn3c95el6MdVx6mJiIPVu7tCVZn88Z2koBQ2gq9P4O
|
||||
Sb249SHFqOb03lYDsaqy1NDsznEOhaRBw7DPJFpvmw1lA3/Y6qrExRI06H2VYR2i
|
||||
7qxwDV50N58fs10n7Ye1IOxTVJsgEA7X6EkRRXqYaM39Z76R894548WHfwXWjUsi
|
||||
MEX0RS0/t1GmnUQjvevDAgMBAAGjZjBkMA4GA1UdDwEB/wQEAwIBBjASBgNVHRMB
|
||||
Af8ECDAGAQH/AgEAMB0GA1UdDgQWBBQBxmcuRSxERYCtNnSr5xNfySokHjAfBgNV
|
||||
HSMEGDAWgBROAu6sPvYVyEztLPUFwY+chAhJgzANBgkqhkiG9w0BAQsFAAOCAQEA
|
||||
UyCUQjsF3nUAABjfEZmpksTuUo07aT3KGYt+EMMFdejnBQ0+2lJJFGtT+CDAk1SD
|
||||
RSgfEBon5vvKEtlnTf9a3pv8WXOAkhfxnryr9FH6NiB8obISHNQNPHn0ljT2/T+I
|
||||
Y6ytfRvKHa0cu3V0NXbJm2B4KEOt4QCDiFxUIX9z6eB4Kditwu05OgQh6KcogOiP
|
||||
JesWxBMXXGoDC1rIYTFO7szwDyOHlCcVXJDNsTJhc32oDWYdeIbW7o/5I+aQsrXZ
|
||||
C96HykZcgWzz6sElrQxUaT3IoMw/5nmw4uWKKnZnxgI9bY4fpQwMeBZ96iHfFxvH
|
||||
mqfEEuC7uUoPofXdBp2ObQ==
|
||||
-----END CERTIFICATE-----
|
||||
145
redash/query_runner/files/redshift-ca-bundle.crt
Normal file
145
redash/query_runner/files/redshift-ca-bundle.crt
Normal file
@@ -0,0 +1,145 @@
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDeDCCAuGgAwIBAgIJALPHPDcjk979MA0GCSqGSIb3DQEBBQUAMIGFMQswCQYD
|
||||
VQQGEwJVUzETMBEGA1UECBMKV2FzaGluZ3RvbjEQMA4GA1UEBxMHU2VhdHRsZTET
|
||||
MBEGA1UEChMKQW1hem9uLmNvbTELMAkGA1UECxMCQ00xLTArBgkqhkiG9w0BCQEW
|
||||
HmNvb2tpZS1tb25zdGVyLWNvcmVAYW1hem9uLmNvbTAeFw0xMjExMDIyMzI0NDda
|
||||
Fw0xODExMDEyMzI0NDdaMIGFMQswCQYDVQQGEwJVUzETMBEGA1UECBMKV2FzaGlu
|
||||
Z3RvbjEQMA4GA1UEBxMHU2VhdHRsZTETMBEGA1UEChMKQW1hem9uLmNvbTELMAkG
|
||||
A1UECxMCQ00xLTArBgkqhkiG9w0BCQEWHmNvb2tpZS1tb25zdGVyLWNvcmVAYW1h
|
||||
em9uLmNvbTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAw949t4UZ+9n1K8vj
|
||||
PVkyehoV2kWepDmJ8YKl358nkmNwrSAGkslVttdpZS+FrgIcb44UbfVbB4bOSq0J
|
||||
qd39GYVRzSazCwr2tpibFvH87PyAX4VVUBDlCizJToEYsXkAKecs+IRqCDWG2ht/
|
||||
pibO2+T5Wp8jaxUBvDmoHY3BSgkCAwEAAaOB7TCB6jAdBgNVHQ4EFgQUE5KUaWSM
|
||||
Uml+6MZQia7DjmfjvLgwgboGA1UdIwSBsjCBr4AUE5KUaWSMUml+6MZQia7Djmfj
|
||||
vLihgYukgYgwgYUxCzAJBgNVBAYTAlVTMRMwEQYDVQQIEwpXYXNoaW5ndG9uMRAw
|
||||
DgYDVQQHEwdTZWF0dGxlMRMwEQYDVQQKEwpBbWF6b24uY29tMQswCQYDVQQLEwJD
|
||||
TTEtMCsGCSqGSIb3DQEJARYeY29va2llLW1vbnN0ZXItY29yZUBhbWF6b24uY29t
|
||||
ggkAs8c8NyOT3v0wDAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQUFAAOBgQC9l5+L
|
||||
7PaPiF9tsZ20CkyBNEdcM3dWrGT2KR0UBQLWYgPDoBKKkqV56c361kWInOtZ2ucf
|
||||
JHjJpT1Np8j673LRbTrZiFiITMg7CcScq5u2ntMa3BNVCeVYlqVLH3RZ7RiQIBXR
|
||||
M5hUZ03/aJqN3fQKamd3MfGHft42AXFOwvh9xg==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl
|
||||
MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp
|
||||
U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw
|
||||
NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE
|
||||
ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp
|
||||
ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3
|
||||
DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf
|
||||
8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN
|
||||
+lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0
|
||||
X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa
|
||||
K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA
|
||||
1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G
|
||||
A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR
|
||||
zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0
|
||||
YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD
|
||||
bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w
|
||||
DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3
|
||||
L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D
|
||||
eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl
|
||||
xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp
|
||||
VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY
|
||||
WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx
|
||||
EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT
|
||||
HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs
|
||||
ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5
|
||||
MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD
|
||||
VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy
|
||||
ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy
|
||||
dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI
|
||||
hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p
|
||||
OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2
|
||||
8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K
|
||||
Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe
|
||||
hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk
|
||||
6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw
|
||||
DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q
|
||||
AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI
|
||||
bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB
|
||||
ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z
|
||||
qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd
|
||||
iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn
|
||||
0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN
|
||||
sSi6
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIDQTCCAimgAwIBAgITBmyfz5m/jAo54vB4ikPmljZbyjANBgkqhkiG9w0BAQsF
|
||||
ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
|
||||
b24gUm9vdCBDQSAxMB4XDTE1MDUyNjAwMDAwMFoXDTM4MDExNzAwMDAwMFowOTEL
|
||||
MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
|
||||
b3QgQ0EgMTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALJ4gHHKeNXj
|
||||
ca9HgFB0fW7Y14h29Jlo91ghYPl0hAEvrAIthtOgQ3pOsqTQNroBvo3bSMgHFzZM
|
||||
9O6II8c+6zf1tRn4SWiw3te5djgdYZ6k/oI2peVKVuRF4fn9tBb6dNqcmzU5L/qw
|
||||
IFAGbHrQgLKm+a/sRxmPUDgH3KKHOVj4utWp+UhnMJbulHheb4mjUcAwhmahRWa6
|
||||
VOujw5H5SNz/0egwLX0tdHA114gk957EWW67c4cX8jJGKLhD+rcdqsq08p8kDi1L
|
||||
93FcXmn/6pUCyziKrlA4b9v7LWIbxcceVOF34GfID5yHI9Y/QCB/IIDEgEw+OyQm
|
||||
jgSubJrIqg0CAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMC
|
||||
AYYwHQYDVR0OBBYEFIQYzIU07LwMlJQuCFmcx7IQTgoIMA0GCSqGSIb3DQEBCwUA
|
||||
A4IBAQCY8jdaQZChGsV2USggNiMOruYou6r4lK5IpDB/G/wkjUu0yKGX9rbxenDI
|
||||
U5PMCCjjmCXPI6T53iHTfIUJrU6adTrCC2qJeHZERxhlbI1Bjjt/msv0tadQ1wUs
|
||||
N+gDS63pYaACbvXy8MWy7Vu33PqUXHeeE6V/Uq2V8viTO96LXFvKWlJbYK8U90vv
|
||||
o/ufQJVtMVT8QtPHRh8jrdkPSHCa2XV4cdFyQzR1bldZwgJcJmApzyMZFo6IQ6XU
|
||||
5MsI+yMRQ+hDKXJioaldXgjUkK642M4UwtBV8ob2xJNDd2ZhwLnoQdeXeGADbkpy
|
||||
rqXRfboQnoZsG4q5WTP468SQvvG5
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIFQTCCAymgAwIBAgITBmyf0pY1hp8KD+WGePhbJruKNzANBgkqhkiG9w0BAQwF
|
||||
ADA5MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6
|
||||
b24gUm9vdCBDQSAyMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTEL
|
||||
MAkGA1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJv
|
||||
b3QgQ0EgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAK2Wny2cSkxK
|
||||
gXlRmeyKy2tgURO8TW0G/LAIjd0ZEGrHJgw12MBvIITplLGbhQPDW9tK6Mj4kHbZ
|
||||
W0/jTOgGNk3Mmqw9DJArktQGGWCsN0R5hYGCrVo34A3MnaZMUnbqQ523BNFQ9lXg
|
||||
1dKmSYXpN+nKfq5clU1Imj+uIFptiJXZNLhSGkOQsL9sBbm2eLfq0OQ6PBJTYv9K
|
||||
8nu+NQWpEjTj82R0Yiw9AElaKP4yRLuH3WUnAnE72kr3H9rN9yFVkE8P7K6C4Z9r
|
||||
2UXTu/Bfh+08LDmG2j/e7HJV63mjrdvdfLC6HM783k81ds8P+HgfajZRRidhW+me
|
||||
z/CiVX18JYpvL7TFz4QuK/0NURBs+18bvBt+xa47mAExkv8LV/SasrlX6avvDXbR
|
||||
8O70zoan4G7ptGmh32n2M8ZpLpcTnqWHsFcQgTfJU7O7f/aS0ZzQGPSSbtqDT6Zj
|
||||
mUyl+17vIWR6IF9sZIUVyzfpYgwLKhbcAS4y2j5L9Z469hdAlO+ekQiG+r5jqFoz
|
||||
7Mt0Q5X5bGlSNscpb/xVA1wf+5+9R+vnSUeVC06JIglJ4PVhHvG/LopyboBZ/1c6
|
||||
+XUyo05f7O0oYtlNc/LMgRdg7c3r3NunysV+Ar3yVAhU/bQtCSwXVEqY0VThUWcI
|
||||
0u1ufm8/0i2BWSlmy5A5lREedCf+3euvAgMBAAGjQjBAMA8GA1UdEwEB/wQFMAMB
|
||||
Af8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSwDPBMMPQFWAJI/TPlUq9LhONm
|
||||
UjANBgkqhkiG9w0BAQwFAAOCAgEAqqiAjw54o+Ci1M3m9Zh6O+oAA7CXDpO8Wqj2
|
||||
LIxyh6mx/H9z/WNxeKWHWc8w4Q0QshNabYL1auaAn6AFC2jkR2vHat+2/XcycuUY
|
||||
+gn0oJMsXdKMdYV2ZZAMA3m3MSNjrXiDCYZohMr/+c8mmpJ5581LxedhpxfL86kS
|
||||
k5Nrp+gvU5LEYFiwzAJRGFuFjWJZY7attN6a+yb3ACfAXVU3dJnJUH/jWS5E4ywl
|
||||
7uxMMne0nxrpS10gxdr9HIcWxkPo1LsmmkVwXqkLN1PiRnsn/eBG8om3zEK2yygm
|
||||
btmlyTrIQRNg91CMFa6ybRoVGld45pIq2WWQgj9sAq+uEjonljYE1x2igGOpm/Hl
|
||||
urR8FLBOybEfdF849lHqm/osohHUqS0nGkWxr7JOcQ3AWEbWaQbLU8uz/mtBzUF+
|
||||
fUwPfHJ5elnNXkoOrJupmHN5fLT0zLm4BwyydFy4x2+IoZCn9Kr5v2c69BoVYh63
|
||||
n749sSmvZ6ES8lgQGVMDMBu4Gon2nL2XA46jCfMdiyHxtN/kHNGfZQIG6lzWE7OE
|
||||
76KlXIx3KadowGuuQNKotOrN8I1LOJwZmhsoVLiJkO/KdYE+HvJkJMcYr07/R54H
|
||||
9jVlpNMKVv/1F2Rs76giJUmTtt8AF9pYfl3uxRuw0dFfIRDH+fO6AgonB8Xx1sfT
|
||||
4PsJYGw=
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIBtjCCAVugAwIBAgITBmyf1XSXNmY/Owua2eiedgPySjAKBggqhkjOPQQDAjA5
|
||||
MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
|
||||
Um9vdCBDQSAzMB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
|
||||
A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
|
||||
Q0EgMzBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABCmXp8ZBf8ANm+gBG1bG8lKl
|
||||
ui2yEujSLtf6ycXYqm0fc4E7O5hrOXwzpcVOho6AF2hiRVd9RFgdszflZwjrZt6j
|
||||
QjBAMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgGGMB0GA1UdDgQWBBSr
|
||||
ttvXBp43rDCGB5Fwx5zEGbF4wDAKBggqhkjOPQQDAgNJADBGAiEA4IWSoxe3jfkr
|
||||
BqWTrBqYaGFy+uGh0PsceGCmQ5nFuMQCIQCcAu/xlJyzlvnrxir4tiz+OpAUFteM
|
||||
YyRIHN8wfdVoOw==
|
||||
-----END CERTIFICATE-----
|
||||
-----BEGIN CERTIFICATE-----
|
||||
MIIB8jCCAXigAwIBAgITBmyf18G7EEwpQ+Vxe3ssyBrBDjAKBggqhkjOPQQDAzA5
|
||||
MQswCQYDVQQGEwJVUzEPMA0GA1UEChMGQW1hem9uMRkwFwYDVQQDExBBbWF6b24g
|
||||
Um9vdCBDQSA0MB4XDTE1MDUyNjAwMDAwMFoXDTQwMDUyNjAwMDAwMFowOTELMAkG
|
||||
A1UEBhMCVVMxDzANBgNVBAoTBkFtYXpvbjEZMBcGA1UEAxMQQW1hem9uIFJvb3Qg
|
||||
Q0EgNDB2MBAGByqGSM49AgEGBSuBBAAiA2IABNKrijdPo1MN/sGKe0uoe0ZLY7Bi
|
||||
9i0b2whxIdIA6GO9mif78DluXeo9pcmBqqNbIJhFXRbb/egQbeOc4OO9X4Ri83Bk
|
||||
M6DLJC9wuoihKqB1+IGuYgbEgds5bimwHvouXKNCMEAwDwYDVR0TAQH/BAUwAwEB
|
||||
/zAOBgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFNPsxzplbszh2naaVvuc84ZtV+WB
|
||||
MAoGCCqGSM49BAMDA2gAMGUCMDqLIfG9fhGt0O9Yli/W651+kI0rz2ZVwyzjKKlw
|
||||
CkcO8DdZEv8tmZQoTipPNU0zWgIxAOp1AE47xDqUEpHJWEadIRNyp4iciuRMStuW
|
||||
1KyLa2tJElMzrdfkviT8tQp21KW8EA==
|
||||
-----END CERTIFICATE-----
|
||||
@@ -1,29 +1,26 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from base64 import b64decode
|
||||
import json
|
||||
import logging
|
||||
from base64 import b64decode
|
||||
from datetime import datetime
|
||||
from urlparse import parse_qs, urlparse
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from urlparse import urlparse, parse_qs
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from oauth2client.service_account import ServiceAccountCredentials
|
||||
from apiclient.discovery import build
|
||||
from apiclient.errors import HttpError
|
||||
import httplib2
|
||||
enabled = True
|
||||
except ImportError as e:
|
||||
logger.info(str(e))
|
||||
enabled = False
|
||||
|
||||
|
||||
def _load_key(filename):
|
||||
with open(filename, "rb") as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
|
||||
types_conv = dict(
|
||||
STRING=TYPE_STRING,
|
||||
INTEGER=TYPE_INTEGER,
|
||||
@@ -33,6 +30,54 @@ types_conv = dict(
|
||||
)
|
||||
|
||||
|
||||
def parse_ga_response(response):
|
||||
columns = []
|
||||
for h in response['columnHeaders']:
|
||||
if h['name'] in ('ga:date', 'mcf:conversionDate'):
|
||||
h['dataType'] = 'DATE'
|
||||
elif h['name'] == 'ga:dateHour':
|
||||
h['dataType'] = 'DATETIME'
|
||||
columns.append({
|
||||
'name': h['name'],
|
||||
'friendly_name': h['name'].split(':', 1)[1],
|
||||
'type': types_conv.get(h['dataType'], 'string')
|
||||
})
|
||||
|
||||
rows = []
|
||||
for r in response['rows']:
|
||||
d = {}
|
||||
for c, value in enumerate(r):
|
||||
column_name = response['columnHeaders'][c]['name']
|
||||
column_type = filter(lambda col: col['name'] == column_name, columns)[0]['type']
|
||||
|
||||
# mcf results come a bit different than ga results:
|
||||
if isinstance(value, dict):
|
||||
if 'primitiveValue' in value:
|
||||
value = value['primitiveValue']
|
||||
elif 'conversionPathValue' in value:
|
||||
steps = []
|
||||
for step in value['conversionPathValue']:
|
||||
steps.append('{}:{}'.format(step['interactionType'], step['nodeValue']))
|
||||
value = ', '.join(steps)
|
||||
else:
|
||||
raise Exception("Results format not supported")
|
||||
|
||||
if column_type == TYPE_DATE:
|
||||
value = datetime.strptime(value, '%Y%m%d')
|
||||
elif column_type == TYPE_DATETIME:
|
||||
if len(value) == 10:
|
||||
value = datetime.strptime(value, '%Y%m%d%H')
|
||||
elif len(value) == 12:
|
||||
value = datetime.strptime(value, '%Y%m%d%H%M')
|
||||
else:
|
||||
raise Exception("Unknown date/time format in results: '{}'".format(value))
|
||||
|
||||
d[column_name] = value
|
||||
rows.append(d)
|
||||
|
||||
return {'columns': columns, 'rows': rows}
|
||||
|
||||
|
||||
class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
@@ -42,6 +87,10 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
def type(cls):
|
||||
return "google_analytics"
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return "Google Analytics"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
@@ -62,6 +111,13 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
|
||||
def __init__(self, configuration):
|
||||
super(GoogleAnalytics, self).__init__(configuration)
|
||||
self.syntax = 'json'
|
||||
|
||||
def _get_analytics_service(self):
|
||||
scope = ['https://www.googleapis.com/auth/analytics.readonly']
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||
return build('analytics', 'v3', http=creds.authorize(httplib2.Http()))
|
||||
|
||||
def _get_tables(self, schema):
|
||||
accounts = self._get_analytics_service().management().accounts().list().execute().get('items')
|
||||
@@ -76,13 +132,16 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
schema[account['name']]['columns'].append(
|
||||
u'{0} (ga:{1})'.format(property_['name'], property_['defaultProfileId'])
|
||||
)
|
||||
|
||||
return schema.values()
|
||||
|
||||
def _get_analytics_service(self):
|
||||
scope = ['https://www.googleapis.com/auth/analytics.readonly']
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
|
||||
return build('analytics', 'v3', http=credentials.authorize(httplib2.Http()))
|
||||
def test_connection(self):
|
||||
try:
|
||||
service = self._get_analytics_service()
|
||||
service.management().accounts().list().execute()
|
||||
except HttpError as e:
|
||||
# Make sure we return a more readable error to the end user
|
||||
raise Exception(e._get_reason())
|
||||
|
||||
def run_query(self, query, user):
|
||||
logger.debug("Analytics is about to execute query: %s", query)
|
||||
@@ -94,42 +153,32 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
params[key] = ','.join(params[key])
|
||||
if '-' in key:
|
||||
params[key.replace('-', '_')] = params.pop(key)
|
||||
if len(params) > 0:
|
||||
response = self._get_analytics_service().data().ga().get(**params).execute()
|
||||
columns = []
|
||||
for h in response['columnHeaders']:
|
||||
if h['name'] == 'ga:date':
|
||||
h['dataType'] = 'DATE'
|
||||
elif h['name'] == 'ga:dateHour':
|
||||
h['dataType'] = 'DATETIME'
|
||||
columns.append({
|
||||
'name': h['name'],
|
||||
'friendly_name': h['name'].split(':', 1)[1],
|
||||
'type': types_conv.get(h['dataType'], 'string')
|
||||
})
|
||||
rows = []
|
||||
for r in response['rows']:
|
||||
d = {}
|
||||
for c, value in enumerate(r):
|
||||
column_name = response['columnHeaders'][c]['name']
|
||||
column_type = filter(lambda col: col['name'] == column_name, columns)[0]['type']
|
||||
if column_type == TYPE_DATE:
|
||||
value = datetime.strptime(value, '%Y%m%d')
|
||||
elif column_type == TYPE_DATETIME:
|
||||
if len(value) == 10:
|
||||
value = datetime.strptime(value, '%Y%m%d%H')
|
||||
elif len(value) == 12:
|
||||
value = datetime.strptime(value, '%Y%m%d%H%M')
|
||||
else:
|
||||
raise Exception("Unknown date/time format in results: '{}'".format(value))
|
||||
d[column_name] = value
|
||||
rows.append(d)
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
|
||||
if 'mcf:' in params['metrics'] and 'ga:' in params['metrics']:
|
||||
raise Exception("Can't mix mcf: and ga: metrics.")
|
||||
|
||||
if 'mcf:' in params.get('dimensions', '') and 'ga:' in params.get('dimensions', ''):
|
||||
raise Exception("Can't mix mcf: and ga: dimensions.")
|
||||
|
||||
if 'mcf:' in params['metrics']:
|
||||
api = self._get_analytics_service().data().mcf()
|
||||
else:
|
||||
error = 'Wrong query format'
|
||||
api = self._get_analytics_service().data().ga()
|
||||
|
||||
if len(params) > 0:
|
||||
try:
|
||||
response = api.get(**params).execute()
|
||||
data = parse_ga_response(response)
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
except HttpError as e:
|
||||
# Make sure we return a more readable error to the end user
|
||||
error = e._get_reason()
|
||||
json_data = None
|
||||
else:
|
||||
error = 'Wrong query format.'
|
||||
json_data = None
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(GoogleAnalytics)
|
||||
|
||||
0
redash/query_runner/google_spanner.py
Normal file
0
redash/query_runner/google_spanner.py
Normal file
@@ -1,7 +1,10 @@
|
||||
from base64 import b64decode
|
||||
import json
|
||||
import logging
|
||||
from base64 import b64decode
|
||||
|
||||
from dateutil import parser
|
||||
from requests import Session
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
@@ -9,7 +12,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import gspread
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from gspread.httpsession import HTTPSession
|
||||
from oauth2client.service_account import ServiceAccountCredentials
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
@@ -38,7 +42,7 @@ def _guess_type(value):
|
||||
try:
|
||||
val = parser.parse(value)
|
||||
return TYPE_DATETIME
|
||||
except ValueError:
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
return TYPE_STRING
|
||||
|
||||
@@ -72,7 +76,7 @@ def _value_eval_list(value):
|
||||
val = parser.parse(member)
|
||||
value_list.append(val)
|
||||
continue
|
||||
except ValueError:
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
value_list.append(member)
|
||||
return value_list
|
||||
@@ -108,7 +112,7 @@ def parse_worksheet(worksheet):
|
||||
})
|
||||
|
||||
if len(worksheet) > 1:
|
||||
for j, value in enumerate(worksheet[HEADER_INDEX+1]):
|
||||
for j, value in enumerate(worksheet[HEADER_INDEX + 1]):
|
||||
columns[j]['type'] = _guess_type(value)
|
||||
|
||||
rows = [dict(zip(column_names, _value_eval_list(row))) for row in worksheet[HEADER_INDEX + 1:]]
|
||||
@@ -128,6 +132,12 @@ def parse_spreadsheet(spreadsheet, worksheet_num):
|
||||
return parse_worksheet(worksheet)
|
||||
|
||||
|
||||
class TimeoutSession(Session):
|
||||
def request(self, *args, **kwargs):
|
||||
kwargs.setdefault('timeout', 300)
|
||||
return super(TimeoutSession, self).request(*args, **kwargs)
|
||||
|
||||
|
||||
class GoogleSpreadsheet(BaseQueryRunner):
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
@@ -164,8 +174,12 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
|
||||
spreadsheetservice = gspread.authorize(credentials)
|
||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||
|
||||
timeout_session = HTTPSession()
|
||||
timeout_session.requests_session = TimeoutSession()
|
||||
spreadsheetservice = gspread.Client(auth=creds, http_session=timeout_session)
|
||||
spreadsheetservice.login()
|
||||
return spreadsheetservice
|
||||
|
||||
def test_connection(self):
|
||||
@@ -174,8 +188,9 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
||||
def run_query(self, query, user):
|
||||
logger.debug("Spreadsheet is about to execute query: %s", query)
|
||||
values = query.split("|")
|
||||
key = values[0] #key of the spreadsheet
|
||||
worksheet_num = 0 if len(values) != 2 else int(values[1])# if spreadsheet contains more than one worksheet - this is the number of it
|
||||
key = values[0] # key of the spreadsheet
|
||||
worksheet_num = 0 if len(values) != 2 else int(values[1]) # if spreadsheet contains more than one worksheet - this is the number of it
|
||||
|
||||
try:
|
||||
spreadsheet_service = self._get_spreadsheet_service()
|
||||
spreadsheet = spreadsheet_service.open_by_key(key)
|
||||
|
||||
@@ -10,7 +10,7 @@ logger = logging.getLogger(__name__)
|
||||
try:
|
||||
from pyhive import hive
|
||||
enabled = True
|
||||
except ImportError, e:
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
COLUMN_NAME = 0
|
||||
@@ -125,9 +125,6 @@ class Hive(BaseSQLQueryRunner):
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
@@ -125,20 +125,15 @@ class Impala(BaseSQLQueryRunner):
|
||||
error = None
|
||||
cursor.close()
|
||||
except DatabaseError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = e.message
|
||||
except RPCError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = "Metastore Error [%s]" % e.message
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -49,7 +49,7 @@ def _transform_result(results):
|
||||
|
||||
|
||||
class InfluxDB(BaseQueryRunner):
|
||||
noop_query = "show databases"
|
||||
noop_query = "show measurements limit 1"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
|
||||
@@ -54,7 +54,7 @@ def parse_issue(issue, field_mapping):
|
||||
|
||||
if 'watchCount' in v:
|
||||
result[output_name] = v['watchCount']
|
||||
|
||||
|
||||
elif isinstance(v, list):
|
||||
if len(member_names) > 0:
|
||||
# if field mapping with dict member mappings defined get value of each member
|
||||
@@ -104,7 +104,7 @@ class FieldMapping:
|
||||
for k, v in query_field_mapping.iteritems():
|
||||
field_name = k
|
||||
member_name = None
|
||||
|
||||
|
||||
# check for member name contained in field name
|
||||
member_parser = re.search('(\w+)\.(\w+)', k)
|
||||
if (member_parser):
|
||||
@@ -183,6 +183,8 @@ class JiraJQL(BaseQueryRunner):
|
||||
if query_type == 'count':
|
||||
query['maxResults'] = 1
|
||||
query['fields'] = ''
|
||||
else:
|
||||
query['maxResults'] = query.get('maxResults', 1000)
|
||||
|
||||
response = requests.get(jql_url, params=query, auth=(self.configuration.get('username'), self.configuration.get('password')))
|
||||
|
||||
|
||||
153
redash/query_runner/memsql_ds.py
Normal file
153
redash/query_runner/memsql_ds.py
Normal file
@@ -0,0 +1,153 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from memsql.common import database
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
COLUMN_NAME = 0
|
||||
COLUMN_TYPE = 1
|
||||
|
||||
types_map = {
|
||||
'BIGINT': TYPE_INTEGER,
|
||||
'TINYINT': TYPE_INTEGER,
|
||||
'SMALLINT': TYPE_INTEGER,
|
||||
'MEDIUMINT': TYPE_INTEGER,
|
||||
'INT': TYPE_INTEGER,
|
||||
'DOUBLE': TYPE_FLOAT,
|
||||
'DECIMAL': TYPE_FLOAT,
|
||||
'FLOAT': TYPE_FLOAT,
|
||||
'REAL': TYPE_FLOAT,
|
||||
'BOOL': TYPE_BOOLEAN,
|
||||
'BOOLEAN': TYPE_BOOLEAN,
|
||||
'TIMESTAMP': TYPE_DATETIME,
|
||||
'DATETIME': TYPE_DATETIME,
|
||||
'DATE': TYPE_DATETIME,
|
||||
'JSON': TYPE_STRING,
|
||||
'CHAR': TYPE_STRING,
|
||||
'VARCHAR': TYPE_STRING
|
||||
}
|
||||
|
||||
|
||||
class MemSQL(BaseSQLQueryRunner):
|
||||
noop_query = 'SELECT 1'
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
"user": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
}
|
||||
|
||||
},
|
||||
"required": ["host", "port"],
|
||||
"secret": ["password"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "memsql"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
def __init__(self, configuration):
|
||||
super(MemSQL, self).__init__(configuration)
|
||||
|
||||
def _get_tables(self, schema):
|
||||
schemas_query = "show schemas"
|
||||
|
||||
tables_query = "show tables in %s"
|
||||
|
||||
columns_query = "show columns in %s"
|
||||
|
||||
for schema_name in filter(lambda a: len(a) > 0,
|
||||
map(lambda a: str(a['Database']), self._run_query_internal(schemas_query))):
|
||||
for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['Tables_in_%s' % schema_name]),
|
||||
self._run_query_internal(
|
||||
tables_query % schema_name))):
|
||||
table_name = '.'.join((schema_name, table_name))
|
||||
columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['Field']),
|
||||
self._run_query_internal(columns_query % table_name)))
|
||||
|
||||
schema[table_name] = {'name': table_name, 'columns': columns}
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query, user):
|
||||
|
||||
cursor = None
|
||||
try:
|
||||
cursor = database.connect(**self.configuration.to_dict())
|
||||
|
||||
res = cursor.query(query)
|
||||
# column_names = []
|
||||
# columns = []
|
||||
#
|
||||
# for column in cursor.description:
|
||||
# column_name = column[COLUMN_NAME]
|
||||
# column_names.append(column_name)
|
||||
#
|
||||
# columns.append({
|
||||
# 'name': column_name,
|
||||
# 'friendly_name': column_name,
|
||||
# 'type': types_map.get(column[COLUMN_TYPE], None)
|
||||
# })
|
||||
|
||||
rows = [dict(zip(list(row.keys()), list(row.values()))) for row in res]
|
||||
|
||||
# ====================================================================================================
|
||||
# temporary - until https://github.com/memsql/memsql-python/pull/8 gets merged
|
||||
# ====================================================================================================
|
||||
columns = []
|
||||
column_names = rows[0].keys() if rows else None
|
||||
|
||||
if column_names:
|
||||
for column in column_names:
|
||||
columns.append({
|
||||
'name': column,
|
||||
'friendly_name': column,
|
||||
'type': TYPE_STRING
|
||||
})
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
cursor.close()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
if cursor:
|
||||
cursor.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(MemSQL)
|
||||
@@ -1,11 +1,12 @@
|
||||
import json
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
from dateutil.parser import parse
|
||||
|
||||
from redash.utils import JSONEncoder, parse_human_time
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder, parse_human_time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -45,6 +46,13 @@ class MongoDBJSONEncoder(JSONEncoder):
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
|
||||
def parse_oids(oids):
|
||||
if not isinstance(oids, list):
|
||||
raise Exception("$oids takes an array as input.")
|
||||
|
||||
return [bson_object_hook({'$oid': oid}) for oid in oids]
|
||||
|
||||
|
||||
def datetime_parser(dct):
|
||||
for k, v in dct.iteritems():
|
||||
if isinstance(v, basestring):
|
||||
@@ -55,6 +63,9 @@ def datetime_parser(dct):
|
||||
if '$humanTime' in dct:
|
||||
return parse_human_time(dct['$humanTime'])
|
||||
|
||||
if '$oids' in dct:
|
||||
return parse_oids(dct['$oids'])
|
||||
|
||||
return bson_object_hook(dct)
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,9 @@ except ImportError:
|
||||
types_map = {
|
||||
1: TYPE_STRING,
|
||||
2: TYPE_BOOLEAN,
|
||||
3: TYPE_INTEGER,
|
||||
# Type #3 supposed to be an integer, but in some cases decimals are returned
|
||||
# with this type. To be on safe side, marking it as float.
|
||||
3: TYPE_FLOAT,
|
||||
4: TYPE_DATETIME,
|
||||
5: TYPE_FLOAT,
|
||||
}
|
||||
@@ -160,7 +162,6 @@ class SqlServer(BaseSQLQueryRunner):
|
||||
|
||||
cursor.close()
|
||||
except pymssql.Error as e:
|
||||
logging.exception(e)
|
||||
try:
|
||||
# Query errors are at `args[1]`
|
||||
error = e.args[1]
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
from redash.settings import parse_boolean
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
types_map = {
|
||||
0: TYPE_FLOAT,
|
||||
1: TYPE_INTEGER,
|
||||
@@ -26,12 +26,15 @@ types_map = {
|
||||
254: TYPE_STRING,
|
||||
}
|
||||
|
||||
|
||||
class Mysql(BaseSQLQueryRunner):
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
show_ssl_settings = parse_boolean(os.environ.get('MYSQL_SHOW_SSL_SETTINGS', 'true'))
|
||||
|
||||
schema = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
@@ -52,7 +55,15 @@ class Mysql(BaseSQLQueryRunner):
|
||||
'port': {
|
||||
'type': 'number',
|
||||
'default': 3306,
|
||||
},
|
||||
}
|
||||
},
|
||||
"order": ['host', 'port', 'user', 'passwd', 'db'],
|
||||
'required': ['db'],
|
||||
'secret': ['passwd']
|
||||
}
|
||||
|
||||
if show_ssl_settings:
|
||||
schema['properties'].update({
|
||||
'use_ssl': {
|
||||
'type': 'boolean',
|
||||
'title': 'Use SSL'
|
||||
@@ -69,10 +80,9 @@ class Mysql(BaseSQLQueryRunner):
|
||||
'type': 'string',
|
||||
'title': 'Path to private key file (SSL)'
|
||||
}
|
||||
},
|
||||
'required': ['db'],
|
||||
'secret': ['passwd']
|
||||
}
|
||||
})
|
||||
|
||||
return schema
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
@@ -147,14 +157,12 @@ class Mysql(BaseSQLQueryRunner):
|
||||
error = "No data was returned."
|
||||
|
||||
cursor.close()
|
||||
except MySQLdb.Error, e:
|
||||
except MySQLdb.Error as e:
|
||||
json_data = None
|
||||
error = e.args[1]
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
if connection:
|
||||
connection.close()
|
||||
@@ -176,4 +184,55 @@ class Mysql(BaseSQLQueryRunner):
|
||||
return ssl_params
|
||||
|
||||
|
||||
class RDSMySQL(Mysql):
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return "MySQL (Amazon RDS)"
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return 'rds_mysql'
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
'type': 'string',
|
||||
},
|
||||
'user': {
|
||||
'type': 'string'
|
||||
},
|
||||
'passwd': {
|
||||
'type': 'string',
|
||||
'title': 'Password'
|
||||
},
|
||||
'db': {
|
||||
'type': 'string',
|
||||
'title': 'Database name'
|
||||
},
|
||||
'port': {
|
||||
'type': 'number',
|
||||
'default': 3306,
|
||||
},
|
||||
'use_ssl': {
|
||||
'type': 'boolean',
|
||||
'title': 'Use SSL'
|
||||
}
|
||||
},
|
||||
"order": ['host', 'port', 'user', 'passwd', 'db'],
|
||||
'required': ['db', 'user', 'passwd', 'host'],
|
||||
'secret': ['passwd']
|
||||
}
|
||||
|
||||
def _get_ssl_parameters(self):
|
||||
if self.configuration.get('use_ssl'):
|
||||
ca_path = os.path.join(os.path.dirname(__file__), './files/rds-combined-ca-bundle.pem')
|
||||
return {'ca': ca_path}
|
||||
|
||||
return {}
|
||||
|
||||
|
||||
register(Mysql)
|
||||
register(RDSMySQL)
|
||||
|
||||
@@ -158,15 +158,12 @@ class Oracle(BaseSQLQueryRunner):
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
except cx_Oracle.DatabaseError as err:
|
||||
logging.exception(err.message)
|
||||
error = "Query failed. {}.".format(err.message)
|
||||
error = u"Query failed. {}.".format(err.message)
|
||||
json_data = None
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as err:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import psycopg2
|
||||
import select
|
||||
import sys
|
||||
|
||||
import psycopg2
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
@@ -71,6 +72,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
"title": "Database Name"
|
||||
}
|
||||
},
|
||||
"order": ['host', 'port', 'user', 'password'],
|
||||
"required": ["dbname"],
|
||||
"secret": ["password"]
|
||||
}
|
||||
@@ -79,22 +81,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
def type(cls):
|
||||
return "pg"
|
||||
|
||||
def __init__(self, configuration):
|
||||
super(PostgreSQL, self).__init__(configuration)
|
||||
|
||||
values = []
|
||||
for k, v in self.configuration.iteritems():
|
||||
values.append("{}={}".format(k, v))
|
||||
|
||||
self.connection_string = " ".join(values)
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
|
||||
"""
|
||||
|
||||
def _get_definitions(self, schema, query):
|
||||
results, error = self.run_query(query, None)
|
||||
|
||||
if error is not None:
|
||||
@@ -113,10 +100,45 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query = """
|
||||
SELECT table_schema, table_name, column_name
|
||||
FROM information_schema.columns
|
||||
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
|
||||
"""
|
||||
|
||||
self._get_definitions(schema, query)
|
||||
|
||||
materialized_views_query = """
|
||||
SELECT ns.nspname as table_schema,
|
||||
mv.relname as table_name,
|
||||
atr.attname as column_name
|
||||
FROM pg_class mv
|
||||
JOIN pg_namespace ns ON mv.relnamespace = ns.oid
|
||||
JOIN pg_attribute atr
|
||||
ON atr.attrelid = mv.oid
|
||||
AND atr.attnum > 0
|
||||
AND NOT atr.attisdropped
|
||||
WHERE mv.relkind = 'm';
|
||||
"""
|
||||
|
||||
self._get_definitions(schema, materialized_views_query)
|
||||
|
||||
return schema.values()
|
||||
|
||||
def _get_connection(self):
|
||||
connection = psycopg2.connect(user=self.configuration.get('user'),
|
||||
password=self.configuration.get('password'),
|
||||
host=self.configuration.get('host'),
|
||||
port=self.configuration.get('port'),
|
||||
dbname=self.configuration.get('dbname'),
|
||||
sslmode=self.configuration.get('sslmode'),
|
||||
async=True)
|
||||
|
||||
return connection
|
||||
|
||||
def run_query(self, query, user):
|
||||
connection = psycopg2.connect(self.connection_string, async=True)
|
||||
connection = self._get_connection()
|
||||
_wait(connection, timeout=10)
|
||||
|
||||
cursor = connection.cursor()
|
||||
@@ -136,19 +158,15 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
except (select.error, OSError) as e:
|
||||
logging.exception(e)
|
||||
error = "Query interrupted. Please retry."
|
||||
json_data = None
|
||||
except psycopg2.DatabaseError as e:
|
||||
logging.exception(e)
|
||||
error = e.message
|
||||
json_data = None
|
||||
except (KeyboardInterrupt, InterruptException):
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
@@ -160,8 +178,23 @@ class Redshift(PostgreSQL):
|
||||
def type(cls):
|
||||
return "redshift"
|
||||
|
||||
def _get_connection(self):
|
||||
sslrootcert_path = os.path.join(os.path.dirname(__file__), './files/redshift-ca-bundle.crt')
|
||||
|
||||
connection = psycopg2.connect(user=self.configuration.get('user'),
|
||||
password=self.configuration.get('password'),
|
||||
host=self.configuration.get('host'),
|
||||
port=self.configuration.get('port'),
|
||||
dbname=self.configuration.get('dbname'),
|
||||
sslmode='prefer',
|
||||
sslrootcert=sslrootcert_path,
|
||||
async=True)
|
||||
|
||||
return connection
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
@@ -182,6 +215,7 @@ class Redshift(PostgreSQL):
|
||||
"title": "Database Name"
|
||||
}
|
||||
},
|
||||
"order": ['host', 'port', 'user', 'password'],
|
||||
"required": ["dbname", "user", "password", "host", "port"],
|
||||
"secret": ["password"]
|
||||
}
|
||||
|
||||
@@ -118,7 +118,11 @@ class Presto(BaseQueryRunner):
|
||||
default_message = 'Unspecified DatabaseError: {0}'.format(db.message)
|
||||
message = db.message.get('failureInfo', {'message', None}).get('message')
|
||||
error = default_message if message is None else message
|
||||
except Exception, ex:
|
||||
except (KeyboardInterrupt, InterruptException) as e:
|
||||
cursor.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
if not isinstance(error, basestring):
|
||||
|
||||
@@ -92,7 +92,7 @@ class Salesforce(BaseQueryRunner):
|
||||
sf = SimpleSalesforce(username=self.configuration['username'],
|
||||
password=self.configuration['password'],
|
||||
security_token=self.configuration['token'],
|
||||
sandbox=self.configuration['sandbox'],
|
||||
sandbox=self.configuration.get('sandbox', False),
|
||||
client_id='Redash')
|
||||
return sf
|
||||
|
||||
|
||||
@@ -83,7 +83,11 @@ class Sqlite(BaseSQLQueryRunner):
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
# handle unicode error message
|
||||
err_class = sys.exc_info()[1].__class__
|
||||
err_args = [arg.decode('utf-8') for arg in sys.exc_info()[1].args]
|
||||
unicode_err = err_class(*err_args)
|
||||
raise unicode_err, None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
return json_data, error
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import json
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
import logging
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
@@ -103,21 +103,17 @@ class TreasureData(BaseQueryRunner):
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
columns_data = [(row[0], cursor.show_job()['hive_result_schema'][i][1]) for i,row in enumerate(cursor.description)]
|
||||
cursor.execute(query)
|
||||
columns_data = [(row[0], cursor.show_job()['hive_result_schema'][i][1]) for i,row in enumerate(cursor.description)]
|
||||
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': TD_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': TD_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
|
||||
|
||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
@@ -42,6 +42,13 @@ def parse_boolean(str):
|
||||
return json.loads(str.lower())
|
||||
|
||||
|
||||
def int_or_none(value):
|
||||
if value is None:
|
||||
return value
|
||||
|
||||
return int(value)
|
||||
|
||||
|
||||
def all_settings():
|
||||
from types import ModuleType
|
||||
|
||||
@@ -66,13 +73,16 @@ STATSD_USE_TAGS = parse_boolean(os.environ.get('REDASH_STATSD_USE_TAGS', "false"
|
||||
|
||||
# Connection settings for Redash's own database (where we store the queries, results, etc)
|
||||
SQLALCHEMY_DATABASE_URI = os.environ.get("REDASH_DATABASE_URL", os.environ.get('DATABASE_URL', "postgresql:///postgres"))
|
||||
SQLALCHEMY_MAX_OVERFLOW = int_or_none(os.environ.get("SQLALCHEMY_MAX_OVERFLOW"))
|
||||
SQLALCHEMY_POOL_SIZE = int_or_none(os.environ.get("SQLALCHEMY_POOL_SIZE"))
|
||||
SQLALCHEMY_DISABLE_POOL = parse_boolean(os.environ.get("SQLALCHEMY_DISABLE_POOL", "false"))
|
||||
SQLALCHEMY_TRACK_MODIFICATIONS = False
|
||||
SQLALCHEMY_ECHO = False
|
||||
|
||||
# Celery related settings
|
||||
CELERY_BROKER = os.environ.get("REDASH_CELERY_BROKER", REDIS_URL)
|
||||
CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", CELERY_BROKER)
|
||||
CELERY_TASK_RESULT_EXPIRES = int(os.environ.get('REDASH_CELERY_TASK_RESULT_EXPIRES', 3600))
|
||||
CELERY_TASK_RESULT_EXPIRES = int(os.environ.get('REDASH_CELERY_TASK_RESULT_EXPIRES', 3600 * 4))
|
||||
|
||||
# The following enables periodic job (every 5 minutes) of removing unused query results.
|
||||
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
|
||||
@@ -129,11 +139,12 @@ REMOTE_USER_HEADER = os.environ.get("REDASH_REMOTE_USER_HEADER", "X-Forwarded-Re
|
||||
STATIC_ASSETS_PATHS = [fix_assets_path(path) for path in os.environ.get("REDASH_STATIC_ASSETS_PATH", "../client/dist/").split(',')]
|
||||
STATIC_ASSETS_PATHS.append(fix_assets_path('./static/'))
|
||||
|
||||
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
|
||||
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 12))
|
||||
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
|
||||
SESSION_COOKIE_SECURE = parse_boolean(os.environ.get("REDASH_SESSION_COOKIE_SECURE") or str(ENFORCE_HTTPS))
|
||||
|
||||
LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
|
||||
LOG_STDOUT = parse_boolean(os.environ.get('REDASH_LOG_STDOUT', 'false'))
|
||||
|
||||
# Mail settings:
|
||||
MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')
|
||||
@@ -165,6 +176,7 @@ ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_
|
||||
|
||||
# Query Runners
|
||||
default_query_runners = [
|
||||
'redash.query_runner.athena',
|
||||
'redash.query_runner.big_query',
|
||||
'redash.query_runner.google_spreadsheets',
|
||||
'redash.query_runner.graphite',
|
||||
@@ -183,9 +195,9 @@ default_query_runners = [
|
||||
'redash.query_runner.sqlite',
|
||||
'redash.query_runner.dynamodb_sql',
|
||||
'redash.query_runner.mssql',
|
||||
'redash.query_runner.memsql_ds',
|
||||
'redash.query_runner.jql',
|
||||
'redash.query_runner.google_analytics',
|
||||
'redash.query_runner.snowflake',
|
||||
'redash.query_runner.axibase_tsd',
|
||||
'redash.query_runner.salesforce'
|
||||
]
|
||||
@@ -224,8 +236,9 @@ VERSION_CHECK = parse_boolean(os.environ.get("REDASH_VERSION_CHECK", "true"))
|
||||
FEATURE_DISABLE_REFRESH_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_DISABLE_REFRESH_QUERIES", "false"))
|
||||
FEATURE_SHOW_QUERY_RESULTS_COUNT = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_QUERY_RESULTS_COUNT", "true"))
|
||||
FEATURE_SHOW_PERMISSIONS_CONTROL = parse_boolean(os.environ.get("REDASH_FEATURE_SHOW_PERMISSIONS_CONTROL", "false"))
|
||||
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(os.environ.get("REDASH_FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS",
|
||||
"false"))
|
||||
FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS = parse_boolean(os.environ.get("REDASH_FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS", "false"))
|
||||
FEATURE_DUMB_RECENTS = parse_boolean(os.environ.get("REDASH_FEATURE_DUMB_RECENTS", "false"))
|
||||
FEATURE_AUTO_PUBLISH_NAMED_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_AUTO_PUBLISH_NAMED_QUERIES", "true"))
|
||||
|
||||
# BigQuery
|
||||
BIGQUERY_HTTP_TIMEOUT = int(os.environ.get("REDASH_BIGQUERY_HTTP_TIMEOUT", "600"))
|
||||
@@ -242,6 +255,7 @@ COMMON_CLIENT_CONFIG = {
|
||||
'allowScriptsInUserInput': ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
'showPermissionsControl': FEATURE_SHOW_PERMISSIONS_CONTROL,
|
||||
'allowCustomJSVisualizations': FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS,
|
||||
'autoPublishNamedQueries': FEATURE_AUTO_PUBLISH_NAMED_QUERIES,
|
||||
'dateFormat': DATE_FORMAT,
|
||||
'dateTimeFormat': "{0} HH:mm".format(DATE_FORMAT),
|
||||
'allowAllToEditQueries': FEATURE_ALLOW_ALL_TO_EDIT_QUERIES,
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import requests
|
||||
|
||||
from celery.utils.log import get_task_logger
|
||||
from flask_mail import Message
|
||||
from redash.worker import celery
|
||||
from redash import mail, models, settings
|
||||
from redash.version_check import run_version_check
|
||||
from redash import models, mail, settings
|
||||
from redash.worker import celery
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
@@ -17,8 +18,8 @@ def record_event(raw_event):
|
||||
logger.debug("Forwarding event to: %s", hook)
|
||||
try:
|
||||
data = {
|
||||
"schema": "iglu:io.redash.webhooks/event/jsonschema/1-0-0",
|
||||
"data": event.to_dict()
|
||||
"schema": "iglu:io.redash.webhooks/event/jsonschema/1-0-0",
|
||||
"data": event.to_dict()
|
||||
}
|
||||
response = requests.post(hook, json=data)
|
||||
if response.status_code != 200:
|
||||
@@ -47,15 +48,12 @@ def subscribe(form):
|
||||
|
||||
@celery.task(name="redash.tasks.send_mail")
|
||||
def send_mail(to, subject, html, text):
|
||||
from redash.wsgi import app
|
||||
|
||||
try:
|
||||
with app.app_context():
|
||||
message = Message(recipients=to,
|
||||
subject=subject,
|
||||
html=html,
|
||||
body=text)
|
||||
message = Message(recipients=to,
|
||||
subject=subject,
|
||||
html=html,
|
||||
body=text)
|
||||
|
||||
mail.send(message)
|
||||
mail.send(message)
|
||||
except Exception:
|
||||
logger.exception('Failed sending message: %s', message.subject)
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
import json
|
||||
import time
|
||||
import logging
|
||||
import signal
|
||||
import time
|
||||
|
||||
import pystache
|
||||
import redis
|
||||
|
||||
from celery.exceptions import SoftTimeLimitExceeded
|
||||
from celery.result import AsyncResult
|
||||
from celery.utils.log import get_task_logger
|
||||
from redash import redis_connection, models, statsd_client, settings, utils
|
||||
from redash import models, redis_connection, settings, statsd_client, utils
|
||||
from redash.query_runner import InterruptException
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.worker import celery
|
||||
from redash.query_runner import InterruptException
|
||||
|
||||
from .alerts import check_alerts_for_query
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
@@ -114,16 +119,15 @@ class QueryTaskTracker(object):
|
||||
return tasks
|
||||
|
||||
@classmethod
|
||||
def prune(cls, list_name, keep_count):
|
||||
def prune(cls, list_name, keep_count, max_keys=100):
|
||||
count = redis_connection.zcard(list_name)
|
||||
if count <= keep_count:
|
||||
return 0
|
||||
|
||||
remove_count = count - keep_count
|
||||
remove_count = min(max_keys, count - keep_count)
|
||||
keys = redis_connection.zrange(list_name, 0, remove_count - 1)
|
||||
redis_connection.delete(*keys)
|
||||
redis_connection.zremrangebyrank(list_name, 0, remove_count - 1)
|
||||
|
||||
return remove_count
|
||||
|
||||
def __getattr__(self, item):
|
||||
@@ -232,9 +236,7 @@ def enqueue_query(query, data_source, user_id, scheduled_query=None, metadata={}
|
||||
queue_name = data_source.queue_name
|
||||
scheduled_query_id = None
|
||||
|
||||
result = execute_query.apply_async(args=(
|
||||
query, data_source.id, metadata, user_id,
|
||||
scheduled_query_id),
|
||||
result = execute_query.apply_async(args=(query, data_source.id, metadata, user_id, scheduled_query_id),
|
||||
queue=queue_name)
|
||||
job = QueryTask(async_result=result)
|
||||
tracker = QueryTaskTracker.create(
|
||||
@@ -265,17 +267,27 @@ def refresh_queries():
|
||||
|
||||
with statsd_client.timer('manager.outdated_queries_lookup'):
|
||||
for query in models.Query.outdated_queries():
|
||||
if settings.FEATURE_DISABLE_REFRESH_QUERIES:
|
||||
if settings.FEATURE_DISABLE_REFRESH_QUERIES:
|
||||
logging.info("Disabled refresh queries.")
|
||||
elif query.data_source is None:
|
||||
logging.info("Skipping refresh of %s because the datasource is none.", query.id)
|
||||
elif query.data_source.paused:
|
||||
logging.info("Skipping refresh of %s because datasource - %s is paused (%s).", query.id, query.data_source.name, query.data_source.pause_reason)
|
||||
else:
|
||||
enqueue_query(query.query_text, query.data_source, query.user_id,
|
||||
# if query.options and 'parameters' in query.options and len(query.options['parameters']) > 0:
|
||||
if query.options and len(query.options.get('parameters', [])) > 0:
|
||||
query_params = {p['name']: p['value']
|
||||
for p in query.options['parameters']}
|
||||
query_text = pystache.render(query.query_text, query_params)
|
||||
else:
|
||||
query_text = query.query_text
|
||||
|
||||
enqueue_query(query_text, query.data_source, query.user_id,
|
||||
scheduled_query=query,
|
||||
metadata={'Query ID': query.id, 'Username': 'Scheduled'})
|
||||
|
||||
query_ids.append(query.id)
|
||||
outdated_queries_count += 1
|
||||
query_ids.append(query.id)
|
||||
outdated_queries_count += 1
|
||||
|
||||
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
|
||||
|
||||
@@ -299,14 +311,6 @@ def cleanup_tasks():
|
||||
for tracker in in_progress:
|
||||
result = AsyncResult(tracker.task_id)
|
||||
|
||||
# If the AsyncResult status is PENDING it means there is no celery task object for this tracker, and we can
|
||||
# mark it as "dead":
|
||||
if result.status == 'PENDING':
|
||||
logging.info("In progress tracker for %s is no longer enqueued, cancelling (task: %s).",
|
||||
tracker.query_hash, tracker.task_id)
|
||||
_unlock(tracker.query_hash, tracker.data_source_id)
|
||||
tracker.update(state='cancelled')
|
||||
|
||||
if result.ready():
|
||||
logging.info("in progress tracker %s finished", tracker.query_hash)
|
||||
_unlock(tracker.query_hash, tracker.data_source_id)
|
||||
@@ -322,7 +326,9 @@ def cleanup_tasks():
|
||||
tracker.update(state='finished')
|
||||
|
||||
# Maintain constant size of the finished tasks list:
|
||||
QueryTaskTracker.prune(QueryTaskTracker.DONE_LIST, 1000)
|
||||
removed = 1000
|
||||
while removed > 0:
|
||||
removed = QueryTaskTracker.prune(QueryTaskTracker.DONE_LIST, 1000)
|
||||
|
||||
|
||||
@celery.task(name="redash.tasks.cleanup_query_results")
|
||||
@@ -346,14 +352,30 @@ def cleanup_query_results():
|
||||
logger.info("Deleted %d unused query results.", deleted_count)
|
||||
|
||||
|
||||
@celery.task(name="redash.tasks.refresh_schema", time_limit=90, soft_time_limit=60)
|
||||
def refresh_schema(data_source_id):
|
||||
ds = models.DataSource.get_by_id(data_source_id)
|
||||
logger.info(u"task=refresh_schema state=start ds_id=%s", ds.id)
|
||||
start_time = time.time()
|
||||
try:
|
||||
ds.get_schema(refresh=True)
|
||||
logger.info(u"task=refresh_schema state=finished ds_id=%s runtime=%.2f", ds.id, time.time() - start_time)
|
||||
statsd_client.incr('refresh_schema.success')
|
||||
except SoftTimeLimitExceeded:
|
||||
logger.info(u"task=refresh_schema state=timeout ds_id=%s runtime=%.2f", ds.id, time.time() - start_time)
|
||||
statsd_client.incr('refresh_schema.timeout')
|
||||
except Exception:
|
||||
logger.warning(u"Failed refreshing schema for the data source: %s", ds.name, exc_info=1)
|
||||
statsd_client.incr('refresh_schema.error')
|
||||
logger.info(u"task=refresh_schema state=failed ds_id=%s runtime=%.2f", ds.id, time.time() - start_time)
|
||||
|
||||
|
||||
@celery.task(name="redash.tasks.refresh_schemas")
|
||||
def refresh_schemas():
|
||||
"""
|
||||
Refreshes the data sources schemas.
|
||||
"""
|
||||
|
||||
blacklist = [int(ds_id) for ds_id in redis_connection.smembers('data_sources:schema:blacklist') if ds_id]
|
||||
|
||||
global_start_time = time.time()
|
||||
|
||||
logger.info(u"task=refresh_schemas state=start")
|
||||
@@ -364,14 +386,7 @@ def refresh_schemas():
|
||||
elif ds.id in blacklist:
|
||||
logger.info(u"task=refresh_schema state=skip ds_id=%s reason=blacklist", ds.id)
|
||||
else:
|
||||
logger.info(u"task=refresh_schema state=start ds_id=%s", ds.id)
|
||||
start_time = time.time()
|
||||
try:
|
||||
ds.get_schema(refresh=True)
|
||||
logger.info(u"task=refresh_schema state=finished ds_id=%s runtime=%.2f", ds.id, time.time() - start_time)
|
||||
except Exception:
|
||||
logger.exception(u"Failed refreshing schema for the data source: %s", ds.name)
|
||||
logger.info(u"task=refresh_schema state=failed ds_id=%s runtime=%.2f", ds.id, time.time() - start_time)
|
||||
refresh_schema.apply_async(args=(ds.id,), queue="schemas")
|
||||
|
||||
logger.info(u"task=refresh_schemas state=finish total_runtime=%.2f", time.time() - global_start_time)
|
||||
|
||||
@@ -406,6 +421,8 @@ class QueryExecutor(object):
|
||||
self.query_hash,
|
||||
self.data_source_id,
|
||||
False, metadata)
|
||||
if self.tracker.scheduled:
|
||||
models.scheduled_queries_executions.update(self.tracker.query_id)
|
||||
|
||||
def run(self):
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
|
||||
@@ -9,14 +9,16 @@ import re
|
||||
import hashlib
|
||||
import pytz
|
||||
import pystache
|
||||
import os
|
||||
|
||||
from funcy import distinct
|
||||
from funcy import distinct, select_values
|
||||
from sqlalchemy.orm.query import Query
|
||||
|
||||
from .human_time import parse_human_time
|
||||
from redash import settings
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
WRITER_ENCODING = os.environ.get('REDASH_CSV_WRITER_ENCODING', 'utf-8')
|
||||
|
||||
|
||||
def utcnow():
|
||||
@@ -28,6 +30,15 @@ def utcnow():
|
||||
return datetime.datetime.now(pytz.utc)
|
||||
|
||||
|
||||
def dt_from_timestamp(timestamp, tz_aware=True):
|
||||
timestamp = datetime.datetime.utcfromtimestamp(float(timestamp))
|
||||
|
||||
if tz_aware:
|
||||
timestamp = timestamp.replace(tzinfo=pytz.utc)
|
||||
|
||||
return timestamp
|
||||
|
||||
|
||||
def slugify(s):
|
||||
return re.sub('[^a-z0-9_\-]+', '-', s.lower())
|
||||
|
||||
@@ -93,7 +104,7 @@ class UnicodeWriter:
|
||||
which is encoded in the given encoding.
|
||||
"""
|
||||
|
||||
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
|
||||
def __init__(self, f, dialect=csv.excel, encoding=WRITER_ENCODING, **kwds):
|
||||
# Redirect output to a queue
|
||||
self.queue = cStringIO.StringIO()
|
||||
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
|
||||
@@ -102,7 +113,7 @@ class UnicodeWriter:
|
||||
|
||||
def _encode_utf8(self, val):
|
||||
if isinstance(val, (unicode, str)):
|
||||
return val.encode('utf-8')
|
||||
return val.encode(WRITER_ENCODING)
|
||||
|
||||
return val
|
||||
|
||||
@@ -110,7 +121,7 @@ class UnicodeWriter:
|
||||
self.writer.writerow([self._encode_utf8(s) for s in row])
|
||||
# Fetch UTF-8 output from the queue ...
|
||||
data = self.queue.getvalue()
|
||||
data = data.decode("utf-8")
|
||||
data = data.decode(WRITER_ENCODING)
|
||||
# ... and reencode it into the target encoding
|
||||
data = self.encoder.encode(data)
|
||||
# write to the target stream
|
||||
@@ -158,3 +169,5 @@ def base_url(org):
|
||||
return settings.HOST
|
||||
|
||||
|
||||
def filter_none(d):
|
||||
return select_values(lambda v: v is not None, d)
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
from random import randint
|
||||
from celery import Celery
|
||||
from flask import current_app
|
||||
from datetime import timedelta
|
||||
from random import randint
|
||||
|
||||
from flask import current_app
|
||||
|
||||
from celery import Celery
|
||||
from celery.schedules import crontab
|
||||
from celery.signals import worker_process_init
|
||||
from redash import settings, __version__, create_app
|
||||
from redash import __version__, create_app, settings
|
||||
from redash.metrics import celery as celery_metrics
|
||||
|
||||
|
||||
celery = Celery('redash',
|
||||
broker=settings.CELERY_BROKER,
|
||||
include='redash.tasks')
|
||||
@@ -52,7 +53,7 @@ if settings.SENTRY_DSN:
|
||||
from raven import Client
|
||||
from raven.contrib.celery import register_signal
|
||||
|
||||
client = Client(settings.SENTRY_DSN, release=__version__)
|
||||
client = Client(settings.SENTRY_DSN, release=__version__, install_logging_hook=False)
|
||||
register_signal(client)
|
||||
|
||||
|
||||
@@ -75,4 +76,3 @@ celery.Task = ContextTask
|
||||
def init_celery_flask_app(**kwargs):
|
||||
app = create_app()
|
||||
app.app_context().push()
|
||||
|
||||
|
||||
@@ -36,7 +36,7 @@ RestrictedPython==3.6.0
|
||||
pysaml2==2.4.0
|
||||
pycrypto==2.6.1
|
||||
funcy==1.7.1
|
||||
raven==5.27.1
|
||||
raven==6.0.0
|
||||
semver==2.2.1
|
||||
xlsxwriter==0.9.3
|
||||
pystache==0.5.4
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user