Compare commits

..

119 Commits

Author SHA1 Message Date
Arik Fraimovich
5e73da1df4 Fix link to tarball (it changes on every build). 2015-08-07 19:34:00 +03:00
Arik Fraimovich
244d25b12c Fix #524: use v0.7.1 in bootstrap.sh. 2015-08-07 19:18:18 +03:00
Arik Fraimovich
2dcf676cf2 Fix #525: make sure we're in right path for requirements_all_ds.txt 2015-08-07 19:14:46 +03:00
Arik Fraimovich
e07af676a5 Fix #526: install latest setuptools in bootstrap.sh. 2015-08-07 19:12:17 +03:00
Arik Fraimovich
3dea6302de Merge pull request #523 from EverythingMe/feature/new_home
Fix: global_recent isnt set if user has enough recents
2015-08-06 16:58:44 +03:00
Arik Fraimovich
b1ceb60360 Fix: global_recent isnt set if user has enough recents 2015-08-06 16:58:15 +03:00
Arik Fraimovich
1ef94b77e9 Merge pull request #522 from EverythingMe/feature/new_home
Feature: "personalized" homepage with recent queries and dashboards
2015-08-06 16:48:29 +03:00
Arik Fraimovich
292d31e490 Improve /personal and use it as default home 2015-08-06 16:42:29 +03:00
Arik Fraimovich
6f0ac1e730 Merge pull request #521 from kataring/update-doc
Update docs about data sources for Presto
2015-08-06 10:16:39 +03:00
Noriaki Katayama
9f82e5850d Update docs about data sources for Presto 2015-08-06 16:02:18 +09:00
Arik Fraimovich
4a18fa07ec Merge pull request #518 from massaru129/feature/bootstrap_amazon_linux
Other: created bootstrap script for amazon linux
2015-08-04 16:17:18 +03:00
Arik Fraimovich
05d1886467 Merge pull request #520 from EverythingMe/docs-datasources
Docs: update documentation about data sources
2015-08-02 10:49:57 +03:00
Arik Fraimovich
6e45706825 Update docs about data sources 2015-08-02 10:15:11 +03:00
Arik Fraimovich
464402a233 Merge pull request #519 from EverythingMe/feature/disable_collaborative_editing
Feature: ability to disable the ability to edit anyone's query by everyone
2015-08-01 16:41:08 +03:00
Arik Fraimovich
3a56b9ded7 Don't set last_modified_by if only changing ref to last result 2015-08-01 16:36:56 +03:00
Arik Fraimovich
142295671b Feature flag to control if everyone can edit queries 2015-08-01 16:30:03 +03:00
masaru
0e46a24112 fixed config file pathes 2015-07-31 19:21:36 +09:00
masaru
a3cb698be0 fixed file path
the fixed pathes will work if my original files are uploaded
2015-07-31 14:34:25 +09:00
masaru
08730ad113 created bootstrap script for amazon linux 2015-07-31 14:05:48 +09:00
Arik Fraimovich
d155f166d7 Merge pull request #517 from EverythingMe/cleanup
Fix: URL query runner was failing without base URL
2015-07-30 21:27:00 +03:00
Arik Fraimovich
ca95e9252f Fix: URL query runner was failing without base URL 2015-07-30 21:26:14 +03:00
Arik Fraimovich
d078e80e79 Grammar fix. 2015-07-30 14:50:52 +03:00
Arik Fraimovich
8ad1d2672c Clarify about -i. 2015-07-30 14:49:55 +03:00
Arik Fraimovich
735130efc9 Merge pull request #510 from rghose/patch-1
for aws based and password less logins to boxes
2015-07-30 14:46:44 +03:00
Arik Fraimovich
7e6b7398a4 Remove confusing exception logging 2015-07-30 14:36:27 +03:00
Arik Fraimovich
edf8f5b1fd Fix tests post field name change in #515 2015-07-30 14:36:01 +03:00
Arik Fraimovich
08c09d896a Merge pull request #516 from EverythingMe/cleanup
Fix: multi-filter was broken in newer version of angular-ui-select
2015-07-30 13:36:50 +03:00
Arik Fraimovich
58403634cf Fix: multi-filter was broken in newer version of angular-ui-select 2015-07-30 13:35:50 +03:00
Arik Fraimovich
2eb171e40d Merge pull request #515 from moyomot/fix/only_email_authentication
Fix: change wording on login screen "username or email" -> "email"
2015-07-30 12:25:37 +03:00
moyomot
3753f58980 authentication are allowed e-mail only. 2015-07-30 17:56:57 +09:00
Arik Fraimovich
fe1cc78ab3 Merge pull request #514 from alexanderlz/master
Feature: Support Hive as datasource
2015-07-30 08:48:26 +03:00
Alexander Leibzon
c140668648 minor fixes. working version of Hive datasource. 2015-07-29 23:39:39 +03:00
Arik Fraimovich
41ca1321cf Merge pull request #513 from EverythingMe/cleanup
Cleanup: remove select2 and use ui-select.
2015-07-29 09:36:13 +03:00
Alexander Leibzon
d88340158a add Hive as datasource 2015-07-29 02:01:22 +03:00
Arik Fraimovich
52f335edd5 Cleanup: remove select2 and use ui-select. 2015-07-28 10:03:56 +03:00
Arik Fraimovich
22200ec7b2 Merge pull request #511 from stanhu/add-primary-key-to-flask-admin
Add the primary key to Flask admin to make it possible to lookup queries
2015-07-27 23:40:15 +03:00
Arik Fraimovich
e458ed03c8 Bump version. 2015-07-27 23:38:10 +03:00
Arik Fraimovich
e9f1e3a189 Merge pull request #512 from johnkearney/tidier-requirements
Move datasource requirements from bootstrap to own requirements file
2015-07-27 23:32:44 +03:00
John Kearney
d202570b0d Move datasource requirements from bootstrap to own requirements file 2015-07-27 11:46:53 -07:00
Stan Hu
9b6edde5c8 Add the primary key to Flask admin to make it possible to lookup queries 2015-07-27 10:24:34 -07:00
Rahul Ghose
975c92d40d for aws based and password less logins to boxes 2015-07-27 13:23:38 +05:30
Arik Fraimovich
27639f83c7 Update index.rst 2015-07-26 22:38:37 +03:00
Arik Fraimovich
c08e6791df Remove version info from conf.py -- rtd doesn't use it 2015-07-26 15:54:56 +03:00
Arik Fraimovich
5c7158b6ae Update vagrant instructions 2015-07-26 15:46:26 +03:00
Arik Fraimovich
b886067a9f Merge pull request #509 from EverythingMe/docs
Moving documentation to ReatTheDocs
2015-07-26 15:25:29 +03:00
Arik Fraimovich
2421de8819 Add Sphinx based documentation to the project. 2015-07-26 15:24:16 +03:00
Arik Fraimovich
9e87e42400 Merge pull request #508 from EverythingMe/cleanup
Some cleanup (updated settings and bootstrap script)
2015-07-26 12:30:31 +03:00
Arik Fraimovich
8c750826e3 Install dependencies for new sources 2015-07-26 11:49:23 +03:00
Arik Fraimovich
b14b6d1773 Give permission to read user(id, name) to redash_reader 2015-07-26 11:45:58 +03:00
Arik Fraimovich
76cb73f4ce Add description to the server param of elastic search 2015-07-26 11:45:25 +03:00
Arik Fraimovich
8854a45598 Update to settings:
1. Enable API key auth by default.
2. Enable query results cleanup by default.
3. Add ElasticSearch to the enabled query runners list.
2015-07-26 11:44:11 +03:00
Arik Fraimovich
228b8c7614 Merge pull request #507 from EverythingMe/cleanup
Fix: when editing alerts show correct column
2015-07-26 11:39:55 +03:00
Arik Fraimovich
5de79213ae Fix: when editing alerts show correct column 2015-07-26 11:39:22 +03:00
Arik Fraimovich
c7d30c8b87 Merge pull request #498 from EverythingMe/feature/ds_admin
Feature: datasources web admin (closes #193)
2015-07-26 11:35:52 +03:00
Arik Fraimovich
076710f0c6 Bump version 2015-07-26 10:24:09 +03:00
Arik Fraimovich
a9172dac00 Fix: if connection fails connection isn't set 2015-07-26 10:24:08 +03:00
Arik Fraimovich
accca51f39 Feature: web interface to edit datasources
* Web interface to add and delete data sources, without the need to ssh
into the server.
* Ability to safely delete datasources -- query results from this data sources
are deleted, while queries get assigned null datasource.
* Updated the BigQuery datasource to use the JSON key file from Google Developer
console. Also both BigQuery and the Google Spreadsheets datasource no longer store
their key on the filesystem, but rather in the DB.
* Minor updates to the Flask Admin.
2015-07-26 10:24:08 +03:00
Arik Fraimovich
5f5774d01b Merge pull request #506 from EverythingMe/small_fixes
Fix: makes sure the totals series is sorted
2015-07-23 15:03:36 +03:00
Arik Fraimovich
00e99d858c Fix: makes sure the totals series is sorted 2015-07-23 15:03:18 +03:00
Arik Fraimovich
da56dc883f Merge pull request #505 from EverythingMe/small_fixes
Fix: Update URL after creating an alert
2015-07-22 20:38:52 +03:00
Arik Fraimovich
02582cab65 Update URL after creating an alert 2015-07-22 20:38:22 +03:00
Arik Fraimovich
bff4d31ada Read HOST from env. 2015-07-22 18:19:27 +03:00
Arik Fraimovich
83554207e1 Merge pull request #504 from EverythingMe/fix/python_result_set
Fix: cohort was wrong if values were not sorted
2015-07-22 18:06:41 +03:00
Arik Fraimovich
1c0c3e0b93 Fix: cohort was wrong if values were not sorted 2015-07-22 18:05:54 +03:00
Arik Fraimovich
5feb563dc9 Merge pull request #503 from EverythingMe/fix/python_result_set
Fix: if you change the result object, python runner wouldn't return any results
2015-07-22 18:03:29 +03:00
Arik Fraimovich
07b88d0b53 Fix: log results were lost 2015-07-22 17:56:49 +03:00
Arik Fraimovich
21f33462d5 Anoter try in removing optipng from build 2015-07-22 17:43:05 +03:00
Arik Fraimovich
6a9d95f1ac Fix: if you change the result object, python runner wouldn't return any results 2015-07-22 17:36:46 +03:00
Arik Fraimovich
36b80fc4ef Remove optipng from build artifacts 2015-07-22 17:21:50 +03:00
Arik Fraimovich
d89dd2c9af Merge pull request #502 from EverythingMe/feature/alerts
Feature: alerts on query results
2015-07-22 17:14:29 +03:00
Arik Fraimovich
658af526c7 Add alerts to menu 2015-07-22 17:05:31 +03:00
Arik Fraimovich
3d859ec5f3 Feature: alerts for query results.
This is basic implementation for alerts feature, where you can
define a simple rule on the last query result to send an alert.

As part of the implementation added Flask-Mail to the project,
to send emails. Should be useful to make re:dash more "self aware"
(notify users about potential issues, when queries done executing
and more).
2015-07-22 17:05:31 +03:00
Arik Fraimovich
fdff799d23 ng_smart_table: support for inline templates 2015-07-22 17:05:09 +03:00
Arik Fraimovich
5fc0b88b23 ng_smart_table: support for nested objects 2015-07-22 17:05:09 +03:00
Alexander Leibzon
63de247478 add datasources 2015-07-22 14:55:26 +03:00
Arik Fraimovich
5d3caac1b5 Merge pull request #499 from alexanderlz/master
Feature: Support Impala as DataSource
2015-07-22 14:23:55 +03:00
Alexander Leibzon
e4b9d23dfe minor fixes 2015-07-22 14:21:40 +03:00
Alexander Leibzon
890f59a4c9 add get_schema ability to impala 2015-07-22 13:54:00 +03:00
Arik Fraimovich
d4a18ba611 Merge pull request #501 from johnkearney/all_pg_queries_with_no_results
All pg queries with no results
2015-07-21 06:47:33 +03:00
John Kearney
c4502b2925 Add a more use-friendly message when redshift returns no rows 2015-07-20 14:17:51 -07:00
Alexander Leibzon
1d5efdd93f fixes in accordance with pull req spec 2015-07-20 23:21:02 +03:00
John Kearney
2b95da102e Remove unused exports for env examples 2015-07-20 12:36:10 -07:00
Arik Fraimovich
d512cd0c1d Merge pull request #500 from EverythingMe/feature/login_events
Feature: add event for users logining in
2015-07-20 18:30:57 +03:00
Arik Fraimovich
3dc9c84a98 Feature: add event for users logining in 2015-07-20 18:26:45 +03:00
Alexander Leibzon
4a33b987b8 datasource rename 2015-07-20 02:07:17 +03:00
Alexander Leibzon
f7041977d5 impala datasource fixes 2015-07-20 02:06:15 +03:00
Alexander Leibzon
83bc38579e impala data source 2015-07-19 01:44:48 +03:00
Arik Fraimovich
4b8a94e795 Merge pull request #495 from EverythingMe/fix/bq_param
Fix: Update BigQuery configuration parameter name to avoid confusion.
2015-07-15 19:49:56 +03:00
Arik Fraimovich
406010a7a6 Fix: Update BigQuery configuration parameter name to avoid confusion. 2015-07-15 19:45:55 +03:00
Arik Fraimovich
4f11f28efa Merge pull request #494 from erans/master
MongoDB query runner: cleanup
2015-07-15 18:24:28 +03:00
Eran Sandler
c919602b20 cleanups and shit. 2015-07-15 18:17:55 +03:00
Arik Fraimovich
7702b05635 Merge pull request #493 from erans/master
Fix: a generic way to parse all the input JSON and make sure we replace ISODate to Python date times.
2015-07-15 17:50:21 +03:00
Eran Sandler
5fc7c499a3 stupid auto merge. 2015-07-15 17:48:03 +03:00
Eran Sandler
628240906e Fix: a generic way to parse all the input JSON and make sure we replace ISODate to Python date times. 2015-07-15 17:44:33 +03:00
Arik Fraimovich
41b9b21a20 Merge pull request #492 from erans/master
Fix: MongoDB: Date parsing and dates in aggregation $match
2015-07-15 17:03:17 +03:00
Eran Sandler
dbd3f754ba - Fixed parsing dates in the format of YYYY-MM-DDTHH:mm
- Added handling dates in the aggregate $match
2015-07-15 16:58:10 +03:00
Arik Fraimovich
4ef3c27fe6 Merge pull request #489 from kataring/suport-presto
Feature: Support Presto
2015-07-15 16:50:51 +03:00
Arik Fraimovich
58a005c71b Merge pull request #484 from alexanderlz/master
Feature: Google Spreadsheets support (alpha)
2015-07-14 12:14:57 +03:00
Alexander Leibzon
9d7ff31178 replace camelcase vars 2015-07-14 12:13:04 +03:00
Noriaki Katayama
93d6b01fbf add bigint 2015-07-14 16:59:25 +09:00
Arik Fraimovich
7d57f9d0f1 Merge pull request #488 from EverythingMe/fix/mongo-support-sandbox
Fix: Drop database name check in MongoDB queries  to support sandboxed environments
2015-07-14 08:56:47 +03:00
Arik Fraimovich
e80f470255 Mongo: Drop database name check to support sandboxed environments 2015-07-14 08:51:31 +03:00
Arik Fraimovich
5636cec0eb Merge pull request #487 from erans/master
Fix: Support newer as well as older PyMongo versions
2015-07-14 08:23:58 +03:00
Eran Sandler
912bbc1a4a Added backwards compatibility mode with older versions of PyMongo.
It appears that older versions would return a dictionary from an aggregate operation that had the cursor inside the "result" key.
Newer versions return a new type of cursor called CommandCursor.
2015-07-14 08:19:25 +03:00
Arik Fraimovich
d3bb58167e Merge pull request #486 from EverythingMe/fix/mysql-noerror-onconnect
Fix: no error when failing to connect to a MySQL data source
2015-07-13 19:04:16 +03:00
Arik Fraimovich
2911fa8af7 Bump version. 2015-07-13 18:31:31 +03:00
Arik Fraimovich
4503c6af66 Move the MySQL connect to the try/except block 2015-07-13 18:31:00 +03:00
Arik Fraimovich
7fc2d5ee0b Update bootstrap.sh to use 0.6.3. 2015-07-13 12:00:28 +03:00
Alexander Leibzon
b850da52a2 remove .nojekyll, naming convention 2015-07-13 09:56:11 +03:00
Alexander Leibzon
079fbf33f4 don't execute query if the query string is empty (when changing datasource) 2015-07-12 22:53:08 +03:00
Alexander Leibzon
43edb009d6 safer handling of worksheet num 2015-07-12 01:28:52 +03:00
Alexander Leibzon
81978c5049 jekyll disable 2015-07-11 22:51:31 +03:00
Alexander Leibzon
239813e195 modify google spreadsheed datasource params, only the credentials file needed 2015-07-11 22:43:07 +03:00
Alexander Leibzon
28dd571a03 google spreadsheets working version 2015-07-10 21:27:16 +03:00
Alexander Leibzon
808126cf91 forgot type 2015-07-09 01:22:08 +03:00
Alexander Leibzon
69a8295f4c forgot type 2015-07-09 01:18:31 +03:00
Noriaki Katayama
6338be3811 modified response 2015-07-08 10:33:55 +09:00
Alexander Leibzon
3ee6371250 initial work on google spreadsheets 2015-07-07 01:39:59 +03:00
Noriaki Katayama
4f38d42182 add presto 2015-07-06 18:22:23 +09:00
88 changed files with 4209 additions and 1076 deletions

View File

@@ -1,9 +1,6 @@
REDASH_CONNECTION_ADAPTER=pg
REDASH_CONNECTION_STRING="dbname=data"
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
REDASH_GOOGLE_APPS_DOMAIN=
REDASH_ADMINS=
REDASH_WORKERS_COUNT=2
REDASH_COOKIE_SECRET=
REDASH_DATABASE_URL='postgresql://rd'
REDASH_LOG_LEVEL = "INFO"
export REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
export REDASH_LOG_LEVEL="INFO"
export REDASH_REDIS_URL=redis://localhost:6379/1
export REDASH_DATABASE_URL="postgresql://redash"
export REDASH_COOKIE_SECRET=veryverysecret
export REDASH_GOOGLE_APPS_DOMAIN=

1
.gitignore vendored
View File

@@ -8,6 +8,7 @@ celerybeat-schedule*
.#*
\#*#
*~
_build
# Vagrant related
.vagrant

View File

@@ -13,7 +13,7 @@ deps:
pack:
sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload:
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)

View File

@@ -10,7 +10,8 @@
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts.
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
Presto, Google Spreadsheets, Cloudera Impala, Hive and custom scripts.
**_re:dash_** consists of two parts:

View File

@@ -1,63 +0,0 @@
"""
Script to test concurrency (multithreading/multiprocess) issues with the workers. Use with caution.
"""
import json
import atfork
atfork.monkeypatch_os_fork_functions()
import atfork.stdlib_fixer
atfork.stdlib_fixer.fix_logging_module()
import time
from redash.data import worker
from redash import models, data_manager, redis_connection
if __name__ == '__main__':
models.create_db(True, False)
print "Creating data source..."
data_source = models.DataSource.create(name="Concurrency", type="pg", options="dbname=postgres")
print "Clear jobs/hashes:"
redis_connection.delete("jobs")
query_hashes = redis_connection.keys("query_hash_*")
if query_hashes:
redis_connection.delete(*query_hashes)
starting_query_results_count = models.QueryResult.select().count()
jobs_count = 5000
workers_count = 10
print "Creating jobs..."
for i in xrange(jobs_count):
query = "SELECT {}".format(i)
print "Inserting: {}".format(query)
data_manager.add_job(query=query, priority=worker.Job.LOW_PRIORITY,
data_source=data_source)
print "Starting workers..."
workers = data_manager.start_workers(workers_count)
print "Waiting for jobs to be done..."
keep_waiting = True
while keep_waiting:
results_count = models.QueryResult.select().count() - starting_query_results_count
print "QueryResults: {}".format(results_count)
time.sleep(5)
if results_count == jobs_count:
print "Yay done..."
keep_waiting = False
data_manager.stop_workers()
qr_count = 0
for qr in models.QueryResult.select():
number = int(qr.query.split()[1])
data_number = json.loads(qr.data)['rows'][0].values()[0]
if number != data_number:
print "Oops? {} != {} ({})".format(number, data_number, qr.id)
qr_count += 1
print "Verified {} query results.".format(qr_count)
print "Done."

View File

@@ -11,7 +11,7 @@ dependencies:
- tar xvf optipng-0.7.5.tar.gz
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
- make deps
- pip install -r dev_requirements.txt
- pip install -r requirements_dev.txt
- pip install -r requirements.txt
cache_directories:
- rd_ui/node_modules/

192
docs/Makefile Normal file
View File

@@ -0,0 +1,192 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/redash.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/redash.qhc"
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/redash"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/redash"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

111
docs/conf.py Normal file
View File

@@ -0,0 +1,111 @@
# -*- coding: utf-8 -*-
#
# re:dash documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 20 22:40:24 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u're:dash'
copyright = u'2015, EverythingMe'
author = u'EverythingMe'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# Output file base name for HTML help builder.
htmlhelp_basename = 'redashdoc'
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'redash', u're:dash Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'redash', u're:dash Documentation',
author, 'redash', 'One line description of project.',
'Miscellaneous'),
]

181
docs/datasources.rst Normal file
View File

@@ -0,0 +1,181 @@
Supported Data Sources
######################
re:dash supports several types of data sources, and if you set it up using the provided images, it should already have
the needed dependencies to use them all. Starting from version 0.7 and newer, you can manage data sources from the UI
by browsing to ``/data_sources`` on your instance.
If one of the listed data source types isn't available when trying to create a new data source, make sure that:
1. You installed required dependencies.
2. If you've set custom value for the ``REDASH_ENABLED_QUERY_RUNNERS`` setting, it's included in the list.
PostgreSQL / Redshift
---------------------
- **Options**:
- Database name (mandatory)
- User
- Password
- Host
- Port
- **Additional requirements**:
- None
MySQL
-----
- **Options**:
- Database name (mandatory)
- User
- Password
- Host
- Port
- **Additional requirements**:
- ``MySQL-python`` python package
Google BigQuery
---------------
- **Options**:
- Project ID (mandatory)
- JSON key file, generated when creating a service account (see `instructions <https://developers.google.com/console/help/new/#serviceaccounts>`__).
- **Additional requirements**:
- ``google-api-python-client``, ``oauth2client`` and ``pyopenssl`` python packages (on Ubuntu it might require installing ``libffi-dev`` and ``libssl-dev`` as well).
Graphite
--------
- **Options**:
- Url (mandatory)
- User
- Password
- Verify SSL certificate
MongoDB
-------
- **Options**:
- Connection String (mandatory)
- Database name
- Replica set name
- **Additional requirements**:
- ``pymongo`` python package.
For information on how to write MongoDB queries, see :doc:`documentation </usage/mongodb_querying>`.
ElasticSearch
-------------
...
InfluxDB
--------
...
Presto
------
- **Options**:
- Host (mandatory)
- Address to a Presto coordinator.
- Port
- Port to a Presto coordinator. `8080` is the default port.
- Schema
- Default schema name of Presto. You can read other schemas by qualified name like `FROM myschema.table1`.
- Catalog
- Catalog (connector) name of Presto such as `hive-cdh4`, `hive-hadoop1`, etc.
- Username
- User name to connect to a Presto.
- **Additional requirements**:
- ``pyhive`` python package.
Hive
----
...
Impala
------
...
URL
---
A URL based data source which requests URLs that return the :doc:`results JSON
format </dev/results_format>`.
Very useful in situations where you want to expose the data without
connecting directly to the database.
The query itself inside re:dash will simply contain the URL to be
executed (i.e. http://myserver/path/myquery)
- **Options**:
- Url - set this if you want to limit queries to certain base path.
Google Spreadsheets
-------------------
- **Options**:
- JSON key file, generated when creating a service account (see `instructions <https://developers.google.com/console/help/new/#serviceaccounts>`__).
- **Additional requirements**:
- ``gspread`` and ``oauth2client`` python packages.
Notes:
1. To be able to load the spreadsheet in re:dash - share your it with
your ServiceAccount's email (it can be found in the credentials json
file, for example
43242343247-fjdfakljr3r2@developer.gserviceaccount.com).
2. The query format is "DOC\_UUID\|SHEET\_NUM" (for example
"kjsdfhkjh4rsEFSDFEWR232jkddsfh\|0")
Python
------
**Execute other queries, manipulate and compute with Python code**
This is a special query runner, that will execute provided Python code as the query. Useful for various scenarios such as
merging data from different data sources, doing data transformation/manipulation that isn't trivial with SQL, merging
with remote data or using data analysis libraries such as Pandas (see `example query <https://gist.github.com/arikfr/be7c2888520c44cf4f0f>`__).
While the Python query runner uses a sandbox (RestrictedPython), it's not 100% secure and the security depends on the
modules you allow to import. We recommend enabling the Python query runner only in a trusted environment (meaning: behind
VPN and with users you trust).
- **Options**:
- Allowed Modules in a comma separated list (optional). **NOTE:**
You MUST make sure these modules are installed on the machine
running the Celery workers.

11
docs/dev.rst Normal file
View File

@@ -0,0 +1,11 @@
Developer Information
=====================
.. toctree::
:maxdepth: 2
:glob:
dev/vagrant
dev/*

View File

@@ -0,0 +1,94 @@
Query Execution Model
#####################
Introduction
============
The first datasource which was used with re:dash was Redshift. Because
we had billions of records in Redshift, and some queries were costly to
re-run, from the get go there was the idea of caching query results in
re:dash.
This was to relieve stress from the Redshift cluster and also to improve
user experience.
How queries get executed and cached in re:dash?
===============================================
Server
------
To make sure each query is executed only once at any giving time, we
translate the query to a ``query hash``, using the following code:
.. code:: python
COMMENTS_REGEX = re.compile("/\*.*?\*/")
def gen_query_hash(sql):
sql = COMMENTS_REGEX.sub("", sql)
sql = "".join(sql.split()).lower()
return hashlib.md5(sql.encode('utf-8')).hexdigest()
When query execution is done, the result gets stored to
``query_results`` table. Also we check for all queries in the
``queries`` table that have the same query hash and update their
reference to the query result we just saved
(`code <https://github.com/EverythingMe/redash/blob/master/redash/models.py#L235>`__).
Client
------
The client (UI) will execute queries in two scenarios:
1. (automatically) When opening a query page of a query that doesn't
have a result yet.
2. (manually) When the user clicks on "Execute".
In each case the client does a POST request to ``/api/query_results``
with the following parameters: ``query`` (the query text),
``data_source_id`` (data source to execute the query with) and ``ttl``.
When loading a cached result, ``ttl`` will be the one set to the query
(if it was set). This is a relic from previous versions, and I'm not
sure if it's really used anymore, as usually we will fetch query result
using its id.
When loading a non cached result, ``ttl`` will be 0 which will "force"
the server to execute the query.
As a response to ``/api/query_results`` the server will send either the
query results (in case of a cached query) or job id of the currently
executing query. When job id received the client will start polling on
this id, until a query result received (this is encapsulated in
``Query`` and ``QueryResult`` services).
Ideas on how to implement query parameters
==========================================
Client side only implementation
-------------------------------
(This was actually implemented in. See pull request `#363 <https://github.com/EverythingMe/redash/pull/363>`__ for details.)
The basic idea of how to implement parametized queries is to treat the
query as a template and merge it with parameters taken from query string
or UI (or both).
When the caching facility isn't required (with queries that return in a
reasonable time frame) the implementation can be completly client side
and the backend can be "blind" to the parameters - it just receives the
final query to execute and returns result.
As one improvement over this, we can let the UI/user specify the TTL
value when making the request to ``/api/query_results``, in which case
caching will be availble too, while not having to make the server aware
of the parameters.
Hybrid
------
Another option, will be to store the list of possible parameters for a
query, with their default/optional values. In such case, the server can
prefetch all the options and cache them to provide faster results to the
client.

View File

@@ -0,0 +1,30 @@
Data Source Results Format
==========================
All data sources in re:dash return the following results in JSON format:
.. code:: javascript
{
"columns" : [
{
// Required: a unique identifier of the column name in this result
"name" : "COLUMN_NAME",
// Required: friendly name of the column that will appear in the results
"friendly_name" : "FRIENDLY_NAME",
// Optional: If not specified sort might not work well.
// Supported types: integer, float, boolean, string (default), datetime (ISO-8601 text format)
"type" : "VALUE_TYPE"
},
...
],
"rows" : [
{
// name is the column name as it appears in the columns above.
// VALUE is a valid JSON value. For dates its an ISO-8601 string.
"name" : VALUE,
"name2" : VALUE2
},
...
]
}

49
docs/dev/vagrant.rst Normal file
View File

@@ -0,0 +1,49 @@
Setting up development environment (using Vagrant)
==================================================
To simplify contribution there is a `Vagrant
box <https://vagrantcloud.com/redash/boxes/dev>`__ available with all
the needed software to run re:dash for development (use it only for
development, for demo purposes there is
`redash/demo <https://vagrantcloud.com/redash/boxes/demo>`__ box and the
AWS/GCE images).
To get started with this box:
1. Make sure you have recent version of
`Vagrant <https://www.vagrantup.com/>`__ installed.
2. Clone the re:dash repository:
``git clone https://github.com/EverythingMe/redash.git``.
3. Change dir into the repository (``cd redash``) and run run
``vagrant up``. This might take some time the first time you run it,
as it downloads the Vagrant virtual box.
4. Once Vagrant is ready, ssh into the instance (``vagrant ssh``), and
change dir to ``/opt/redash/current`` -- this is where your local
repository copy synced to.
5. Copy ``.env`` file into this directory (``cp ../.env ./``).
6. From ``/opt/redash/current/rd_ui`` run ``bower install`` to install
frontend packages. This can be done from your host machine as well,
if you have bower installed.
7. Go back to ``/opt/redash/current`` and install python dependencies
``sudo pip install -r requirements.txt``
8. Apply migrations
::
PYTHONPATH=. bin/run python migrations/0001_allow_delete_query.py
PYTHONPATH=. bin/run python migrations/0002_fix_timestamp_fields.py
PYTHONPATH=. bin/run python migrations/0003_update_data_source_config.py
PYTHONPATH=. bin/run python migrations/0004_allow_null_in_event_user.py
PYTHONPATH=. bin/run python migrations/0005_add_updated_at.py
PYTHONPATH=. bin/run python migrations/0006_queries_last_edit_by.py
PYTHONPATH=. bin/run python migrations/0007_add_schedule_to_queries.py
PYTHONPATH=. bin/run python migrations/0008_make_ds_name_unique.py
PYTHONPATH=. bin/run python migrations/0009_add_api_key_to_user.py
PYTHONPATH=. bin/run python migrations/0010_create_alerts.py
PYTHONPATH=. bin/run python migrations/0010_allow_deleting_datasources.py
PYTHONPATH=. bin/run python migrations/0011_migrate_bigquery_to_json.py
9. Start the server and background workers with
``bin/run honcho start -f Procfile.dev``.
10. Now the server should be available on your host on port 9001 and you
can login with username admin and password admin.

57
docs/index.rst Normal file
View File

@@ -0,0 +1,57 @@
.. image:: http://redash.io/static/img/redash_logo.png
:width: 200px
Open Source Data Collaboration and Visualization Platform
===================================
**re:dash** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
Prior to **re:dash**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
**re:dash** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery,Google Spreadsheets, PostgreSQL, MySQL, Graphite and custom scripts.
Features
########
1. **Query Editor**: think of `JS Fiddle`_ for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it.
2. **Visualizations**: once you have a dataset, you can create different visualizations out of it. Currently it supports charts, pivot table and cohorts.
3. **Dashboards**: combine several visualizations into a single dashboard.
Demo
####
.. figure:: https://raw.github.com/EverythingMe/redash/screenshots/screenshots.gif
:alt: Screenshots
You can try out the demo instance: `http://demo.redash.io`_ (login with any Google account).
.. _http://demo.redash.io: http://demo.redash.io
.. _JS Fiddle: http://jsfiddle.net
Getting Started
###############
:doc:`Setting up re:dash instance </setup>` (includes links to ready made AWS/GCE images).
Getting Help
############
* Source: https://github.com/everythingme/redash
* Issues: https://github.com/everythingme/redash/issues
* Mailing List: https://groups.google.com/forum/#!forum/redash-users
* Gitter (chat): https://gitter.im/EverythingMe/redash
* Contact Arik, the maintainer directly: arik@everything.me.
TOC
###
.. toctree::
:maxdepth: 2
setup
upgrade
datasources
usage
dev
misc

10
docs/misc.rst Normal file
View File

@@ -0,0 +1,10 @@
Miscellaneous
=============
.. toctree::
:maxdepth: 2
:glob:
misc/*

View File

@@ -0,0 +1,50 @@
How To: Create a Google Developers Project
==========================================
1. Go to the `Google Developers
Console <https://console.developers.google.com/>`__.
2. Select a project, or create a new one by clicking Create Project:
1. In the Project name field, type in a name for your project.
2. In the Project ID field, optionally type in a project ID for your
project or use the one that the console has created for you. This
ID must be unique world-wide.
3. Click the **Create** button and wait for the project to be
created.
4. Click on the new project name in the list to start editing the
project.
3. In the left sidebar, select the **APIs** item below "APIs & auth". A
list of Google web services appears.
4. Find the **Google+ API** service and set its status to **ON**—notice
that this action moves the service to the top of the list.
5. In the sidebar under "APIs & auth", select **Consent screen**.
- Choose an Email Address and specify a Product Name.
6. In the sidebar under "APIs & auth", select **Credentials**.
7. Click **Create a new Client ID** — a dialog box appears.
- In the **Application type** section of the dialog, select **Web
application**.
- In the **Authorized JavaScript origins** field, enter the origin
for your app. You can enter multiple origins to use with multiple
re:dash instance. Wildcards are not allowed. In the example below,
we assume your re:dash instance address is *redash.example.com*:
::
http://redash.example.com
https://redash.example.com
- In the Authorized redirect URI field, enter the redirect URI
callback:
::
http://redash.example.com/oauth/google_callback
- Click the ``Create Client ID`` button.
8. In the resulting **Client ID for web application** section, copy the
**Client ID** and **Client secret** to your ``.env`` file.

59
docs/misc/ssl.rst Normal file
View File

@@ -0,0 +1,59 @@
SSL (HTTPS) Setup
=================
If you used the provided images or the bootstrap script, to start using
SSL with your instance you need to:
1. Update the nginx config file (``/etc/nginx/sites-available/redash``)
with SSL configuration (see below an example). Make sure to upload
the certificate to the server, and set the paths correctly in the new
config.
2. Open port 443 in your security group (if using AWS or GCE).
.. code:: nginx
upstream redash_servers {
server 127.0.0.1:5000;
}
server {
listen 80;
# Allow accessing /ping without https. Useful when placing behind load balancer.
location /ping {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://redash_servers;
}
location / {
# Enforce SSL.
return 301 https://$host$request_uri;
}
}
server {
listen 443 ssl;
# Make sure to set paths to your certificate .pem and .key files.
ssl on;
ssl_certificate /path-to/cert.pem; # or crt
ssl_certificate_key /path-to/cert.key;
access_log /var/log/nginx/redash.access.log;
gzip on;
gzip_types *;
gzip_proxied any;
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_pass http://redash_servers;
proxy_redirect off;
}
}

3
docs/requirements.txt Normal file
View File

@@ -0,0 +1,3 @@
sphinx
sphinx-autobuild
sphinx_rtd_theme

155
docs/setup.rst Normal file
View File

@@ -0,0 +1,155 @@
Setting up re:dash instance
###########################
The `provisioning
script <https://github.com/EverythingMe/redash/blob/master/setup/bootstrap.sh>`__
works on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy. This script
installs all needed dependencies and creates basic setup.
To ease the process, there are also images for AWS and Google Compute
Cloud. These images created with the same provision script using Packer.
Create an instance
==================
Google Compute Engine
---------------------
First, you need to add the images to your account:
.. code:: bash
$ gcloud compute images add redash-063-b906 gs://redash-images/redash.0.6.3.b906.tar.gz
Next you need to launch an instance using this image (n1-standard-1
instance type is recommended). If you plan using re:dash with BigQuery,
you can use a dedicated image which comes with BigQuery preconfigured
(using instance permissions):
.. code:: bash
$ gcloud compute images add redash-063-b906-bq gs://redash-images/redash.0.6.3.b906-bq.tar.gz
Note that you need to launch this instance with BigQuery access:
.. code:: bash
$ gcloud compute instances create <your_instance_name> --image redash-060-b812-bq --scopes storage-ro bigquery
(the same can be done from the web interface, just make sure to enable
BigQuery access)
Now proceed to `"Setup" <#setup>`__.
AWS
---
Launch the instance with from the pre-baked AMI (for small deployments
t2.micro should be enough):
- us-east-1:
`ami-47b4612c <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-47b4612c>`__
- us-west-1:
`ami-a72edde3 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-a72edde3>`__
- us-west-2:
`ami-f9d6d5c9 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-f9d6d5c9>`__
- eu-central-1:
`ami-72eed46f <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-72eed46f>`__
- eu-west-1:
`ami-5a135c2d <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-5a135c2d>`__
- sa-east-1:
`ami-2b78f436 <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-2b78f436>`__
- ap-northeast-1:
`ami-0a55fd0a <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-0a55fd0a>`__
- ap-southeast-2:
`ami-9f793ea5 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-9f793ea5>`__
- ap-southeast-1:
`ami-12545740 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-12545740>`__
Now proceed to `"Setup" <#setup>`__.
Other
-----
Download the provision script and run it on your machine. Note that:
1. You need to run the script as root.
2. It was tested only on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy.
Setup
=====
Once you created the instance with either the image or the script, you
should have a running re:dash instance with everything you need to get
started. You can now login to it with the user "admin" (password:
"admin"). But to make it useful, there are a few more steps that you
need to manually do to complete the setup:
First ssh to your instance and change directory to ``/opt/redash``. If
you're using the GCE image, switch to root (``sudo su``).
Users & Google Authentication setup
-----------------------------------
Most of the settings you need to edit are in the ``/opt/redash/.env``
file.
1. Update the cookie secret (important! otherwise anyone can sign new
cookies and impersonate users): change "veryverysecret" in the line:
``export REDASH_COOKIE_SECRET=veryverysecret`` to something else (you
can use ``pwgen 32 -1`` to generate random string).
2. By default we create an admin user with the password "admin". You
need to change the password:
- ``cd /opt/redash/current``
- ``sudo -u redash bin/run ./manage.py users password admin {new password}``
3. If you want to use Google OAuth to authenticate users, you need to
create a Google Developers project (see :doc:`instructions </misc/google_developers_project>`)
and then add the needed configuration in the ``.env`` file:
.. code::
export REDASH_GOOGLE_CLIENT_ID=""
export REDASH_GOOGLE_CLIENT_SECRET=""
export REDASH_GOOGLE_APPS_DOMAIN=""
``REDASH_GOOGLE_CLIENT_ID`` and ``REDASH_GOOGLE_CLIENT_SECRET`` are the values you get after registering with Google. ``READASH_GOOGLE_APPS_DOMAIN`` is used in case you want to limit access to single Google apps domain (*if you leave it empty anyone with a Google account can access your instance*).
4. Restart the web server to apply the configuration changes:
``sudo supervisorctl restart redash_server``.
5. Once you have Google OAuth enabled, you can login using your Google
Apps account. If you want to grant admin permissions to some users,
you can do it with the ``users grant_admin`` command:
``sudo -u redash bin/run ./manage.py users grant_admin {email}``.
6. If you don't use Google OAuth or just need username/password logins,
you can create additional users using the CLI (see :doc:`documentation </usage/users>`).
Datasources
-----------
To make re:dash truly useful, you need to setup your data sources in it. Browse to ``/data_sources`` on your instance,
to create new data source connection.
See
:doc:`documentation </datasources>`
for the different options. Your instance comes ready with dependencies
needed to setup supported sources.
How to upgrade?
---------------
It's recommended to upgrade once in a while your re:dash instance to
benefit from bug fixes and new features. See :doc:`here </upgrade>` for full upgrade
instructions (including Fabric script).
Notes
=====
- If this is a production setup, you should enforce HTTPS and make sure
you set the cookie secret (see :doc:`instructions </misc/ssl>`).

36
docs/upgrade.rst Normal file
View File

@@ -0,0 +1,36 @@
How to Upgrade
##############
It's recommended to upgrade your re:dash instance once there are new
releases, to benefit from new features and bug fixes. The upgrade
process is relatively simple, and assuming you used one of the base
images we provide, you can just use the
`Fabric <http://www.fabfile.org/>`__ script provided here:
https://gist.github.com/arikfr/440d1403b4aeb76ebaf8.
How to run the Fabric script
============================
1. Install Fabric: ``pip install fabric requests`` (needed only once)
2. Download the ``fabfile.py`` from the gist.
3. Run the script:
``fab -H{your re:dash host} -u{the ssh user for this host} -i{path to key file for passwordless login} deploy_latest_release``
``-i`` is optional and it is only needed in case you're using private-key based authentication (and didn't add the key file to your authentication agent or set its path in your SSH config).
What the Fabric script does
===========================
Even if you didn't use the image, it's very likely you can reuse most of
this script with small modifications. What this script does is:
1. Find the URL of the latest release tarball (from `GitHub releases
page <github.com/everythingme/redash/releases>`__).
2. Download it.
3. Create new directory for this version (for example:
``/opt/redash/redash.0.5.0.b685``).
4. Unpack that (``tar -C {dir} -xvf {tarball path}``).
5. Link ``/opt/redash/.env`` file into this directory.
6. Apply any new migrations.
7. Link ``/opt/redash/current`` to new version.
8. Restart web server and celery workers.

12
docs/usage.rst Normal file
View File

@@ -0,0 +1,12 @@
Usage
=====
.. toctree::
:maxdepth: 2
:glob:
usage/maintenance.rst
usage/users.rst
usage/*

View File

@@ -0,0 +1,48 @@
ElasticSearch: Querying
#######################
ElasticSearch currently supports only simple Lucene style queries (like
Kibana but without the aggregation).
Full blown JSON based ElasticSearch queries (including aggregations)
will be added later.
Simple query example:
=====================
- Query the index named "twitter"
- Filter by "user:kimchy"
- Return the fields: "@timestamp", "tweet" and "user"
- Return up to 15 results
- Sort by @timestamp ascending
.. code:: json
{
"index" : "twitter",
"query" : "user:kimchy",
"fields" : ["@timestamp", "tweet", "user"],
"size" : 15,
"sort" : "@timestamp:asc"
}
Simple query on a logstash ElasticSearch instance:
==================================================
- Query the index named "logstash-2015.04.\*" (in this case its all of
April 2015)
- Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
- Return fields: "@timestamp", "userId", "channel", "utm\_source",
"utm\_medium", "utm\_campaign", "utm\_content"
- Return up to 250 results
- Sort by @timestamp ascending
.. code:: json
{
"index" : "logstash-2015.04.*",
"query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
"fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
"size" : 250,
"sort" : "@timestamp:asc"
}

View File

@@ -0,0 +1,94 @@
Ongoing Maintanence and Basic Operations
########################################
Configuration and logs
======================
The supervisor config can be found in
``/opt/redash/supervisord/supervisord.conf``.
There you can see the names of its programs (``redash_celery``,
``redash_server``) and the location of their logs.
Restart
=======
Restarting the Web Server
-------------------------
``sudo supervisorctl stop redash_server``
Restarting Celery Workers
-------------------------
``sudo supervisorctl restart redash_celery``
Restarting Celery Workers & the Queries Queue
---------------------------------------------
In case you are handling a problem, and you need to stop the currently
running queries and reset the queue, follow the steps below.
1. Stop celery: ``sudo supervisorctl stop redash_celery`` (celery might
take some time to stop, if it's in the middle of running a query)
2. Flush redis: ``redis-cli flushdb``
3. Start celery: ``sudo supervisorctl start redash_celery``
Changing the Number of Workers
==============================
By default, Celery will start a worker per CPU core. Because most of
re:dash's tasks are IO bound, the real limit for number of workers you
can use depends on the amount of memory your machine has. It's
recommended to increase number of workers, to support more concurrent
queries.
1. Open the supervisord configuration file:
``/opt/redash/supervisord/supervisord.conf``
2. Edit the ``[program:redash_celery]`` section and add to the *command*
value, the param "-c" with the number of concurrent workers you need.
3. Restart supervisord to apply new configuration:
``sudo /etc/init.d/redash_supervisord restart``.
DB
==
Show the Currently Configured Data Source
-----------------------------------------
This varies based on the redash version and personal preferences. You
can do one of the following:
Using the CLI
~~~~~~~~~~~~~
In ``/opt/redash/current``, run:
``sudo -u redash bin/run ./manage.py ds list``
Using the Admin
~~~~~~~~~~~~~~~
(available from version 0.6b797). Browse to ``/admin/datasource``
View the Definition Directly in the DB
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Open psql: ``sudo -u redash psql``
2. Run the query: ``SELECT * from data_sources;``
Backup re:dash's DB:
--------------------
``sudo -u redash pg_dump > backup_filename.sql``
Version
=======
See current version:
``bin/run ./manage.py version``

View File

@@ -0,0 +1,74 @@
MongoDB: Querying
#################
Simple query example:
=====================
.. code:: json
{
"collection" : "my_collection",
"query" : {
"date" : {
"$gt" : "ISODate(\"2015-01-15 11:41\")",
},
"type" : 1
},
"fields" : {
"_id" : 1,
"name" : 2
},
"sort" : [
{
"name" : "date",
"direction" : -1
}
]
}
Live example on the demo instance:
http://demo.redash.io/queries/394/source.
Aggregation
===========
Uses a syntax similar to the one used in PyMongo, however to support the
correct order of sorting, it uses a regular list for the "$sort"
operation that converts into a SON (sorted dictionary) object before
execution.
Aggregation query example:
.. code:: json
{
"collection" : "things",
"aggregate" : [
{
"$unwind" : "$tags"
},
{
"$group" : {
"_id" : "$tags",
"count" : { "$sum" : 1 }
}
},
{
"$sort" : [
{
"name" : "count",
"direction" : -1
},
{
"name" : "_id",
"direction" : -1
}
]
}
]
}
Live examples on the demo instance:
1. http://demo.redash.io/queries/393/source
2. http://demo.redash.io/queries/387/source

39
docs/usage/users.rst Normal file
View File

@@ -0,0 +1,39 @@
Users' Management
#################
If you use Google OpenID authentication, then each user from the domains
you allowed will automatically be logged in and have the default
permissions.
If you want to give some user different permissions or you want to
create password based users (make sure you enabled this options in
settings first), you need to use the CLI (``manage.py``).
Create a new user
=================
.. code:: bash
$ bin/run ./manage.py users create --help
usage: users create [-h] [--permissions PERMISSIONS] [--password PASSWORD]
[--google] [--admin]
name email
positional arguments:
name User's full name
email User's email
optional arguments:
-h, --help show this help message and exit
--permissions PERMISSIONS
Comma seperated list of permissions (leave blank for
default).
--password PASSWORD Password for users who don't use Google Auth (leave
blank for prompt).
--google user uses Google Auth to login
--admin set user as admin
Grant admin permissions
=======================
``sudo -u redash bin/run ./manage.py users grant_admin {email}``

View File

@@ -43,12 +43,15 @@ def make_shell_context():
@manager.command
def check_settings():
"""Show the settings as re:dash sees them (useful for debugging)."""
from types import ModuleType
for name, item in settings.all_settings().iteritems():
print "{} = {}".format(name, item)
for name in dir(settings):
item = getattr(settings, name)
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
print "{} = {}".format(name, item)
@manager.command
def send_test_mail():
from redash import mail
from flask_mail import Message
mail.send(Message(subject="Test Message from re:dash", recipients=[settings.MAIL_DEFAULT_SENDER], body="Test message."))
if __name__ == '__main__':

View File

@@ -0,0 +1,18 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.drop_not_null('queries', 'data_source_id'),
)
db.close_db(None)

View File

@@ -0,0 +1,8 @@
from redash.models import db, Alert, AlertSubscription
if __name__ == '__main__':
with db.database.transaction():
Alert.create_table()
AlertSubscription.create_table()
db.close_db(None)

View File

@@ -0,0 +1,44 @@
from base64 import b64encode
import json
from redash.models import DataSource
def convert_p12_to_pem(p12file):
from OpenSSL import crypto
with open(p12file, 'rb') as f:
p12 = crypto.load_pkcs12(f.read(), "notasecret")
return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
if __name__ == '__main__':
for ds in DataSource.all():
if ds.type == 'bigquery':
options = json.loads(ds.options)
if 'jsonKeyFile' in options:
continue
new_options = {
'projectId': options['projectId'],
'jsonKeyFile': b64encode(json.dumps({
'client_email': options['serviceAccount'],
'private_key': convert_p12_to_pem(options['privateKey'])
}))
}
ds.options = json.dumps(new_options)
ds.save()
elif ds.type == 'google_spreadsheets':
options = json.loads(ds.options)
if 'jsonKeyFile' in options:
continue
with open(options['credentialsFilePath']) as f:
new_options = {
'jsonKeyFile': b64encode(f.read())
}
ds.options = json.dumps(new_options)
ds.save()

View File

@@ -19,6 +19,7 @@
"trailing": true,
"smarttabs": true,
"globals": {
"angular": false
"angular": false,
"_": false
}
}

View File

@@ -14,7 +14,6 @@
<link rel="stylesheet" href="/bower_components/gridster/dist/jquery.gridster.css">
<link rel="stylesheet" href="/bower_components/pivottable/dist/pivot.css">
<link rel="stylesheet" href="/bower_components/cornelius/src/cornelius.css">
<link rel="stylesheet" href="/bower_components/select2/select2.css">
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
@@ -73,6 +72,12 @@
<li><a href="/queries">Queries</a></li>
</ul>
</li>
<li>
<a href="/alerts">Alerts</a>
</li>
<li ng-show="currentUser.hasPermission('admin')">
<a href="/data_sources">Data Sources</a>
</li>
</ul>
<form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()">
<div class="form-group">
@@ -125,11 +130,10 @@
<script src="/bower_components/cornelius/src/cornelius.js"></script>
<script src="/bower_components/mousetrap/mousetrap.js"></script>
<script src="/bower_components/mousetrap/plugins/global-bind/mousetrap-global-bind.js"></script>
<script src="/bower_components/select2/select2.js"></script>
<script src="/bower_components/angular-ui-select2/src/select2.js"></script>
<script src="/bower_components/angular-ui-select/dist/select.js"></script>
<script src="/bower_components/underscore.string/lib/underscore.string.js"></script>
<script src="/bower_components/marked/lib/marked.js"></script>
<script src="/bower_components/angular-base64-upload/dist/angular-base64-upload.js"></script>
<script src="/scripts/ng_highchart.js"></script>
<script src="/scripts/ng_smart_table.js"></script>
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
@@ -151,6 +155,7 @@
<script src="/scripts/controllers/controllers.js"></script>
<script src="/scripts/controllers/dashboard.js"></script>
<script src="/scripts/controllers/admin_controllers.js"></script>
<script src="/scripts/controllers/data_sources.js"></script>
<script src="/scripts/controllers/query_view.js"></script>
<script src="/scripts/controllers/query_source.js"></script>
<script src="/scripts/visualizations/base.js"></script>
@@ -162,8 +167,10 @@
<script src="/scripts/visualizations/pivot.js"></script>
<script src="/scripts/directives/directives.js"></script>
<script src="/scripts/directives/query_directives.js"></script>
<script src="/scripts/directives/data_source_directives.js"></script>
<script src="/scripts/directives/dashboard_directives.js"></script>
<script src="/scripts/filters.js"></script>
<script src="/scripts/controllers/alerts.js"></script>
<!-- endbuild -->
<script>
@@ -178,7 +185,7 @@
currentUser.hasPermission = function(permission) {
return this.permissions.indexOf(permission) != -1;
}
};
{{ analytics|safe }}
</script>

View File

@@ -74,8 +74,8 @@
<form role="form" method="post" name="login">
<div class="form-group">
<label for="inputUsernameEmail">Username or email</label>
<input type="text" class="form-control" id="inputUsernameEmail" name="username" value="{{username}}">
<label for="inputEmail">Email</label>
<input type="text" class="form-control" id="inputEmail" name="email" value="{{email}}">
</div>
<div class="form-group">
<!--<a class="pull-right" href="#">Forgot password?</a>-->

View File

@@ -7,16 +7,16 @@ angular.module('redash', [
'redash.renderers',
'redash.visualization',
'highchart',
'ui.select2',
'angular-growl',
'angularMoment',
'ui.bootstrap',
'smartTable.table',
'ngResource',
'ngRoute',
'ui.select'
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider',
function ($routeProvider, $locationProvider, $compileProvider, growlProvider) {
'ui.select',
'naif.base64'
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider', 'uiSelectConfig',
function ($routeProvider, $locationProvider, $compileProvider, growlProvider, uiSelectConfig) {
if (featureFlags.clientSideMetrics) {
Bucky.setOptions({
host: '/api/metrics'
@@ -31,6 +31,8 @@ angular.module('redash', [
return query.$promise;
};
uiSelectConfig.theme = "bootstrap";
$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|http|data):/);
$locationProvider.html5Mode(true);
growlProvider.globalTimeToLive(2000);
@@ -80,15 +82,30 @@ angular.module('redash', [
templateUrl: '/views/admin_status.html',
controller: 'AdminStatusCtrl'
});
$routeProvider.when('/admin/workers', {
templateUrl: '/views/admin_workers.html',
controller: 'AdminWorkersCtrl'
$routeProvider.when('/alerts', {
templateUrl: '/views/alerts/list.html',
controller: 'AlertsCtrl'
});
$routeProvider.when('/alerts/:alertId', {
templateUrl: '/views/alerts/edit.html',
controller: 'AlertCtrl'
});
$routeProvider.when('/data_sources/:dataSourceId', {
templateUrl: '/views/data_sources/edit.html',
controller: 'DataSourceCtrl'
});
$routeProvider.when('/data_sources', {
templateUrl: '/views/data_sources/list.html',
controller: 'DataSourcesCtrl'
});
$routeProvider.when('/', {
templateUrl: '/views/index.html',
controller: 'IndexCtrl'
templateUrl: '/views/personal.html',
controller: 'PersonalIndexCtrl'
});
$routeProvider.when('/personal', {
templateUrl: '/views/personal.html',
controller: 'PersonalIndexCtrl'

View File

@@ -17,7 +17,7 @@
};
refresh();
}
};
angular.module('redash.admin_controllers', [])
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])

View File

@@ -0,0 +1,174 @@
(function() {
var AlertsCtrl = function($scope, Events, Alert) {
Events.record(currentUser, "view", "page", "alerts");
$scope.$parent.pageTitle = "Alerts";
$scope.alerts = []
Alert.query(function(alerts) {
var stateClass = {
'ok': 'label label-success',
'triggered': 'label label-danger',
'unknown': 'label label-warning'
};
_.each(alerts, function(alert) {
alert.class = stateClass[alert.state];
})
$scope.alerts = alerts;
});
$scope.gridConfig = {
isPaginationEnabled: true,
itemsByPage: 50,
maxSize: 8,
};
$scope.gridColumns = [
{
"label": "Name",
"map": "name",
"cellTemplate": '<a href="/alerts/{{dataRow.id}}">{{dataRow.name}}</a> (<a href="/queries/{{dataRow.query.id}}">query</a>)'
},
{
'label': 'Created By',
'map': 'user.name'
},
{
'label': 'State',
'cellTemplate': '<span ng-class="dataRow.class">{{dataRow.state | uppercase}}</span> since <span am-time-ago="dataRow.updated_at"></span>'
},
{
'label': 'Created At',
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
}
];
};
var AlertCtrl = function($scope, $routeParams, $location, growl, Query, Events, Alert) {
$scope.$parent.pageTitle = "Alerts";
$scope.alertId = $routeParams.alertId;
if ($scope.alertId === "new") {
Events.record(currentUser, 'view', 'page', 'alerts/new');
} else {
Events.record(currentUser, 'view', 'alert', $scope.alertId);
}
$scope.onQuerySelected = function(item) {
$scope.selectedQuery = item;
item.getQueryResultPromise().then(function(result) {
$scope.queryResult = result;
$scope.alert.options.column = $scope.alert.options.column || result.getColumnNames()[0];
});
};
if ($scope.alertId === "new") {
$scope.alert = new Alert({options: {}});
} else {
$scope.alert = Alert.get({id: $scope.alertId}, function(alert) {
$scope.onQuerySelected(new Query($scope.alert.query));
});
}
$scope.ops = ['greater than', 'less than', 'equals'];
$scope.selectedQuery = null;
$scope.getDefaultName = function() {
if (!$scope.alert.query) {
return undefined;
}
return _.template("<%= query.name %>: <%= options.column %> <%= options.op %> <%= options.value %>", $scope.alert);
};
$scope.searchQueries = function (term) {
if (!term || term.length < 3) {
return;
}
Query.search({q: term}, function(results) {
$scope.queries = results;
});
};
$scope.saveChanges = function() {
if ($scope.alert.name === undefined || $scope.alert.name === '') {
$scope.alert.name = $scope.getDefaultName();
}
$scope.alert.$save(function(alert) {
growl.addSuccessMessage("Saved.");
if ($scope.alertId === "new") {
$location.path('/alerts/' + alert.id).replace();
}
}, function() {
growl.addErrorMessage("Failed saving alert.");
});
};
};
angular.module('redash.directives').directive('alertSubscribers', ['AlertSubscription', function (AlertSubscription) {
return {
restrict: 'E',
replace: true,
templateUrl: '/views/alerts/subscribers.html',
scope: {
'alertId': '='
},
controller: function ($scope) {
$scope.subscribers = AlertSubscription.query({alertId: $scope.alertId});
}
}
}]);
angular.module('redash.directives').directive('subscribeButton', ['AlertSubscription', 'growl', function (AlertSubscription, growl) {
return {
restrict: 'E',
replace: true,
template: '<button class="btn btn-default btn-xs" ng-click="toggleSubscription()"><i ng-class="class"></i></button>',
controller: function ($scope) {
var updateClass = function() {
if ($scope.subscription) {
$scope.class = "fa fa-eye-slash";
} else {
$scope.class = "fa fa-eye";
}
}
$scope.subscribers.$promise.then(function() {
$scope.subscription = _.find($scope.subscribers, function(subscription) {
return (subscription.user.email == currentUser.email);
});
updateClass();
});
$scope.toggleSubscription = function() {
if ($scope.subscription) {
$scope.subscription.$delete(function() {
$scope.subscribers = _.without($scope.subscribers, $scope.subscription);
$scope.subscription = undefined;
updateClass();
}, function() {
growl.addErrorMessage("Failed saving subscription.");
});
} else {
$scope.subscription = new AlertSubscription({alert_id: $scope.alertId});
$scope.subscription.$save(function() {
$scope.subscribers.push($scope.subscription);
updateClass();
}, function() {
growl.addErrorMessage("Unsubscription failed.");
});
}
}
}
}
}]);
angular.module('redash.controllers')
.controller('AlertsCtrl', ['$scope', 'Events', 'Alert', AlertsCtrl])
.controller('AlertCtrl', ['$scope', '$routeParams', '$location', 'growl', 'Query', 'Events', 'Alert', AlertCtrl])
})();

View File

@@ -23,7 +23,7 @@
},
{
'label': 'Created By',
'map': 'user_name'
'map': 'user.name'
},
{
'label': 'Created At',
@@ -45,7 +45,6 @@
Query.search({q: $scope.term }, function(results) {
$scope.queries = _.map(results, function(query) {
query.created_at = moment(query.created_at);
query.user_name = query.user.name;
return query;
});
});
@@ -93,7 +92,6 @@
$scope.allQueries = _.map(queries, function (query) {
query.created_at = moment(query.created_at);
query.retrieved_at = moment(query.retrieved_at);
query.user_name = query.user.name;
return query;
});
@@ -108,7 +106,7 @@
},
{
'label': 'Created By',
'map': 'user_name'
'map': 'user.name'
},
{
'label': 'Created At',

View File

@@ -0,0 +1,47 @@
(function () {
var DataSourcesCtrl = function ($scope, $location, growl, Events, DataSource) {
Events.record(currentUser, "view", "page", "admin/data_sources");
$scope.$parent.pageTitle = "Data Sources";
$scope.dataSources = DataSource.query();
$scope.openDataSource = function(datasource) {
$location.path('/data_sources/' + datasource.id);
};
$scope.deleteDataSource = function(event, datasource) {
event.stopPropagation();
Events.record(currentUser, "delete", "datasource", datasource.id);
datasource.$delete(function(resource) {
growl.addSuccessMessage("Data source deleted succesfully.");
this.$parent.dataSources = _.without(this.dataSources, resource);
}.bind(this), function(httpResponse) {
console.log("Failed to delete data source: ", httpResponse.status, httpResponse.statusText, httpResponse.data);
growl.addErrorMessage("Failed to delete data source.");
});
}
};
var DataSourceCtrl = function ($scope, $routeParams, $http, $location, Events, DataSource) {
Events.record(currentUser, "view", "page", "admin/data_source");
$scope.$parent.pageTitle = "Data Sources";
$scope.dataSourceId = $routeParams.dataSourceId;
if ($scope.dataSourceId == "new") {
$scope.dataSource = new DataSource({options: {}});
} else {
$scope.dataSource = DataSource.get({id: $routeParams.dataSourceId});
}
$scope.$watch('dataSource.id', function(id) {
if (id != $scope.dataSourceId && id !== undefined) {
$location.path('/data_sources/' + id).replace();
}
});
};
angular.module('redash.controllers')
.controller('DataSourcesCtrl', ['$scope', '$location', 'growl', 'Events', 'DataSource', DataSourcesCtrl])
.controller('DataSourceCtrl', ['$scope', '$routeParams', '$http', '$location', 'Events', 'DataSource', DataSourceCtrl])
})();

View File

@@ -17,7 +17,7 @@
saveQuery = $scope.saveQuery;
$scope.sourceMode = true;
$scope.canEdit = true;
$scope.canEdit = currentUser.canEdit($scope.query) || featureFlags.allowAllToEditQueries;
$scope.isDirty = false;
$scope.newVisualization = undefined;

View File

@@ -49,10 +49,13 @@
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
$scope.canViewSource = currentUser.hasPermission('view_source');
$scope.dataSources = DataSource.get(function(dataSources) {
$scope.dataSources = DataSource.query(function(dataSources) {
updateSchema();
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
if ($scope.query.isNew()) {
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
}
});
// in view mode, latest dataset is always visible
@@ -101,6 +104,9 @@
};
$scope.executeQuery = function() {
if (!$scope.query.query) {
return;
}
getQueryResult(0);
$scope.lockButton(true);
$scope.cancelling = false;

View File

@@ -0,0 +1,76 @@
(function () {
'use strict';
var directives = angular.module('redash.directives');
// Angular strips data- from the directive, so data-source-form becomes sourceForm...
directives.directive('sourceForm', ['$http', 'growl', function ($http, growl) {
return {
restrict: 'E',
replace: true,
templateUrl: '/views/data_sources/form.html',
scope: {
'dataSource': '='
},
link: function ($scope) {
var setType = function(types) {
if ($scope.dataSource.type === undefined) {
$scope.dataSource.type = types[0].type;
return types[0];
}
$scope.type = _.find(types, function (t) {
return t.type == $scope.dataSource.type;
});
};
$scope.files = {};
$scope.$watchCollection('files', function() {
_.each($scope.files, function(v, k) {
if (v) {
$scope.dataSource.options[k] = v.base64;
}
});
});
$http.get('/api/data_sources/types').success(function (types) {
setType(types);
$scope.dataSourceTypes = types;
_.each(types, function (type) {
_.each(type.configuration_schema.properties, function (prop, name) {
if (name == 'password' || name == 'passwd') {
prop.type = 'password';
}
if (_.string.endsWith(name, "File")) {
prop.type = 'file';
}
prop.required = _.contains(type.configuration_schema.required, name);
});
});
});
$scope.$watch('dataSource.type', function(current, prev) {
if (prev !== current) {
if (prev !== undefined) {
$scope.dataSource.options = {};
}
setType($scope.dataSourceTypes);
}
});
$scope.saveChanges = function() {
$scope.dataSource.$save(function() {
growl.addSuccessMessage("Saved.");
}, function() {
growl.addErrorMessage("Failed saving.");
});
}
}
}
}]);
})();

View File

@@ -145,7 +145,7 @@
if (!hasTotalsAlready) {
this.addSeries({
data: _.values(data),
data: _.sortBy(_.values(data), 'x'),
type: 'line',
name: 'Total'
}, false)

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,28 @@
(function () {
function QueryResultError(errorMessage) {
this.errorMessage = errorMessage;
}
QueryResultError.prototype.getError = function() {
return this.errorMessage;
};
QueryResultError.prototype.getStatus = function() {
return 'failed';
};
QueryResultError.prototype.getData = function() {
return null;
};
QueryResultError.prototype.getLog = function() {
return null;
};
QueryResultError.prototype.getChartData = function() {
return null;
};
var QueryResult = function ($resource, $timeout, $q) {
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
var Job = $resource('/api/jobs/:id', {id: '@id'});
@@ -44,7 +68,7 @@
} else {
this.status = undefined;
}
}
};
function QueryResult(props) {
this.deferred = $q.defer();
@@ -414,20 +438,23 @@
return '/queries/' + this.id + '/source';
};
Query.prototype.isNew = function() {
return this.id === undefined;
};
Query.prototype.hasDailySchedule = function() {
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
}
};
Query.prototype.scheduleInLocalTime = function() {
var parts = this.schedule.split(':');
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
}
};
Query.prototype.getQueryResult = function (maxAge, parameters) {
// if (ttl == undefined) {
// ttl = this.ttl;
// }
if (!this.query) {
return;
}
var queryText = this.query;
var queryParameters = this.getParameters();
@@ -462,6 +489,8 @@
}
} else if (this.data_source_id) {
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
} else {
return new QueryResultError("Please select data source to run this query.");
}
return this.queryResult;
@@ -498,14 +527,41 @@
var DataSource = function ($resource) {
var actions = {
'get': {'method': 'GET', 'cache': true, 'isArray': true},
'get': {'method': 'GET', 'cache': false, 'isArray': false},
'query': {'method': 'GET', 'cache': false, 'isArray': true},
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
};
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
return DataSourceResource;
}
};
var AlertSubscription = function ($resource) {
var resource = $resource('/api/alerts/:alertId/subscriptions/:userId', {alertId: '@alert_id', userId: '@user.id'});
return resource;
};
var Alert = function ($resource, $http) {
var actions = {
save: {
method: 'POST',
transformRequest: [function(data) {
var newData = _.extend({}, data);
if (newData.query_id === undefined) {
newData.query_id = newData.query.id;
delete newData.query;
}
return newData;
}].concat($http.defaults.transformRequest)
}
};
var resource = $resource('/api/alerts/:id', {id: '@id'}, actions);
return resource;
};
var Widget = function ($resource, Query) {
var WidgetResource = $resource('/api/widgets/:id', {id: '@id'});
@@ -532,5 +588,7 @@
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
.factory('DataSource', ['$resource', DataSource])
.factory('Alert', ['$resource', '$http', Alert])
.factory('AlertSubscription', ['$resource', AlertSubscription])
.factory('Widget', ['$resource', 'Query', Widget]);
})();

File diff suppressed because one or more lines are too long

View File

@@ -84,10 +84,6 @@
template: '<filters></filters>\n' + Visualization.renderVisualizationsTemplate,
replace: false,
link: function (scope) {
scope.select2Options = {
width: '50%'
};
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
if (filters) {
scope.filters = filters;

View File

@@ -26,7 +26,10 @@
if ($scope.queryResult.getData() == null) {
} else {
var sortedData = _.sortBy($scope.queryResult.getData(), "date");
var sortedData = _.sortBy($scope.queryResult.getData(),function(r) {
return r['date'] + r['day_number'] ;
});
var grouped = _.groupBy(sortedData, "date");
var maxColumns = _.reduce(grouped, function(memo, data){
return (data.length > memo)? data.length : memo;

View File

@@ -100,6 +100,15 @@ a.navbar-brand img {
text-decoration: underline;
}
.list-group-item.clickable {
cursor: pointer;
}
.list-group-item.clickable:focus,
.list-group-item.clickable:hover {
background-color: #f5f5f5;
}
/* angular-growl */
.growl {
position: fixed;
@@ -128,6 +137,23 @@ a.navbar-brand img {
}
/* Visualization Filters */
.filters-container {
display: flex;
flex-wrap: wrap;
}
.filter {
width: 33%;
padding-left: 5px;
padding-bottom: 5px;
}
.filter > div {
width: 100%;
}
/* Gridster */
.gridster ul {
@@ -330,6 +356,11 @@ div.table-name {
cursor: pointer;
}
.blankslate {
text-align: center;
padding: 30px;
}
/*
bootstrap's hidden-xs class adds display:block when not hidden
use this class when you need to keep the original display value

View File

@@ -0,0 +1,58 @@
<div class="container">
<ol class="breadcrumb">
<li><a href="/alerts">Alerts</a></li>
<li class="active">{{alert.name || getDefaultName() || "New"}}</li>
</ol>
<div class="row">
<div class="col-md-8">
<form name="alertForm" ng-submit="saveChanges()" class="form">
<div class="form-group">
<label>Query</label>
<ui-select ng-model="alert.query" reset-search-input="false" on-select="onQuerySelected($item)">
<ui-select-match placeholder="Search a query by name">{{$select.selected.name}}</ui-select-match>
<ui-select-choices repeat="q in queries"
refresh="searchQueries($select.search)"
refresh-delay="0">
<div ng-bind-html="q.name | highlight: $select.search | trustAsHtml"></div>
</ui-select-choices>
</ui-select>
</div>
<div class="form-group" ng-show="selectedQuery">
<label>Name</label>
<input type="string" placeholder="{{getDefaultName()}}" class="form-control" ng-model="alert.name">
</div>
<div ng-show="queryResult" class="form-horizontal">
<div class="form-group">
<label class="control-label col-md-2">Value column</label>
<div class="col-md-4">
<select ng-options="name for name in queryResult.getColumnNames()" ng-model="alert.options.column" class="form-control"></select>
</div>
<label class="control-label col-md-2">Value</label>
<div class="col-md-4">
<p class="form-control-static">{{queryResult.getData()[0][alert.options.column]}}</p>
</div>
</div>
<div class="form-group">
<label class="control-label col-md-2">Op</label>
<div class="col-md-4">
<select ng-options="name for name in ops" ng-model="alert.options.op" class="form-control"></select>
</div>
<label class="control-label col-md-2">Reference</label>
<div class="col-md-4">
<input type="number" class="form-control" ng-model="alert.options.value" placeholder="reference value" required/>
</div>
</div>
</div>
<div class="form-group">
<button class="btn btn-primary" ng-disabled="!alertForm.$valid">Save</button>
</div>
</form>
</div>
<div class="col-md-4" ng-if="alert.id">
<alert-subscribers alert-id="alert.id"></alert-subscribers>
</div>
</div>
</div>

View File

@@ -0,0 +1,16 @@
<div class="container">
<ol class="breadcrumb">
<li class="active">Alerts</li>
</ol>
<div class="row">
<div class="col-md-12">
<p>
<a href="/alerts/new" class="btn btn-default"><i class="fa fa-plus"></i> New Alert</a>
</p>
<smart-table rows="alerts" columns="gridColumns"
config="gridConfig"
class="table table-condensed table-hover"></smart-table>
</div>
</div>
</div>

View File

@@ -0,0 +1,4 @@
<div>
<strong>Subscribers</strong> <subscribe-button alert-id="alertId" subscribers="subscribers"></subscribe-button><br/>
<img ng-src="{{s.user.gravatar_url}}" class="img-circle" alt="{{s.user.name}}" ng-repeat="s in subscribers"/>
</div>

View File

@@ -0,0 +1,11 @@
<div class="container">
<ol class="breadcrumb">
<li><a href="/data_sources">Data Sources</a></li>
<li class="active">{{dataSource.name || "New"}}</li>
</ol>
<div class="row">
<div class="col-md-8">
<data-source-form data-data-source="dataSource" />
</div>
</div>
</div>

View File

@@ -0,0 +1,20 @@
<form name="dataSourceForm" ng-submit="saveChanges()">
<div class="form-group">
<label for="dataSourceName">Name</label>
<input type="string" class="form-control" name="dataSourceName" ng-model="dataSource.name" required>
</div>
<div class="form-group">
<label for="type">Type</label>
<select name="type" class="form-control" ng-options="type.type as type.name for type in dataSourceTypes" ng-model="dataSource.type"></select>
</div>
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
<label>{{input.title || name | capitalize}}</label>
<input name="input" type="{{input.type}}" class="form-control" ng-model="dataSource.options[name]" ng-required="input.required"
ng-if="input.type !== 'file'" accesskey="tab">
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required"
base-sixty-four-input
ng-if="input.type === 'file'">
</div>
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid">Save</button>
</form>

View File

@@ -0,0 +1,18 @@
<div class="container">
<ol class="breadcrumb">
<li class="active">Data Sources</li>
</ol>
<div class="row">
<div class="col-md-4">
<div class="list-group">
<div class="list-group-item clickable" ng-repeat="dataSource in dataSources" ng-click="openDataSource(dataSource)">
<i class="fa fa-database"></i> {{dataSource.name}}
<button class="btn btn-xs btn-danger pull-right" ng-click="deleteDataSource($event, dataSource)">Delete</button>
</div>
<a ng-href="/data_sources/new" class="list-group-item">
<i class="fa fa-plus"></i> Add Data Source
</a>
</div>
</div>
</div>
</div>

View File

@@ -1,9 +1,16 @@
<div class="container">
<div class="row">
<p>
<a href="/queries/new" class="btn btn-default">New Query</a>
<button ng-show="currentUser.hasPermission('create_dashboard')" type="button" class="btn btn-default" data-toggle="modal" href="#new_dashboard_dialog">New Dashboard</button>
<a href="/alerts/new" class="btn btn-default">New Alert</a>
</p>
</div>
<div class="row">
<div class="list-group col-md-6">
<div class="list-group-item active">
Recent Dashboards
<button ng-show="currentUser.hasPermission('create_dashboard')" type="button" class="btn btn-sm btn-link" data-toggle="modal" href="#new_dashboard_dialog" tooltip="New Dashboard"><span class="glyphicon glyphicon-plus-sign"></span></button>
</div>
<div class="list-group-item" ng-repeat="dashboard in recentDashboards" >
<button type="button" class="close delete-button" aria-hidden="true" ng-show="dashboard.canEdit()" ng-click="archiveDashboard(dashboard)" tooltip="Delete Dashboard">&times;</button>

View File

@@ -138,7 +138,7 @@
</p>
<p>
<span class="glyphicon glyphicon-hdd"></span>
<i class="fa fa-database"></i>
<span class="text-muted">Data Source</span>
<select ng-disabled="!isQueryOwner" ng-model="query.data_source_id" ng-change="updateDataSource()" ng-options="ds.id as ds.name for ds in dataSources"></select>
</p>

View File

@@ -1,8 +1,17 @@
<div class="well well-sm" ng-show="filters">
<div ng-repeat="filter in filters">
{{filter.friendlyName}}:
<select ui-select2='select2Options' ng-model="filter.current" ng-multiple="{{filter.multiple}}">
<option ng-repeat="value in filter.values" value="{{value}}">{{value}}</option>
</select>
<div class="well well-sm filters-container" ng-show="filters">
<div class="filter" ng-repeat="filter in filters">
<ui-select ng-model="filter.current" ng-if="!filter.multiple">
<ui-select-match placeholder="Select value for {{filter.friendlyName}}...">{{filter.friendlyName}}: {{$select.selected}}</ui-select-match>
<ui-select-choices repeat="value in filter.values | filter: $select.search track by $index">
{{value}}
</ui-select-choices>
</ui-select>
<ui-select ng-model="filter.current" multiple ng-if="filter.multiple">
<ui-select-match placeholder="Select value for {{filter.friendlyName}}...">{{filter.friendlyName}}: {{$item}}</ui-select-match>
<ui-select-choices repeat="value in filter.values | filter: $select.search track by $index">
{{value}}
</ui-select-choices>
</ui-select>
</div>
</div>

View File

@@ -19,18 +19,18 @@
"cornelius": "https://github.com/restorando/cornelius.git",
"gridster": "0.2.0",
"mousetrap": "~1.4.6",
"angular-ui-select2": "~0.0.5",
"jquery-ui": "~1.10.4",
"underscore.string": "~2.3.3",
"marked": "~0.3.2",
"bucky": "~0.2.6",
"pace": "~0.5.1",
"angular-ui-select": "0.8.2",
"font-awesome": "~4.2.0",
"mustache": "~1.0.0",
"canvg": "gabelerner/canvg",
"angular-ui-bootstrap-bower": "~0.12.1",
"leaflet":"~0.7.3"
"leaflet": "~0.7.3",
"angular-base64-upload": "~0.1.11",
"angular-ui-select": "0.8.2"
},
"devDependencies": {
"angular-mocks": "1.2.18",

View File

@@ -36,6 +36,7 @@
"node": ">=0.10.0"
},
"scripts": {
"test": "grunt test"
"test": "grunt test",
"bower": "bower"
}
}

View File

@@ -2,11 +2,12 @@ import logging
import urlparse
import redis
from statsd import StatsClient
from flask_mail import Mail
from redash import settings
from redash.query_runner import import_query_runners
__version__ = '0.6.3'
__version__ = '0.7.1'
def setup_logging():
@@ -32,6 +33,8 @@ def create_redis_connection():
setup_logging()
redis_connection = create_redis_connection()
mail = Mail()
mail.init_mail(settings.all_settings())
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
import_query_runners(settings.QUERY_RUNNERS)

View File

@@ -53,7 +53,8 @@ class PasswordHashField(fields.PasswordField):
class PgModelConverter(CustomModelConverter):
def __init__(self, view, additional=None):
additional = {ArrayField: self.handle_array_field,
DateTimeTZField: self.handle_datetime_tz_field}
DateTimeTZField: self.handle_datetime_tz_field,
}
super(PgModelConverter, self).__init__(view, additional)
self.view = view
@@ -66,6 +67,7 @@ class PgModelConverter(CustomModelConverter):
class BaseModelView(ModelView):
column_display_pk = True
model_form_converter = PgModelConverter
@require_permission('admin')
@@ -84,33 +86,25 @@ class UserModelView(BaseModelView):
}
def query_runner_type_formatter(view, context, model, name):
qr = query_runner.query_runners.get(model.type, None)
if qr:
return qr.name()
return model.type
class QueryResultModelView(BaseModelView):
column_exclude_list = ('data',)
class DataSourceModelView(BaseModelView):
form_overrides = dict(type=fields.SelectField, options=JSONTextAreaField)
form_args = dict(type={
'choices': [(k, r.name()) for k, r in query_runner.query_runners.iteritems()]
})
column_formatters = dict(type=query_runner_type_formatter)
column_filters = ('type',)
class QueryModelView(BaseModelView):
column_exclude_list = ('latest_query_data',)
class DashboardModelView(BaseModelView):
column_searchable_list = ('name', 'slug')
def init_admin(app):
admin = Admin(app, name='re:dash admin')
admin = Admin(app, name='re:dash admin', template_mode='bootstrap3')
views = {
models.User: UserModelView(models.User),
models.DataSource: DataSourceModelView(models.DataSource)
}
admin.add_view(UserModelView(models.User))
admin.add_view(QueryModelView(models.Query))
admin.add_view(QueryResultModelView(models.QueryResult))
admin.add_view(DashboardModelView(models.Dashboard))
for m in models.all_models:
if m in views:
admin.add_view(views[m])
else:
admin.add_view(BaseModelView(m))
for m in (models.Visualization, models.Widget, models.ActivityLog, models.Group, models.Event):
admin.add_view(BaseModelView(m))

View File

@@ -4,8 +4,10 @@ import time
import logging
from flask.ext.login import LoginManager
from flask.ext.login import user_logged_in
from redash import models, settings, google_oauth, saml_auth
from redash.tasks import record_event
login_manager = LoginManager()
logger = logging.getLogger('authentication')
@@ -73,6 +75,17 @@ def api_key_load_user_from_request(request):
return user
def log_user_logged_in(app, user):
event = {
'user_id': user.id,
'action': 'login',
'object_type': 'redash',
'timestamp': int(time.time()),
}
record_event.delay(event)
def setup_authentication(app):
login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser
@@ -81,6 +94,8 @@ def setup_authentication(app):
app.register_blueprint(google_oauth.blueprint)
app.register_blueprint(saml_auth.blueprint)
user_logged_in.connect(log_user_logged_in)
if settings.AUTH_TYPE == 'hmac':
login_manager.request_loader(hmac_load_user_from_request)
elif settings.AUTH_TYPE == 'api_key':

View File

@@ -13,10 +13,14 @@ import logging
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
session, url_for, current_app, flash
from flask.ext.restful import Resource, abort
from flask.ext.restful import Resource, abort, reqparse
from flask_login import current_user, login_user, logout_user, login_required
from funcy import project
import sqlparse
from itertools import chain
from funcy import distinct
from redash import statsd_client, models, settings, utils
from redash.wsgi import app, api
from redash.tasks import QueryTask, record_event
@@ -31,9 +35,14 @@ def ping():
return 'PONG.'
@app.route('/admin/<anything>/<whatever>')
@app.route('/admin/<anything>')
@app.route('/dashboard/<anything>')
@app.route('/alerts')
@app.route('/alerts/<pk>')
@app.route('/queries')
@app.route('/data_sources')
@app.route('/data_sources/<pk>')
@app.route('/queries/<query_id>')
@app.route('/queries/<query_id>/<anything>')
@app.route('/personal')
@@ -53,7 +62,8 @@ def index(**kwargs):
}
features = {
'clientSideMetrics': settings.CLIENT_SIDE_METRICS
'clientSideMetrics': settings.CLIENT_SIDE_METRICS,
'allowAllToEditQueries': settings.FEATURE_ALLOW_ALL_TO_EDIT_QUERIES
}
return render_template("index.html", user=json.dumps(user), name=settings.NAME,
@@ -74,13 +84,15 @@ def login():
if request.method == 'POST':
try:
user = models.User.get_by_email(request.form['username'])
user = models.User.get_by_email(request.form['email'])
if user and user.verify_password(request.form['password']):
remember = ('remember' in request.form)
login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
else:
flash("Wrong email or password.")
except models.User.DoesNotExist:
flash("Wrong username or password.")
flash("Wrong email or password.")
return render_template("login.html",
name=settings.NAME,
@@ -179,6 +191,34 @@ class DataSourceTypeListAPI(BaseResource):
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
class DataSourceAPI(BaseResource):
@require_permission('admin')
def get(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
return data_source.to_dict(all=True)
@require_permission('admin')
def post(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
req = request.get_json(True)
if not validate_configuration(req['type'], req['options']):
abort(400)
data_source.name = req['name']
data_source.options = json.dumps(req['options'])
data_source.save()
return data_source.to_dict(all=True)
@require_permission('admin')
def delete(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
data_source.delete_instance(recursive=True)
return make_response('', 204)
class DataSourceListAPI(BaseResource):
def get(self):
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
@@ -195,11 +235,12 @@ class DataSourceListAPI(BaseResource):
if not validate_configuration(req['type'], req['options']):
abort(400)
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options'])
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=json.dumps(req['options']))
return datasource.to_dict()
return datasource.to_dict(all=True)
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
api.add_resource(DataSourceAPI, '/api/data_sources/<data_source_id>', endpoint='data_source')
class DataSourceSchemaAPI(BaseResource):
@@ -211,9 +252,16 @@ class DataSourceSchemaAPI(BaseResource):
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
class DashboardRecentAPI(BaseResource):
def get(self):
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]
recent = [d.to_dict() for d in models.Dashboard.recent(current_user.id)]
global_recent = []
if len(recent) < 10:
global_recent = [d.to_dict() for d in models.Dashboard.recent()]
return distinct(chain(recent, global_recent), key=lambda d: d['id'])
class DashboardListAPI(BaseResource):
@@ -317,7 +365,13 @@ class QuerySearchAPI(BaseResource):
class QueryRecentAPI(BaseResource):
@require_permission('view_query')
def get(self):
return [q.to_dict() for q in models.Query.recent(current_user.id).limit(20)]
recent = [d.to_dict() for d in models.Query.recent(current_user.id)]
global_recent = []
if len(recent) < 10:
global_recent = [d.to_dict() for d in models.Query.recent()]
return distinct(chain(recent, global_recent), key=lambda d: d['id'])
class QueryListAPI(BaseResource):
@@ -354,7 +408,9 @@ class QueryAPI(BaseResource):
if 'data_source_id' in query_def:
query_def['data_source'] = query_def.pop('data_source_id')
query_def['last_modified_by'] = self.current_user
# Don't set "last_modified_by" if the user only refreshing this query
if not ('latest_query_data' in query_def and len(query_def.keys()) == 1):
query_def['last_modified_by'] = self.current_user
# TODO: use #save() with #dirty_fields.
models.Query.update_instance(query_id, **query_def)
@@ -575,6 +631,105 @@ class JobAPI(BaseResource):
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
class AlertAPI(BaseResource):
def get(self, alert_id):
alert = models.Alert.get_by_id(alert_id)
return alert.to_dict()
def post(self, alert_id):
req = request.get_json(True)
params = project(req, ('options', 'name', 'query_id'))
alert = models.Alert.get_by_id(alert_id)
if 'query_id' in params:
params['query'] = params.pop('query_id')
alert.update_instance(**params)
record_event.delay({
'user_id': self.current_user.id,
'action': 'edit',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
return alert.to_dict()
class AlertListAPI(BaseResource):
def post(self):
req = request.get_json(True)
required_fields = ('options', 'name', 'query_id')
for f in required_fields:
if f not in req:
abort(400)
alert = models.Alert.create(
name=req['name'],
query=req['query_id'],
user=self.current_user,
options=req['options']
)
record_event.delay({
'user_id': self.current_user.id,
'action': 'create',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
# TODO: should be in model?
models.AlertSubscription.create(alert=alert, user=self.current_user)
record_event.delay({
'user_id': self.current_user.id,
'action': 'subscribe',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
return alert.to_dict()
def get(self):
return [alert.to_dict() for alert in models.Alert.all()]
class AlertSubscriptionListResource(BaseResource):
def post(self, alert_id):
subscription = models.AlertSubscription.create(alert=alert_id, user=self.current_user)
record_event.delay({
'user_id': self.current_user.id,
'action': 'subscribe',
'timestamp': int(time.time()),
'object_id': alert_id,
'object_type': 'alert'
})
return subscription.to_dict()
def get(self, alert_id):
subscriptions = models.AlertSubscription.all(alert_id)
return [s.to_dict() for s in subscriptions]
class AlertSubscriptionResource(BaseResource):
def delete(self, alert_id, subscriber_id):
models.AlertSubscription.unsubscribe(alert_id, subscriber_id)
record_event.delay({
'user_id': self.current_user.id,
'action': 'unsubscribe',
'timestamp': int(time.time()),
'object_id': alert_id,
'object_type': 'alert'
})
api.add_resource(AlertAPI, '/api/alerts/<alert_id>', endpoint='alert')
api.add_resource(AlertSubscriptionListResource, '/api/alerts/<alert_id>/subscriptions', endpoint='alert_subscriptions')
api.add_resource(AlertSubscriptionResource, '/api/alerts/<alert_id>/subscriptions/<subscriber_id>', endpoint='alert_subscription')
api.add_resource(AlertListAPI, '/api/alerts', endpoint='alerts')
@app.route('/<path:filename>')
def send_static(filename):
if current_app.debug:
@@ -583,7 +738,3 @@ def send_static(filename):
cache_timeout = None
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)
if __name__ == '__main__':
app.run(debug=True)

View File

@@ -77,6 +77,17 @@ class BaseModel(peewee.Model):
super(BaseModel, self).save(*args, **kwargs)
self.post_save(created)
def update_instance(self, **kwargs):
for k, v in kwargs.items():
# setattr(model_instance, field_name, field_obj.python_value(value))
setattr(self, k, v)
dirty_fields = self.dirty_fields
if hasattr(self, 'updated_at'):
dirty_fields = dirty_fields + [self.__class__.updated_at]
self.save(only=dirty_fields)
class ModelTimestampsMixin(BaseModel):
updated_at = DateTimeTZField(default=datetime.datetime.now)
@@ -163,6 +174,7 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
'id': self.id,
'name': self.name,
'email': self.email,
'gravatar_url': self.gravatar_url,
'updated_at': self.updated_at,
'created_at': self.created_at
}
@@ -177,6 +189,11 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
if not self.api_key:
self.api_key = generate_token(40)
@property
def gravatar_url(self):
email_md5 = hashlib.md5(self.email.lower()).hexdigest()
return "https://www.gravatar.com/avatar/%s?s=40" % email_md5
@property
def permissions(self):
# TODO: this should be cached.
@@ -201,7 +218,7 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
return cls.get(cls.api_key == api_key)
def __unicode__(self):
return '%r, %r' % (self.name, self.email)
return u'%s (%s)' % (self.name, self.email)
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
@@ -247,14 +264,24 @@ class DataSource(BaseModel):
class Meta:
db_table = 'data_sources'
def to_dict(self):
return {
def to_dict(self, all=False):
d = {
'id': self.id,
'name': self.name,
'type': self.type,
'syntax': self.query_runner.syntax
}
if all:
d['options'] = json.loads(self.options)
d['queue_name'] = self.queue_name
d['scheduled_queue_name'] = self.scheduled_queue_name
return d
def __unicode__(self):
return self.name
def get_schema(self, refresh=False):
key = "data_source:schema:{}".format(self.id)
@@ -281,6 +308,14 @@ class DataSource(BaseModel):
return cls.select().order_by(cls.id.asc())
class JSONField(peewee.TextField):
def db_value(self, value):
return json.dumps(value)
def python_value(self, value):
return json.loads(value)
class QueryResult(BaseModel):
id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource)
@@ -338,13 +373,17 @@ class QueryResult(BaseModel):
logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id)
updated_count = Query.update(latest_query_data=query_result).\
where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
execute()
sql = "UPDATE queries SET latest_query_data_id = %s WHERE query_hash = %s AND data_source_id = %s RETURNING id"
query_ids = [row[0] for row in db.database.execute_sql(sql, params=(query_result.id, query_hash, data_source_id))]
logging.info("Updated %s queries with result (%s).", updated_count, query_hash)
# TODO: when peewee with update & returning support is released, we can get back to using this code:
# updated_count = Query.update(latest_query_data=query_result).\
# where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
# execute()
return query_result
logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash)
return query_result, query_ids
def __unicode__(self):
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
@@ -373,7 +412,7 @@ def should_schedule_next(previous_iteration, now, schedule):
class Query(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource)
data_source = peewee.ForeignKeyField(DataSource, null=True)
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
name = peewee.CharField(max_length=255)
description = peewee.CharField(max_length=4096, null=True)
@@ -472,18 +511,24 @@ class Query(ModelTimestampsMixin, BaseModel):
return cls.select().where(where).order_by(cls.created_at.desc())
@classmethod
def recent(cls, user_id):
def recent(cls, user_id=None, limit=20):
# TODO: instead of t2 here, we should define table_alias for Query table
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
query = cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\
where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\
where(Event.user == user_id).\
where(~(Event.object_id >> None)).\
where(Event.object_type == 'query'). \
where(cls.is_archived == False).\
group_by(Event.object_id, Query.id).\
order_by(peewee.SQL("count(0) desc"))
if user_id:
query = query.where(Event.user == user_id)
query = query.limit(limit)
return query
@classmethod
def update_instance(cls, query_id, **kwargs):
if 'query' in kwargs:
@@ -527,6 +572,83 @@ class Query(ModelTimestampsMixin, BaseModel):
return unicode(self.id)
class Alert(ModelTimestampsMixin, BaseModel):
UNKNOWN_STATE = 'unknown'
OK_STATE = 'ok'
TRIGGERED_STATE = 'triggered'
id = peewee.PrimaryKeyField()
name = peewee.CharField()
query = peewee.ForeignKeyField(Query, related_name='alerts')
user = peewee.ForeignKeyField(User, related_name='alerts')
options = JSONField()
state = peewee.CharField(default=UNKNOWN_STATE)
last_triggered_at = DateTimeTZField(null=True)
class Meta:
db_table = 'alerts'
@classmethod
def all(cls):
return cls.select(Alert, User, Query).join(Query).switch(Alert).join(User)
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'query': self.query.to_dict(),
'user': self.user.to_dict(),
'options': self.options,
'state': self.state,
'last_triggered_at': self.last_triggered_at,
'updated_at': self.updated_at,
'created_at': self.created_at
}
def evaluate(self):
data = json.loads(self.query.latest_query_data.data)
# todo: safe guard for empty
value = data['rows'][0][self.options['column']]
op = self.options['op']
if op == 'greater than' and value > self.options['value']:
new_state = self.TRIGGERED_STATE
elif op == 'less than' and value < self.options['value']:
new_state = self.TRIGGERED_STATE
elif op == 'equals' and value == self.options['value']:
new_state = self.TRIGGERED_STATE
else:
new_state = self.OK_STATE
return new_state
def subscribers(self):
return User.select().join(AlertSubscription).where(AlertSubscription.alert==self)
class AlertSubscription(ModelTimestampsMixin, BaseModel):
user = peewee.ForeignKeyField(User)
alert = peewee.ForeignKeyField(Alert)
class Meta:
db_table = 'alert_subscriptions'
def to_dict(self):
return {
'user': self.user.to_dict(),
'alert_id': self._data['alert']
}
@classmethod
def all(cls, alert_id):
return AlertSubscription.select(AlertSubscription, User).join(User).where(AlertSubscription.alert==alert_id)
@classmethod
def unsubscribe(cls, alert_id, user_id):
query = AlertSubscription.delete().where(AlertSubscription.alert==alert_id).where(AlertSubscription.user==user_id)
return query.execute()
class Dashboard(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
slug = peewee.CharField(max_length=140, index=True)
@@ -587,16 +709,22 @@ class Dashboard(ModelTimestampsMixin, BaseModel):
return cls.get(cls.slug == slug)
@classmethod
def recent(cls, user_id):
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")). \
def recent(cls, user_id=None, limit=20):
query = cls.select().where(Event.created_at > peewee.SQL("current_date - 7")). \
join(Event, on=(Dashboard.id == peewee.SQL("t2.object_id::integer"))). \
where(Event.action << ('edit', 'view')).\
where(Event.user == user_id). \
where(~(Event.object_id >> None)). \
where(Event.object_type == 'dashboard'). \
group_by(Event.object_id, Dashboard.id). \
order_by(peewee.SQL("count(0) desc"))
if user_id:
query = query.where(Event.user == user_id)
query = query.limit(limit)
return query
def save(self, *args, **kwargs):
if not self.slug:
self.slug = utils.slugify(self.name)
@@ -716,7 +844,7 @@ class Event(BaseModel):
return event
all_models = (DataSource, User, QueryResult, Query, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
all_models = (DataSource, User, QueryResult, Query, Alert, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
def init_db():

View File

@@ -70,6 +70,13 @@ class BaseQueryRunner(object):
def get_schema(self):
return []
def _run_query_internal(self, query):
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed running query [%s]." % query)
return json.loads(results)['rows']
@classmethod
def to_dict(cls):
return {
@@ -105,7 +112,11 @@ def validate_configuration(query_runner_type, configuration_json):
return False
try:
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema())
if isinstance(configuration_json, basestring):
configuration = json.loads(configuration_json)
else:
configuration = configuration_json
jsonschema.validate(configuration, query_runner_class.configuration_schema())
except (ValidationError, ValueError):
return False

View File

@@ -1,3 +1,4 @@
from base64 import b64decode
import datetime
import json
import httplib2
@@ -89,20 +90,16 @@ class BigQuery(BaseQueryRunner):
return {
'type': 'object',
'properties': {
'serviceAccount': {
'type': 'string',
'title': 'Service Account'
},
'projectId': {
'type': 'string',
'title': 'Project ID'
},
'privateKey': {
'type': 'string',
'title': 'Private Key Path'
'jsonKeyFile': {
"type": "string",
'title': 'JSON Key File'
}
},
'required': ['serviceAccount', 'projectId', 'privateKey']
'required': ['jsonKeyFile', 'projectId']
}
def __init__(self, configuration_json):
@@ -113,8 +110,9 @@ class BigQuery(BaseQueryRunner):
"https://www.googleapis.com/auth/bigquery",
]
private_key = _load_key(self.configuration["privateKey"])
credentials = SignedJwtAssertionCredentials(self.configuration['serviceAccount'], private_key, scope=scope)
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
@@ -201,4 +199,4 @@ class BigQueryGCE(BigQuery):
register(BigQuery)
register(BigQueryGCE)
register(BigQueryGCE)

View File

@@ -95,7 +95,8 @@ class ElasticSearch(BaseQueryRunner):
'type': 'object',
'properties': {
'server': {
'type': 'string'
'type': 'string',
'title': 'Base URL'
}
},
"required" : ["server"]

View File

@@ -0,0 +1,117 @@
from base64 import b64decode
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
from dateutil import parser
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install gspread, dateutil and oauth2client.")
logger.warning("You can use pip: pip install gspread dateutil oauth2client")
enabled = False
def _load_key(filename):
with open(filename, "rb") as f:
return json.loads(f.read())
def _guess_type(value):
try:
val = int(value)
return TYPE_INTEGER, val
except ValueError:
pass
try:
val = float(value)
return TYPE_FLOAT, val
except ValueError:
pass
if str(value).lower() in ('true', 'false'):
return TYPE_BOOLEAN, bool(value)
try:
val = parser.parse(value)
return TYPE_DATETIME, val
except ValueError:
pass
return TYPE_STRING, value
class GoogleSpreadsheet(BaseQueryRunner):
HEADER_INDEX = 0
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "google_spreadsheets"
@classmethod
def enabled(cls):
return enabled
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'jsonKeyFile': {
"type": "string",
'title': 'JSON Key File'
}
},
'required': ['jsonKeyFile']
}
def __init__(self, configuration_json):
super(GoogleSpreadsheet, self).__init__(configuration_json)
def _get_spreadsheet_service(self):
scope = [
'https://spreadsheets.google.com/feeds',
]
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
spreadsheetservice = gspread.authorize(credentials)
return spreadsheetservice
def run_query(self, query):
logger.debug("Spreadsheet is about to execute query: %s", query)
values = query.split("|")
key = values[0] #key of the spreadsheet
worksheet_num = 0 if len(values) != 2 else int(values[1])# if spreadsheet contains more than one worksheet - this is the number of it
try:
spreadsheet_service = self._get_spreadsheet_service()
spreadsheet = spreadsheet_service.open_by_key(key)
worksheets = spreadsheet.worksheets()
all_data = worksheets[worksheet_num].get_all_values()
column_names = []
columns = []
for j, column_name in enumerate(all_data[self.HEADER_INDEX]):
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': _guess_type(all_data[self.HEADER_INDEX+1][j])
})
rows = [dict(zip(column_names, row)) for row in all_data[self.HEADER_INDEX+1:]]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(GoogleSpreadsheet)

View File

@@ -0,0 +1,134 @@
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
from pyhive import hive
enabled = True
except ImportError, e:
logger.warning("Missing dependencies. Please install pyhive.")
logger.warning("You can use pip: pip install pyhive")
enabled = False
COLUMN_NAME = 0
COLUMN_TYPE = 1
types_map = {
'BIGINT': TYPE_INTEGER,
'TINYINT': TYPE_INTEGER,
'SMALLINT': TYPE_INTEGER,
'INT': TYPE_INTEGER,
'DOUBLE': TYPE_FLOAT,
'DECIMAL': TYPE_FLOAT,
'FLOAT': TYPE_FLOAT,
'REAL': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'TIMESTAMP': TYPE_DATETIME,
'DATE': TYPE_DATETIME,
'CHAR': TYPE_STRING,
'STRING': TYPE_STRING,
'VARCHAR': TYPE_STRING
}
class Hive(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"database": {
"type": "string"
},
"username": {
"type": "string"
}
},
"required": ["host"]
}
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "hive"
def __init__(self, configuration_json):
super(Hive, self).__init__(configuration_json)
def get_schema(self):
try:
schemas_query = "show schemas"
tables_query = "show tables in %s"
columns_query = "show columns in %s"
schema = {}
for schema_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['database_name']), self._run_query_internal(schemas_query))):
for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['tab_name']), self._run_query_internal(tables_query % schema_name))):
columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['field']), self._run_query_internal(columns_query % table_name)))
if schema_name != 'default':
table_name = '{}.{}'.format(schema_name, table_name)
schema[table_name] = {'name': table_name, 'columns': columns}
except Exception, e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return schema.values()
def run_query(self, query):
connection = None
try:
connection = hive.connect(**self.configuration)
cursor = connection.cursor()
cursor.execute(query)
column_names = []
columns = []
for column in cursor.description:
column_name = column[COLUMN_NAME]
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column[COLUMN_TYPE], None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
logging.exception(e)
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Hive)

View File

@@ -0,0 +1,151 @@
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
from impala.dbapi import connect
from impala.error import DatabaseError, RPCError
enabled = True
except ImportError, e:
logger.warning("Missing dependencies. Please install impyla.")
logger.warning("You can use pip: pip install impyla")
enabled = False
COLUMN_NAME = 0
COLUMN_TYPE = 1
types_map = {
'BIGINT': TYPE_INTEGER,
'TINYINT': TYPE_INTEGER,
'SMALLINT': TYPE_INTEGER,
'INT': TYPE_INTEGER,
'DOUBLE': TYPE_FLOAT,
'DECIMAL': TYPE_FLOAT,
'FLOAT': TYPE_FLOAT,
'REAL': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'TIMESTAMP': TYPE_DATETIME,
'CHAR': TYPE_STRING,
'STRING': TYPE_STRING,
'VARCHAR': TYPE_STRING
}
class Impala(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"protocol": {
"type": "string",
"title": "Please specify beeswax or hiveserver2"
},
"database": {
"type": "string"
},
"use_ldap": {
"type": "boolean"
},
"ldap_user": {
"type": "string"
},
"ldap_password": {
"type": "string"
},
"timeout": {
"type": "number"
}
},
"required": ["host"]
}
@classmethod
def type(cls):
return "impala"
def __init__(self, configuration_json):
super(Impala, self).__init__(configuration_json)
def get_schema(self):
try:
schemas_query = "show schemas;"
tables_query = "show tables in %s;"
columns_query = "show column stats %s;"
schema = {}
for schema_name in map(lambda a: a['name'], self._run_query_internal(schemas_query)):
for table_name in map(lambda a: a['name'], self._run_query_internal(tables_query % schema_name)):
columns = map(lambda a: a['Column'], self._run_query_internal(columns_query % table_name))
if schema_name != 'default':
table_name = '{}.{}'.format(schema_name, table_name)
schema[table_name] = {'name': table_name, 'columns': columns}
except Exception, e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return schema.values()
def run_query(self, query):
connection = None
try:
connection = connect(**self.configuration)
cursor = connection.cursor()
cursor.execute(query)
column_names = []
columns = []
for column in cursor.description:
column_name = column[COLUMN_NAME]
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column[COLUMN_TYPE], None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except RPCError as e:
logging.exception(e)
json_data = None
error = "Metastore Error [%s]" % e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
logging.exception(e)
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Impala)

View File

@@ -3,6 +3,7 @@ import datetime
import logging
import re
import time
from dateutil.parser import parse
from redash.utils import JSONEncoder
from redash.query_runner import *
@@ -40,7 +41,6 @@ class MongoDBJSONEncoder(JSONEncoder):
return super(MongoDBJSONEncoder, self).default(o)
# Simple query example:
#
# {
@@ -147,14 +147,21 @@ class MongoDB(BaseQueryRunner):
return None
def _fix_dates(self, data):
for k in data:
if isinstance(data[k], list):
for i in range(0, len(data[k])):
self._fix_dates(data[k][i])
elif isinstance(data[k], dict):
self._fix_dates(data[k])
else:
if isinstance(data[k], (str, unicode)):
self._convert_date(data, k)
def _convert_date(self, q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
q[field_name] = parse(m[0], yearfirst=True)
def run_query(self, query):
if self.is_replica_set:
@@ -162,16 +169,14 @@ class MongoDB(BaseQueryRunner):
else:
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
if self.db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % self.db_name
db = db_connection[self.db_name ]
db = db_connection[self.db_name]
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
logger.debug("mongodb got query: %s", query)
try:
query_data = json.loads(query)
self._fix_dates(query_data)
except ValueError:
return None, "Invalid query format. The query is not a valid JSON."
@@ -180,23 +185,11 @@ class MongoDB(BaseQueryRunner):
else:
collection = query_data["collection"]
q = None
if "query" in query_data:
q = query_data["query"]
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
self._convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
self._convert_date(q[k], k2)
q = query_data.get("query", None)
f = None
aggregate = None
if "aggregate" in query_data:
aggregate = query_data["aggregate"]
aggregate = query_data.get("aggregate", None)
if aggregate:
for step in aggregate:
if "$sort" in step:
sort_list = []
@@ -205,9 +198,7 @@ class MongoDB(BaseQueryRunner):
step["$sort"] = SON(sort_list)
if aggregate:
pass
else:
if not aggregate:
s = None
if "sort" in query_data and query_data["sort"]:
s = []
@@ -226,9 +217,6 @@ class MongoDB(BaseQueryRunner):
columns = []
rows = []
error = None
json_data = None
cursor = None
if q or (not q and not aggregate):
if s:
@@ -244,7 +232,16 @@ class MongoDB(BaseQueryRunner):
elif aggregate:
r = db[collection].aggregate(aggregate)
cursor = r["result"]
# Backwards compatibility with older pymongo versions.
#
# Older pymongo version would return a dictionary from an aggregate command.
# The dict would contain a "result" key which would hold the cursor.
# Newer ones return pymongo.command_cursor.CommandCursor.
if isinstance(r, dict):
cursor = r["result"]
else:
cursor = r
for r in cursor:
for k in r:

View File

@@ -103,17 +103,16 @@ class Mysql(BaseQueryRunner):
def run_query(self, query):
import MySQLdb
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
connection = None
try:
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
cursor.execute(query)
data = cursor.fetchall()
@@ -145,7 +144,8 @@ class Mysql(BaseQueryRunner):
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
if connection:
connection.close()
return json_data, error

View File

@@ -93,7 +93,7 @@ class PostgreSQL(BaseQueryRunner):
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
raise Exception("Failed getting schema.")
results = json.loads(results)
@@ -127,35 +127,38 @@ class PostgreSQL(BaseQueryRunner):
columns = []
duplicates_counter = 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name += str(duplicates_counter)
duplicates_counter += 1
if cursor.description is not None:
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name += str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column.type_code, None)
})
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
data = {'columns': columns, 'rows': rows}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
else:
error = 'Query completed but it returned no data.'
json_data = None
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
json_data = None
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."

View File

@@ -0,0 +1,98 @@
import json
from redash.utils import JSONEncoder
from redash.query_runner import *
import logging
logger = logging.getLogger(__name__)
try:
from pyhive import presto
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install PyHive.")
logger.warning("You can use pip: pip install pyhive")
enabled = False
PRESTO_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"bigint" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"varchar": TYPE_STRING,
"date" : TYPE_DATE,
}
class Presto(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'host': {
'type': 'string'
},
'port': {
'type': 'number'
},
'schema': {
'type': 'string'
},
'catalog': {
'type': 'string'
},
'username': {
'type': 'string'
}
},
'required': ['host']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "presto"
def __init__(self, configuration_json):
super(Presto, self).__init__(configuration_json)
def run_query(self, query):
connection = presto.connect(
host=self.configuration.get('host', ''),
port=self.configuration.get('port', 8080),
username=self.configuration.get('username', 'redash'),
catalog=self.configuration.get('catalog', 'hive'),
schema=self.configuration.get('schema', 'default'))
cursor = connection.cursor()
try:
cursor.execute(query)
columns_data = [(row[0], row[1]) for row in cursor.description]
columns = [{'name': col[0],
'friendly_name': col[0],
'type': PRESTO_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
except Exception, ex:
json_data = None
error = ex.message
return json_data, error
register(Presto)

View File

@@ -14,23 +14,30 @@ logger = logging.getLogger(__name__)
from RestrictedPython import compile_restricted
from RestrictedPython.Guards import safe_builtins
class CustomPrint(object):
""" CustomPrint redirect "print" calls to be sent as "log" on the result object """
def __init__(self, python_runner):
self._python_runner = python_runner
def __init__(self):
self.enabled = True
self.lines = []
def write(self, text):
if self._python_runner()._enable_print_log:
if self.enabled:
if text and text.strip():
log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
self._python_runner()._result["log"].append(log_line)
self.lines.append(log_line)
def enable(self):
self.enabled = True
def disable(self):
self.enabled = False
def __call__(self):
return self
class Python(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
@@ -52,15 +59,14 @@ class Python(BaseQueryRunner):
return False
def __init__(self, configuration_json):
global ALLOWED_MODULES
super(Python, self).__init__(configuration_json)
self.syntax = "python"
self._allowed_modules = {}
self._result = { "rows" : [], "columns" : [], "log" : [] }
self._script_locals = { "result" : { "rows" : [], "columns" : [], "log" : [] } }
self._enable_print_log = True
self._custom_print = CustomPrint()
if self.configuration.get("allowedImportModules", None):
for item in self.configuration["allowedImportModules"].split(","):
@@ -92,12 +98,6 @@ class Python(BaseQueryRunner):
def custom_get_iter(self, obj):
return iter(obj)
def disable_print_log(self):
self._enable_print_log = False
def enable_print_log(self):
self._enable_print_log = True
def add_result_column(self, result, column_name, friendly_name, column_type):
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
if column_type not in SUPPORTED_COLUMN_TYPES:
@@ -164,17 +164,15 @@ class Python(BaseQueryRunner):
safe_builtins["setattr"] = setattr
safe_builtins["_getitem_"] = self.custom_get_item
safe_builtins["_getiter_"] = self.custom_get_iter
safe_builtins["_print_"] = CustomPrint(weakref.ref(self))
script_locals = { "result" : self._result }
safe_builtins["_print_"] = self._custom_print
restricted_globals = dict(__builtins__=safe_builtins)
restricted_globals["get_query_result"] = self.get_query_result
restricted_globals["execute_query"] = self.execute_query
restricted_globals["add_result_column"] = self.add_result_column
restricted_globals["add_result_row"] = self.add_result_row
restricted_globals["disable_print_log"] = self.disable_print_log
restricted_globals["enable_print_log"] = self.enable_print_log
restricted_globals["disable_print_log"] = self._custom_print.disable
restricted_globals["enable_print_log"] = self._custom_print.enable
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
@@ -187,9 +185,11 @@ class Python(BaseQueryRunner):
# One option is to use ETA with Celery + timeouts on workers
# And replacement of worker process every X requests handled.
exec(code) in restricted_globals, script_locals
exec(code) in restricted_globals, self._script_locals
json_data = json.dumps(self._result)
result = self._script_locals['result']
result['log'] = self._custom_print.lines
json_data = json.dumps(result)
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None

View File

@@ -22,11 +22,10 @@ class Url(BaseQueryRunner):
return False
def run_query(self, query):
base_url = self.configuration["url"]
base_url = self.configuration.get("url", None)
try:
error = None
query = query.strip()
if base_url is not None and base_url != "":

View File

@@ -40,6 +40,17 @@ def parse_boolean(str):
return json.loads(str.lower())
def all_settings():
from types import ModuleType
settings = {}
for name, item in globals().iteritems():
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
settings[name] = item
return settings
NAME = os.environ.get('REDASH_NAME', 're:dash')
REDIS_URL = os.environ.get('REDASH_REDIS_URL', "redis://localhost:6379/0")
@@ -57,9 +68,9 @@ CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
# proved to be "safe".
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false"))
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "hmac")
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
@@ -81,6 +92,19 @@ LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# Mail settings:
MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')
MAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25))
MAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false'))
MAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false'))
MAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None)
MAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None)
MAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None)
MAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None)
MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false'))
HOST = os.environ.get('REDASH_HOST', '')
# CORS settings for the Query Result API (and possbily future external APIs).
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
# to the calling domain (or domains in a comma separated list).
@@ -92,14 +116,19 @@ ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_
# Query Runners
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
'redash.query_runner.big_query',
'redash.query_runner.google_spreadsheets',
'redash.query_runner.graphite',
'redash.query_runner.mongodb',
'redash.query_runner.mysql',
'redash.query_runner.pg',
'redash.query_runner.script',
'redash.query_runner.url',
'redash.query_runner.influx_db',
'redash.query_runner.elasticsearch',
'redash.query_runner.presto',
'redash.query_runner.hive_ds',
'redash.query_runner.impala_ds',
])))
# Features:
FEATURE_ALLOW_ALL_TO_EDIT_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_ALLOW_ALL_TO_EDIT", "true"))
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))

View File

@@ -1,10 +1,11 @@
import time
import logging
from flask.ext.mail import Message
import redis
from celery import Task
from celery.result import AsyncResult
from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client, settings, utils
from redash import redis_connection, models, statsd_client, settings, utils, mail
from redash.utils import gen_query_hash
from redash.worker import celery
from redash.query_runner import get_query_runner
@@ -222,7 +223,7 @@ def cleanup_query_results():
@celery.task(base=BaseTask)
def refresh_schemas():
"""
Refershs the datasources schema.
Refreshs the datasources schema.
"""
for ds in models.DataSource.all():
@@ -230,6 +231,39 @@ def refresh_schemas():
ds.get_schema(refresh=True)
@celery.task(bind=True, base=BaseTask)
def check_alerts_for_query(self, query_id):
from redash.wsgi import app
logger.debug("Checking query %d for alerts", query_id)
query = models.Query.get_by_id(query_id)
for alert in query.alerts:
alert.query = query
new_state = alert.evaluate()
if new_state != alert.state:
logger.info("Alert %d new state: %s", alert.id, new_state)
old_state = alert.state
alert.update_instance(state=new_state)
if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:
logger.debug("Skipping notification (previous state was unknown and now it's ok).")
continue
# message = Message
recipients = [s.email for s in alert.subscribers()]
logger.debug("Notifying: %s", recipients)
html = """
Check <a href="{host}/alerts/{alert_id}">alert</a> / check <a href="{host}/queries/{query_id}">query</a>.
""".format(host=settings.HOST, alert_id=alert.id, query_id=query.id)
with app.app_context():
message = Message(recipients=recipients,
subject="[{1}] {0}".format(alert.name, new_state.upper()),
html=html)
mail.send(message)
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id, metadata):
start_time = time.time()
@@ -271,7 +305,9 @@ def execute_query(self, query, data_source_id, metadata):
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
if not error:
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
query_result, updated_query_ids = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
for query_id in updated_query_ids:
check_alerts_for_query.delay(query_id)
else:
raise Exception(error)

View File

@@ -3,7 +3,7 @@ from flask import Flask, make_response
from werkzeug.wrappers import Response
from flask.ext.restful import Api
from redash import settings, utils
from redash import settings, utils, mail
from redash.models import db
from redash.admin import init_admin
@@ -22,7 +22,9 @@ init_admin(app)
# configure our database
settings.DATABASE_CONFIG.update({'threadlocals': True})
app.config['DATABASE'] = settings.DATABASE_CONFIG
app.config.update(settings.all_settings())
db.init_app(app)
mail.init_app(app)
from redash.authentication import setup_authentication
setup_authentication(app)

View File

@@ -3,6 +3,7 @@ Flask-Admin==1.1.0
Flask-RESTful==0.2.10
Flask-Login==0.2.11
Flask-OAuth==0.12
flask-mail==0.9.1
passlib==1.6.2
Jinja2==2.7.2
MarkupSafe==0.18
@@ -30,3 +31,4 @@ RestrictedPython==3.6.0
wtf-peewee==0.2.3
pysaml2==2.4.0
pycrypto==2.6.1
funcy==1.5

9
requirements_all_ds.txt Normal file
View File

@@ -0,0 +1,9 @@
google-api-python-client==1.2
gspread==0.2.5
impyla==0.10.0
influxdb==2.7.1
MySQL-python==1.2.5
oauth2client==1.2
pyhive==0.1.5
pymongo==2.7.2
pyOpenSSL==0.14

View File

@@ -18,6 +18,7 @@ fi
# Base packages
apt-get update
apt-get install -y python-pip python-dev nginx curl build-essential pwgen
pip install -U setuptools
# redash user
# TODO: check user doesn't exist yet?
@@ -98,11 +99,12 @@ if [ ! -f "/opt/redash/.env" ]; then
fi
# Install latest version
REDASH_VERSION=${REDASH_VERSION-0.6.2.b887}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
REDASH_VERSION=${REDASH_VERSION-0.7.1.b1015}
#LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
# Use explicit path until we switch to using release version again instead of RC.
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v0.7.1-rc/redash.0.7.1.b1015.tar.gz"
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
REDASH_TARBALL=/tmp/redash.tar.gz
REDASH_TARBALL=/tmp/redash.tar.gz
if [ ! -d "$VERSION_DIR" ]; then
sudo -u redash wget $LATEST_URL -O $REDASH_TARBALL
@@ -143,6 +145,7 @@ if [ $pg_user_exists -ne 0 ]; then
REDASH_READER_PASSWORD=$(pwgen -1)
sudo -u postgres psql -c "CREATE ROLE redash_reader WITH PASSWORD '$REDASH_READER_PASSWORD' NOCREATEROLE NOCREATEDB NOSUPERUSER LOGIN"
sudo -u redash psql -c "grant select(id,name,type) ON data_sources to redash_reader;" redash
sudo -u redash psql -c "grant select(id,name) ON users to redash_reader;" redash
sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
cd /opt/redash/current
@@ -151,14 +154,13 @@ fi
# BigQuery dependencies:
apt-get install -y libffi-dev libssl-dev
pip install google-api-python-client==1.2 pyOpenSSL==0.14 oauth2client==1.2
# MySQL dependencies:
apt-get install -y libmysqlclient-dev
pip install MySQL-python==1.2.5
# Mongo dependencies:
pip install pymongo==2.7.2
# Pip requirements for all data source types
cd /opt/redash/current
pip install -r requirements_all_ds.txt
# Setup supervisord + sysv init startup script
sudo -u redash mkdir -p /opt/redash/supervisord
@@ -175,3 +177,4 @@ rm /etc/nginx/sites-enabled/default
wget -O /etc/nginx/sites-available/redash $FILES_BASE_URL"nginx_redash_site"
ln -nfs /etc/nginx/sites-available/redash /etc/nginx/sites-enabled/redash
service nginx restart

View File

@@ -0,0 +1,199 @@
#!/bin/bash
set -eu
REDASH_BASE_PATH=/opt/redash
FILES_BASE_URL=https://raw.githubusercontent.com/EverythingMe/redash/docs_setup/setup/files/
FILE_BASE_URL_FOR_AMAZON_LINUX=https://raw.githubusercontent.com/EverythingMe/redash/master/setup/files/
# Verify running as root:
if [ "$(id -u)" != "0" ]; then
if [ $# -ne 0 ]; then
echo "Failed running with sudo. Exiting." 1>&2
exit 1
fi
echo "This script must be run as root. Trying to run with sudo."
sudo bash $0 --with-sudo
exit 0
fi
# Base packages
yum update
yum install -y python-pip python-devel nginx curl
yes | yum groupinstall -y "Development Tools"
yum install -y libffi-devel openssl-devel
# redash user
# TODO: check user doesn't exist yet?
if [-x $(adduser --system --no-create-home --comment "" redash)]; then
echo "redash user have already registered."
fi
add_service() {
service_name=$1
service_command="/etc/init.d/$service_name"
echo "Adding service: $service_name (/etc/init.d/$service_name)."
chmod +x $service_command
if command -v chkconfig >/dev/null 2>&1; then
# we're chkconfig, so lets add to chkconfig and put in runlevel 345
chkconfig --add $service_name && echo "Successfully added to chkconfig!"
chkconfig --level 345 $service_name on && echo "Successfully added to runlevels 345!"
elif command -v update-rc.d >/dev/null 2>&1; then
#if we're not a chkconfig box assume we're able to use update-rc.d
update-rc.d $service_name defaults && echo "Success!"
else
echo "No supported init tool found."
fi
$service_command start
}
# PostgreSQL
pg_available=0
psql --version || pg_available=$?
if [ $pg_available -ne 0 ]; then
# wget $FILES_BASE_URL"postgres_apt.sh" -O /tmp/postgres_apt.sh
# bash /tmp/postgres_apt.sh
yum update
yum -y install postgresql93-server postgresql93-devel
service postgresql93 initdb
add_service "postgresql93"
fi
# Redis
redis_available=0
redis-cli --version || redis_available=$?
if [ $redis_available -ne 0 ]; then
wget http://download.redis.io/releases/redis-2.8.17.tar.gz
tar xzf redis-2.8.17.tar.gz
rm redis-2.8.17.tar.gz
cd redis-2.8.17
make
make install
# Setup process init & configuration
REDIS_PORT=6379
REDIS_CONFIG_FILE="/etc/redis/$REDIS_PORT.conf"
REDIS_LOG_FILE="/var/log/redis_$REDIS_PORT.log"
REDIS_DATA_DIR="/var/lib/redis/$REDIS_PORT"
mkdir -p `dirname "$REDIS_CONFIG_FILE"` || die "Could not create redis config directory"
mkdir -p `dirname "$REDIS_LOG_FILE"` || die "Could not create redis log dir"
mkdir -p "$REDIS_DATA_DIR" || die "Could not create redis data directory"
wget -O /etc/init.d/redis_6379 $FILES_BASE_URL"redis_init"
wget -O $REDIS_CONFIG_FILE $FILES_BASE_URL"redis.conf"
add_service "redis_$REDIS_PORT"
cd ..
rm -rf redis-2.8.17
fi
if [ ! -d "$REDASH_BASE_PATH" ]; then
sudo mkdir /opt/redash
sudo chown redash /opt/redash
sudo -u redash mkdir /opt/redash/logs
fi
# Default config file
if [ ! -f "/opt/redash/.env" ]; then
sudo -u redash wget $FILES_BASE_URL"env" -O /opt/redash/.env
fi
# Install latest version
REDASH_VERSION=${REDASH_VERSION-0.6.3.b906}
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
REDASH_TARBALL=/tmp/redash.tar.gz
REDASH_TARBALL=/tmp/redash.tar.gz
if [ ! -d "$VERSION_DIR" ]; then
sudo -u redash wget $LATEST_URL -O $REDASH_TARBALL
sudo -u redash mkdir $VERSION_DIR
sudo -u redash tar -C $VERSION_DIR -xvf $REDASH_TARBALL
ln -nfs $VERSION_DIR /opt/redash/current
ln -nfs /opt/redash/.env /opt/redash/current/.env
cd /opt/redash/current
# TODO: venv?
pip install -r requirements.txt
fi
# InfluxDB dependencies:
pip install influxdb==2.6.0
# BigQuery dependencies:
pip install google-api-python-client==1.2 pyOpenSSL==0.14 oauth2client==1.2
# MySQL dependencies:
yum install -y mysql-devel
pip install MySQL-python==1.2.5
# Mongo dependencies:
pip install pymongo==2.7.2
# Setup supervisord + sysv init startup script
sudo -u redash mkdir -p /opt/redash/supervisord
pip install supervisor==3.1.2 # TODO: move to requirements.txt
# Create database / tables
pg_user_exists=0
sudo -u postgres psql postgres -tAc "SELECT 1 FROM pg_roles WHERE rolname='redash'" | grep -q 1 || pg_user_exists=$?
if [ $pg_user_exists -ne 0 ]; then
echo "Creating redash postgres user & database."
sudo -u postgres createuser redash --no-superuser --no-createdb --no-createrole
sudo -u postgres createdb redash --owner=redash
cd /opt/redash/current
sudo -u redash bin/run ./manage.py database create_tables
fi
# Create default admin user
cd /opt/redash/current
# TODO: make sure user created only once
# TODO: generate temp password and print to screen
sudo -u redash bin/run ./manage.py users create --admin --password admin "Admin" "admin"
# Create re:dash read only pg user & setup data source
pg_user_exists=0
sudo -u postgres psql postgres -tAc "SELECT 1 FROM pg_roles WHERE rolname='redash_reader'" | grep -q 1 || pg_user_exists=$?
if [ $pg_user_exists -ne 0 ]; then
echo "Creating redash reader postgres user."
sudo yum install expect
REDASH_READER_PASSWORD=$(mkpasswd)
sudo -u postgres psql -c "CREATE ROLE redash_reader WITH PASSWORD '$REDASH_READER_PASSWORD' NOCREATEROLE NOCREATEDB NOSUPERUSER LOGIN"
sudo -u redash psql -c "grant select(id,name,type) ON data_sources to redash_reader;" redash
sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
cd /opt/redash/current
sudo -u redash bin/run ./manage.py ds new -n "re:dash metadata" -t "pg" -o "{\"user\": \"redash_reader\", \"password\": \"$REDASH_READER_PASSWORD\", \"host\": \"localhost\", \"dbname\": \"redash\"}"
fi
# Get supervisord startup script
sudo -u redash wget -O /opt/redash/supervisord/supervisord.conf $FILE_BASE_URL_FOR_AMAZON_LINUX"supervisord_for_amazon_linux.conf"
# install start-stop-daemon
wget http://developer.axis.com/download/distribution/apps-sys-utils-start-stop-daemon-IR1_9_18-2.tar.gz
tar xvzf apps-sys-utils-start-stop-daemon-IR1_9_18-2.tar.gz
cd apps/sys-utils/start-stop-daemon-IR1_9_18-2/
gcc start-stop-daemon.c -o start-stop-daemon
cp start-stop-daemon /sbin/
wget -O /etc/init.d/redash_supervisord $FILE_BASE_URL_FOR_AMAZON_LINUX"redash_supervisord_init_for_amazon_linux"
add_service "redash_supervisord"
# Nginx setup
if [-x $(mkdir /etc/nginx/sites-available)]; then
echo "/etc/nginx/sites-available exists."
fi
wget -O /etc/nginx/sites-available/redash $FILES_BASE_URL"nginx_redash_site"
ln -nfs /etc/nginx/sites-available/redash /etc/nginx/conf.d/redash.conf
service nginx restart

View File

@@ -1,9 +1,6 @@
export REDASH_CONNECTION_ADAPTER=pg
export REDASH_CONNECTION_STRING="dbname=redash"
export REDASH_STATIC_ASSETS_PATH="../rd_ui/dist/"
export REDASH_LOG_LEVEL="INFO"
export REDASH_WORKERS_COUNT=6
export REDASH_REDIS_URL=redis://localhost:6379/1
export REDASH_DATABASE_URL="postgresql://redash"
export REDASH_COOKIE_SECRET=veryverysecret
export REDASH_GOOGLE_APPS_DOMAIN=
export REDASH_GOOGLE_APPS_DOMAIN=

View File

@@ -0,0 +1,125 @@
#!/bin/sh
# /etc/init.d/redash_supervisord
### BEGIN INIT INFO
# Provides: supervisord
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: process supervisor
### END INIT INFO
# Author: Ron DuPlain <ron.duplain@gmail.com>
# Do NOT "set -e"
# PATH should only include /usr/* if it runs after the mountnfs.sh script
PATH=/sbin:/usr/sbin:/usr/local/sbin:/bin:/usr/bin:/usr/local/bin
NAME=supervisord
DESC="process supervisor"
DAEMON=/usr/local/bin/$NAME
DAEMON_ARGS="--configuration /opt/redash/supervisord/supervisord.conf "
PIDFILE=/opt/redash/supervisord/supervisord.pid
SCRIPTNAME=/etc/init.d/redash_supervisord
USER=redash
# Exit if the package is not installed
[ -x "$DAEMON" ] || exit 0
# Read configuration variable file if it is present
[ -r /etc/default/$NAME ] && . /etc/default/$NAME
. /etc/rc.d/init.d/functions
#
# Function that starts the daemon/service
#
do_start()
{
# Return
# 0 if daemon has been started
# 1 if daemon was already running
# 2 if daemon could not be started
start-stop-daemon --start --quiet --pidfile $PIDFILE --user $USER --chuid $USER --exec $DAEMON --test > /dev/null \
|| return 1
start-stop-daemon --start --quiet --pidfile $PIDFILE --user $USER --chuid $USER --exec $DAEMON -- \
$DAEMON_ARGS \
|| return 2
# Add code here, if necessary, that waits for the process to be ready
# to handle requests from services started subsequently which depend
# on this one. As a last resort, sleep for some time.
}
#
# Function that stops the daemon/service
#
do_stop()
{
# Return
# 0 if daemon has been stopped
# 1 if daemon was already stopped
# 2 if daemon could not be stopped
# other if a failure occurred
start-stop-daemon --stop --quiet --retry=TERM/30/KILL/5 --pidfile $PIDFILE --user $USER --chuid $USER --name $NAME
RETVAL="$?"
[ "$RETVAL" = 2 ] && return 2
# Wait for children to finish too if this is a daemon that forks
# and if the daemon is only ever run from this initscript.
# If the above conditions are not satisfied then add some other code
# that waits for the process to drop all resources that could be
# needed by services started subsequently. A last resort is to
# sleep for some time.
start-stop-daemon --stop --quiet --oknodo --retry=0/30/KILL/5 --user $USER --chuid $USER --exec $DAEMON
[ "$?" = 2 ] && return 2
# Many daemons don't delete their pidfiles when they exit.
rm -f $PIDFILE
return "$RETVAL"
}
case "$1" in
start)
[ "$VERBOSE" != no ] && echo "Starting $DESC" "$NAME"
do_start
case "$?" in
0|1) [ "$VERBOSE" != no ] && echo 0 ;;
2) [ "$VERBOSE" != no ] && echo 1 ;;
esac
;;
stop)
[ "$VERBOSE" != no ] && echo "Stopping $DESC" "$NAME"
do_stop
case "$?" in
0|1) [ "$VERBOSE" != no ] && echo 0 ;;
2) [ "$VERBOSE" != no ] && echo 1 ;;
esac
;;
status)
status -p "$STASH_PID" stash
# status_of_proc "$DAEMON" "$NAME" && exit 0 || exit $?
;;
restart)
echo "Restarting $DESC" "$NAME"
do_stop
case "$?" in
0|1)
do_start
case "$?" in
0) echo 0 ;;
1) echo 1 ;; # Old process is still running
*) echo 1 ;; # Failed to start
esac
;;
*)
# Failed to stop
echo 1
;;
esac
;;
*)
echo "Usage: $SCRIPTNAME {start|stop|status|restart}" >&2
exit 3
;;
esac

View File

@@ -0,0 +1,31 @@
[supervisord]
nodaemon=false
logfile=/opt/redash/logs/supervisord.log
pidfile=/opt/redash/supervisord/supervisord.pid
directory=/opt/redash/current
[inet_http_server]
port = 127.0.0.1:9001
[rpcinterface:supervisor]
supervisor.rpcinterface_factory = supervisor.rpcinterface:make_main_rpcinterface
[program:redash_server]
command=/opt/redash/current/bin/run /usr/local/bin/gunicorn -b 127.0.0.1:5000 --name redash -w 4 redash.wsgi:app
process_name=redash_server
numprocs=1
priority=999
autostart=true
autorestart=true
stdout_logfile=/opt/redash/logs/api.log
stderr_logfile=/opt/redash/logs/api_error.log
[program:redash_celery]
command=/opt/redash/current/bin/run /usr/local/bin/celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries
process_name=redash_celery
numprocs=1
priority=999
autostart=true
autorestart=true
stdout_logfile=/opt/redash/logs/celery.log

View File

@@ -355,7 +355,7 @@ class TestLogin(BaseTestCase):
def test_submit_non_existing_user(self):
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
rv = c.post('/login', data={'username': 'arik', 'password': 'password'})
rv = c.post('/login', data={'email': 'arik', 'password': 'password'})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
@@ -366,7 +366,7 @@ class TestLogin(BaseTestCase):
user.save()
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
rv = c.post('/login', data={'username': user.email, 'password': 'password'})
rv = c.post('/login', data={'email': user.email, 'password': 'password'})
self.assertEquals(rv.status_code, 302)
login_user_mock.assert_called_with(user, remember=False)
@@ -376,7 +376,7 @@ class TestLogin(BaseTestCase):
user.save()
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
rv = c.post('/login', data={'username': user.email, 'password': 'password', 'remember': True})
rv = c.post('/login', data={'email': user.email, 'password': 'password', 'remember': True})
self.assertEquals(rv.status_code, 302)
login_user_mock.assert_called_with(user, remember=True)
@@ -387,14 +387,14 @@ class TestLogin(BaseTestCase):
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
rv = c.post('/login?next=/test',
data={'username': user.email, 'password': 'password'})
data={'email': user.email, 'password': 'password'})
self.assertEquals(rv.status_code, 302)
self.assertEquals(rv.location, 'http://localhost/test')
login_user_mock.assert_called_with(user, remember=False)
def test_submit_incorrect_user(self):
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
rv = c.post('/login', data={'username': 'non-existing', 'password': 'password'})
rv = c.post('/login', data={'email': 'non-existing', 'password': 'password'})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
@@ -404,7 +404,7 @@ class TestLogin(BaseTestCase):
user.save()
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
rv = c.post('/login', data={'username': user.email, 'password': 'badbadpassword'})
rv = c.post('/login', data={'email': user.email, 'password': 'badbadpassword'})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
@@ -412,7 +412,7 @@ class TestLogin(BaseTestCase):
user = user_factory.create()
with app.test_client() as c, patch('redash.controllers.login_user') as login_user_mock:
rv = c.post('/login', data={'username': user.email, 'password': ''})
rv = c.post('/login', data={'email': user.email, 'password': ''})
self.assertEquals(rv.status_code, 200)
self.assertFalse(login_user_mock.called)
@@ -477,6 +477,6 @@ class DataSourceTest(BaseTestCase):
admin = user_factory.create(groups=['admin', 'default'])
with app.test_client() as c, authenticated_user(c, user=admin):
rv = json_request(c.post, '/api/data_sources',
data={'name': 'DS 1', 'type': 'pg', 'options': '{"dbname": "redash"}'})
data={'name': 'DS 1', 'type': 'pg', 'options': {"dbname": "redash"}})
self.assertEqual(rv.status_code, 200)

View File

@@ -164,7 +164,7 @@ class QueryArchiveTest(BaseTestCase):
def test_archived_query_doesnt_return_in_all(self):
query = query_factory.create(schedule="1")
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
query_result = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
query_result, _ = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
123, yesterday)
query.latest_query_data = query_result
@@ -329,7 +329,7 @@ class TestQueryResultStoreResult(BaseTestCase):
self.data = "data"
def test_stores_the_result(self):
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query,
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query,
self.data, self.runtime, self.utcnow)
self.assertEqual(query_result.data, self.data)
@@ -344,7 +344,7 @@ class TestQueryResultStoreResult(BaseTestCase):
query2 = query_factory.create(query=self.query, data_source=self.data_source)
query3 = query_factory.create(query=self.query, data_source=self.data_source)
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
@@ -356,7 +356,7 @@ class TestQueryResultStoreResult(BaseTestCase):
query2 = query_factory.create(query=self.query, data_source=self.data_source)
query3 = query_factory.create(query=self.query + "123", data_source=self.data_source)
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
@@ -368,7 +368,7 @@ class TestQueryResultStoreResult(BaseTestCase):
query2 = query_factory.create(query=self.query, data_source=self.data_source)
query3 = query_factory.create(query=self.query, data_source=data_source_factory.create())
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
self.runtime, self.utcnow)
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)