mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Compare commits
179 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
27639f83c7 | ||
|
|
c08e6791df | ||
|
|
5c7158b6ae | ||
|
|
b886067a9f | ||
|
|
2421de8819 | ||
|
|
9e87e42400 | ||
|
|
8c750826e3 | ||
|
|
b14b6d1773 | ||
|
|
76cb73f4ce | ||
|
|
8854a45598 | ||
|
|
228b8c7614 | ||
|
|
5de79213ae | ||
|
|
c7d30c8b87 | ||
|
|
076710f0c6 | ||
|
|
a9172dac00 | ||
|
|
accca51f39 | ||
|
|
5f5774d01b | ||
|
|
00e99d858c | ||
|
|
da56dc883f | ||
|
|
02582cab65 | ||
|
|
bff4d31ada | ||
|
|
83554207e1 | ||
|
|
1c0c3e0b93 | ||
|
|
5feb563dc9 | ||
|
|
07b88d0b53 | ||
|
|
21f33462d5 | ||
|
|
6a9d95f1ac | ||
|
|
36b80fc4ef | ||
|
|
d89dd2c9af | ||
|
|
658af526c7 | ||
|
|
3d859ec5f3 | ||
|
|
fdff799d23 | ||
|
|
5fc0b88b23 | ||
|
|
63de247478 | ||
|
|
5d3caac1b5 | ||
|
|
e4b9d23dfe | ||
|
|
890f59a4c9 | ||
|
|
d4a18ba611 | ||
|
|
c4502b2925 | ||
|
|
1d5efdd93f | ||
|
|
2b95da102e | ||
|
|
d512cd0c1d | ||
|
|
3dc9c84a98 | ||
|
|
4a33b987b8 | ||
|
|
f7041977d5 | ||
|
|
83bc38579e | ||
|
|
4b8a94e795 | ||
|
|
406010a7a6 | ||
|
|
4f11f28efa | ||
|
|
c919602b20 | ||
|
|
7702b05635 | ||
|
|
5fc7c499a3 | ||
|
|
628240906e | ||
|
|
41b9b21a20 | ||
|
|
dbd3f754ba | ||
|
|
4ef3c27fe6 | ||
|
|
58a005c71b | ||
|
|
9d7ff31178 | ||
|
|
93d6b01fbf | ||
|
|
7d57f9d0f1 | ||
|
|
e80f470255 | ||
|
|
5636cec0eb | ||
|
|
912bbc1a4a | ||
|
|
d3bb58167e | ||
|
|
2911fa8af7 | ||
|
|
4503c6af66 | ||
|
|
7fc2d5ee0b | ||
|
|
3c9c1466a3 | ||
|
|
4a7c066bf0 | ||
|
|
b850da52a2 | ||
|
|
1a3657572e | ||
|
|
666e3281e4 | ||
|
|
66084b1a3b | ||
|
|
421470666a | ||
|
|
f8e2bc9eca | ||
|
|
079fbf33f4 | ||
|
|
c195362710 | ||
|
|
b671dd0431 | ||
|
|
7793f3b257 | ||
|
|
e09aa6f81a | ||
|
|
780e0c0418 | ||
|
|
43edb009d6 | ||
|
|
81978c5049 | ||
|
|
239813e195 | ||
|
|
28dd571a03 | ||
|
|
808126cf91 | ||
|
|
69a8295f4c | ||
|
|
a692e3f664 | ||
|
|
6860dde1f7 | ||
|
|
e183affdd0 | ||
|
|
6338be3811 | ||
|
|
3ee6371250 | ||
|
|
4f38d42182 | ||
|
|
39db74ff20 | ||
|
|
05c2c21a85 | ||
|
|
00edc29e50 | ||
|
|
3771af0a8c | ||
|
|
c32c2d43f7 | ||
|
|
4e2e3f9077 | ||
|
|
2a27422df9 | ||
|
|
f9e0ce8e9c | ||
|
|
a1d49f13d3 | ||
|
|
26aa199f9c | ||
|
|
4c77f3f914 | ||
|
|
d6be792595 | ||
|
|
59c1ea7f16 | ||
|
|
4d24005eff | ||
|
|
2dab35b614 | ||
|
|
0b61b88f5f | ||
|
|
e5cb58207c | ||
|
|
fc17d1af81 | ||
|
|
e6650e1e2d | ||
|
|
3aa1cd0133 | ||
|
|
e04833c327 | ||
|
|
b743cceb60 | ||
|
|
a0e134d3b5 | ||
|
|
d7fb2d7458 | ||
|
|
b913ce6022 | ||
|
|
1eb7945d16 | ||
|
|
37d0026ee4 | ||
|
|
9cdc2cb2f7 | ||
|
|
a9bff9063e | ||
|
|
380126ee44 | ||
|
|
d8377375b8 | ||
|
|
98ff701f9a | ||
|
|
f5ea3e97d3 | ||
|
|
719e96dd2f | ||
|
|
6c6c0256ba | ||
|
|
723df51cdd | ||
|
|
a0f4e263b2 | ||
|
|
4706bf8060 | ||
|
|
f96a9f659a | ||
|
|
63c273f896 | ||
|
|
622ac6d781 | ||
|
|
8dc564a8bc | ||
|
|
3ae5baef22 | ||
|
|
8d819068b5 | ||
|
|
585e056265 | ||
|
|
1914ed7c7c | ||
|
|
bd216e93e7 | ||
|
|
5e351de896 | ||
|
|
de0e534c77 | ||
|
|
5fa1f9440d | ||
|
|
b3ddc5f8b9 | ||
|
|
8cde5f9673 | ||
|
|
1bb53ca497 | ||
|
|
0a3cd9267f | ||
|
|
075d843354 | ||
|
|
b14e5e8c0e | ||
|
|
c9da4be422 | ||
|
|
276ee7c27a | ||
|
|
334040532a | ||
|
|
335a3a98b5 | ||
|
|
b17080a7f5 | ||
|
|
8441c12b01 | ||
|
|
3b4af1b6fa | ||
|
|
c3deb8e2fa | ||
|
|
a60b1686da | ||
|
|
b56e87ceb2 | ||
|
|
fc89bcdaf3 | ||
|
|
15ec8321bb | ||
|
|
e6ba62485c | ||
|
|
9077b01fb9 | ||
|
|
f45281be96 | ||
|
|
a1c8ef9037 | ||
|
|
f46e8af23f | ||
|
|
30a89bfd2c | ||
|
|
6312f8738d | ||
|
|
9e3d5c10c5 | ||
|
|
59b87ec4fd | ||
|
|
27ecf5f25c | ||
|
|
105971c4c8 | ||
|
|
690f8323c3 | ||
|
|
20eb110ce3 | ||
|
|
571c9d0aee | ||
|
|
0ee7292f16 | ||
|
|
8c28392dfd | ||
|
|
671f1f4478 | ||
|
|
557d3748be |
15
.env.example
15
.env.example
@@ -1,9 +1,6 @@
|
||||
REDASH_CONNECTION_ADAPTER=pg
|
||||
REDASH_CONNECTION_STRING="dbname=data"
|
||||
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
|
||||
REDASH_GOOGLE_APPS_DOMAIN=
|
||||
REDASH_ADMINS=
|
||||
REDASH_WORKERS_COUNT=2
|
||||
REDASH_COOKIE_SECRET=
|
||||
REDASH_DATABASE_URL='postgresql://rd'
|
||||
REDASH_LOG_LEVEL = "INFO"
|
||||
export REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
|
||||
export REDASH_LOG_LEVEL="INFO"
|
||||
export REDASH_REDIS_URL=redis://localhost:6379/1
|
||||
export REDASH_DATABASE_URL="postgresql://redash"
|
||||
export REDASH_COOKIE_SECRET=veryverysecret
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -8,6 +8,7 @@ celerybeat-schedule*
|
||||
.#*
|
||||
\#*#
|
||||
*~
|
||||
_build
|
||||
|
||||
# Vagrant related
|
||||
.vagrant
|
||||
|
||||
2
Makefile
2
Makefile
@@ -13,7 +13,7 @@ deps:
|
||||
|
||||
pack:
|
||||
sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
|
||||
upload:
|
||||
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
|
||||
|
||||
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts.
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
|
||||
Presto, Google Spreadsheets, Cloudera Impala and custom scripts.
|
||||
|
||||
**_re:dash_** consists of two parts:
|
||||
|
||||
@@ -27,7 +28,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
|
||||
|
||||
## Getting Started
|
||||
|
||||
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
|
||||
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
|
||||
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).
|
||||
|
||||
|
||||
|
||||
@@ -104,9 +104,26 @@ def get_changelog(commit_sha):
|
||||
|
||||
return "\n".join(changes)
|
||||
|
||||
def update_release_commit_sha(release, commit_sha):
|
||||
params = {
|
||||
'target_commitish': commit_sha,
|
||||
}
|
||||
|
||||
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating commit sha for existing release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def update_release(version, build_filepath, commit_sha):
|
||||
try:
|
||||
release = get_rc_release(version) or create_release(version, commit_sha)
|
||||
release = get_rc_release(version)
|
||||
if release:
|
||||
release = update_release_commit_sha(release, commit_sha)
|
||||
else:
|
||||
release = create_release(version, commit_sha)
|
||||
|
||||
print "Using release id: {}".format(release['id'])
|
||||
|
||||
remove_previous_builds(release)
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
"""
|
||||
Script to test concurrency (multithreading/multiprocess) issues with the workers. Use with caution.
|
||||
"""
|
||||
import json
|
||||
import atfork
|
||||
atfork.monkeypatch_os_fork_functions()
|
||||
import atfork.stdlib_fixer
|
||||
atfork.stdlib_fixer.fix_logging_module()
|
||||
|
||||
import time
|
||||
from redash.data import worker
|
||||
from redash import models, data_manager, redis_connection
|
||||
|
||||
if __name__ == '__main__':
|
||||
models.create_db(True, False)
|
||||
|
||||
print "Creating data source..."
|
||||
data_source = models.DataSource.create(name="Concurrency", type="pg", options="dbname=postgres")
|
||||
|
||||
print "Clear jobs/hashes:"
|
||||
redis_connection.delete("jobs")
|
||||
query_hashes = redis_connection.keys("query_hash_*")
|
||||
if query_hashes:
|
||||
redis_connection.delete(*query_hashes)
|
||||
|
||||
starting_query_results_count = models.QueryResult.select().count()
|
||||
jobs_count = 5000
|
||||
workers_count = 10
|
||||
|
||||
print "Creating jobs..."
|
||||
for i in xrange(jobs_count):
|
||||
query = "SELECT {}".format(i)
|
||||
print "Inserting: {}".format(query)
|
||||
data_manager.add_job(query=query, priority=worker.Job.LOW_PRIORITY,
|
||||
data_source=data_source)
|
||||
|
||||
print "Starting workers..."
|
||||
workers = data_manager.start_workers(workers_count)
|
||||
|
||||
print "Waiting for jobs to be done..."
|
||||
keep_waiting = True
|
||||
while keep_waiting:
|
||||
results_count = models.QueryResult.select().count() - starting_query_results_count
|
||||
print "QueryResults: {}".format(results_count)
|
||||
time.sleep(5)
|
||||
if results_count == jobs_count:
|
||||
print "Yay done..."
|
||||
keep_waiting = False
|
||||
|
||||
data_manager.stop_workers()
|
||||
|
||||
qr_count = 0
|
||||
for qr in models.QueryResult.select():
|
||||
number = int(qr.query.split()[1])
|
||||
data_number = json.loads(qr.data)['rows'][0].values()[0]
|
||||
|
||||
if number != data_number:
|
||||
print "Oops? {} != {} ({})".format(number, data_number, qr.id)
|
||||
qr_count += 1
|
||||
|
||||
print "Verified {} query results.".format(qr_count)
|
||||
|
||||
print "Done."
|
||||
192
docs/Makefile
Normal file
192
docs/Makefile
Normal file
@@ -0,0 +1,192 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " applehelp to make an Apple Help Book"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/redash.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/redash.qhc"
|
||||
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@echo
|
||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||
"~/Library/Documentation/Help or install it in your application" \
|
||||
"bundle."
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/redash"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/redash"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
coverage:
|
||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||
@echo "Testing of coverage in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/coverage/python.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
111
docs/conf.py
Normal file
111
docs/conf.py
Normal file
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# re:dash documentation build configuration file, created by
|
||||
# sphinx-quickstart on Mon Jul 20 22:40:24 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u're:dash'
|
||||
copyright = u'2015, EverythingMe'
|
||||
author = u'EverythingMe'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
import sphinx_rtd_theme
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
html_show_sphinx = False
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
html_show_copyright = False
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'redashdoc'
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'redash', u're:dash Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'redash', u're:dash Documentation',
|
||||
author, 'redash', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
|
||||
245
docs/datasources.rst
Normal file
245
docs/datasources.rst
Normal file
@@ -0,0 +1,245 @@
|
||||
Supported Data Sources
|
||||
######################
|
||||
|
||||
re:dash supports several types of data sources (see below the full list)
|
||||
and their management is done with the CLI (``manage.py``):
|
||||
|
||||
Create new data source
|
||||
======================
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ cd /opt/redash/current
|
||||
$ sudo -u redash bin/run ./manage.py ds new -n {name} -t {type} -o {options}
|
||||
|
||||
If you omit any of the options (-n, -t, -o) it will show a prompt asking
|
||||
for it. Options is a JSON string with the connection parameters. Unless
|
||||
you're doing some sort of automation, it's probably easier to leave it
|
||||
empty and fill out the prompt.
|
||||
|
||||
See below for the different supported data sources types and the
|
||||
relevant options string format.
|
||||
|
||||
Listing existing data sources
|
||||
=============================
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ sudo -u redash bin/run ./manage.py ds list
|
||||
|
||||
Supported data sources
|
||||
======================
|
||||
|
||||
PostgreSQL / Redshift
|
||||
---------------------
|
||||
|
||||
- **Type**: pg
|
||||
- **Options**:
|
||||
|
||||
- User (user)
|
||||
- Password (password)
|
||||
- Host (host)
|
||||
- Port (port)
|
||||
- Database name (dbname) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: "user= password=
|
||||
host= port=5439 dbname="
|
||||
|
||||
MySQL
|
||||
-----
|
||||
|
||||
- **Type**: mysql
|
||||
- **Options**:
|
||||
|
||||
- User (user)
|
||||
- Password (passwd)
|
||||
- Host (host)
|
||||
- Port (port)
|
||||
- Database name (db) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**:
|
||||
"Server=localhost;User=;Pwd=;Database="
|
||||
|
||||
Note that you need to install the MySQLDb package as it is not included
|
||||
in the ``requirements.txt`` file.
|
||||
|
||||
Graphite
|
||||
--------
|
||||
|
||||
- **Type**: graphite
|
||||
- **Options**:
|
||||
|
||||
- Url (url) (mandatory)
|
||||
- User (username)
|
||||
- Password (password)
|
||||
- Verify SSL ceritficate (verify)
|
||||
|
||||
- **Options string format**: '{"url":
|
||||
"https://graphite.yourcompany.com", "auth": ["user", "password"],
|
||||
"verify": true}'
|
||||
|
||||
Google BigQuery
|
||||
---------------
|
||||
|
||||
- **Type**: bigquery
|
||||
- **Options**:
|
||||
|
||||
- Service Account (serviceAccount) (mandatory)
|
||||
- Project ID (projectId) (mandatory)
|
||||
- Private Key filename (privateKey) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: {"serviceAccount" :
|
||||
"43242343247-fjdfakljr3r2@developer.gserviceaccount.com",
|
||||
"privateKey" : "/somewhere/23fjkfjdsfj21312-privatekey.p12",
|
||||
"projectId" : "myproject-123" }
|
||||
|
||||
Notes:
|
||||
|
||||
1. To obtain BigQuery credentials follow the guidelines at:
|
||||
https://developers.google.com/bigquery/authorization#service-accounts
|
||||
2. You need to install the ``google-api-python-client``,
|
||||
``oauth2client`` and ``pyopenssl`` packages (PyOpenSSL requires
|
||||
``libffi-dev`` and ``libssl-dev`` packages), as they are not included
|
||||
in the ``requirements.txt`` file.
|
||||
|
||||
Google Spreadsheets
|
||||
-------------------
|
||||
|
||||
(supported from v0.6.4)
|
||||
|
||||
- **Type**: google\_spreadsheets
|
||||
- **Options**:
|
||||
|
||||
- Credentials filename (credentialsFilePath) (mandatory)
|
||||
|
||||
Notes:
|
||||
|
||||
1. To obtain Google ServiceAccount credentials follow the guidelines at:
|
||||
https://developers.google.com/console/help/new/#serviceaccounts (save
|
||||
the JSON version of the credentials file)
|
||||
2. To be able to load the spreadsheet in re:dash - share your it with
|
||||
your ServiceAccount's email (it can be found in the credentials json
|
||||
file, for example
|
||||
43242343247-fjdfakljr3r2@developer.gserviceaccount.com) Note: all the
|
||||
service account details can be seen inside the json file you should
|
||||
obtain following step #1
|
||||
3. The query format is "DOC\_UUID\|SHEET\_NUM" (for example
|
||||
"kjsdfhkjh4rsEFSDFEWR232jkddsfh\|0")
|
||||
4. You (might) need to install the ``gspread``, ``oauth2client`` and
|
||||
``dateutil`` packages as they are not included in the
|
||||
``requirements.txt`` file.
|
||||
|
||||
MongoDB
|
||||
-------
|
||||
|
||||
- **Type**: mongo
|
||||
- **Options**:
|
||||
|
||||
- Connection String (connectionString) (mandatory)
|
||||
- Database name (dbName)
|
||||
- Replica set name (replicaSetName)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: { "connectionString"
|
||||
: "mongodb://user:password@localhost:27017/mydb", "dbName" : "mydb" }
|
||||
|
||||
For ReplicaSet databases use the following connection string: \*
|
||||
**Options string format**: { "connectionString" :
|
||||
"mongodb://user:pasword@server1:27017,server2:27017/mydb", "dbName" :
|
||||
"mydb", "replicaSetName" : "myreplicaSet" }
|
||||
|
||||
Notes:
|
||||
|
||||
1. You need to install ``pymongo``, as it is not included in the
|
||||
``requirements.txt`` file.
|
||||
|
||||
URL
|
||||
---
|
||||
|
||||
A URL based data source which requests URLs that conforms to the
|
||||
supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
|
||||
Very useful in situations where you want to expose the data without
|
||||
connecting directly to the database.
|
||||
|
||||
The query itself inside re:dash will simply contain the URL to be
|
||||
executed (i.e. http://myserver/path/myquery)
|
||||
|
||||
- **Type**: url
|
||||
- **Options**:
|
||||
|
||||
- Url (url)
|
||||
|
||||
- **Options string format (optional) (for v0.5 and older)**:
|
||||
http://myserver/path/
|
||||
|
||||
Notes:
|
||||
|
||||
1. All URLs must return the supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
2. If the Options string is set, only URLs that are part of the supplied
|
||||
path can be executed using this data source. Not setting the options
|
||||
path allows any URL to be executed as long as it returns the
|
||||
supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
|
||||
Script
|
||||
------
|
||||
|
||||
Allows executing any executable script residing on the server as long as
|
||||
its standard output conforms to the supported :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
|
||||
This integration is useful in situations where you need more than just a
|
||||
query and requires some processing to happen.
|
||||
|
||||
Once the path to scripts is configured in the datasource the query needs
|
||||
to contain the file name of the script as well as any command line
|
||||
parameters the script requires (i.e. myscript.py param1 param2
|
||||
--param3=value)
|
||||
|
||||
- **Type**: script
|
||||
- **Options**:
|
||||
|
||||
- Scripts Path (path) (mandatory)
|
||||
|
||||
- **Options string format (for v0.5 and older)**: /path/to/scripts/
|
||||
|
||||
Notes:
|
||||
|
||||
1. You MUST set a path to execute the scripts, otherwise the data source
|
||||
will not work.
|
||||
2. All scripts must be executable, otherwise results won't return
|
||||
3. The script data source does not allow relative paths in the form of
|
||||
"../". You may use a relative sub path such as "./mydir/myscript".
|
||||
4. All scripts must output to the standard output the supported :doc:`results
|
||||
JSON format </dev/results_format>` and
|
||||
only that, otherwise the data source will not be able to load the
|
||||
data.
|
||||
|
||||
Python
|
||||
------
|
||||
|
||||
Execute other queries, manipulate and compute with Python code
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
The Python data source allows running Python code in a secure and safe
|
||||
environment. It won't allow writing files to disk, importing modules
|
||||
that were not pre-approved in the configuration etc.
|
||||
|
||||
One of the benefits of using the Python data source is its ability to
|
||||
execute queries (or saved queries) which you can store in a variable and
|
||||
then manipulate/transform/merge with other data and queries.
|
||||
|
||||
You can import data analysis libraries such as Pandas, NumPy and SciPy.
|
||||
|
||||
This saved the trouble of having outside scripts do the synthesis of
|
||||
data from multiple sources to create a single data set that can then be
|
||||
used in dashboards.
|
||||
|
||||
- **Type**: Python
|
||||
- **Options**:
|
||||
|
||||
- Allowed Modules in a comma separated list (optional). **NOTE:**
|
||||
You MUST make sure these modules are installed on the machine
|
||||
running the Celery workers
|
||||
11
docs/dev.rst
Normal file
11
docs/dev.rst
Normal file
@@ -0,0 +1,11 @@
|
||||
Developer Information
|
||||
=====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
dev/vagrant
|
||||
dev/*
|
||||
|
||||
|
||||
94
docs/dev/query_execution.rst
Normal file
94
docs/dev/query_execution.rst
Normal file
@@ -0,0 +1,94 @@
|
||||
Query Execution Model
|
||||
#####################
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
The first datasource which was used with re:dash was Redshift. Because
|
||||
we had billions of records in Redshift, and some queries were costly to
|
||||
re-run, from the get go there was the idea of caching query results in
|
||||
re:dash.
|
||||
|
||||
This was to relieve stress from the Redshift cluster and also to improve
|
||||
user experience.
|
||||
|
||||
How queries get executed and cached in re:dash?
|
||||
===============================================
|
||||
|
||||
Server
|
||||
------
|
||||
|
||||
To make sure each query is executed only once at any giving time, we
|
||||
translate the query to a ``query hash``, using the following code:
|
||||
|
||||
.. code:: python
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
|
||||
def gen_query_hash(sql):
|
||||
sql = COMMENTS_REGEX.sub("", sql)
|
||||
sql = "".join(sql.split()).lower()
|
||||
return hashlib.md5(sql.encode('utf-8')).hexdigest()
|
||||
|
||||
When query execution is done, the result gets stored to
|
||||
``query_results`` table. Also we check for all queries in the
|
||||
``queries`` table that have the same query hash and update their
|
||||
reference to the query result we just saved
|
||||
(`code <https://github.com/EverythingMe/redash/blob/master/redash/models.py#L235>`__).
|
||||
|
||||
Client
|
||||
------
|
||||
|
||||
The client (UI) will execute queries in two scenarios:
|
||||
|
||||
1. (automatically) When opening a query page of a query that doesn't
|
||||
have a result yet.
|
||||
2. (manually) When the user clicks on "Execute".
|
||||
|
||||
In each case the client does a POST request to ``/api/query_results``
|
||||
with the following parameters: ``query`` (the query text),
|
||||
``data_source_id`` (data source to execute the query with) and ``ttl``.
|
||||
|
||||
When loading a cached result, ``ttl`` will be the one set to the query
|
||||
(if it was set). This is a relic from previous versions, and I'm not
|
||||
sure if it's really used anymore, as usually we will fetch query result
|
||||
using its id.
|
||||
|
||||
When loading a non cached result, ``ttl`` will be 0 which will "force"
|
||||
the server to execute the query.
|
||||
|
||||
As a response to ``/api/query_results`` the server will send either the
|
||||
query results (in case of a cached query) or job id of the currently
|
||||
executing query. When job id received the client will start polling on
|
||||
this id, until a query result received (this is encapsulated in
|
||||
``Query`` and ``QueryResult`` services).
|
||||
|
||||
Ideas on how to implement query parameters
|
||||
==========================================
|
||||
|
||||
Client side only implementation
|
||||
-------------------------------
|
||||
|
||||
(This was actually implemented in. See pull request `#363 <https://github.com/EverythingMe/redash/pull/363>`__ for details.)
|
||||
|
||||
The basic idea of how to implement parametized queries is to treat the
|
||||
query as a template and merge it with parameters taken from query string
|
||||
or UI (or both).
|
||||
|
||||
When the caching facility isn't required (with queries that return in a
|
||||
reasonable time frame) the implementation can be completly client side
|
||||
and the backend can be "blind" to the parameters - it just receives the
|
||||
final query to execute and returns result.
|
||||
|
||||
As one improvement over this, we can let the UI/user specify the TTL
|
||||
value when making the request to ``/api/query_results``, in which case
|
||||
caching will be availble too, while not having to make the server aware
|
||||
of the parameters.
|
||||
|
||||
Hybrid
|
||||
------
|
||||
|
||||
Another option, will be to store the list of possible parameters for a
|
||||
query, with their default/optional values. In such case, the server can
|
||||
prefetch all the options and cache them to provide faster results to the
|
||||
client.
|
||||
30
docs/dev/results_format.rst
Normal file
30
docs/dev/results_format.rst
Normal file
@@ -0,0 +1,30 @@
|
||||
Data Source Results Format
|
||||
==========================
|
||||
|
||||
All data sources in re:dash return the following results in JSON format:
|
||||
|
||||
.. code:: javascript
|
||||
|
||||
{
|
||||
"columns" : [
|
||||
{
|
||||
// Required: a unique identifier of the column name in this result
|
||||
"name" : "COLUMN_NAME",
|
||||
// Required: friendly name of the column that will appear in the results
|
||||
"friendly_name" : "FRIENDLY_NAME",
|
||||
// Optional: If not specified sort might not work well.
|
||||
// Supported types: integer, float, boolean, string (default), datetime (ISO-8601 text format)
|
||||
"type" : "VALUE_TYPE"
|
||||
},
|
||||
...
|
||||
],
|
||||
"rows" : [
|
||||
{
|
||||
// name is the column name as it appears in the columns above.
|
||||
// VALUE is a valid JSON value. For dates its an ISO-8601 string.
|
||||
"name" : VALUE,
|
||||
"name2" : VALUE2
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
49
docs/dev/vagrant.rst
Normal file
49
docs/dev/vagrant.rst
Normal file
@@ -0,0 +1,49 @@
|
||||
Setting up development environment (using Vagrant)
|
||||
==================================================
|
||||
|
||||
To simplify contribution there is a `Vagrant
|
||||
box <https://vagrantcloud.com/redash/boxes/dev>`__ available with all
|
||||
the needed software to run re:dash for development (use it only for
|
||||
development, for demo purposes there is
|
||||
`redash/demo <https://vagrantcloud.com/redash/boxes/demo>`__ box and the
|
||||
AWS/GCE images).
|
||||
|
||||
To get started with this box:
|
||||
|
||||
1. Make sure you have recent version of
|
||||
`Vagrant <https://www.vagrantup.com/>`__ installed.
|
||||
2. Clone the re:dash repository:
|
||||
``git clone https://github.com/EverythingMe/redash.git``.
|
||||
3. Change dir into the repository (``cd redash``) and run run
|
||||
``vagrant up``. This might take some time the first time you run it,
|
||||
as it downloads the Vagrant virtual box.
|
||||
4. Once Vagrant is ready, ssh into the instance (``vagrant ssh``), and
|
||||
change dir to ``/opt/redash/current`` -- this is where your local
|
||||
repository copy synced to.
|
||||
5. Copy ``.env`` file into this directory (``cp ../.env ./``).
|
||||
6. From ``/opt/redash/current/rd_ui`` run ``bower install`` to install
|
||||
frontend packages. This can be done from your host machine as well,
|
||||
if you have bower installed.
|
||||
7. Go back to ``/opt/redash/current`` and install python dependencies
|
||||
``sudo pip install -r requirements.txt``
|
||||
8. Apply migrations
|
||||
|
||||
::
|
||||
|
||||
PYTHONPATH=. bin/run python migrations/0001_allow_delete_query.py
|
||||
PYTHONPATH=. bin/run python migrations/0002_fix_timestamp_fields.py
|
||||
PYTHONPATH=. bin/run python migrations/0003_update_data_source_config.py
|
||||
PYTHONPATH=. bin/run python migrations/0004_allow_null_in_event_user.py
|
||||
PYTHONPATH=. bin/run python migrations/0005_add_updated_at.py
|
||||
PYTHONPATH=. bin/run python migrations/0006_queries_last_edit_by.py
|
||||
PYTHONPATH=. bin/run python migrations/0007_add_schedule_to_queries.py
|
||||
PYTHONPATH=. bin/run python migrations/0008_make_ds_name_unique.py
|
||||
PYTHONPATH=. bin/run python migrations/0009_add_api_key_to_user.py
|
||||
PYTHONPATH=. bin/run python migrations/0010_create_alerts.py
|
||||
PYTHONPATH=. bin/run python migrations/0010_allow_deleting_datasources.py
|
||||
PYTHONPATH=. bin/run python migrations/0011_migrate_bigquery_to_json.py
|
||||
|
||||
9. Start the server and background workers with
|
||||
``bin/run honcho start -f Procfile.dev``.
|
||||
10. Now the server should be available on your host on port 9001 and you
|
||||
can login with username admin and password admin.
|
||||
57
docs/index.rst
Normal file
57
docs/index.rst
Normal file
@@ -0,0 +1,57 @@
|
||||
.. image:: http://redash.io/static/img/redash_logo.png
|
||||
:width: 200px
|
||||
|
||||
Open Source Data Collaboration and Visualization Platform
|
||||
===================================
|
||||
|
||||
**re:dash** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
|
||||
|
||||
Prior to **re:dash**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
|
||||
|
||||
**re:dash** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery,Google Spreadsheets, PostgreSQL, MySQL, Graphite and custom scripts.
|
||||
|
||||
Features
|
||||
########
|
||||
|
||||
1. **Query Editor**: think of `JS Fiddle`_ for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it.
|
||||
2. **Visualizations**: once you have a dataset, you can create different visualizations out of it. Currently it supports charts, pivot table and cohorts.
|
||||
3. **Dashboards**: combine several visualizations into a single dashboard.
|
||||
|
||||
Demo
|
||||
####
|
||||
|
||||
.. figure:: https://raw.github.com/EverythingMe/redash/screenshots/screenshots.gif
|
||||
:alt: Screenshots
|
||||
|
||||
You can try out the demo instance: `http://demo.redash.io`_ (login with any Google account).
|
||||
|
||||
.. _http://demo.redash.io: http://demo.redash.io
|
||||
.. _JS Fiddle: http://jsfiddle.net
|
||||
|
||||
Getting Started
|
||||
###############
|
||||
|
||||
:doc:`Setting up re:dash instance </setup>` (includes links to ready made AWS/GCE images).
|
||||
|
||||
Getting Help
|
||||
############
|
||||
|
||||
* Source: https://github.com/everythingme/redash
|
||||
* Issues: https://github.com/everythingme/redash/issues
|
||||
* Mailing List: https://groups.google.com/forum/#!forum/redash-users
|
||||
* Gitter (chat): https://gitter.im/EverythingMe/redash
|
||||
* Contact Arik, the maintainer directly: arik@everything.me.
|
||||
|
||||
TOC
|
||||
###
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
setup
|
||||
upgrade
|
||||
datasources
|
||||
usage
|
||||
dev
|
||||
misc
|
||||
10
docs/misc.rst
Normal file
10
docs/misc.rst
Normal file
@@ -0,0 +1,10 @@
|
||||
Miscellaneous
|
||||
=============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
misc/*
|
||||
|
||||
|
||||
50
docs/misc/google_developers_project.rst
Normal file
50
docs/misc/google_developers_project.rst
Normal file
@@ -0,0 +1,50 @@
|
||||
How To: Create a Google Developers Project
|
||||
==========================================
|
||||
|
||||
1. Go to the `Google Developers
|
||||
Console <https://console.developers.google.com/>`__.
|
||||
2. Select a project, or create a new one by clicking Create Project:
|
||||
|
||||
1. In the Project name field, type in a name for your project.
|
||||
2. In the Project ID field, optionally type in a project ID for your
|
||||
project or use the one that the console has created for you. This
|
||||
ID must be unique world-wide.
|
||||
3. Click the **Create** button and wait for the project to be
|
||||
created.
|
||||
4. Click on the new project name in the list to start editing the
|
||||
project.
|
||||
|
||||
3. In the left sidebar, select the **APIs** item below "APIs & auth". A
|
||||
list of Google web services appears.
|
||||
4. Find the **Google+ API** service and set its status to **ON**—notice
|
||||
that this action moves the service to the top of the list.
|
||||
5. In the sidebar under "APIs & auth", select **Consent screen**.
|
||||
|
||||
- Choose an Email Address and specify a Product Name.
|
||||
|
||||
6. In the sidebar under "APIs & auth", select **Credentials**.
|
||||
7. Click **Create a new Client ID** — a dialog box appears.
|
||||
|
||||
- In the **Application type** section of the dialog, select **Web
|
||||
application**.
|
||||
- In the **Authorized JavaScript origins** field, enter the origin
|
||||
for your app. You can enter multiple origins to use with multiple
|
||||
re:dash instance. Wildcards are not allowed. In the example below,
|
||||
we assume your re:dash instance address is *redash.example.com*:
|
||||
|
||||
::
|
||||
|
||||
http://redash.example.com
|
||||
https://redash.example.com
|
||||
|
||||
- In the Authorized redirect URI field, enter the redirect URI
|
||||
callback:
|
||||
|
||||
::
|
||||
|
||||
http://redash.example.com/oauth/google_callback
|
||||
|
||||
- Click the ``Create Client ID`` button.
|
||||
|
||||
8. In the resulting **Client ID for web application** section, copy the
|
||||
**Client ID** and **Client secret** to your ``.env`` file.
|
||||
59
docs/misc/ssl.rst
Normal file
59
docs/misc/ssl.rst
Normal file
@@ -0,0 +1,59 @@
|
||||
SSL (HTTPS) Setup
|
||||
=================
|
||||
|
||||
If you used the provided images or the bootstrap script, to start using
|
||||
SSL with your instance you need to:
|
||||
|
||||
1. Update the nginx config file (``/etc/nginx/sites-available/redash``)
|
||||
with SSL configuration (see below an example). Make sure to upload
|
||||
the certificate to the server, and set the paths correctly in the new
|
||||
config.
|
||||
|
||||
2. Open port 443 in your security group (if using AWS or GCE).
|
||||
|
||||
.. code:: nginx
|
||||
|
||||
upstream redash_servers {
|
||||
server 127.0.0.1:5000;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
# Allow accessing /ping without https. Useful when placing behind load balancer.
|
||||
location /ping {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://redash_servers;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Enforce SSL.
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
|
||||
# Make sure to set paths to your certificate .pem and .key files.
|
||||
ssl on;
|
||||
ssl_certificate /path-to/cert.pem; # or crt
|
||||
ssl_certificate_key /path-to/cert.key;
|
||||
|
||||
access_log /var/log/nginx/redash.access.log;
|
||||
|
||||
gzip on;
|
||||
gzip_types *;
|
||||
gzip_proxied any;
|
||||
|
||||
location / {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_pass http://redash_servers;
|
||||
proxy_redirect off;
|
||||
}
|
||||
}
|
||||
3
docs/requirements.txt
Normal file
3
docs/requirements.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
sphinx
|
||||
sphinx-autobuild
|
||||
sphinx_rtd_theme
|
||||
159
docs/setup.rst
Normal file
159
docs/setup.rst
Normal file
@@ -0,0 +1,159 @@
|
||||
Setting up re:dash instance
|
||||
###########################
|
||||
|
||||
The `provisioning
|
||||
script <https://github.com/EverythingMe/redash/blob/master/setup/bootstrap.sh>`__
|
||||
works on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy. This script
|
||||
installs all needed dependencies and creates basic setup.
|
||||
|
||||
To ease the process, there are also images for AWS and Google Compute
|
||||
Cloud. These images created with the same provision script using Packer.
|
||||
|
||||
Create an instance
|
||||
==================
|
||||
|
||||
Google Compute Engine
|
||||
---------------------
|
||||
|
||||
First, you need to add the images to your account:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images add redash-063-b906 gs://redash-images/redash.0.6.3.b906.tar.gz
|
||||
|
||||
Next you need to launch an instance using this image (n1-standard-1
|
||||
instance type is recommended). If you plan using re:dash with BigQuery,
|
||||
you can use a dedicated image which comes with BigQuery preconfigured
|
||||
(using instance permissions):
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images add redash-063-b906-bq gs://redash-images/redash.0.6.3.b906-bq.tar.gz
|
||||
|
||||
Note that you need to launch this instance with BigQuery access:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute instances create <your_instance_name> --image redash-060-b812-bq --scopes storage-ro bigquery
|
||||
|
||||
(the same can be done from the web interface, just make sure to enable
|
||||
BigQuery access)
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
AWS
|
||||
---
|
||||
|
||||
Launch the instance with from the pre-baked AMI (for small deployments
|
||||
t2.micro should be enough):
|
||||
|
||||
- us-east-1:
|
||||
`ami-47b4612c <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-47b4612c>`__
|
||||
- us-west-1:
|
||||
`ami-a72edde3 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-a72edde3>`__
|
||||
- us-west-2:
|
||||
`ami-f9d6d5c9 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-f9d6d5c9>`__
|
||||
- eu-central-1:
|
||||
`ami-72eed46f <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-72eed46f>`__
|
||||
- eu-west-1:
|
||||
`ami-5a135c2d <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-5a135c2d>`__
|
||||
- sa-east-1:
|
||||
`ami-2b78f436 <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-2b78f436>`__
|
||||
- ap-northeast-1:
|
||||
`ami-0a55fd0a <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-0a55fd0a>`__
|
||||
- ap-southeast-2:
|
||||
`ami-9f793ea5 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-9f793ea5>`__
|
||||
- ap-southeast-1:
|
||||
`ami-12545740 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-12545740>`__
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
Other
|
||||
-----
|
||||
|
||||
Download the provision script and run it on your machine. Note that:
|
||||
|
||||
1. You need to run the script as root.
|
||||
2. It was tested only on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy.
|
||||
|
||||
Setup
|
||||
=====
|
||||
|
||||
Once you created the instance with either the image or the script, you
|
||||
should have a running re:dash instance with everything you need to get
|
||||
started. You can even login to it with the user "admin" (password:
|
||||
"admin"). But to make it useful, there are a few more steps that you
|
||||
need to manually do to complete the setup:
|
||||
|
||||
First ssh to your instance and change directory to ``/opt/redash``. If
|
||||
you're using the GCE image, switch to root (``sudo su``).
|
||||
|
||||
Users & Google Authentication setup
|
||||
-----------------------------------
|
||||
|
||||
Most of the settings you need to edit are in the ``/opt/redash/.env``
|
||||
file.
|
||||
|
||||
1. Update the cookie secret (important! otherwise anyone can sign new
|
||||
cookies and impersonate users): change "veryverysecret" in the line:
|
||||
``export REDASH_COOKIE_SECRET=veryverysecret`` to something else (you
|
||||
can use ``pwgen 32 -1`` to generate random string).
|
||||
|
||||
2. By default we create an admin user with the password "admin". You
|
||||
need to change the password:
|
||||
|
||||
- ``cd /opt/redash/current``
|
||||
- ``sudo -u redash bin/run ./manage.py users password admin {new password}``
|
||||
|
||||
3. If you want to use Google OAuth to authenticate users, you need to
|
||||
create a Google Developers project (see :doc:`instructions </misc/google_developers_project>`)
|
||||
and then add the needed configuration in the ``.env`` file:
|
||||
|
||||
.. code::
|
||||
|
||||
export REDASH_GOOGLE_CLIENT_ID=""
|
||||
export REDASH_GOOGLE_CLIENT_SECRET=""
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=""
|
||||
|
||||
|
||||
|
||||
``REDASH_GOOGLE_CLIENT_ID`` and ``REDASH_GOOGLE_CLIENT_SECRET`` are the values you get after registering with Google. ``READASH_GOOGLE_APPS_DOMAIN`` is used in case you want to limit access to single Google apps domain (*if you leave it empty anyone with a Google account can access your instance*).
|
||||
|
||||
4. Restart the web server to apply the configuration changes:
|
||||
``sudo supervisorctl restart redash_server``.
|
||||
|
||||
5. Once you have Google OAuth enabled, you can login using your Google
|
||||
Apps account. If you want to grant admin permissions to some users,
|
||||
you can do it with the ``users grant_admin`` command:
|
||||
``sudo -u redash bin/run ./manage.py users grant_admin {email}``.
|
||||
|
||||
6. If you don't use Google OAuth or just need username/password logins,
|
||||
you can create additional users using the CLI (see :doc:`documentation </usage/users>`).
|
||||
|
||||
Datasources
|
||||
-----------
|
||||
|
||||
To make re:dash truly useful, you need to setup your data sources in it.
|
||||
Currently all data sources management is done with the CLI.
|
||||
|
||||
See
|
||||
:doc:`documentation </datasources>`
|
||||
for the different options. Your instance comes ready with dependencies
|
||||
needed to setup supported sources.
|
||||
|
||||
Follow issue
|
||||
`#193 <https://github.com/EverythingMe/redash/issues/193>`__ to know
|
||||
when UI was implemented to manage data sources.
|
||||
|
||||
How to upgrade?
|
||||
---------------
|
||||
|
||||
It's recommended to upgrade once in a while your re:dash instance to
|
||||
benefit from bug fixes and new features. See :doc:`here </upgrade>` for full upgrade
|
||||
instructions (including Fabric script).
|
||||
|
||||
Notes
|
||||
=====
|
||||
|
||||
- If this is a production setup, you should enforce HTTPS and make sure
|
||||
you set the cookie secret (see :doc:`instructions </misc/ssl>`).
|
||||
34
docs/upgrade.rst
Normal file
34
docs/upgrade.rst
Normal file
@@ -0,0 +1,34 @@
|
||||
How to Upgrade
|
||||
##############
|
||||
|
||||
It's recommended to upgrade your re:dash instance once there are new
|
||||
releases, to benefit from new features and bug fixes. The upgrade
|
||||
process is relatively simple, and assuming you used one of the base
|
||||
images we provide, you can just use the
|
||||
`Fabric <http://www.fabfile.org/>`__ script provided here:
|
||||
https://gist.github.com/arikfr/440d1403b4aeb76ebaf8.
|
||||
|
||||
How to run the Fabric script
|
||||
============================
|
||||
|
||||
1. Install Fabric: ``pip install fabric requests`` (needed only once)
|
||||
2. Download the ``fabfile.py`` from the gist.
|
||||
3. Run the script:
|
||||
``fab -H{your re:dash host} -u{the ssh user for this host} deploy_latest_release``
|
||||
|
||||
What the Fabric script does
|
||||
===========================
|
||||
|
||||
Even if you didn't use the image, it's very likely you can reuse most of
|
||||
this script with small modifications. What this script does is:
|
||||
|
||||
1. Find the URL of the latest release tarball (from `GitHub releases
|
||||
page <github.com/everythingme/redash/releases>`__).
|
||||
2. Download it.
|
||||
3. Create new directory for this version (for example:
|
||||
``/opt/redash/redash.0.5.0.b685``).
|
||||
4. Unpack that (``tar -C {dir} -xvf {tarball path}``).
|
||||
5. Link ``/opt/redash/.env`` file into this directory.
|
||||
6. Apply any new migrations.
|
||||
7. Link ``/opt/redash/current`` to new version.
|
||||
8. Restart web server and celery workers.
|
||||
12
docs/usage.rst
Normal file
12
docs/usage.rst
Normal file
@@ -0,0 +1,12 @@
|
||||
Usage
|
||||
=====
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
usage/maintenance.rst
|
||||
usage/users.rst
|
||||
usage/*
|
||||
|
||||
|
||||
48
docs/usage/elasticsearch_querying.rst
Normal file
48
docs/usage/elasticsearch_querying.rst
Normal file
@@ -0,0 +1,48 @@
|
||||
ElasticSearch: Querying
|
||||
#######################
|
||||
|
||||
ElasticSearch currently supports only simple Lucene style queries (like
|
||||
Kibana but without the aggregation).
|
||||
|
||||
Full blown JSON based ElasticSearch queries (including aggregations)
|
||||
will be added later.
|
||||
|
||||
Simple query example:
|
||||
=====================
|
||||
|
||||
- Query the index named "twitter"
|
||||
- Filter by "user:kimchy"
|
||||
- Return the fields: "@timestamp", "tweet" and "user"
|
||||
- Return up to 15 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "twitter",
|
||||
"query" : "user:kimchy",
|
||||
"fields" : ["@timestamp", "tweet", "user"],
|
||||
"size" : 15,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
|
||||
Simple query on a logstash ElasticSearch instance:
|
||||
==================================================
|
||||
|
||||
- Query the index named "logstash-2015.04.\*" (in this case its all of
|
||||
April 2015)
|
||||
- Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
- Return fields: "@timestamp", "userId", "channel", "utm\_source",
|
||||
"utm\_medium", "utm\_campaign", "utm\_content"
|
||||
- Return up to 250 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "logstash-2015.04.*",
|
||||
"query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
"fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
"size" : 250,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
94
docs/usage/maintenance.rst
Normal file
94
docs/usage/maintenance.rst
Normal file
@@ -0,0 +1,94 @@
|
||||
Ongoing Maintanence and Basic Operations
|
||||
########################################
|
||||
|
||||
Configuration and logs
|
||||
======================
|
||||
|
||||
The supervisor config can be found in
|
||||
``/opt/redash/supervisord/supervisord.conf``.
|
||||
|
||||
There you can see the names of its programs (``redash_celery``,
|
||||
``redash_server``) and the location of their logs.
|
||||
|
||||
Restart
|
||||
=======
|
||||
|
||||
Restarting the Web Server
|
||||
-------------------------
|
||||
|
||||
``sudo supervisorctl stop redash_server``
|
||||
|
||||
Restarting Celery Workers
|
||||
-------------------------
|
||||
|
||||
``sudo supervisorctl restart redash_celery``
|
||||
|
||||
Restarting Celery Workers & the Queries Queue
|
||||
---------------------------------------------
|
||||
|
||||
In case you are handling a problem, and you need to stop the currently
|
||||
running queries and reset the queue, follow the steps below.
|
||||
|
||||
1. Stop celery: ``sudo supervisorctl stop redash_celery`` (celery might
|
||||
take some time to stop, if it's in the middle of running a query)
|
||||
|
||||
2. Flush redis: ``redis-cli flushdb``
|
||||
|
||||
3. Start celery: ``sudo supervisorctl start redash_celery``
|
||||
|
||||
Changing the Number of Workers
|
||||
==============================
|
||||
|
||||
By default, Celery will start a worker per CPU core. Because most of
|
||||
re:dash's tasks are IO bound, the real limit for number of workers you
|
||||
can use depends on the amount of memory your machine has. It's
|
||||
recommended to increase number of workers, to support more concurrent
|
||||
queries.
|
||||
|
||||
1. Open the supervisord configuration file:
|
||||
``/opt/redash/supervisord/supervisord.conf``
|
||||
|
||||
2. Edit the ``[program:redash_celery]`` section and add to the *command*
|
||||
value, the param "-c" with the number of concurrent workers you need.
|
||||
|
||||
3. Restart supervisord to apply new configuration:
|
||||
``sudo /etc/init.d/redash_supervisord restart``.
|
||||
|
||||
DB
|
||||
==
|
||||
|
||||
Show the Currently Configured Data Source
|
||||
-----------------------------------------
|
||||
|
||||
This varies based on the redash version and personal preferences. You
|
||||
can do one of the following:
|
||||
|
||||
Using the CLI
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
In ``/opt/redash/current``, run:
|
||||
``sudo -u redash bin/run ./manage.py ds list``
|
||||
|
||||
Using the Admin
|
||||
~~~~~~~~~~~~~~~
|
||||
|
||||
(available from version 0.6b797). Browse to ``/admin/datasource``
|
||||
|
||||
View the Definition Directly in the DB
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
1. Open psql: ``sudo -u redash psql``
|
||||
|
||||
2. Run the query: ``SELECT * from data_sources;``
|
||||
|
||||
Backup re:dash's DB:
|
||||
--------------------
|
||||
|
||||
``sudo -u redash pg_dump > backup_filename.sql``
|
||||
|
||||
Version
|
||||
=======
|
||||
|
||||
See current version:
|
||||
|
||||
``bin/run ./manage.py version``
|
||||
74
docs/usage/mongodb_querying.rst
Normal file
74
docs/usage/mongodb_querying.rst
Normal file
@@ -0,0 +1,74 @@
|
||||
MongoDB: Querying
|
||||
#################
|
||||
|
||||
Simple query example:
|
||||
=====================
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"collection" : "my_collection",
|
||||
"query" : {
|
||||
"date" : {
|
||||
"$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
},
|
||||
"type" : 1
|
||||
},
|
||||
"fields" : {
|
||||
"_id" : 1,
|
||||
"name" : 2
|
||||
},
|
||||
"sort" : [
|
||||
{
|
||||
"name" : "date",
|
||||
"direction" : -1
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Live example on the demo instance:
|
||||
http://demo.redash.io/queries/394/source.
|
||||
|
||||
Aggregation
|
||||
===========
|
||||
|
||||
Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
correct order of sorting, it uses a regular list for the "$sort"
|
||||
operation that converts into a SON (sorted dictionary) object before
|
||||
execution.
|
||||
|
||||
Aggregation query example:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"collection" : "things",
|
||||
"aggregate" : [
|
||||
{
|
||||
"$unwind" : "$tags"
|
||||
},
|
||||
{
|
||||
"$group" : {
|
||||
"_id" : "$tags",
|
||||
"count" : { "$sum" : 1 }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$sort" : [
|
||||
{
|
||||
"name" : "count",
|
||||
"direction" : -1
|
||||
},
|
||||
{
|
||||
"name" : "_id",
|
||||
"direction" : -1
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Live examples on the demo instance:
|
||||
|
||||
1. http://demo.redash.io/queries/393/source
|
||||
2. http://demo.redash.io/queries/387/source
|
||||
39
docs/usage/users.rst
Normal file
39
docs/usage/users.rst
Normal file
@@ -0,0 +1,39 @@
|
||||
Users' Management
|
||||
#################
|
||||
|
||||
If you use Google OpenID authentication, then each user from the domains
|
||||
you allowed will automatically be logged in and have the default
|
||||
permissions.
|
||||
|
||||
If you want to give some user different permissions or you want to
|
||||
create password based users (make sure you enabled this options in
|
||||
settings first), you need to use the CLI (``manage.py``).
|
||||
|
||||
Create a new user
|
||||
=================
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ bin/run ./manage.py users create --help
|
||||
usage: users create [-h] [--permissions PERMISSIONS] [--password PASSWORD]
|
||||
[--google] [--admin]
|
||||
name email
|
||||
|
||||
positional arguments:
|
||||
name User's full name
|
||||
email User's email
|
||||
|
||||
optional arguments:
|
||||
-h, --help show this help message and exit
|
||||
--permissions PERMISSIONS
|
||||
Comma seperated list of permissions (leave blank for
|
||||
default).
|
||||
--password PASSWORD Password for users who don't use Google Auth (leave
|
||||
blank for prompt).
|
||||
--google user uses Google Auth to login
|
||||
--admin set user as admin
|
||||
|
||||
Grant admin permissions
|
||||
=======================
|
||||
|
||||
``sudo -u redash bin/run ./manage.py users grant_admin {email}``
|
||||
19
manage.py
19
manage.py
@@ -2,12 +2,15 @@
|
||||
"""
|
||||
CLI to manage redash.
|
||||
"""
|
||||
import json
|
||||
|
||||
from flask.ext.script import Manager
|
||||
|
||||
from redash import settings, models, __version__
|
||||
from redash.wsgi import app
|
||||
from redash.import_export import import_manager
|
||||
from redash.cli import users, database, data_sources
|
||||
from redash.monitor import get_status
|
||||
|
||||
manager = Manager(app)
|
||||
manager.add_command("database", database.manager)
|
||||
@@ -21,6 +24,9 @@ def version():
|
||||
"""Displays re:dash version."""
|
||||
print __version__
|
||||
|
||||
@manager.command
|
||||
def status():
|
||||
print json.dumps(get_status(), indent=2)
|
||||
|
||||
@manager.command
|
||||
def runworkers():
|
||||
@@ -37,12 +43,15 @@ def make_shell_context():
|
||||
@manager.command
|
||||
def check_settings():
|
||||
"""Show the settings as re:dash sees them (useful for debugging)."""
|
||||
from types import ModuleType
|
||||
for name, item in settings.all_settings().iteritems():
|
||||
print "{} = {}".format(name, item)
|
||||
|
||||
for name in dir(settings):
|
||||
item = getattr(settings, name)
|
||||
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
|
||||
print "{} = {}".format(name, item)
|
||||
@manager.command
|
||||
def send_test_mail():
|
||||
from redash import mail
|
||||
from flask_mail import Message
|
||||
|
||||
mail.send(Message(subject="Test Message from re:dash", recipients=[settings.MAIL_DEFAULT_SENDER], body="Test message."))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
27
migrations/0009_add_api_key_to_user.py
Normal file
27
migrations/0009_add_api_key_to_user.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
column = models.User.api_key
|
||||
column.null = True
|
||||
migrate(
|
||||
migrator.add_column('users', 'api_key', models.User.api_key),
|
||||
)
|
||||
|
||||
for user in models.User.select():
|
||||
user.save()
|
||||
|
||||
migrate(
|
||||
migrator.add_not_null('users', 'api_key')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
18
migrations/0010_allow_deleting_datasources.py
Normal file
18
migrations/0010_allow_deleting_datasources.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.drop_not_null('queries', 'data_source_id'),
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
|
||||
8
migrations/0010_create_alerts.py
Normal file
8
migrations/0010_create_alerts.py
Normal file
@@ -0,0 +1,8 @@
|
||||
from redash.models import db, Alert, AlertSubscription
|
||||
|
||||
if __name__ == '__main__':
|
||||
with db.database.transaction():
|
||||
Alert.create_table()
|
||||
AlertSubscription.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
44
migrations/0011_migrate_bigquery_to_json.py
Normal file
44
migrations/0011_migrate_bigquery_to_json.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from base64 import b64encode
|
||||
import json
|
||||
from redash.models import DataSource
|
||||
|
||||
|
||||
def convert_p12_to_pem(p12file):
|
||||
from OpenSSL import crypto
|
||||
with open(p12file, 'rb') as f:
|
||||
p12 = crypto.load_pkcs12(f.read(), "notasecret")
|
||||
|
||||
return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for ds in DataSource.all():
|
||||
|
||||
if ds.type == 'bigquery':
|
||||
options = json.loads(ds.options)
|
||||
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
new_options = {
|
||||
'projectId': options['projectId'],
|
||||
'jsonKeyFile': b64encode(json.dumps({
|
||||
'client_email': options['serviceAccount'],
|
||||
'private_key': convert_p12_to_pem(options['privateKey'])
|
||||
}))
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save()
|
||||
elif ds.type == 'google_spreadsheets':
|
||||
options = json.loads(ds.options)
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
with open(options['credentialsFilePath']) as f:
|
||||
new_options = {
|
||||
'jsonKeyFile': b64encode(f.read())
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save()
|
||||
@@ -19,6 +19,7 @@
|
||||
"trailing": true,
|
||||
"smarttabs": true,
|
||||
"globals": {
|
||||
"angular": false
|
||||
"angular": false,
|
||||
"_": false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@
|
||||
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
|
||||
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
|
||||
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
@@ -72,6 +73,12 @@
|
||||
<li><a href="/queries">Queries</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="/alerts">Alerts</a>
|
||||
</li>
|
||||
<li ng-show="currentUser.hasPermission('admin')">
|
||||
<a href="/data_sources">Data Sources</a>
|
||||
</li>
|
||||
</ul>
|
||||
<form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()">
|
||||
<div class="form-group">
|
||||
@@ -129,6 +136,7 @@
|
||||
<script src="/bower_components/angular-ui-select/dist/select.js"></script>
|
||||
<script src="/bower_components/underscore.string/lib/underscore.string.js"></script>
|
||||
<script src="/bower_components/marked/lib/marked.js"></script>
|
||||
<script src="/bower_components/angular-base64-upload/dist/angular-base64-upload.js"></script>
|
||||
<script src="/scripts/ng_highchart.js"></script>
|
||||
<script src="/scripts/ng_smart_table.js"></script>
|
||||
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
|
||||
@@ -137,7 +145,8 @@
|
||||
<script src="/bower_components/mustache/mustache.js"></script>
|
||||
<script src="/bower_components/canvg/rgbcolor.js"></script>
|
||||
<script src="/bower_components/canvg/StackBlur.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
|
||||
@@ -149,18 +158,22 @@
|
||||
<script src="/scripts/controllers/controllers.js"></script>
|
||||
<script src="/scripts/controllers/dashboard.js"></script>
|
||||
<script src="/scripts/controllers/admin_controllers.js"></script>
|
||||
<script src="/scripts/controllers/data_sources.js"></script>
|
||||
<script src="/scripts/controllers/query_view.js"></script>
|
||||
<script src="/scripts/controllers/query_source.js"></script>
|
||||
<script src="/scripts/visualizations/base.js"></script>
|
||||
<script src="/scripts/visualizations/chart.js"></script>
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/map.js"></script>
|
||||
<script src="/scripts/visualizations/counter.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/visualizations/pivot.js"></script>
|
||||
<script src="/scripts/directives/directives.js"></script>
|
||||
<script src="/scripts/directives/query_directives.js"></script>
|
||||
<script src="/scripts/directives/data_source_directives.js"></script>
|
||||
<script src="/scripts/directives/dashboard_directives.js"></script>
|
||||
<script src="/scripts/filters.js"></script>
|
||||
<script src="/scripts/controllers/alerts.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<script>
|
||||
@@ -175,7 +188,7 @@
|
||||
|
||||
currentUser.hasPermission = function(permission) {
|
||||
return this.permissions.indexOf(permission) != -1;
|
||||
}
|
||||
};
|
||||
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
@@ -13,6 +13,10 @@
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<link rel="stylesheet" href="/styles/login.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -26,13 +30,20 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
{% with messages = get_flashed_messages() %}
|
||||
{% if messages %}
|
||||
{% for message in messages %}
|
||||
<div class="alert alert-warning" role="alert">{{ message }}</div>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
<div class="main">
|
||||
{% if show_google_openid %}
|
||||
@@ -48,6 +59,19 @@
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% if show_saml_login %}
|
||||
|
||||
<div class="row">
|
||||
<a href="/saml/login">SAML Login</a>
|
||||
</div>
|
||||
|
||||
<div class="login-or">
|
||||
<hr class="hr-or">
|
||||
<span class="span-or">or</span>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
|
||||
<form role="form" method="post" name="login">
|
||||
<div class="form-group">
|
||||
<label for="inputUsernameEmail">Username or email</label>
|
||||
|
||||
@@ -14,7 +14,8 @@ angular.module('redash', [
|
||||
'smartTable.table',
|
||||
'ngResource',
|
||||
'ngRoute',
|
||||
'ui.select'
|
||||
'ui.select',
|
||||
'naif.base64'
|
||||
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider',
|
||||
function ($routeProvider, $locationProvider, $compileProvider, growlProvider) {
|
||||
if (featureFlags.clientSideMetrics) {
|
||||
@@ -80,9 +81,23 @@ angular.module('redash', [
|
||||
templateUrl: '/views/admin_status.html',
|
||||
controller: 'AdminStatusCtrl'
|
||||
});
|
||||
$routeProvider.when('/admin/workers', {
|
||||
templateUrl: '/views/admin_workers.html',
|
||||
controller: 'AdminWorkersCtrl'
|
||||
|
||||
$routeProvider.when('/alerts', {
|
||||
templateUrl: '/views/alerts/list.html',
|
||||
controller: 'AlertsCtrl'
|
||||
});
|
||||
$routeProvider.when('/alerts/:alertId', {
|
||||
templateUrl: '/views/alerts/edit.html',
|
||||
controller: 'AlertCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/data_sources/:dataSourceId', {
|
||||
templateUrl: '/views/data_sources/edit.html',
|
||||
controller: 'DataSourceCtrl'
|
||||
});
|
||||
$routeProvider.when('/data_sources', {
|
||||
templateUrl: '/views/data_sources/list.html',
|
||||
controller: 'DataSourcesCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/', {
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
};
|
||||
|
||||
refresh();
|
||||
}
|
||||
};
|
||||
|
||||
angular.module('redash.admin_controllers', [])
|
||||
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])
|
||||
|
||||
174
rd_ui/app/scripts/controllers/alerts.js
Normal file
174
rd_ui/app/scripts/controllers/alerts.js
Normal file
@@ -0,0 +1,174 @@
|
||||
(function() {
|
||||
|
||||
var AlertsCtrl = function($scope, Events, Alert) {
|
||||
Events.record(currentUser, "view", "page", "alerts");
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alerts = []
|
||||
Alert.query(function(alerts) {
|
||||
var stateClass = {
|
||||
'ok': 'label label-success',
|
||||
'triggered': 'label label-danger',
|
||||
'unknown': 'label label-warning'
|
||||
};
|
||||
_.each(alerts, function(alert) {
|
||||
alert.class = stateClass[alert.state];
|
||||
})
|
||||
$scope.alerts = alerts;
|
||||
|
||||
});
|
||||
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: 50,
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
"map": "name",
|
||||
"cellTemplate": '<a href="/alerts/{{dataRow.id}}">{{dataRow.name}}</a> (<a href="/queries/{{dataRow.query.id}}">query</a>)'
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user.name'
|
||||
},
|
||||
{
|
||||
'label': 'State',
|
||||
'cellTemplate': '<span ng-class="dataRow.class">{{dataRow.state | uppercase}}</span> since <span am-time-ago="dataRow.updated_at"></span>'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
var AlertCtrl = function($scope, $routeParams, $location, growl, Query, Events, Alert) {
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alertId = $routeParams.alertId;
|
||||
if ($scope.alertId === "new") {
|
||||
Events.record(currentUser, 'view', 'page', 'alerts/new');
|
||||
} else {
|
||||
Events.record(currentUser, 'view', 'alert', $scope.alertId);
|
||||
}
|
||||
|
||||
$scope.onQuerySelected = function(item) {
|
||||
$scope.selectedQuery = item;
|
||||
item.getQueryResultPromise().then(function(result) {
|
||||
$scope.queryResult = result;
|
||||
$scope.alert.options.column = $scope.alert.options.column || result.getColumnNames()[0];
|
||||
});
|
||||
};
|
||||
|
||||
if ($scope.alertId === "new") {
|
||||
$scope.alert = new Alert({options: {}});
|
||||
} else {
|
||||
$scope.alert = Alert.get({id: $scope.alertId}, function(alert) {
|
||||
$scope.onQuerySelected(new Query($scope.alert.query));
|
||||
});
|
||||
}
|
||||
|
||||
$scope.ops = ['greater than', 'less than', 'equals'];
|
||||
$scope.selectedQuery = null;
|
||||
|
||||
$scope.getDefaultName = function() {
|
||||
if (!$scope.alert.query) {
|
||||
return undefined;
|
||||
}
|
||||
return _.template("<%= query.name %>: <%= options.column %> <%= options.op %> <%= options.value %>", $scope.alert);
|
||||
};
|
||||
|
||||
$scope.searchQueries = function (term) {
|
||||
if (!term || term.length < 3) {
|
||||
return;
|
||||
}
|
||||
|
||||
Query.search({q: term}, function(results) {
|
||||
$scope.queries = results;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
if ($scope.alert.name === undefined || $scope.alert.name === '') {
|
||||
$scope.alert.name = $scope.getDefaultName();
|
||||
}
|
||||
|
||||
$scope.alert.$save(function(alert) {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
if ($scope.alertId === "new") {
|
||||
$location.path('/alerts/' + alert.id).replace();
|
||||
}
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving alert.");
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
angular.module('redash.directives').directive('alertSubscribers', ['AlertSubscription', function (AlertSubscription) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/alerts/subscribers.html',
|
||||
scope: {
|
||||
'alertId': '='
|
||||
},
|
||||
controller: function ($scope) {
|
||||
$scope.subscribers = AlertSubscription.query({alertId: $scope.alertId});
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.directives').directive('subscribeButton', ['AlertSubscription', 'growl', function (AlertSubscription, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
template: '<button class="btn btn-default btn-xs" ng-click="toggleSubscription()"><i ng-class="class"></i></button>',
|
||||
controller: function ($scope) {
|
||||
var updateClass = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.class = "fa fa-eye-slash";
|
||||
} else {
|
||||
$scope.class = "fa fa-eye";
|
||||
}
|
||||
}
|
||||
|
||||
$scope.subscribers.$promise.then(function() {
|
||||
$scope.subscription = _.find($scope.subscribers, function(subscription) {
|
||||
return (subscription.user.email == currentUser.email);
|
||||
});
|
||||
|
||||
updateClass();
|
||||
});
|
||||
|
||||
$scope.toggleSubscription = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.subscription.$delete(function() {
|
||||
$scope.subscribers = _.without($scope.subscribers, $scope.subscription);
|
||||
$scope.subscription = undefined;
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving subscription.");
|
||||
});
|
||||
} else {
|
||||
$scope.subscription = new AlertSubscription({alert_id: $scope.alertId});
|
||||
$scope.subscription.$save(function() {
|
||||
$scope.subscribers.push($scope.subscription);
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Unsubscription failed.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('AlertsCtrl', ['$scope', 'Events', 'Alert', AlertsCtrl])
|
||||
.controller('AlertCtrl', ['$scope', '$routeParams', '$location', 'growl', 'Query', 'Events', 'Alert', AlertCtrl])
|
||||
|
||||
})();
|
||||
@@ -23,7 +23,7 @@
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user_name'
|
||||
'map': 'user.name'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
@@ -45,7 +45,6 @@
|
||||
Query.search({q: $scope.term }, function(results) {
|
||||
$scope.queries = _.map(results, function(query) {
|
||||
query.created_at = moment(query.created_at);
|
||||
query.user_name = query.user.name;
|
||||
return query;
|
||||
});
|
||||
});
|
||||
@@ -93,7 +92,6 @@
|
||||
$scope.allQueries = _.map(queries, function (query) {
|
||||
query.created_at = moment(query.created_at);
|
||||
query.retrieved_at = moment(query.retrieved_at);
|
||||
query.user_name = query.user.name;
|
||||
return query;
|
||||
});
|
||||
|
||||
@@ -108,7 +106,7 @@
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user_name'
|
||||
'map': 'user.name'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
|
||||
47
rd_ui/app/scripts/controllers/data_sources.js
Normal file
47
rd_ui/app/scripts/controllers/data_sources.js
Normal file
@@ -0,0 +1,47 @@
|
||||
(function () {
|
||||
var DataSourcesCtrl = function ($scope, $location, growl, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_sources");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSources = DataSource.query();
|
||||
|
||||
$scope.openDataSource = function(datasource) {
|
||||
$location.path('/data_sources/' + datasource.id);
|
||||
};
|
||||
|
||||
$scope.deleteDataSource = function(event, datasource) {
|
||||
event.stopPropagation();
|
||||
Events.record(currentUser, "delete", "datasource", datasource.id);
|
||||
datasource.$delete(function(resource) {
|
||||
growl.addSuccessMessage("Data source deleted succesfully.");
|
||||
this.$parent.dataSources = _.without(this.dataSources, resource);
|
||||
}.bind(this), function(httpResponse) {
|
||||
console.log("Failed to delete data source: ", httpResponse.status, httpResponse.statusText, httpResponse.data);
|
||||
growl.addErrorMessage("Failed to delete data source.");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
var DataSourceCtrl = function ($scope, $routeParams, $http, $location, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_source");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSourceId = $routeParams.dataSourceId;
|
||||
|
||||
if ($scope.dataSourceId == "new") {
|
||||
$scope.dataSource = new DataSource({options: {}});
|
||||
} else {
|
||||
$scope.dataSource = DataSource.get({id: $routeParams.dataSourceId});
|
||||
}
|
||||
|
||||
$scope.$watch('dataSource.id', function(id) {
|
||||
if (id != $scope.dataSourceId && id !== undefined) {
|
||||
$location.path('/data_sources/' + id).replace();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('DataSourcesCtrl', ['$scope', '$location', 'growl', 'Events', 'DataSource', DataSourcesCtrl])
|
||||
.controller('DataSourceCtrl', ['$scope', '$routeParams', '$http', '$location', 'Events', 'DataSource', DataSourceCtrl])
|
||||
})();
|
||||
@@ -15,6 +15,7 @@
|
||||
maxAge = -1;
|
||||
}
|
||||
|
||||
$scope.showLog = false;
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
}
|
||||
|
||||
@@ -48,15 +49,19 @@
|
||||
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
|
||||
$scope.canViewSource = currentUser.hasPermission('view_source');
|
||||
|
||||
$scope.dataSources = DataSource.get(function(dataSources) {
|
||||
$scope.dataSources = DataSource.query(function(dataSources) {
|
||||
updateSchema();
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
|
||||
if ($scope.query.isNew()) {
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
}
|
||||
});
|
||||
|
||||
// in view mode, latest dataset is always visible
|
||||
// source mode changes this behavior
|
||||
$scope.showDataset = true;
|
||||
$scope.showLog = false;
|
||||
|
||||
$scope.lockButton = function(lock) {
|
||||
$scope.queryExecuting = lock;
|
||||
@@ -99,6 +104,9 @@
|
||||
};
|
||||
|
||||
$scope.executeQuery = function() {
|
||||
if (!$scope.query.query) {
|
||||
return;
|
||||
}
|
||||
getQueryResult(0);
|
||||
$scope.lockButton(true);
|
||||
$scope.cancelling = false;
|
||||
@@ -110,21 +118,21 @@
|
||||
$scope.queryResult.cancelExecution();
|
||||
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
|
||||
};
|
||||
|
||||
|
||||
$scope.archiveQuery = function(options, data) {
|
||||
if (data) {
|
||||
data.id = $scope.query.id;
|
||||
} else {
|
||||
data = $scope.query;
|
||||
}
|
||||
|
||||
|
||||
$scope.isDirty = false;
|
||||
|
||||
|
||||
options = _.extend({}, {
|
||||
successMessage: 'Query archived',
|
||||
errorMessage: 'Query could not be archived'
|
||||
}, options);
|
||||
|
||||
|
||||
return Query.delete({id: data.id}, function() {
|
||||
$scope.query.is_archived = true;
|
||||
$scope.query.schedule = null;
|
||||
@@ -197,6 +205,10 @@
|
||||
if (status === 'done' || status === 'failed') {
|
||||
$scope.lockButton(false);
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getLog() != null) {
|
||||
$scope.showLog = true;
|
||||
}
|
||||
});
|
||||
|
||||
$scope.openScheduleForm = function() {
|
||||
|
||||
76
rd_ui/app/scripts/directives/data_source_directives.js
Normal file
76
rd_ui/app/scripts/directives/data_source_directives.js
Normal file
@@ -0,0 +1,76 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var directives = angular.module('redash.directives');
|
||||
|
||||
// Angular strips data- from the directive, so data-source-form becomes sourceForm...
|
||||
directives.directive('sourceForm', ['$http', 'growl', function ($http, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/data_sources/form.html',
|
||||
scope: {
|
||||
'dataSource': '='
|
||||
},
|
||||
link: function ($scope) {
|
||||
var setType = function(types) {
|
||||
if ($scope.dataSource.type === undefined) {
|
||||
$scope.dataSource.type = types[0].type;
|
||||
return types[0];
|
||||
}
|
||||
|
||||
$scope.type = _.find(types, function (t) {
|
||||
return t.type == $scope.dataSource.type;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.files = {};
|
||||
|
||||
$scope.$watchCollection('files', function() {
|
||||
_.each($scope.files, function(v, k) {
|
||||
if (v) {
|
||||
$scope.dataSource.options[k] = v.base64;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
$http.get('/api/data_sources/types').success(function (types) {
|
||||
setType(types);
|
||||
|
||||
$scope.dataSourceTypes = types;
|
||||
|
||||
_.each(types, function (type) {
|
||||
_.each(type.configuration_schema.properties, function (prop, name) {
|
||||
if (name == 'password' || name == 'passwd') {
|
||||
prop.type = 'password';
|
||||
}
|
||||
|
||||
if (_.string.endsWith(name, "File")) {
|
||||
prop.type = 'file';
|
||||
}
|
||||
|
||||
prop.required = _.contains(type.configuration_schema.required, name);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
$scope.$watch('dataSource.type', function(current, prev) {
|
||||
if (prev !== current) {
|
||||
if (prev !== undefined) {
|
||||
$scope.dataSource.options = {};
|
||||
}
|
||||
setType($scope.dataSourceTypes);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
$scope.dataSource.$save(function() {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
})();
|
||||
@@ -8,7 +8,7 @@
|
||||
'query': '=',
|
||||
'visualization': '=?'
|
||||
},
|
||||
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
link: function(scope, element) {
|
||||
scope.link = '/queries/' + scope.query.id;
|
||||
if (scope.visualization) {
|
||||
@@ -139,6 +139,8 @@
|
||||
|
||||
additionalHints = _.unique(keywords);
|
||||
}
|
||||
|
||||
codemirror.refresh();
|
||||
});
|
||||
|
||||
$scope.$watch('syntax', function(syntax) {
|
||||
@@ -239,7 +241,14 @@
|
||||
value: "60",
|
||||
name: 'Every minute'
|
||||
}
|
||||
]
|
||||
];
|
||||
|
||||
_.each([5, 10, 15, 30], function(i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: String(i*60),
|
||||
name: "Every " + i + " minutes"
|
||||
})
|
||||
});
|
||||
|
||||
_.each(_.range(1, 13), function (i) {
|
||||
$scope.refreshOptions.push({
|
||||
|
||||
@@ -145,7 +145,7 @@
|
||||
|
||||
if (!hasTotalsAlready) {
|
||||
this.addSeries({
|
||||
data: _.values(data),
|
||||
data: _.sortBy(_.values(data), 'x'),
|
||||
type: 'line',
|
||||
name: 'Total'
|
||||
}, false)
|
||||
@@ -308,22 +308,6 @@
|
||||
// We check either for true or undefined for backward compatibility.
|
||||
var series = scope.series;
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
// If this is a chart that has just one row for multiple columns, sort
|
||||
// by the Y values. For example:
|
||||
@@ -376,6 +360,23 @@
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,4 +1,28 @@
|
||||
(function () {
|
||||
function QueryResultError(errorMessage) {
|
||||
this.errorMessage = errorMessage;
|
||||
}
|
||||
|
||||
QueryResultError.prototype.getError = function() {
|
||||
return this.errorMessage;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getStatus = function() {
|
||||
return 'failed';
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getLog = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getChartData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
var QueryResult = function ($resource, $timeout, $q) {
|
||||
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
|
||||
var Job = $resource('/api/jobs/:id', {id: '@id'});
|
||||
@@ -12,6 +36,8 @@
|
||||
|
||||
var columnTypes = {};
|
||||
|
||||
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
|
||||
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
|
||||
_.each(this.query_result.data.rows, function (row) {
|
||||
_.each(row, function (v, k) {
|
||||
if (angular.isNumber(v)) {
|
||||
@@ -30,7 +56,7 @@
|
||||
|
||||
_.each(this.query_result.data.columns, function(column) {
|
||||
if (columnTypes[column.name]) {
|
||||
if (column.type == null) {
|
||||
if (column.type == null || column.type == 'string') {
|
||||
column.type = columnTypes[column.name];
|
||||
}
|
||||
}
|
||||
@@ -42,7 +68,7 @@
|
||||
} else {
|
||||
this.status = undefined;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
function QueryResult(props) {
|
||||
this.deferred = $q.defer();
|
||||
@@ -93,6 +119,14 @@
|
||||
return this.job.error;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getLog = function() {
|
||||
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.query_result.data.log;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getUpdatedAt = function () {
|
||||
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
|
||||
}
|
||||
@@ -404,20 +438,23 @@
|
||||
return '/queries/' + this.id + '/source';
|
||||
};
|
||||
|
||||
Query.prototype.isNew = function() {
|
||||
return this.id === undefined;
|
||||
};
|
||||
|
||||
Query.prototype.hasDailySchedule = function() {
|
||||
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
|
||||
}
|
||||
};
|
||||
|
||||
Query.prototype.scheduleInLocalTime = function() {
|
||||
var parts = this.schedule.split(':');
|
||||
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
}
|
||||
};
|
||||
|
||||
Query.prototype.getQueryResult = function (maxAge, parameters) {
|
||||
// if (ttl == undefined) {
|
||||
// ttl = this.ttl;
|
||||
// }
|
||||
|
||||
if (!this.query) {
|
||||
return;
|
||||
}
|
||||
var queryText = this.query;
|
||||
|
||||
var queryParameters = this.getParameters();
|
||||
@@ -452,6 +489,8 @@
|
||||
}
|
||||
} else if (this.data_source_id) {
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
|
||||
} else {
|
||||
return new QueryResultError("Please select data source to run this query.");
|
||||
}
|
||||
|
||||
return this.queryResult;
|
||||
@@ -488,14 +527,41 @@
|
||||
|
||||
var DataSource = function ($resource) {
|
||||
var actions = {
|
||||
'get': {'method': 'GET', 'cache': true, 'isArray': true},
|
||||
'get': {'method': 'GET', 'cache': false, 'isArray': false},
|
||||
'query': {'method': 'GET', 'cache': false, 'isArray': true},
|
||||
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
|
||||
};
|
||||
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
|
||||
|
||||
|
||||
return DataSourceResource;
|
||||
}
|
||||
};
|
||||
|
||||
var AlertSubscription = function ($resource) {
|
||||
var resource = $resource('/api/alerts/:alertId/subscriptions/:userId', {alertId: '@alert_id', userId: '@user.id'});
|
||||
return resource;
|
||||
};
|
||||
|
||||
var Alert = function ($resource, $http) {
|
||||
var actions = {
|
||||
save: {
|
||||
method: 'POST',
|
||||
transformRequest: [function(data) {
|
||||
var newData = _.extend({}, data);
|
||||
if (newData.query_id === undefined) {
|
||||
newData.query_id = newData.query.id;
|
||||
delete newData.query;
|
||||
}
|
||||
|
||||
return newData;
|
||||
}].concat($http.defaults.transformRequest)
|
||||
}
|
||||
};
|
||||
var resource = $resource('/api/alerts/:id', {id: '@id'}, actions);
|
||||
|
||||
return resource;
|
||||
};
|
||||
|
||||
var Widget = function ($resource, Query) {
|
||||
var WidgetResource = $resource('/api/widgets/:id', {id: '@id'});
|
||||
@@ -522,5 +588,7 @@
|
||||
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
|
||||
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
|
||||
.factory('DataSource', ['$resource', DataSource])
|
||||
.factory('Alert', ['$resource', '$http', Alert])
|
||||
.factory('AlertSubscription', ['$resource', AlertSubscription])
|
||||
.factory('Widget', ['$resource', 'Query', Widget]);
|
||||
})();
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -121,7 +121,7 @@
|
||||
query: '=',
|
||||
queryResult: '=',
|
||||
visualization: '=?',
|
||||
openEditor: '=?',
|
||||
openEditor: '@',
|
||||
onNewSuccess: '=?'
|
||||
},
|
||||
link: function (scope, element, attrs) {
|
||||
@@ -150,9 +150,13 @@
|
||||
scope.$watch('visualization.type', function (type, oldType) {
|
||||
// if not edited by user, set name to match type
|
||||
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
|
||||
// poor man's titlecase
|
||||
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
|
||||
scope.visualization.name = _.string.titleize(scope.visualization.type);
|
||||
}
|
||||
|
||||
if (type && oldType != type && scope.visualization) {
|
||||
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
|
||||
}
|
||||
|
||||
});
|
||||
|
||||
scope.submit = function () {
|
||||
|
||||
@@ -112,9 +112,6 @@
|
||||
|
||||
scope.columnTypes = {
|
||||
"X": "x",
|
||||
// "X (Date time)": "x",
|
||||
// "X (Linear)": "x-linear",
|
||||
// "X (Category)": "x-category",
|
||||
"Y": "y",
|
||||
"Series": "series",
|
||||
"Unused": "unused"
|
||||
@@ -166,7 +163,7 @@
|
||||
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
|
||||
}
|
||||
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
|
||||
|
||||
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
|
||||
});
|
||||
scope.zIndexes = _.range(scope.series.length);
|
||||
scope.yAxes = [[0, 'left'], [1, 'right']];
|
||||
@@ -227,6 +224,12 @@
|
||||
}
|
||||
});
|
||||
|
||||
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
|
||||
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
|
||||
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
|
||||
scope.visualization.options.xAxis.labels.enabled = true;
|
||||
}
|
||||
|
||||
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
|
||||
|
||||
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {
|
||||
|
||||
@@ -26,7 +26,10 @@
|
||||
if ($scope.queryResult.getData() == null) {
|
||||
|
||||
} else {
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(), "date");
|
||||
var sortedData = _.sortBy($scope.queryResult.getData(),function(r) {
|
||||
return r['date'] + r['day_number'] ;
|
||||
});
|
||||
|
||||
var grouped = _.groupBy(sortedData, "date");
|
||||
var maxColumns = _.reduce(grouped, function(memo, data){
|
||||
return (data.length > memo)? data.length : memo;
|
||||
|
||||
238
rd_ui/app/scripts/visualizations/map.js
Normal file
238
rd_ui/app/scripts/visualizations/map.js
Normal file
@@ -0,0 +1,238 @@
|
||||
'use strict';
|
||||
|
||||
(function() {
|
||||
var module = angular.module('redash.visualization');
|
||||
|
||||
module.config(['VisualizationProvider', function(VisualizationProvider) {
|
||||
var renderTemplate =
|
||||
'<map-renderer ' +
|
||||
'options="visualization.options" query-result="queryResult">' +
|
||||
'</map-renderer>';
|
||||
|
||||
var editTemplate = '<map-editor></map-editor>';
|
||||
var defaultOptions = {
|
||||
'height': 500,
|
||||
'draw': 'Marker',
|
||||
'classify':'none'
|
||||
};
|
||||
|
||||
VisualizationProvider.registerVisualization({
|
||||
type: 'MAP',
|
||||
name: 'Map',
|
||||
renderTemplate: renderTemplate,
|
||||
editorTemplate: editTemplate,
|
||||
defaultOptions: defaultOptions
|
||||
});
|
||||
}
|
||||
]);
|
||||
|
||||
module.directive('mapRenderer', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
|
||||
var setBounds = function(){
|
||||
var b = $scope.visualization.options.bounds;
|
||||
|
||||
if(b){
|
||||
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
|
||||
} else if ($scope.features.length > 0){
|
||||
var group= new L.featureGroup($scope.features);
|
||||
$scope.map.fitBounds(group.getBounds());
|
||||
}
|
||||
};
|
||||
|
||||
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
|
||||
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
|
||||
function() {
|
||||
var marker = function(lat,lon){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
return L.marker([lat, lon]);
|
||||
};
|
||||
|
||||
var heatpoint = function(lat,lon,obj){
|
||||
if (lat == null || lon == null) return;
|
||||
|
||||
var color = 'red';
|
||||
|
||||
if (obj &&
|
||||
obj[$scope.visualization.options.classify] &&
|
||||
$scope.visualization.options.classification){
|
||||
var v = $.grep($scope.visualization.options.classification,function(e){
|
||||
return e.value == obj[$scope.visualization.options.classify];
|
||||
});
|
||||
if (v.length >0) color = v[0].color;
|
||||
}
|
||||
|
||||
var style = {
|
||||
fillColor:color,
|
||||
fillOpacity:0.5,
|
||||
stroke:false
|
||||
};
|
||||
|
||||
return L.circleMarker([lat,lon],style)
|
||||
};
|
||||
|
||||
var color = function(val){
|
||||
// taken from http://jsfiddle.net/xgJ2e/2/
|
||||
|
||||
var h= Math.floor((100 - val) * 120 / 100);
|
||||
var s = Math.abs(val - 50)/50;
|
||||
var v = 1;
|
||||
|
||||
var rgb, i, data = [];
|
||||
if (s === 0) {
|
||||
rgb = [v,v,v];
|
||||
} else {
|
||||
h = h / 60;
|
||||
i = Math.floor(h);
|
||||
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
|
||||
switch(i) {
|
||||
case 0:
|
||||
rgb = [v, data[2], data[0]];
|
||||
break;
|
||||
case 1:
|
||||
rgb = [data[1], v, data[0]];
|
||||
break;
|
||||
case 2:
|
||||
rgb = [data[0], v, data[2]];
|
||||
break;
|
||||
case 3:
|
||||
rgb = [data[0], data[1], v];
|
||||
break;
|
||||
case 4:
|
||||
rgb = [data[2], data[0], v];
|
||||
break;
|
||||
default:
|
||||
rgb = [v, data[0], data[1]];
|
||||
break;
|
||||
}
|
||||
}
|
||||
return '#' + rgb.map(function(x){
|
||||
return ("0" + Math.round(x*255).toString(16)).slice(-2);
|
||||
}).join('');
|
||||
};
|
||||
|
||||
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
|
||||
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
|
||||
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
|
||||
|
||||
function getBounds(e) {
|
||||
$scope.visualization.options.bounds = $scope.map.getBounds();
|
||||
}
|
||||
|
||||
var queryData = $scope.queryResult.getData();
|
||||
var classify = $scope.visualization.options.classify;
|
||||
|
||||
if (queryData) {
|
||||
$scope.visualization.options.classification = [];
|
||||
|
||||
for (var row in queryData) {
|
||||
if (queryData[row][classify] &&
|
||||
$.grep($scope.visualization.options.classification, function (e) {
|
||||
return e.value == queryData[row][classify]
|
||||
}).length == 0) {
|
||||
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
|
||||
}
|
||||
}
|
||||
|
||||
$.each($scope.visualization.options.classification, function (i, c) {
|
||||
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
|
||||
});
|
||||
|
||||
if (!$scope.map) {
|
||||
$scope.map = L.map(elm[0].children[0].children[0])
|
||||
}
|
||||
|
||||
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||
attribution: '© <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
|
||||
}).addTo($scope.map);
|
||||
|
||||
$scope.features = $scope.features || [];
|
||||
|
||||
var tmp_features = [];
|
||||
|
||||
var lat_col = $scope.visualization.options.latColName || 'lat';
|
||||
var lon_col = $scope.visualization.options.lonColName || 'lon';
|
||||
|
||||
for (var row in queryData) {
|
||||
var feature;
|
||||
|
||||
if ($scope.visualization.options.draw == 'Marker') {
|
||||
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
|
||||
} else if ($scope.visualization.options.draw == 'Color') {
|
||||
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
|
||||
}
|
||||
|
||||
if (!feature) continue;
|
||||
|
||||
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
|
||||
for (var k in queryData[row]){
|
||||
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
|
||||
}
|
||||
obj_description += '</ul>';
|
||||
feature.bindPopup(obj_description);
|
||||
tmp_features.push(feature);
|
||||
}
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
$scope.map.removeLayer(f);
|
||||
});
|
||||
|
||||
$scope.features = tmp_features;
|
||||
|
||||
$.each($scope.features, function (i, f) {
|
||||
f.addTo($scope.map)
|
||||
});
|
||||
|
||||
setBounds();
|
||||
|
||||
$scope.map.on('focus',function(){
|
||||
$scope.map.on('moveend', getBounds);
|
||||
});
|
||||
|
||||
$scope.map.on('blur',function(){
|
||||
$scope.map.off('moveend', getBounds);
|
||||
});
|
||||
|
||||
|
||||
// We redraw the map if it was loaded in a hidden tab
|
||||
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
|
||||
|
||||
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
|
||||
setTimeout(function() {
|
||||
$scope.map.invalidateSize(false);
|
||||
|
||||
setBounds();
|
||||
},500);
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
}, true);
|
||||
|
||||
$scope.$watch('visualization.options.height', function() {
|
||||
|
||||
if (!$scope.map) return;
|
||||
$scope.map.invalidateSize(false);
|
||||
setBounds();
|
||||
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
module.directive('mapEditor', function() {
|
||||
return {
|
||||
restrict: 'E',
|
||||
templateUrl: '/views/visualizations/map_editor.html',
|
||||
link: function($scope, elm, attrs) {
|
||||
$scope.draw_options = ['Marker','Color'];
|
||||
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
})();
|
||||
@@ -97,7 +97,16 @@ a.navbar-brand img {
|
||||
}
|
||||
|
||||
.panel-heading .query-link:hover {
|
||||
text-decoration: none;
|
||||
text-decoration: underline;
|
||||
}
|
||||
|
||||
.list-group-item.clickable {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.list-group-item.clickable:focus,
|
||||
.list-group-item.clickable:hover {
|
||||
background-color: #f5f5f5;
|
||||
}
|
||||
|
||||
/* angular-growl */
|
||||
@@ -330,6 +339,11 @@ div.table-name {
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.blankslate {
|
||||
text-align: center;
|
||||
padding: 30px;
|
||||
}
|
||||
|
||||
/*
|
||||
bootstrap's hidden-xs class adds display:block when not hidden
|
||||
use this class when you need to keep the original display value
|
||||
@@ -339,3 +353,7 @@ use this class when you need to keep the original display value
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
.log-container {
|
||||
margin-bottom: 50px;
|
||||
}
|
||||
|
||||
58
rd_ui/app/views/alerts/edit.html
Normal file
58
rd_ui/app/views/alerts/edit.html
Normal file
@@ -0,0 +1,58 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li><a href="/alerts">Alerts</a></li>
|
||||
<li class="active">{{alert.name || getDefaultName() || "New"}}</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<form name="alertForm" ng-submit="saveChanges()" class="form">
|
||||
<div class="form-group">
|
||||
<label>Query</label>
|
||||
<ui-select ng-model="alert.query" theme="bootstrap" reset-search-input="false" on-select="onQuerySelected($item)">
|
||||
<ui-select-match placeholder="Search a query by name">{{$select.selected.name}}</ui-select-match>
|
||||
<ui-select-choices repeat="q in queries"
|
||||
refresh="searchQueries($select.search)"
|
||||
refresh-delay="0">
|
||||
<div ng-bind-html="q.name | highlight: $select.search | trustAsHtml"></div>
|
||||
</ui-select-choices>
|
||||
</ui-select>
|
||||
</div>
|
||||
|
||||
<div class="form-group" ng-show="selectedQuery">
|
||||
<label>Name</label>
|
||||
<input type="string" placeholder="{{getDefaultName()}}" class="form-control" ng-model="alert.name">
|
||||
</div>
|
||||
|
||||
<div ng-show="queryResult" class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="control-label col-md-2">Value column</label>
|
||||
<div class="col-md-4">
|
||||
<select ng-options="name for name in queryResult.getColumnNames()" ng-model="alert.options.column" class="form-control"></select>
|
||||
</div>
|
||||
<label class="control-label col-md-2">Value</label>
|
||||
<div class="col-md-4">
|
||||
<p class="form-control-static">{{queryResult.getData()[0][alert.options.column]}}</p>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-md-2">Op</label>
|
||||
<div class="col-md-4">
|
||||
<select ng-options="name for name in ops" ng-model="alert.options.op" class="form-control"></select>
|
||||
</div>
|
||||
<label class="control-label col-md-2">Reference</label>
|
||||
<div class="col-md-4">
|
||||
<input type="number" class="form-control" ng-model="alert.options.value" placeholder="reference value" required/>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<button class="btn btn-primary" ng-disabled="!alertForm.$valid">Save</button>
|
||||
</div>
|
||||
</form>
|
||||
</div>
|
||||
<div class="col-md-4" ng-if="alert.id">
|
||||
<alert-subscribers alert-id="alert.id"></alert-subscribers>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
16
rd_ui/app/views/alerts/list.html
Normal file
16
rd_ui/app/views/alerts/list.html
Normal file
@@ -0,0 +1,16 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li class="active">Alerts</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-12">
|
||||
<p>
|
||||
<a href="/alerts/new" class="btn btn-default"><i class="fa fa-plus"></i> New Alert</a>
|
||||
</p>
|
||||
|
||||
<smart-table rows="alerts" columns="gridColumns"
|
||||
config="gridConfig"
|
||||
class="table table-condensed table-hover"></smart-table>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
4
rd_ui/app/views/alerts/subscribers.html
Normal file
4
rd_ui/app/views/alerts/subscribers.html
Normal file
@@ -0,0 +1,4 @@
|
||||
<div>
|
||||
<strong>Subscribers</strong> <subscribe-button alert-id="alertId" subscribers="subscribers"></subscribe-button><br/>
|
||||
<img ng-src="{{s.user.gravatar_url}}" class="img-circle" alt="{{s.user.name}}" ng-repeat="s in subscribers"/>
|
||||
</div>
|
||||
11
rd_ui/app/views/data_sources/edit.html
Normal file
11
rd_ui/app/views/data_sources/edit.html
Normal file
@@ -0,0 +1,11 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li><a href="/data_sources">Data Sources</a></li>
|
||||
<li class="active">{{dataSource.name || "New"}}</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-8">
|
||||
<data-source-form data-data-source="dataSource" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
20
rd_ui/app/views/data_sources/form.html
Normal file
20
rd_ui/app/views/data_sources/form.html
Normal file
@@ -0,0 +1,20 @@
|
||||
<form name="dataSourceForm" ng-submit="saveChanges()">
|
||||
<div class="form-group">
|
||||
<label for="dataSourceName">Name</label>
|
||||
<input type="string" class="form-control" name="dataSourceName" ng-model="dataSource.name" required>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label for="type">Type</label>
|
||||
<select name="type" class="form-control" ng-options="type.type as type.name for type in dataSourceTypes" ng-model="dataSource.type"></select>
|
||||
</div>
|
||||
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
|
||||
<label>{{input.title || name | capitalize}}</label>
|
||||
<input name="input" type="{{input.type}}" class="form-control" ng-model="dataSource.options[name]" ng-required="input.required"
|
||||
ng-if="input.type !== 'file'" accesskey="tab">
|
||||
|
||||
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required"
|
||||
base-sixty-four-input
|
||||
ng-if="input.type === 'file'">
|
||||
</div>
|
||||
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid">Save</button>
|
||||
</form>
|
||||
18
rd_ui/app/views/data_sources/list.html
Normal file
18
rd_ui/app/views/data_sources/list.html
Normal file
@@ -0,0 +1,18 @@
|
||||
<div class="container">
|
||||
<ol class="breadcrumb">
|
||||
<li class="active">Data Sources</li>
|
||||
</ol>
|
||||
<div class="row">
|
||||
<div class="col-md-4">
|
||||
<div class="list-group">
|
||||
<div class="list-group-item clickable" ng-repeat="dataSource in dataSources" ng-click="openDataSource(dataSource)">
|
||||
<i class="fa fa-database"></i> {{dataSource.name}}
|
||||
<button class="btn btn-xs btn-danger pull-right" ng-click="deleteDataSource($event, dataSource)">Delete</button>
|
||||
</div>
|
||||
<a ng-href="/data_sources/new" class="list-group-item">
|
||||
<i class="fa fa-plus"></i> Add Data Source
|
||||
</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -84,16 +84,16 @@
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-3 schema-container" ng-show="hasSchema">
|
||||
<div>
|
||||
<div ng-show="schema.length < 200">
|
||||
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
|
||||
</div>
|
||||
<div class="schema-browser">
|
||||
<div ng-repeat="table in schema | filter:schemaFilter">
|
||||
<div ng-repeat="table in schema | filter:schemaFilter track by table.name">
|
||||
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
|
||||
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
|
||||
</div>
|
||||
<div collapse="table.collapsed">
|
||||
<div ng-repeat="column in table.columns | filter:schemaFilter" style="padding-left:16px;">{{column}}</div>
|
||||
<div collapse="table.collapsed && !schemaFilter">
|
||||
<div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -138,7 +138,7 @@
|
||||
</p>
|
||||
|
||||
<p>
|
||||
<span class="glyphicon glyphicon-hdd"></span>
|
||||
<i class="fa fa-database"></i>
|
||||
<span class="text-muted">Data Source</span>
|
||||
<select ng-disabled="!isQueryOwner" ng-model="query.data_source_id" ng-change="updateDataSource()" ng-options="ds.id as ds.name for ds in dataSources"></select>
|
||||
</p>
|
||||
@@ -192,6 +192,16 @@
|
||||
</div>
|
||||
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
|
||||
|
||||
<div class="row log-container" ng-show="showLog">
|
||||
<span ng-show="showLog">Log Information:</span>
|
||||
<table>
|
||||
<tbody>
|
||||
<tr ng-repeat="l in queryResult.getLog()">
|
||||
<td>{{l}}</td>
|
||||
</tr>
|
||||
</tbody>
|
||||
</table>
|
||||
</div>
|
||||
<!-- tabs and data -->
|
||||
<div ng-show="showDataset">
|
||||
<div class="row">
|
||||
|
||||
@@ -54,6 +54,14 @@
|
||||
ng-model="visualization.options.sortX">
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-2">Show X Axis Labels</label>
|
||||
|
||||
<div class="col-sm-10">
|
||||
<input name="sortX" type="checkbox" class="form-control"
|
||||
ng-model="visualization.options.xAxis.labels.enabled">
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@@ -100,6 +108,15 @@
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">Index</label>
|
||||
|
||||
<div class="col-sm-9">
|
||||
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
|
||||
ng-options="o as o for o in zIndexes"
|
||||
class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="control-label col-sm-3">y Axis</label>
|
||||
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
<div>
|
||||
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
|
||||
|
||||
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
|
||||
<div class="form-group">
|
||||
<label class="control-label">Name</label>
|
||||
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">
|
||||
|
||||
3
rd_ui/app/views/visualizations/map.html
Normal file
3
rd_ui/app/views/visualizations/map.html
Normal file
@@ -0,0 +1,3 @@
|
||||
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
|
||||
<div style="width:100%; height:100%;"></div>
|
||||
</div>
|
||||
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
55
rd_ui/app/views/visualizations/map_editor.html
Normal file
@@ -0,0 +1,55 @@
|
||||
<div class="form-horizontal">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Map height (px)</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" type="number" ng-model = "visualization.options.height" />
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Draw option</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Latitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Longitude column name</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div ng-show = "visualization.options.draw == 'Color'">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-2">Classify by column</label>
|
||||
<div class="col-sm-4">
|
||||
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="row" >
|
||||
<div class="col-lg-6">
|
||||
<div ng-repeat="element in visualization.options.classification" class="list-group">
|
||||
<div class="list-group-item active">
|
||||
{{element.value}}
|
||||
</div>
|
||||
|
||||
<div class="list-group-item">
|
||||
<div class="form-group">
|
||||
<label class="col-lg-4">Color</label>
|
||||
<div class="col-sm-4">
|
||||
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -25,11 +25,13 @@
|
||||
"marked": "~0.3.2",
|
||||
"bucky": "~0.2.6",
|
||||
"pace": "~0.5.1",
|
||||
"angular-ui-select": "0.8.2",
|
||||
"angular-ui-select": "~0.12.0",
|
||||
"font-awesome": "~4.2.0",
|
||||
"mustache": "~1.0.0",
|
||||
"canvg": "gabelerner/canvg",
|
||||
"angular-ui-bootstrap-bower": "~0.12.1"
|
||||
"angular-ui-bootstrap-bower": "~0.12.1",
|
||||
"leaflet": "~0.7.3",
|
||||
"angular-base64-upload": "~0.1.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"angular-mocks": "1.2.18",
|
||||
|
||||
@@ -36,6 +36,7 @@
|
||||
"node": ">=0.10.0"
|
||||
},
|
||||
"scripts": {
|
||||
"test": "grunt test"
|
||||
"test": "grunt test",
|
||||
"bower": "bower"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,11 +2,12 @@ import logging
|
||||
import urlparse
|
||||
import redis
|
||||
from statsd import StatsClient
|
||||
from flask_mail import Mail
|
||||
|
||||
from redash import settings
|
||||
from redash.query_runner import import_query_runners
|
||||
|
||||
__version__ = '0.6.1'
|
||||
__version__ = '0.7.0'
|
||||
|
||||
|
||||
def setup_logging():
|
||||
@@ -32,6 +33,8 @@ def create_redis_connection():
|
||||
|
||||
setup_logging()
|
||||
redis_connection = create_redis_connection()
|
||||
mail = Mail()
|
||||
mail.init_mail(settings.all_settings())
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
|
||||
import_query_runners(settings.QUERY_RUNNERS)
|
||||
|
||||
@@ -53,7 +53,8 @@ class PasswordHashField(fields.PasswordField):
|
||||
class PgModelConverter(CustomModelConverter):
|
||||
def __init__(self, view, additional=None):
|
||||
additional = {ArrayField: self.handle_array_field,
|
||||
DateTimeTZField: self.handle_datetime_tz_field}
|
||||
DateTimeTZField: self.handle_datetime_tz_field,
|
||||
}
|
||||
super(PgModelConverter, self).__init__(view, additional)
|
||||
self.view = view
|
||||
|
||||
@@ -84,33 +85,25 @@ class UserModelView(BaseModelView):
|
||||
}
|
||||
|
||||
|
||||
def query_runner_type_formatter(view, context, model, name):
|
||||
qr = query_runner.query_runners.get(model.type, None)
|
||||
if qr:
|
||||
return qr.name()
|
||||
|
||||
return model.type
|
||||
class QueryResultModelView(BaseModelView):
|
||||
column_exclude_list = ('data',)
|
||||
|
||||
|
||||
class DataSourceModelView(BaseModelView):
|
||||
form_overrides = dict(type=fields.SelectField, options=JSONTextAreaField)
|
||||
form_args = dict(type={
|
||||
'choices': [(k, r.name()) for k, r in query_runner.query_runners.iteritems()]
|
||||
})
|
||||
column_formatters = dict(type=query_runner_type_formatter)
|
||||
column_filters = ('type',)
|
||||
class QueryModelView(BaseModelView):
|
||||
column_exclude_list = ('latest_query_data',)
|
||||
|
||||
|
||||
class DashboardModelView(BaseModelView):
|
||||
column_searchable_list = ('name', 'slug')
|
||||
|
||||
|
||||
def init_admin(app):
|
||||
admin = Admin(app, name='re:dash admin')
|
||||
admin = Admin(app, name='re:dash admin', template_mode='bootstrap3')
|
||||
|
||||
views = {
|
||||
models.User: UserModelView(models.User),
|
||||
models.DataSource: DataSourceModelView(models.DataSource)
|
||||
}
|
||||
admin.add_view(UserModelView(models.User))
|
||||
admin.add_view(QueryModelView(models.Query))
|
||||
admin.add_view(QueryResultModelView(models.QueryResult))
|
||||
admin.add_view(DashboardModelView(models.Dashboard))
|
||||
|
||||
for m in models.all_models:
|
||||
if m in views:
|
||||
admin.add_view(views[m])
|
||||
else:
|
||||
admin.add_view(BaseModelView(m))
|
||||
for m in (models.Visualization, models.Widget, models.ActivityLog, models.Group, models.Event):
|
||||
admin.add_view(BaseModelView(m))
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import functools
|
||||
import hashlib
|
||||
import hmac
|
||||
import time
|
||||
import logging
|
||||
|
||||
from flask import request, make_response, redirect, url_for
|
||||
from flask.ext.login import LoginManager, login_user, current_user, logout_user
|
||||
from flask.ext.login import LoginManager
|
||||
from flask.ext.login import user_logged_in
|
||||
|
||||
from redash import models, settings, google_oauth
|
||||
from redash import models, settings, google_oauth, saml_auth
|
||||
from redash.tasks import record_event
|
||||
|
||||
login_manager = LoginManager()
|
||||
logger = logging.getLogger('authentication')
|
||||
@@ -23,77 +23,85 @@ def sign(key, path, expires):
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
class Authentication(object):
|
||||
def verify_authentication(self):
|
||||
return False
|
||||
|
||||
def required(self, fn):
|
||||
@functools.wraps(fn)
|
||||
def decorated(*args, **kwargs):
|
||||
if current_user.is_authenticated() or self.verify_authentication():
|
||||
return fn(*args, **kwargs)
|
||||
|
||||
return make_response(redirect(url_for("login", next=request.url)))
|
||||
|
||||
return decorated
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
return models.User.get_by_id(user_id)
|
||||
|
||||
|
||||
class ApiKeyAuthentication(Authentication):
|
||||
def verify_authentication(self):
|
||||
api_key = request.args.get('api_key')
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
def hmac_load_user_from_request(request):
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
user_id = request.args.get('user_id', None)
|
||||
|
||||
if query_id and api_key:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and time.time() < expires <= time.time() + 3600:
|
||||
if user_id:
|
||||
user = models.User.get_by_id(user_id)
|
||||
calculated_signature = sign(user.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and api_key == query.api_key:
|
||||
login_user(models.ApiUser(query.api_key), remember=False)
|
||||
return True
|
||||
if user.api_key and signature == calculated_signature:
|
||||
return user
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class HMACAuthentication(Authentication):
|
||||
def verify_authentication(self):
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and query_id and time.time() < expires <= time.time() + 3600:
|
||||
if query_id:
|
||||
query = models.Query.get(models.Query.id == query_id)
|
||||
calculated_signature = sign(query.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and signature == calculated_signature:
|
||||
login_user(models.ApiUser(query.api_key), remember=False)
|
||||
return True
|
||||
return models.ApiUser(query.api_key)
|
||||
|
||||
return False
|
||||
return None
|
||||
|
||||
|
||||
@login_manager.user_loader
|
||||
def load_user(user_id):
|
||||
# If the user was previously logged in as api user, the user_id will be the api key and will raise an exception as
|
||||
# it can't be casted to int.
|
||||
if isinstance(user_id, basestring) and not user_id.isdigit():
|
||||
def get_user_from_api_key(api_key, query_id):
|
||||
if not api_key:
|
||||
return None
|
||||
|
||||
return models.User.select().where(models.User.id == user_id).first()
|
||||
user = None
|
||||
try:
|
||||
user = models.User.get_by_api_key(api_key)
|
||||
except models.User.DoesNotExist:
|
||||
if query_id:
|
||||
query = models.Query.get_by_id(query_id)
|
||||
if query and query.api_key == api_key:
|
||||
user = models.ApiUser(api_key)
|
||||
|
||||
return user
|
||||
|
||||
def api_key_load_user_from_request(request):
|
||||
api_key = request.args.get('api_key', None)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
|
||||
user = get_user_from_api_key(api_key, query_id)
|
||||
return user
|
||||
|
||||
|
||||
def log_user_logged_in(app, user):
|
||||
event = {
|
||||
'user_id': user.id,
|
||||
'action': 'login',
|
||||
'object_type': 'redash',
|
||||
'timestamp': int(time.time()),
|
||||
}
|
||||
|
||||
record_event.delay(event)
|
||||
|
||||
|
||||
def setup_authentication(app):
|
||||
login_manager.init_app(app)
|
||||
login_manager.anonymous_user = models.AnonymousUser
|
||||
login_manager.login_view = 'login'
|
||||
app.secret_key = settings.COOKIE_SECRET
|
||||
app.register_blueprint(google_oauth.blueprint)
|
||||
app.register_blueprint(saml_auth.blueprint)
|
||||
|
||||
user_logged_in.connect(log_user_logged_in)
|
||||
|
||||
if settings.AUTH_TYPE == 'hmac':
|
||||
auth = HMACAuthentication()
|
||||
login_manager.request_loader(hmac_load_user_from_request)
|
||||
elif settings.AUTH_TYPE == 'api_key':
|
||||
auth = ApiKeyAuthentication()
|
||||
login_manager.request_loader(api_key_load_user_from_request)
|
||||
else:
|
||||
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
|
||||
auth = HMACAuthentication()
|
||||
login_manager.request_loader(hmac_load_user_from_request)
|
||||
|
||||
return auth
|
||||
|
||||
|
||||
@@ -12,17 +12,19 @@ import time
|
||||
import logging
|
||||
|
||||
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
|
||||
session, url_for
|
||||
from flask.ext.restful import Resource, abort
|
||||
from flask_login import current_user, login_user, logout_user
|
||||
session, url_for, current_app, flash
|
||||
from flask.ext.restful import Resource, abort, reqparse
|
||||
from flask_login import current_user, login_user, logout_user, login_required
|
||||
from funcy import project
|
||||
import sqlparse
|
||||
|
||||
from redash import redis_connection, statsd_client, models, settings, utils, __version__
|
||||
from redash.wsgi import app, auth, api
|
||||
from redash import statsd_client, models, settings, utils
|
||||
from redash.wsgi import app, api
|
||||
from redash.tasks import QueryTask, record_event
|
||||
from redash.cache import headers as cache_headers
|
||||
from redash.permissions import require_permission
|
||||
from redash.query_runner import query_runners, validate_configuration
|
||||
from redash.monitor import get_status
|
||||
|
||||
|
||||
@app.route('/ping', methods=['GET'])
|
||||
@@ -30,14 +32,19 @@ def ping():
|
||||
return 'PONG.'
|
||||
|
||||
|
||||
@app.route('/admin/<anything>/<whatever>')
|
||||
@app.route('/admin/<anything>')
|
||||
@app.route('/dashboard/<anything>')
|
||||
@app.route('/alerts')
|
||||
@app.route('/alerts/<pk>')
|
||||
@app.route('/queries')
|
||||
@app.route('/data_sources')
|
||||
@app.route('/data_sources/<pk>')
|
||||
@app.route('/queries/<query_id>')
|
||||
@app.route('/queries/<query_id>/<anything>')
|
||||
@app.route('/personal')
|
||||
@app.route('/')
|
||||
@auth.required
|
||||
@login_required
|
||||
def index(**kwargs):
|
||||
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
|
||||
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
@@ -66,22 +73,30 @@ def login():
|
||||
return redirect(request.args.get('next') or '/')
|
||||
|
||||
if not settings.PASSWORD_LOGIN_ENABLED:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
if settings.SAML_LOGIN_ENABLED:
|
||||
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
|
||||
else:
|
||||
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
|
||||
|
||||
if request.method == 'POST':
|
||||
user = models.User.select().where(models.User.email == request.form['username']).first()
|
||||
if user and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
login_user(user, remember=remember)
|
||||
return redirect(request.args.get('next') or '/')
|
||||
try:
|
||||
user = models.User.get_by_email(request.form['username'])
|
||||
if user and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
login_user(user, remember=remember)
|
||||
return redirect(request.args.get('next') or '/')
|
||||
else:
|
||||
flash("Wrong username or password.")
|
||||
except models.User.DoesNotExist:
|
||||
flash("Wrong username or password.")
|
||||
|
||||
return render_template("login.html",
|
||||
name=settings.NAME,
|
||||
analytics=settings.ANALYTICS,
|
||||
next=request.args.get('next'),
|
||||
username=request.form.get('username', ''),
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
|
||||
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
show_saml_login=settings.SAML_LOGIN_ENABLED)
|
||||
|
||||
@app.route('/logout')
|
||||
def logout():
|
||||
@@ -91,43 +106,16 @@ def logout():
|
||||
return redirect('/login')
|
||||
|
||||
@app.route('/status.json')
|
||||
@auth.required
|
||||
@login_required
|
||||
@require_permission('admin')
|
||||
def status_api():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
status = get_status()
|
||||
|
||||
return jsonify(status)
|
||||
|
||||
|
||||
@app.route('/api/queries/format', methods=['POST'])
|
||||
@auth.required
|
||||
@login_required
|
||||
def format_sql_query():
|
||||
arguments = request.get_json(force=True)
|
||||
query = arguments.get("query", "")
|
||||
@@ -136,7 +124,7 @@ def format_sql_query():
|
||||
|
||||
|
||||
@app.route('/queries/new', methods=['POST'])
|
||||
@auth.required
|
||||
@login_required
|
||||
def create_query_route():
|
||||
query = request.form.get('query', None)
|
||||
data_source_id = request.form.get('data_source_id', None)
|
||||
@@ -154,7 +142,7 @@ def create_query_route():
|
||||
|
||||
|
||||
class BaseResource(Resource):
|
||||
decorators = [auth.required]
|
||||
decorators = [login_required]
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(BaseResource, self).__init__(*args, **kwargs)
|
||||
@@ -199,6 +187,34 @@ class DataSourceTypeListAPI(BaseResource):
|
||||
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
|
||||
|
||||
|
||||
class DataSourceAPI(BaseResource):
|
||||
@require_permission('admin')
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_permission('admin')
|
||||
def post(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
req = request.get_json(True)
|
||||
if not validate_configuration(req['type'], req['options']):
|
||||
abort(400)
|
||||
|
||||
data_source.name = req['name']
|
||||
data_source.options = json.dumps(req['options'])
|
||||
|
||||
data_source.save()
|
||||
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_permission('admin')
|
||||
def delete(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id(data_source_id)
|
||||
data_source.delete_instance(recursive=True)
|
||||
|
||||
return make_response('', 204)
|
||||
|
||||
|
||||
class DataSourceListAPI(BaseResource):
|
||||
def get(self):
|
||||
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
|
||||
@@ -215,11 +231,12 @@ class DataSourceListAPI(BaseResource):
|
||||
if not validate_configuration(req['type'], req['options']):
|
||||
abort(400)
|
||||
|
||||
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options'])
|
||||
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=json.dumps(req['options']))
|
||||
|
||||
return datasource.to_dict()
|
||||
return datasource.to_dict(all=True)
|
||||
|
||||
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
|
||||
api.add_resource(DataSourceAPI, '/api/data_sources/<data_source_id>', endpoint='data_source')
|
||||
|
||||
|
||||
class DataSourceSchemaAPI(BaseResource):
|
||||
@@ -363,7 +380,7 @@ class QueryAPI(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
def post(self, query_id):
|
||||
query = models.Query.get_by_id(query_id)
|
||||
|
||||
|
||||
query_def = request.get_json(force=True)
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
|
||||
query_def.pop(field, None)
|
||||
@@ -415,7 +432,7 @@ class VisualizationListAPI(BaseResource):
|
||||
kwargs = request.get_json(force=True)
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs['query'] = kwargs.pop('query_id')
|
||||
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
vis.save()
|
||||
|
||||
@@ -450,7 +467,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
|
||||
class QueryResultListAPI(BaseResource):
|
||||
@require_permission('execute_query')
|
||||
def post(self):
|
||||
params = request.json
|
||||
params = request.get_json(force=True)
|
||||
|
||||
if settings.FEATURE_TABLES_PERMISSIONS:
|
||||
metadata = utils.SQLMetaData(params['query'])
|
||||
@@ -476,7 +493,7 @@ class QueryResultListAPI(BaseResource):
|
||||
activity=params['query']
|
||||
).save()
|
||||
|
||||
max_age = int(params['max_age'])
|
||||
max_age = int(params.get('max_age', -1))
|
||||
|
||||
if max_age == 0:
|
||||
query_result = None
|
||||
@@ -508,6 +525,28 @@ class QueryResultAPI(BaseResource):
|
||||
headers.update(cache_headers)
|
||||
return make_response(s.getvalue(), 200, headers)
|
||||
|
||||
@staticmethod
|
||||
def add_cors_headers(headers):
|
||||
if 'Origin' in request.headers:
|
||||
origin = request.headers['Origin']
|
||||
|
||||
if origin in settings.ACCESS_CONTROL_ALLOW_ORIGIN:
|
||||
headers['Access-Control-Allow-Origin'] = origin
|
||||
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
|
||||
|
||||
@require_permission('view_query')
|
||||
def options(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
headers = {}
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
if settings.ACCESS_CONTROL_REQUEST_METHOD:
|
||||
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
|
||||
|
||||
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
|
||||
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
|
||||
|
||||
return make_response("", 200, headers)
|
||||
|
||||
@require_permission('view_query')
|
||||
def get(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
if query_result_id is None and query_id is not None:
|
||||
@@ -537,9 +576,15 @@ class QueryResultAPI(BaseResource):
|
||||
|
||||
record_event.delay(event)
|
||||
|
||||
headers = {}
|
||||
|
||||
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
if filetype == 'json':
|
||||
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
|
||||
return make_response(data, 200, cache_headers)
|
||||
headers.update(cache_headers)
|
||||
return make_response(data, 200, headers)
|
||||
else:
|
||||
return self.csv_response(query_result)
|
||||
|
||||
@@ -567,13 +612,110 @@ class JobAPI(BaseResource):
|
||||
|
||||
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
|
||||
|
||||
|
||||
class AlertAPI(BaseResource):
|
||||
def get(self, alert_id):
|
||||
alert = models.Alert.get_by_id(alert_id)
|
||||
return alert.to_dict()
|
||||
|
||||
def post(self, alert_id):
|
||||
req = request.get_json(True)
|
||||
params = project(req, ('options', 'name', 'query_id'))
|
||||
alert = models.Alert.get_by_id(alert_id)
|
||||
if 'query_id' in params:
|
||||
params['query'] = params.pop('query_id')
|
||||
|
||||
alert.update_instance(**params)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'edit',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
return alert.to_dict()
|
||||
|
||||
|
||||
class AlertListAPI(BaseResource):
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
required_fields = ('options', 'name', 'query_id')
|
||||
for f in required_fields:
|
||||
if f not in req:
|
||||
abort(400)
|
||||
|
||||
alert = models.Alert.create(
|
||||
name=req['name'],
|
||||
query=req['query_id'],
|
||||
user=self.current_user,
|
||||
options=req['options']
|
||||
)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'create',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
# TODO: should be in model?
|
||||
models.AlertSubscription.create(alert=alert, user=self.current_user)
|
||||
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'subscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
return alert.to_dict()
|
||||
|
||||
def get(self):
|
||||
return [alert.to_dict() for alert in models.Alert.all()]
|
||||
|
||||
|
||||
class AlertSubscriptionListResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
subscription = models.AlertSubscription.create(alert=alert_id, user=self.current_user)
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'subscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
return subscription.to_dict()
|
||||
|
||||
def get(self, alert_id):
|
||||
subscriptions = models.AlertSubscription.all(alert_id)
|
||||
return [s.to_dict() for s in subscriptions]
|
||||
|
||||
|
||||
class AlertSubscriptionResource(BaseResource):
|
||||
def delete(self, alert_id, subscriber_id):
|
||||
models.AlertSubscription.unsubscribe(alert_id, subscriber_id)
|
||||
record_event.delay({
|
||||
'user_id': self.current_user.id,
|
||||
'action': 'unsubscribe',
|
||||
'timestamp': int(time.time()),
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
|
||||
api.add_resource(AlertAPI, '/api/alerts/<alert_id>', endpoint='alert')
|
||||
api.add_resource(AlertSubscriptionListResource, '/api/alerts/<alert_id>/subscriptions', endpoint='alert_subscriptions')
|
||||
api.add_resource(AlertSubscriptionResource, '/api/alerts/<alert_id>/subscriptions/<subscriber_id>', endpoint='alert_subscription')
|
||||
api.add_resource(AlertListAPI, '/api/alerts', endpoint='alerts')
|
||||
|
||||
@app.route('/<path:filename>')
|
||||
def send_static(filename):
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
app.run(debug=True)
|
||||
|
||||
|
||||
if current_app.debug:
|
||||
cache_timeout = 0
|
||||
else:
|
||||
cache_timeout = None
|
||||
|
||||
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
import logging
|
||||
from flask.ext.login import login_user
|
||||
import requests
|
||||
from flask import redirect, url_for, Blueprint
|
||||
from flask import redirect, url_for, Blueprint, flash
|
||||
from flask_oauth import OAuth
|
||||
from redash import models, settings
|
||||
|
||||
logger = logging.getLogger('google_oauth')
|
||||
oauth = OAuth()
|
||||
|
||||
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
|
||||
|
||||
if settings.GOOGLE_APPS_DOMAIN:
|
||||
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
|
||||
else:
|
||||
if not settings.GOOGLE_APPS_DOMAIN:
|
||||
logger.warning("No Google Apps domain defined, all Google accounts allowed.")
|
||||
|
||||
google = oauth.remote_app('google',
|
||||
base_url='https://www.google.com/accounts/',
|
||||
authorize_url='https://accounts.google.com/o/oauth2/auth',
|
||||
request_token_url=None,
|
||||
request_token_params=request_token_params,
|
||||
request_token_params={
|
||||
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
|
||||
'response_type': 'code'
|
||||
},
|
||||
access_token_url='https://accounts.google.com/o/oauth2/token',
|
||||
access_token_method='POST',
|
||||
access_token_params={'grant_type': 'authorization_code'},
|
||||
@@ -31,7 +31,7 @@ blueprint = Blueprint('google_oauth', __name__)
|
||||
|
||||
|
||||
def get_user_profile(access_token):
|
||||
headers = {'Authorization': 'OAuth '+access_token}
|
||||
headers = {'Authorization': 'OAuth {}'.format(access_token)}
|
||||
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
|
||||
|
||||
if response.status_code == 401:
|
||||
@@ -41,9 +41,17 @@ def get_user_profile(access_token):
|
||||
return response.json()
|
||||
|
||||
|
||||
def verify_profile(profile):
|
||||
if not settings.GOOGLE_APPS_DOMAIN:
|
||||
return True
|
||||
|
||||
domain = profile['email'].split('@')[-1]
|
||||
return domain in settings.GOOGLE_APPS_DOMAIN
|
||||
|
||||
|
||||
def create_and_login_user(name, email):
|
||||
try:
|
||||
user_object = models.User.get(models.User.email == email)
|
||||
user_object = models.User.get_by_email(email)
|
||||
if user_object.name != name:
|
||||
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
|
||||
user_object.name = name
|
||||
@@ -70,10 +78,17 @@ def authorized(resp):
|
||||
|
||||
if access_token is None:
|
||||
logger.warning("Access token missing in call back request.")
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
profile = get_user_profile(access_token)
|
||||
if profile is None:
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
if not verify_profile(profile):
|
||||
logger.warning("User tried to login with unauthorized domain name: %s", profile['email'])
|
||||
flash("Your Google Apps domain name isn't allowed.")
|
||||
return redirect(url_for('login'))
|
||||
|
||||
create_and_login_user(profile['name'], profile['email'])
|
||||
|
||||
154
redash/models.py
154
redash/models.py
@@ -15,6 +15,7 @@ import psycopg2
|
||||
|
||||
from redash import utils, settings, redis_connection
|
||||
from redash.query_runner import get_query_runner
|
||||
from utils import generate_token
|
||||
|
||||
|
||||
class Database(object):
|
||||
@@ -76,6 +77,17 @@ class BaseModel(peewee.Model):
|
||||
super(BaseModel, self).save(*args, **kwargs)
|
||||
self.post_save(created)
|
||||
|
||||
def update_instance(self, **kwargs):
|
||||
for k, v in kwargs.items():
|
||||
# setattr(model_instance, field_name, field_obj.python_value(value))
|
||||
setattr(self, k, v)
|
||||
|
||||
dirty_fields = self.dirty_fields
|
||||
if hasattr(self, 'updated_at'):
|
||||
dirty_fields = dirty_fields + [self.__class__.updated_at]
|
||||
|
||||
self.save(only=dirty_fields)
|
||||
|
||||
|
||||
class ModelTimestampsMixin(BaseModel):
|
||||
updated_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
@@ -152,6 +164,7 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
email = peewee.CharField(max_length=320, index=True, unique=True)
|
||||
password_hash = peewee.CharField(max_length=128, null=True)
|
||||
groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS)
|
||||
api_key = peewee.CharField(max_length=40, unique=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'users'
|
||||
@@ -161,6 +174,7 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'email': self.email,
|
||||
'gravatar_url': self.gravatar_url,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
@@ -169,6 +183,17 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
super(User, self).__init__(*args, **kwargs)
|
||||
self._allowed_tables = None
|
||||
|
||||
def pre_save(self, created):
|
||||
super(User, self).pre_save(created)
|
||||
|
||||
if not self.api_key:
|
||||
self.api_key = generate_token(40)
|
||||
|
||||
@property
|
||||
def gravatar_url(self):
|
||||
email_md5 = hashlib.md5(self.email.lower()).hexdigest()
|
||||
return "https://www.gravatar.com/avatar/%s?s=40" % email_md5
|
||||
|
||||
@property
|
||||
def permissions(self):
|
||||
# TODO: this should be cached.
|
||||
@@ -188,8 +213,12 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
|
||||
def get_by_email(cls, email):
|
||||
return cls.get(cls.email == email)
|
||||
|
||||
@classmethod
|
||||
def get_by_api_key(cls, api_key):
|
||||
return cls.get(cls.api_key == api_key)
|
||||
|
||||
def __unicode__(self):
|
||||
return '%r, %r' % (self.name, self.email)
|
||||
return u'%s (%s)' % (self.name, self.email)
|
||||
|
||||
def hash_password(self, password):
|
||||
self.password_hash = pwd_context.encrypt(password)
|
||||
@@ -229,20 +258,30 @@ class DataSource(BaseModel):
|
||||
type = peewee.CharField()
|
||||
options = peewee.TextField()
|
||||
queue_name = peewee.CharField(default="queries")
|
||||
scheduled_queue_name = peewee.CharField(default="queries")
|
||||
scheduled_queue_name = peewee.CharField(default="scheduled_queries")
|
||||
created_at = DateTimeTZField(default=datetime.datetime.now)
|
||||
|
||||
class Meta:
|
||||
db_table = 'data_sources'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
def to_dict(self, all=False):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'type': self.type,
|
||||
'syntax': self.query_runner.syntax
|
||||
}
|
||||
|
||||
if all:
|
||||
d['options'] = json.loads(self.options)
|
||||
d['queue_name'] = self.queue_name
|
||||
d['scheduled_queue_name'] = self.scheduled_queue_name
|
||||
|
||||
return d
|
||||
|
||||
def __unicode__(self):
|
||||
return self.name
|
||||
|
||||
def get_schema(self, refresh=False):
|
||||
key = "data_source:schema:{}".format(self.id)
|
||||
|
||||
@@ -269,6 +308,14 @@ class DataSource(BaseModel):
|
||||
return cls.select().order_by(cls.id.asc())
|
||||
|
||||
|
||||
class JSONField(peewee.TextField):
|
||||
def db_value(self, value):
|
||||
return json.dumps(value)
|
||||
|
||||
def python_value(self, value):
|
||||
return json.loads(value)
|
||||
|
||||
|
||||
class QueryResult(BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
data_source = peewee.ForeignKeyField(DataSource)
|
||||
@@ -326,13 +373,17 @@ class QueryResult(BaseModel):
|
||||
|
||||
logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id)
|
||||
|
||||
updated_count = Query.update(latest_query_data=query_result).\
|
||||
where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
|
||||
execute()
|
||||
sql = "UPDATE queries SET latest_query_data_id = %s WHERE query_hash = %s AND data_source_id = %s RETURNING id"
|
||||
query_ids = [row[0] for row in db.database.execute_sql(sql, params=(query_result.id, query_hash, data_source_id))]
|
||||
|
||||
logging.info("Updated %s queries with result (%s).", updated_count, query_hash)
|
||||
# TODO: when peewee with update & returning support is released, we can get back to using this code:
|
||||
# updated_count = Query.update(latest_query_data=query_result).\
|
||||
# where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
|
||||
# execute()
|
||||
|
||||
return query_result
|
||||
logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash)
|
||||
|
||||
return query_result, query_ids
|
||||
|
||||
def __unicode__(self):
|
||||
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
|
||||
@@ -361,7 +412,7 @@ def should_schedule_next(previous_iteration, now, schedule):
|
||||
|
||||
class Query(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
data_source = peewee.ForeignKeyField(DataSource)
|
||||
data_source = peewee.ForeignKeyField(DataSource, null=True)
|
||||
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
|
||||
name = peewee.CharField(max_length=255)
|
||||
description = peewee.CharField(max_length=4096, null=True)
|
||||
@@ -395,7 +446,7 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
|
||||
if with_user:
|
||||
d['user'] = self.user.to_dict()
|
||||
d['last_modified_by'] = self.last_modified_by.to_dict()
|
||||
d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
|
||||
else:
|
||||
d['user_id'] = self._data['user']
|
||||
|
||||
@@ -437,7 +488,7 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
.switch(Query).join(DataSource)\
|
||||
.where(cls.schedule != None)
|
||||
|
||||
now = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None))
|
||||
now = utils.utcnow()
|
||||
outdated_queries = {}
|
||||
for query in queries:
|
||||
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
|
||||
@@ -515,6 +566,83 @@ class Query(ModelTimestampsMixin, BaseModel):
|
||||
return unicode(self.id)
|
||||
|
||||
|
||||
class Alert(ModelTimestampsMixin, BaseModel):
|
||||
UNKNOWN_STATE = 'unknown'
|
||||
OK_STATE = 'ok'
|
||||
TRIGGERED_STATE = 'triggered'
|
||||
|
||||
id = peewee.PrimaryKeyField()
|
||||
name = peewee.CharField()
|
||||
query = peewee.ForeignKeyField(Query, related_name='alerts')
|
||||
user = peewee.ForeignKeyField(User, related_name='alerts')
|
||||
options = JSONField()
|
||||
state = peewee.CharField(default=UNKNOWN_STATE)
|
||||
last_triggered_at = DateTimeTZField(null=True)
|
||||
|
||||
class Meta:
|
||||
db_table = 'alerts'
|
||||
|
||||
@classmethod
|
||||
def all(cls):
|
||||
return cls.select(Alert, User, Query).join(Query).switch(Alert).join(User)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'query': self.query.to_dict(),
|
||||
'user': self.user.to_dict(),
|
||||
'options': self.options,
|
||||
'state': self.state,
|
||||
'last_triggered_at': self.last_triggered_at,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at
|
||||
}
|
||||
|
||||
def evaluate(self):
|
||||
data = json.loads(self.query.latest_query_data.data)
|
||||
# todo: safe guard for empty
|
||||
value = data['rows'][0][self.options['column']]
|
||||
op = self.options['op']
|
||||
|
||||
if op == 'greater than' and value > self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'less than' and value < self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
elif op == 'equals' and value == self.options['value']:
|
||||
new_state = self.TRIGGERED_STATE
|
||||
else:
|
||||
new_state = self.OK_STATE
|
||||
|
||||
return new_state
|
||||
|
||||
def subscribers(self):
|
||||
return User.select().join(AlertSubscription).where(AlertSubscription.alert==self)
|
||||
|
||||
|
||||
class AlertSubscription(ModelTimestampsMixin, BaseModel):
|
||||
user = peewee.ForeignKeyField(User)
|
||||
alert = peewee.ForeignKeyField(Alert)
|
||||
|
||||
class Meta:
|
||||
db_table = 'alert_subscriptions'
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'user': self.user.to_dict(),
|
||||
'alert_id': self._data['alert']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def all(cls, alert_id):
|
||||
return AlertSubscription.select(AlertSubscription, User).join(User).where(AlertSubscription.alert==alert_id)
|
||||
|
||||
@classmethod
|
||||
def unsubscribe(cls, alert_id, user_id):
|
||||
query = AlertSubscription.delete().where(AlertSubscription.alert==alert_id).where(AlertSubscription.user==user_id)
|
||||
return query.execute()
|
||||
|
||||
|
||||
class Dashboard(ModelTimestampsMixin, BaseModel):
|
||||
id = peewee.PrimaryKeyField()
|
||||
slug = peewee.CharField(max_length=140, index=True)
|
||||
@@ -704,7 +832,7 @@ class Event(BaseModel):
|
||||
return event
|
||||
|
||||
|
||||
all_models = (DataSource, User, QueryResult, Query, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
all_models = (DataSource, User, QueryResult, Query, Alert, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
|
||||
|
||||
|
||||
def init_db():
|
||||
|
||||
33
redash/monitor.py
Normal file
33
redash/monitor.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from redash import redis_connection, models, __version__
|
||||
|
||||
def get_status():
|
||||
status = {}
|
||||
info = redis_connection.info()
|
||||
status['redis_used_memory'] = info['used_memory_human']
|
||||
status['version'] = __version__
|
||||
status['queries_count'] = models.Query.select().count()
|
||||
status['query_results_count'] = models.QueryResult.select().count()
|
||||
status['unused_query_results_count'] = models.QueryResult.unused().count()
|
||||
status['dashboards_count'] = models.Dashboard.select().count()
|
||||
status['widgets_count'] = models.Widget.select().count()
|
||||
|
||||
status['workers'] = []
|
||||
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
status['manager'] = manager_status
|
||||
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
|
||||
|
||||
queues = {}
|
||||
for ds in models.DataSource.select():
|
||||
for queue in (ds.queue_name, ds.scheduled_queue_name):
|
||||
queues.setdefault(queue, set())
|
||||
queues[queue].add(ds.name)
|
||||
|
||||
status['manager']['queues'] = {}
|
||||
for queue, sources in queues.iteritems():
|
||||
status['manager']['queues'][queue] = {
|
||||
'data_sources': ', '.join(sources),
|
||||
'size': redis_connection.llen(queue)
|
||||
}
|
||||
|
||||
return status
|
||||
@@ -105,7 +105,11 @@ def validate_configuration(query_runner_type, configuration_json):
|
||||
return False
|
||||
|
||||
try:
|
||||
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema())
|
||||
if isinstance(configuration_json, basestring):
|
||||
configuration = json.loads(configuration_json)
|
||||
else:
|
||||
configuration = configuration_json
|
||||
jsonschema.validate(configuration, query_runner_class.configuration_schema())
|
||||
except (ValidationError, ValueError):
|
||||
return False
|
||||
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
from base64 import b64decode
|
||||
import datetime
|
||||
import json
|
||||
import httplib2
|
||||
@@ -89,20 +90,16 @@ class BigQuery(BaseQueryRunner):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'serviceAccount': {
|
||||
'type': 'string',
|
||||
'title': 'Service Account'
|
||||
},
|
||||
'projectId': {
|
||||
'type': 'string',
|
||||
'title': 'Project ID'
|
||||
},
|
||||
'privateKey': {
|
||||
'type': 'string',
|
||||
'title': 'Private Key Path'
|
||||
'jsonKeyFile': {
|
||||
"type": "string",
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['serviceAccount', 'projectId', 'privateKey']
|
||||
'required': ['jsonKeyFile', 'projectId']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
@@ -113,8 +110,9 @@ class BigQuery(BaseQueryRunner):
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
]
|
||||
|
||||
private_key = _load_key(self.configuration["privateKey"])
|
||||
credentials = SignedJwtAssertionCredentials(self.configuration['serviceAccount'], private_key, scope=scope)
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
@@ -201,4 +199,4 @@ class BigQueryGCE(BigQuery):
|
||||
|
||||
|
||||
register(BigQuery)
|
||||
register(BigQueryGCE)
|
||||
register(BigQueryGCE)
|
||||
|
||||
259
redash/query_runner/elasticsearch.py
Normal file
259
redash/query_runner/elasticsearch.py
Normal file
@@ -0,0 +1,259 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import urllib
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
|
||||
import requests
|
||||
import dateutil
|
||||
from dateutil.parser import parse
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
except ImportError:
|
||||
# Python 2
|
||||
import httplib as http_client
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ELASTICSEARCH_TYPES_MAPPING = {
|
||||
"integer" : TYPE_INTEGER,
|
||||
"long" : TYPE_INTEGER,
|
||||
"float" : TYPE_FLOAT,
|
||||
"double" : TYPE_FLOAT,
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
# "geo_point" TODO: Need to split to 2 fields somehow
|
||||
}
|
||||
|
||||
PYTHON_TYPES_MAPPING = {
|
||||
str: TYPE_STRING,
|
||||
unicode: TYPE_STRING,
|
||||
bool : TYPE_BOOLEAN,
|
||||
int : TYPE_INTEGER,
|
||||
long: TYPE_INTEGER,
|
||||
float: TYPE_FLOAT
|
||||
}
|
||||
|
||||
#
|
||||
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
|
||||
# but without the aggregation).
|
||||
#
|
||||
# Full blown JSON based ElasticSearch queries (including aggregations) will be
|
||||
# added later
|
||||
#
|
||||
# Simple query example:
|
||||
#
|
||||
# - Query the index named "twitter"
|
||||
# - Filter by "user:kimchy"
|
||||
# - Return the fields: "@timestamp", "tweet" and "user"
|
||||
# - Return up to 15 results
|
||||
# - Sort by @timestamp ascending
|
||||
#
|
||||
# {
|
||||
# "index" : "twitter",
|
||||
# "query" : "user:kimchy",
|
||||
# "fields" : ["@timestamp", "tweet", "user"],
|
||||
# "size" : 15,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Simple query on a logstash ElasticSearch instance:
|
||||
#
|
||||
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
|
||||
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
|
||||
# - Return up to 250 results
|
||||
# - Sort by @timestamp ascending
|
||||
|
||||
# {
|
||||
# "index" : "logstash-2015.04.*",
|
||||
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
# "size" : 250,
|
||||
# "sort" : "@timestamp:asc"
|
||||
# }
|
||||
#
|
||||
#
|
||||
|
||||
class ElasticSearch(BaseQueryRunner):
|
||||
DEBUG_ENABLED = False
|
||||
|
||||
"""
|
||||
ElastichSearch query runner for querying ElasticSearch servers.
|
||||
Query can be done using the Lucene Syntax (single line) or the more complex,
|
||||
full blown ElasticSearch JSON syntax
|
||||
"""
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'server': {
|
||||
'type': 'string',
|
||||
'title': 'Base URL'
|
||||
}
|
||||
},
|
||||
"required" : ["server"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(ElasticSearch, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "json"
|
||||
|
||||
if self.DEBUG_ENABLED:
|
||||
http_client.HTTPConnection.debuglevel = 1
|
||||
|
||||
# you need to initialize logging, otherwise you will not see anything from requests
|
||||
logging.basicConfig()
|
||||
logging.getLogger().setLevel(logging.DEBUG)
|
||||
requests_log = logging.getLogger("requests.packages.urllib3")
|
||||
requests_log.setLevel(logging.DEBUG)
|
||||
requests_log.propagate = True
|
||||
|
||||
def get_mappings(self, url):
|
||||
mappings = {}
|
||||
|
||||
r = requests.get(url)
|
||||
mappings_data = r.json()
|
||||
for index_name in mappings_data:
|
||||
index_mappings = mappings_data[index_name]
|
||||
for m in index_mappings.get("mappings", {}):
|
||||
for property_name in index_mappings["mappings"][m]["properties"]:
|
||||
property_data = index_mappings["mappings"][m]["properties"][property_name]
|
||||
if not property_name in mappings:
|
||||
property_type = property_data.get("type", None)
|
||||
if property_type:
|
||||
if property_type in ELASTICSEARCH_TYPES_MAPPING:
|
||||
mappings[property_name] = property_type
|
||||
else:
|
||||
raise "Unknown property type: {0}".format(property_type)
|
||||
|
||||
return mappings
|
||||
|
||||
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
|
||||
result_columns_index = {}
|
||||
for c in result_columns:
|
||||
result_columns_index[c["name"]] = c
|
||||
|
||||
result_fields_index = {}
|
||||
if result_fields:
|
||||
for r in result_fields:
|
||||
result_fields_index[r] = None
|
||||
|
||||
for h in raw_result["hits"]["hits"]:
|
||||
row = {}
|
||||
for column in h["_source"]:
|
||||
if result_fields and column not in result_fields_index:
|
||||
continue
|
||||
|
||||
if column not in result_columns_index:
|
||||
result_columns.append({
|
||||
"name" : column,
|
||||
"friendly_name" : column,
|
||||
"type" : mappings.get(column, "string")
|
||||
})
|
||||
result_columns_index[column] = result_columns[-1]
|
||||
|
||||
row[column] = h["_source"][column]
|
||||
|
||||
if row and len(row) > 0:
|
||||
result_rows.append(row)
|
||||
|
||||
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
|
||||
url += "&from={0}".format(_from)
|
||||
r = requests.get(url)
|
||||
if r.status_code != 200:
|
||||
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
|
||||
|
||||
raw_result = r.json()
|
||||
|
||||
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
|
||||
|
||||
total = raw_result["hits"]["total"]
|
||||
result_size = len(raw_result["hits"]["hits"])
|
||||
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
|
||||
|
||||
return raw_result["hits"]["total"]
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
error = None
|
||||
|
||||
logger.debug(query)
|
||||
query_params = json.loads(query)
|
||||
|
||||
index_name = query_params["index"]
|
||||
query_data = query_params["query"]
|
||||
size = int(query_params.get("size", 500))
|
||||
result_fields = query_params.get("fields", None)
|
||||
sort = query_params.get("sort", None)
|
||||
|
||||
server_url = self.configuration["server"]
|
||||
if not server_url:
|
||||
error = "Missing configuration key 'server'"
|
||||
return None, error
|
||||
|
||||
|
||||
if server_url[-1] == "/":
|
||||
server_url = server_url[:-1]
|
||||
|
||||
url = "{0}/{1}/_search?".format(server_url, index_name)
|
||||
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
|
||||
|
||||
mappings = self.get_mappings(mapping_url)
|
||||
|
||||
logger.debug(json.dumps(mappings, indent=4))
|
||||
|
||||
if size:
|
||||
url += "&size={0}".format(size)
|
||||
|
||||
if sort:
|
||||
url += "&sort={0}".format(urllib.quote_plus(sort))
|
||||
|
||||
url += "&q={0}".format(urllib.quote_plus(query_data))
|
||||
|
||||
logger.debug("Using URL: {0}".format(url))
|
||||
logger.debug("Using Query: {0}".format(query_data))
|
||||
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
if isinstance(query_data, str) or isinstance(query_data, unicode):
|
||||
_from = 0
|
||||
while True:
|
||||
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
|
||||
_from += size
|
||||
if _from >= total:
|
||||
break
|
||||
else:
|
||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||
raise Exception("Advanced queries are not supported")
|
||||
|
||||
json_data = json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
})
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(ElasticSearch)
|
||||
117
redash/query_runner/google_spreadsheets.py
Normal file
117
redash/query_runner/google_spreadsheets.py
Normal file
@@ -0,0 +1,117 @@
|
||||
from base64 import b64decode
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
import gspread
|
||||
from oauth2client.client import SignedJwtAssertionCredentials
|
||||
from dateutil import parser
|
||||
enabled = True
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install gspread, dateutil and oauth2client.")
|
||||
logger.warning("You can use pip: pip install gspread dateutil oauth2client")
|
||||
|
||||
enabled = False
|
||||
|
||||
|
||||
def _load_key(filename):
|
||||
with open(filename, "rb") as f:
|
||||
return json.loads(f.read())
|
||||
|
||||
|
||||
def _guess_type(value):
|
||||
try:
|
||||
val = int(value)
|
||||
return TYPE_INTEGER, val
|
||||
except ValueError:
|
||||
pass
|
||||
try:
|
||||
val = float(value)
|
||||
return TYPE_FLOAT, val
|
||||
except ValueError:
|
||||
pass
|
||||
if str(value).lower() in ('true', 'false'):
|
||||
return TYPE_BOOLEAN, bool(value)
|
||||
try:
|
||||
val = parser.parse(value)
|
||||
return TYPE_DATETIME, val
|
||||
except ValueError:
|
||||
pass
|
||||
return TYPE_STRING, value
|
||||
|
||||
|
||||
class GoogleSpreadsheet(BaseQueryRunner):
|
||||
HEADER_INDEX = 0
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "google_spreadsheets"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'jsonKeyFile': {
|
||||
"type": "string",
|
||||
'title': 'JSON Key File'
|
||||
}
|
||||
},
|
||||
'required': ['jsonKeyFile']
|
||||
}
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(GoogleSpreadsheet, self).__init__(configuration_json)
|
||||
|
||||
def _get_spreadsheet_service(self):
|
||||
scope = [
|
||||
'https://spreadsheets.google.com/feeds',
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
|
||||
spreadsheetservice = gspread.authorize(credentials)
|
||||
return spreadsheetservice
|
||||
|
||||
def run_query(self, query):
|
||||
logger.debug("Spreadsheet is about to execute query: %s", query)
|
||||
values = query.split("|")
|
||||
key = values[0] #key of the spreadsheet
|
||||
worksheet_num = 0 if len(values) != 2 else int(values[1])# if spreadsheet contains more than one worksheet - this is the number of it
|
||||
try:
|
||||
spreadsheet_service = self._get_spreadsheet_service()
|
||||
spreadsheet = spreadsheet_service.open_by_key(key)
|
||||
worksheets = spreadsheet.worksheets()
|
||||
all_data = worksheets[worksheet_num].get_all_values()
|
||||
column_names = []
|
||||
columns = []
|
||||
for j, column_name in enumerate(all_data[self.HEADER_INDEX]):
|
||||
column_names.append(column_name)
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': _guess_type(all_data[self.HEADER_INDEX+1][j])
|
||||
})
|
||||
rows = [dict(zip(column_names, row)) for row in all_data[self.HEADER_INDEX+1:]]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(GoogleSpreadsheet)
|
||||
159
redash/query_runner/impala_ds.py
Normal file
159
redash/query_runner/impala_ds.py
Normal file
@@ -0,0 +1,159 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from impala.dbapi import connect
|
||||
from impala.error import DatabaseError, RPCError
|
||||
enabled = True
|
||||
except ImportError, e:
|
||||
logger.exception(e)
|
||||
logger.warning("Missing dependencies. Please install impyla.")
|
||||
logger.warning("You can use pip: pip install impyla")
|
||||
enabled = False
|
||||
|
||||
COLUMN_NAME = 0
|
||||
COLUMN_TYPE = 1
|
||||
|
||||
types_map = {
|
||||
'BIGINT': TYPE_INTEGER,
|
||||
'TINYINT': TYPE_INTEGER,
|
||||
'SMALLINT': TYPE_INTEGER,
|
||||
'INT': TYPE_INTEGER,
|
||||
'DOUBLE': TYPE_FLOAT,
|
||||
'DECIMAL': TYPE_FLOAT,
|
||||
'FLOAT': TYPE_FLOAT,
|
||||
'REAL': TYPE_FLOAT,
|
||||
'BOOLEAN': TYPE_BOOLEAN,
|
||||
'TIMESTAMP': TYPE_DATETIME,
|
||||
'CHAR': TYPE_STRING,
|
||||
'STRING': TYPE_STRING,
|
||||
'VARCHAR': TYPE_STRING
|
||||
}
|
||||
|
||||
|
||||
class Impala(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"port": {
|
||||
"type": "number"
|
||||
},
|
||||
"protocol": {
|
||||
"type": "string",
|
||||
"title": "Please specify beeswax or hiveserver2"
|
||||
},
|
||||
"database": {
|
||||
"type": "string"
|
||||
},
|
||||
"use_ldap": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"ldap_user": {
|
||||
"type": "string"
|
||||
},
|
||||
"ldap_password": {
|
||||
"type": "string"
|
||||
},
|
||||
"timeout": {
|
||||
"type": "number"
|
||||
}
|
||||
},
|
||||
"required": ["host"]
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "impala"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Impala, self).__init__(configuration_json)
|
||||
|
||||
def _run_query_internal(self, query):
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
return json.loads(results)['rows']
|
||||
|
||||
def get_schema(self):
|
||||
try:
|
||||
schemas_query = "show schemas;"
|
||||
|
||||
tables_query = "show tables in %s;"
|
||||
|
||||
columns_query = "show column stats %s;"
|
||||
|
||||
schema = {}
|
||||
for schema_name in map(lambda a: a['name'], self._run_query_internal(schemas_query)):
|
||||
for table_name in map(lambda a: a['name'], self._run_query_internal(tables_query % schema_name)):
|
||||
columns = map(lambda a: a['Column'], self._run_query_internal(columns_query % table_name))
|
||||
|
||||
if schema_name != 'default':
|
||||
table_name = '{}.{}'.format(schema_name, table_name)
|
||||
|
||||
schema[table_name] = {'name': table_name, 'columns': columns}
|
||||
except Exception, e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
return schema.values()
|
||||
|
||||
def run_query(self, query):
|
||||
|
||||
connection = None
|
||||
try:
|
||||
connection = connect(**self.configuration)
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
cursor.execute(query)
|
||||
|
||||
column_names = []
|
||||
columns = []
|
||||
|
||||
for column in cursor.description:
|
||||
column_name = column[COLUMN_NAME]
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column[COLUMN_TYPE], None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
except DatabaseError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = e.message
|
||||
except RPCError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = "Metastore Error [%s]" % e.message
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Impala)
|
||||
83
redash/query_runner/influx_db.py
Normal file
83
redash/query_runner/influx_db.py
Normal file
@@ -0,0 +1,83 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from influxdb import InfluxDBClusterClient
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install influxdb.")
|
||||
logger.warning("You can use pip: pip install influxdb")
|
||||
enabled = False
|
||||
|
||||
def _transform_result(results):
|
||||
result_columns = []
|
||||
result_rows = []
|
||||
|
||||
for result in results:
|
||||
if not result_columns:
|
||||
for c in result.raw['series'][0]['columns']:
|
||||
result_columns.append({ "name": c })
|
||||
|
||||
for point in result.get_points():
|
||||
result_rows.append(point)
|
||||
|
||||
return json.dumps({
|
||||
"columns" : result_columns,
|
||||
"rows" : result_rows
|
||||
}, cls=JSONEncoder)
|
||||
|
||||
class InfluxDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'required': ['url']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "influxdb"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(InfluxDB, self).__init__(configuration_json)
|
||||
|
||||
def run_query(self, query):
|
||||
client = InfluxDBClusterClient.from_DSN(self.configuration['url'])
|
||||
|
||||
logger.debug("influxdb url: %s", self.configuration['url'])
|
||||
logger.debug("influxdb got query: %s", query)
|
||||
|
||||
try:
|
||||
results = client.query(query)
|
||||
if not isinstance(results, list):
|
||||
results = [results]
|
||||
|
||||
json_data = _transform_result(results)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
register(InfluxDB)
|
||||
@@ -3,6 +3,7 @@ import datetime
|
||||
import logging
|
||||
import re
|
||||
import time
|
||||
from dateutil.parser import parse
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
@@ -12,6 +13,7 @@ logger = logging.getLogger(__name__)
|
||||
try:
|
||||
import pymongo
|
||||
from bson.objectid import ObjectId
|
||||
from bson.son import SON
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
@@ -32,24 +34,73 @@ TYPES_MAP = {
|
||||
|
||||
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
|
||||
|
||||
class MongoDBJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, ObjectId):
|
||||
return str(o)
|
||||
|
||||
def _get_column_by_name(columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _convert_date(q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
if q[field_name].find(":") == -1:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
|
||||
else:
|
||||
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
|
||||
|
||||
return super(MongoDBJSONEncoder, self).default(o)
|
||||
|
||||
# Simple query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "my_collection",
|
||||
# "query" : {
|
||||
# "date" : {
|
||||
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
# },
|
||||
# "type" : 1
|
||||
# },
|
||||
# "fields" : {
|
||||
# "_id" : 1,
|
||||
# "name" : 2
|
||||
# },
|
||||
# "sort" : [
|
||||
# {
|
||||
# "name" : "date",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
#
|
||||
# }
|
||||
#
|
||||
#
|
||||
# Aggregation
|
||||
# ===========
|
||||
# Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
# correct order of sorting, it uses a regular list for the "$sort" operation
|
||||
# that converts into a SON (sorted dictionary) object before execution.
|
||||
#
|
||||
# Aggregation query example:
|
||||
#
|
||||
# {
|
||||
# "collection" : "things",
|
||||
# "aggregate" : [
|
||||
# {
|
||||
# "$unwind" : "$tags"
|
||||
# },
|
||||
# {
|
||||
# "$group" : {
|
||||
# "_id" : "$tags",
|
||||
# "count" : { "$sum" : 1 }
|
||||
# }
|
||||
# },
|
||||
# {
|
||||
# "$sort" : [
|
||||
# {
|
||||
# "name" : "count",
|
||||
# "direction" : -1
|
||||
# },
|
||||
# {
|
||||
# "name" : "_id",
|
||||
# "direction" : -1
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
# ]
|
||||
# }
|
||||
#
|
||||
#
|
||||
class MongoDB(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
@@ -89,22 +140,43 @@ class MongoDB(BaseQueryRunner):
|
||||
|
||||
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
|
||||
|
||||
def _get_column_by_name(self, columns, column_name):
|
||||
for c in columns:
|
||||
if "name" in c and c["name"] == column_name:
|
||||
return c
|
||||
|
||||
return None
|
||||
|
||||
def _fix_dates(self, data):
|
||||
for k in data:
|
||||
if isinstance(data[k], list):
|
||||
for i in range(0, len(data[k])):
|
||||
self._fix_dates(data[k][i])
|
||||
elif isinstance(data[k], dict):
|
||||
self._fix_dates(data[k])
|
||||
else:
|
||||
if isinstance(data[k], (str, unicode)):
|
||||
self._convert_date(data, k)
|
||||
|
||||
def _convert_date(self, q, field_name):
|
||||
m = date_regex.findall(q[field_name])
|
||||
if len(m) > 0:
|
||||
q[field_name] = parse(m[0], yearfirst=True)
|
||||
|
||||
def run_query(self, query):
|
||||
if self.is_replica_set:
|
||||
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
|
||||
else:
|
||||
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
|
||||
|
||||
if self.db_name not in db_connection.database_names():
|
||||
return None, "Unknown database name '%s'" % self.db_name
|
||||
|
||||
db = db_connection[self.db_name ]
|
||||
db = db_connection[self.db_name]
|
||||
|
||||
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
|
||||
logger.debug("mongodb got query: %s", query)
|
||||
|
||||
try:
|
||||
query_data = json.loads(query)
|
||||
self._fix_dates(query_data)
|
||||
except ValueError:
|
||||
return None, "Invalid query format. The query is not a valid JSON."
|
||||
|
||||
@@ -113,19 +185,26 @@ class MongoDB(BaseQueryRunner):
|
||||
else:
|
||||
collection = query_data["collection"]
|
||||
|
||||
q = None
|
||||
if "query" in query_data:
|
||||
q = query_data["query"]
|
||||
for k in q:
|
||||
if q[k] and type(q[k]) in [str, unicode]:
|
||||
logging.debug(q[k])
|
||||
_convert_date(q, k)
|
||||
elif q[k] and type(q[k]) is dict:
|
||||
for k2 in q[k]:
|
||||
if type(q[k][k2]) in [str, unicode]:
|
||||
_convert_date(q[k], k2)
|
||||
|
||||
q = query_data.get("query", None)
|
||||
f = None
|
||||
|
||||
aggregate = query_data.get("aggregate", None)
|
||||
if aggregate:
|
||||
for step in aggregate:
|
||||
if "$sort" in step:
|
||||
sort_list = []
|
||||
for sort_item in step["$sort"]:
|
||||
sort_list.append((sort_item["name"], sort_item["direction"]))
|
||||
|
||||
step["$sort"] = SON(sort_list)
|
||||
|
||||
if not aggregate:
|
||||
s = None
|
||||
if "sort" in query_data and query_data["sort"]:
|
||||
s = []
|
||||
for field in query_data["sort"]:
|
||||
s.append((field["name"], field["direction"]))
|
||||
|
||||
if "fields" in query_data:
|
||||
f = query_data["fields"]
|
||||
|
||||
@@ -138,36 +217,47 @@ class MongoDB(BaseQueryRunner):
|
||||
columns = []
|
||||
rows = []
|
||||
|
||||
error = None
|
||||
json_data = None
|
||||
cursor = None
|
||||
if q or (not q and not aggregate):
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
|
||||
if s:
|
||||
cursor = db[collection].find(q, f).sort(s)
|
||||
else:
|
||||
cursor = db[collection].find(q, f)
|
||||
if "skip" in query_data:
|
||||
cursor = cursor.skip(query_data["skip"])
|
||||
|
||||
if "limit" in query_data and query_data["limit"]:
|
||||
cursor = cursor.limit(query_data["limit"])
|
||||
if "limit" in query_data:
|
||||
cursor = cursor.limit(query_data["limit"])
|
||||
|
||||
elif aggregate:
|
||||
r = db[collection].aggregate(aggregate)
|
||||
|
||||
# Backwards compatibility with older pymongo versions.
|
||||
#
|
||||
# Older pymongo version would return a dictionary from an aggregate command.
|
||||
# The dict would contain a "result" key which would hold the cursor.
|
||||
# Newer ones return pymongo.command_cursor.CommandCursor.
|
||||
if isinstance(r, dict):
|
||||
cursor = r["result"]
|
||||
else:
|
||||
cursor = r
|
||||
|
||||
for r in cursor:
|
||||
for k in r:
|
||||
if _get_column_by_name(columns, k) is None:
|
||||
if self._get_column_by_name(columns, k) is None:
|
||||
columns.append({
|
||||
"name": k,
|
||||
"friendly_name": k,
|
||||
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
|
||||
})
|
||||
|
||||
# Convert ObjectId to string
|
||||
if type(r[k]) == ObjectId:
|
||||
r[k] = str(r[k])
|
||||
|
||||
rows.append(r)
|
||||
|
||||
if f:
|
||||
ordered_columns = []
|
||||
for k in sorted(f, key=f.get):
|
||||
ordered_columns.append(_get_column_by_name(columns, k))
|
||||
ordered_columns.append(self._get_column_by_name(columns, k))
|
||||
|
||||
columns = ordered_columns
|
||||
|
||||
@@ -176,7 +266,7 @@ class MongoDB(BaseQueryRunner):
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
@@ -7,6 +7,24 @@ from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
types_map = {
|
||||
0: TYPE_FLOAT,
|
||||
1: TYPE_INTEGER,
|
||||
2: TYPE_INTEGER,
|
||||
3: TYPE_INTEGER,
|
||||
4: TYPE_FLOAT,
|
||||
5: TYPE_FLOAT,
|
||||
7: TYPE_DATETIME,
|
||||
8: TYPE_INTEGER,
|
||||
9: TYPE_INTEGER,
|
||||
10: TYPE_DATE,
|
||||
12: TYPE_DATETIME,
|
||||
15: TYPE_STRING,
|
||||
16: TYPE_INTEGER,
|
||||
246: TYPE_FLOAT,
|
||||
253: TYPE_STRING,
|
||||
254: TYPE_STRING,
|
||||
}
|
||||
|
||||
class Mysql(BaseQueryRunner):
|
||||
@classmethod
|
||||
@@ -85,32 +103,29 @@ class Mysql(BaseQueryRunner):
|
||||
def run_query(self, query):
|
||||
import MySQLdb
|
||||
|
||||
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
|
||||
user=self.configuration.get('user', ''),
|
||||
passwd=self.configuration.get('passwd', ''),
|
||||
db=self.configuration['db'],
|
||||
port=self.configuration.get('port', 3306),
|
||||
charset='utf8', use_unicode=True)
|
||||
cursor = connection.cursor()
|
||||
|
||||
logger.debug("MySQL running query: %s", query)
|
||||
|
||||
connection = None
|
||||
try:
|
||||
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
|
||||
user=self.configuration.get('user', ''),
|
||||
passwd=self.configuration.get('passwd', ''),
|
||||
db=self.configuration['db'],
|
||||
port=self.configuration.get('port', 3306),
|
||||
charset='utf8', use_unicode=True)
|
||||
cursor = connection.cursor()
|
||||
logger.debug("MySQL running query: %s", query)
|
||||
cursor.execute(query)
|
||||
|
||||
data = cursor.fetchall()
|
||||
|
||||
cursor_desc = cursor.description
|
||||
if cursor_desc is not None:
|
||||
num_fields = len(cursor_desc)
|
||||
column_names = [i[0] for i in cursor.description]
|
||||
# TODO - very similar to pg.py
|
||||
if cursor.description is not None:
|
||||
columns_data = [(i[0], i[1]) for i in cursor.description]
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in data]
|
||||
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
|
||||
|
||||
# TODO: add types support
|
||||
columns = [{'name': col_name,
|
||||
'friendly_name': col_name,
|
||||
'type': None} for col_name in column_names]
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': types_map.get(col[1], None)} for col in columns_data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
@@ -129,8 +144,9 @@ class Mysql(BaseQueryRunner):
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
finally:
|
||||
connection.close()
|
||||
if connection:
|
||||
connection.close()
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Mysql)
|
||||
register(Mysql)
|
||||
@@ -93,7 +93,7 @@ class PostgreSQL(BaseQueryRunner):
|
||||
results, error = self.run_query(query)
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
|
||||
@@ -127,35 +127,38 @@ class PostgreSQL(BaseQueryRunner):
|
||||
columns = []
|
||||
duplicates_counter = 1
|
||||
|
||||
for column in cursor.description:
|
||||
# TODO: this deduplication needs to be generalized and reused in all query runners.
|
||||
column_name = column.name
|
||||
if column_name in column_names:
|
||||
column_name += str(duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
if cursor.description is not None:
|
||||
for column in cursor.description:
|
||||
# TODO: this deduplication needs to be generalized and reused in all query runners.
|
||||
column_name = column.name
|
||||
if column_name in column_names:
|
||||
column_name += str(duplicates_counter)
|
||||
duplicates_counter += 1
|
||||
|
||||
column_names.append(column_name)
|
||||
column_names.append(column_name)
|
||||
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column.type_code, None)
|
||||
})
|
||||
columns.append({
|
||||
'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': types_map.get(column.type_code, None)
|
||||
})
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
cursor.close()
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
except (select.error, OSError) as e:
|
||||
logging.exception(e)
|
||||
error = "Query interrupted. Please retry."
|
||||
json_data = None
|
||||
except psycopg2.DatabaseError as e:
|
||||
logging.exception(e)
|
||||
json_data = None
|
||||
error = e.message
|
||||
json_data = None
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
error = "Query cancelled by user."
|
||||
|
||||
98
redash/query_runner/presto.py
Normal file
98
redash/query_runner/presto.py
Normal file
@@ -0,0 +1,98 @@
|
||||
import json
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
try:
|
||||
from pyhive import presto
|
||||
enabled = True
|
||||
|
||||
except ImportError:
|
||||
logger.warning("Missing dependencies. Please install PyHive.")
|
||||
logger.warning("You can use pip: pip install pyhive")
|
||||
enabled = False
|
||||
|
||||
PRESTO_TYPES_MAPPING = {
|
||||
"integer" : TYPE_INTEGER,
|
||||
"long" : TYPE_INTEGER,
|
||||
"bigint" : TYPE_INTEGER,
|
||||
"float" : TYPE_FLOAT,
|
||||
"double" : TYPE_FLOAT,
|
||||
"boolean" : TYPE_BOOLEAN,
|
||||
"string" : TYPE_STRING,
|
||||
"varchar": TYPE_STRING,
|
||||
"date" : TYPE_DATE,
|
||||
}
|
||||
|
||||
class Presto(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
'type': 'string'
|
||||
},
|
||||
'port': {
|
||||
'type': 'number'
|
||||
},
|
||||
'schema': {
|
||||
'type': 'string'
|
||||
},
|
||||
'catalog': {
|
||||
'type': 'string'
|
||||
},
|
||||
'username': {
|
||||
'type': 'string'
|
||||
}
|
||||
},
|
||||
'required': ['host']
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
return enabled
|
||||
|
||||
@classmethod
|
||||
def annotate_query(cls):
|
||||
return False
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "presto"
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
super(Presto, self).__init__(configuration_json)
|
||||
|
||||
def run_query(self, query):
|
||||
connection = presto.connect(
|
||||
host=self.configuration.get('host', ''),
|
||||
port=self.configuration.get('port', 8080),
|
||||
username=self.configuration.get('username', 'redash'),
|
||||
catalog=self.configuration.get('catalog', 'hive'),
|
||||
schema=self.configuration.get('schema', 'default'))
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
columns_data = [(row[0], row[1]) for row in cursor.description]
|
||||
|
||||
columns = [{'name': col[0],
|
||||
'friendly_name': col[0],
|
||||
'type': PRESTO_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
|
||||
|
||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
error = None
|
||||
except Exception, ex:
|
||||
json_data = None
|
||||
error = ex.message
|
||||
|
||||
return json_data, error
|
||||
|
||||
register(Presto)
|
||||
@@ -1,6 +1,8 @@
|
||||
import sys
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import weakref
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash import models
|
||||
@@ -12,93 +14,30 @@ logger = logging.getLogger(__name__)
|
||||
from RestrictedPython import compile_restricted
|
||||
from RestrictedPython.Guards import safe_builtins
|
||||
|
||||
ALLOWED_MODULES = {}
|
||||
|
||||
class CustomPrint(object):
|
||||
""" CustomPrint redirect "print" calls to be sent as "log" on the result object """
|
||||
def __init__(self):
|
||||
self.enabled = True
|
||||
self.lines = []
|
||||
|
||||
def custom_write(obj):
|
||||
"""
|
||||
Custom hooks which controls the way objects/lists/tuples/dicts behave in
|
||||
RestrictedPython
|
||||
"""
|
||||
return obj
|
||||
def write(self, text):
|
||||
if self.enabled:
|
||||
if text and text.strip():
|
||||
log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
|
||||
self.lines.append(log_line)
|
||||
|
||||
def enable(self):
|
||||
self.enabled = True
|
||||
|
||||
def custom_import(name, globals=None, locals=None, fromlist=(), level=0):
|
||||
if name in ALLOWED_MODULES:
|
||||
m = None
|
||||
if ALLOWED_MODULES[name] is None:
|
||||
m = importlib.import_module(name)
|
||||
ALLOWED_MODULES[name] = m
|
||||
else:
|
||||
m = ALLOWED_MODULES[name]
|
||||
def disable(self):
|
||||
self.enabled = False
|
||||
|
||||
return m
|
||||
|
||||
raise Exception("'{0}' is not configured as a supported import module".format(name))
|
||||
|
||||
def custom_get_item(obj, key):
|
||||
return obj[key]
|
||||
|
||||
def get_query_result(query_id):
|
||||
try:
|
||||
query = models.Query.get_by_id(query_id)
|
||||
except models.Query.DoesNotExist:
|
||||
raise Exception("Query id %s does not exist." % query_id)
|
||||
|
||||
if query.latest_query_data is None:
|
||||
raise Exception("Query does not have results yet.")
|
||||
|
||||
if query.latest_query_data.data is None:
|
||||
raise Exception("Query does not have results yet.")
|
||||
|
||||
return json.loads(query.latest_query_data.data)
|
||||
|
||||
|
||||
def execute_query(data_source_name_or_id, query):
|
||||
try:
|
||||
if type(data_source_name_or_id) == int:
|
||||
data_source = models.DataSource.get_by_id(data_source_name_or_id)
|
||||
else:
|
||||
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
|
||||
except models.DataSource.DoesNotExist:
|
||||
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
|
||||
|
||||
query_runner = get_query_runner(data_source.type, data_source.options)
|
||||
|
||||
data, error = query_runner.run_query(query)
|
||||
if error is not None:
|
||||
raise Exception(error)
|
||||
|
||||
# TODO: allow avoiding the json.dumps/loads in same process
|
||||
return json.loads(data)
|
||||
|
||||
|
||||
def add_result_column(result, column_name, friendly_name, column_type):
|
||||
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
|
||||
if column_type not in SUPPORTED_COLUMN_TYPES:
|
||||
raise Exception("'{0}' is not a supported column type".format(column_type))
|
||||
|
||||
if not "columns" in result:
|
||||
result["columns"] = []
|
||||
|
||||
result["columns"].append({
|
||||
"name" : column_name,
|
||||
"friendly_name" : friendly_name,
|
||||
"type" : column_type
|
||||
})
|
||||
|
||||
|
||||
def add_result_row(result, values):
|
||||
if not "rows" in result:
|
||||
result["rows"] = []
|
||||
|
||||
result["rows"].append(values)
|
||||
def __call__(self):
|
||||
return self
|
||||
|
||||
|
||||
class Python(BaseQueryRunner):
|
||||
"""
|
||||
This is very, very unsafe. Use at your own risk with people you really trust.
|
||||
"""
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
@@ -120,15 +59,96 @@ class Python(BaseQueryRunner):
|
||||
return False
|
||||
|
||||
def __init__(self, configuration_json):
|
||||
global ALLOWED_MODULES
|
||||
|
||||
super(Python, self).__init__(configuration_json)
|
||||
|
||||
self.syntax = "python"
|
||||
|
||||
self._allowed_modules = {}
|
||||
self._script_locals = { "result" : { "rows" : [], "columns" : [], "log" : [] } }
|
||||
self._enable_print_log = True
|
||||
self._custom_print = CustomPrint()
|
||||
|
||||
if self.configuration.get("allowedImportModules", None):
|
||||
for item in self.configuration["allowedImportModules"].split(","):
|
||||
ALLOWED_MODULES[item] = None
|
||||
self._allowed_modules[item] = None
|
||||
|
||||
def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
|
||||
if name in self._allowed_modules:
|
||||
m = None
|
||||
if self._allowed_modules[name] is None:
|
||||
m = importlib.import_module(name)
|
||||
self._allowed_modules[name] = m
|
||||
else:
|
||||
m = self._allowed_modules[name]
|
||||
|
||||
return m
|
||||
|
||||
raise Exception("'{0}' is not configured as a supported import module".format(name))
|
||||
|
||||
def custom_write(self, obj):
|
||||
"""
|
||||
Custom hooks which controls the way objects/lists/tuples/dicts behave in
|
||||
RestrictedPython
|
||||
"""
|
||||
return obj
|
||||
|
||||
def custom_get_item(self, obj, key):
|
||||
return obj[key]
|
||||
|
||||
def custom_get_iter(self, obj):
|
||||
return iter(obj)
|
||||
|
||||
def add_result_column(self, result, column_name, friendly_name, column_type):
|
||||
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
|
||||
if column_type not in SUPPORTED_COLUMN_TYPES:
|
||||
raise Exception("'{0}' is not a supported column type".format(column_type))
|
||||
|
||||
if not "columns" in result:
|
||||
result["columns"] = []
|
||||
|
||||
result["columns"].append({
|
||||
"name" : column_name,
|
||||
"friendly_name" : friendly_name,
|
||||
"type" : column_type
|
||||
})
|
||||
|
||||
def add_result_row(self, result, values):
|
||||
if not "rows" in result:
|
||||
result["rows"] = []
|
||||
|
||||
result["rows"].append(values)
|
||||
|
||||
def execute_query(self, data_source_name_or_id, query):
|
||||
try:
|
||||
if type(data_source_name_or_id) == int:
|
||||
data_source = models.DataSource.get_by_id(data_source_name_or_id)
|
||||
else:
|
||||
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
|
||||
except models.DataSource.DoesNotExist:
|
||||
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
|
||||
|
||||
query_runner = get_query_runner(data_source.type, data_source.options)
|
||||
|
||||
data, error = query_runner.run_query(query)
|
||||
if error is not None:
|
||||
raise Exception(error)
|
||||
|
||||
# TODO: allow avoiding the json.dumps/loads in same process
|
||||
return json.loads(data)
|
||||
|
||||
def get_query_result(self, query_id):
|
||||
try:
|
||||
query = models.Query.get_by_id(query_id)
|
||||
except models.Query.DoesNotExist:
|
||||
raise Exception("Query id %s does not exist." % query_id)
|
||||
|
||||
if query.latest_query_data is None:
|
||||
raise Exception("Query does not have results yet.")
|
||||
|
||||
if query.latest_query_data.data is None:
|
||||
raise Exception("Query does not have results yet.")
|
||||
|
||||
return json.loads(query.latest_query_data.data)
|
||||
|
||||
def run_query(self, query):
|
||||
try:
|
||||
@@ -136,21 +156,23 @@ class Python(BaseQueryRunner):
|
||||
|
||||
code = compile_restricted(query, '<string>', 'exec')
|
||||
|
||||
safe_builtins["_write_"] = custom_write
|
||||
safe_builtins["__import__"] = custom_import
|
||||
safe_builtins["_write_"] = self.custom_write
|
||||
safe_builtins["__import__"] = self.custom_import
|
||||
safe_builtins["_getattr_"] = getattr
|
||||
safe_builtins["getattr"] = getattr
|
||||
safe_builtins["_setattr_"] = setattr
|
||||
safe_builtins["setattr"] = setattr
|
||||
safe_builtins["_getitem_"] = custom_get_item
|
||||
|
||||
script_locals = { "result" : { "rows" : [], "columns" : [] } }
|
||||
safe_builtins["_getitem_"] = self.custom_get_item
|
||||
safe_builtins["_getiter_"] = self.custom_get_iter
|
||||
safe_builtins["_print_"] = self._custom_print
|
||||
|
||||
restricted_globals = dict(__builtins__=safe_builtins)
|
||||
restricted_globals["get_query_result"] = get_query_result
|
||||
restricted_globals["execute_query"] = execute_query
|
||||
restricted_globals["add_result_column"] = add_result_column
|
||||
restricted_globals["add_result_row"] = add_result_row
|
||||
restricted_globals["get_query_result"] = self.get_query_result
|
||||
restricted_globals["execute_query"] = self.execute_query
|
||||
restricted_globals["add_result_column"] = self.add_result_column
|
||||
restricted_globals["add_result_row"] = self.add_result_row
|
||||
restricted_globals["disable_print_log"] = self._custom_print.disable
|
||||
restricted_globals["enable_print_log"] = self._custom_print.enable
|
||||
|
||||
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
|
||||
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
|
||||
@@ -163,17 +185,17 @@ class Python(BaseQueryRunner):
|
||||
# One option is to use ETA with Celery + timeouts on workers
|
||||
# And replacement of worker process every X requests handled.
|
||||
|
||||
exec(code) in restricted_globals, script_locals
|
||||
exec(code) in restricted_globals, self._script_locals
|
||||
|
||||
if script_locals['result'] is None:
|
||||
raise Exception("result wasn't set to value.")
|
||||
|
||||
json_data = json.dumps(script_locals['result'])
|
||||
result = self._script_locals['result']
|
||||
result['log'] = self._custom_print.lines
|
||||
json_data = json.dumps(result)
|
||||
except KeyboardInterrupt:
|
||||
error = "Query cancelled by user."
|
||||
json_data = None
|
||||
except Exception as e:
|
||||
raise sys.exc_info()[1], None, sys.exc_info()[2]
|
||||
error = str(e)
|
||||
json_data = None
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
145
redash/saml_auth.py
Normal file
145
redash/saml_auth.py
Normal file
@@ -0,0 +1,145 @@
|
||||
# Copyright 2015 Okta, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
from flask.ext.login import login_user
|
||||
import requests
|
||||
from flask import redirect, url_for, Blueprint, request
|
||||
from flask_oauth import OAuth
|
||||
from redash import models, settings
|
||||
from saml2 import (
|
||||
BINDING_HTTP_POST,
|
||||
BINDING_HTTP_REDIRECT,
|
||||
entity,
|
||||
)
|
||||
from saml2.client import Saml2Client
|
||||
from saml2.config import Config as Saml2Config
|
||||
|
||||
|
||||
logger = logging.getLogger('saml_auth')
|
||||
|
||||
blueprint = Blueprint('saml_auth', __name__)
|
||||
|
||||
def get_saml_client():
|
||||
'''
|
||||
Return saml configuation.
|
||||
The configuration is a hash for use by saml2.config.Config
|
||||
'''
|
||||
|
||||
if settings.SAML_CALLBACK_SERVER_NAME:
|
||||
acs_url=settings.SAML_CALLBACK_SERVER_NAME + url_for("saml_auth.idp_initiated")
|
||||
else:
|
||||
acs_url = url_for("saml_auth.idp_initiated",_external=True)
|
||||
|
||||
# NOTE:
|
||||
# Ideally, this should fetch the metadata and pass it to
|
||||
# PySAML2 via the "inline" metadata type.
|
||||
# However, this method doesn't seem to work on PySAML2 v2.4.0
|
||||
#
|
||||
# SAML metadata changes very rarely. On a production system,
|
||||
# this data should be cached as approprate for your production system.
|
||||
rv = requests.get(settings.SAML_METADATA_URL)
|
||||
import tempfile
|
||||
tmp = tempfile.NamedTemporaryFile()
|
||||
f = open(tmp.name, 'w')
|
||||
f.write(rv.text)
|
||||
f.close()
|
||||
|
||||
saml_settings = {
|
||||
'metadata': {
|
||||
# 'inline': metadata,
|
||||
"local": [tmp.name]
|
||||
},
|
||||
'service': {
|
||||
'sp': {
|
||||
'endpoints': {
|
||||
'assertion_consumer_service': [
|
||||
(acs_url, BINDING_HTTP_REDIRECT),
|
||||
(acs_url, BINDING_HTTP_POST)
|
||||
],
|
||||
},
|
||||
# Don't verify that the incoming requests originate from us via
|
||||
# the built-in cache for authn request ids in pysaml2
|
||||
'allow_unsolicited': True,
|
||||
# Don't sign authn requests, since signed requests only make
|
||||
# sense in a situation where you control both the SP and IdP
|
||||
'authn_requests_signed': False,
|
||||
'logout_requests_signed': True,
|
||||
'want_assertions_signed': True,
|
||||
'want_response_signed': False,
|
||||
},
|
||||
},
|
||||
}
|
||||
spConfig = Saml2Config()
|
||||
spConfig.load(saml_settings)
|
||||
spConfig.allow_unknown_attributes = True
|
||||
saml_client = Saml2Client(config=spConfig)
|
||||
tmp.close()
|
||||
return saml_client
|
||||
|
||||
|
||||
@blueprint.route("/saml/callback", methods=['POST'])
|
||||
def idp_initiated():
|
||||
saml_client = get_saml_client()
|
||||
authn_response = saml_client.parse_authn_request_response(
|
||||
request.form['SAMLResponse'],
|
||||
entity.BINDING_HTTP_POST)
|
||||
authn_response.get_identity()
|
||||
user_info = authn_response.get_subject()
|
||||
email = user_info.text
|
||||
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
|
||||
|
||||
# This is what as known as "Just In Time (JIT) provisioning".
|
||||
# What that means is that, if a user in a SAML assertion
|
||||
# isn't in the user store, we create that user first, then log them in
|
||||
try:
|
||||
user_object = models.User.get(models.User.email == email)
|
||||
if user_object.name != name:
|
||||
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
|
||||
user_object.name = name
|
||||
user_object.save()
|
||||
except models.User.DoesNotExist:
|
||||
logger.debug("Creating user object (%r)", name)
|
||||
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
|
||||
|
||||
login_user(user_object, remember=True)
|
||||
url = url_for('index')
|
||||
|
||||
return redirect(url)
|
||||
|
||||
@blueprint.route("/saml/login")
|
||||
def sp_initiated():
|
||||
if not settings.SAML_METADATA_URL:
|
||||
logger.error("Cannot invoke saml endpoint without metadata url in settings.")
|
||||
return redirect(url_for('index'))
|
||||
|
||||
saml_client = get_saml_client()
|
||||
reqid, info = saml_client.prepare_for_authenticate()
|
||||
|
||||
redirect_url = None
|
||||
# Select the IdP URL to send the AuthN request to
|
||||
for key, value in info['headers']:
|
||||
if key is 'Location':
|
||||
redirect_url = value
|
||||
response = redirect(redirect_url, code=302)
|
||||
# NOTE:
|
||||
# I realize I _technically_ don't need to set Cache-Control or Pragma:
|
||||
# http://stackoverflow.com/a/5494469
|
||||
# However, Section 3.2.3.2 of the SAML spec suggests they are set:
|
||||
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
|
||||
# We set those headers here as a "belt and suspenders" approach,
|
||||
# since enterprise environments don't always conform to RFCs
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
return response
|
||||
@@ -32,10 +32,25 @@ def array_from_string(str):
|
||||
return array
|
||||
|
||||
|
||||
def set_from_string(str):
|
||||
return set(array_from_string(str))
|
||||
|
||||
|
||||
def parse_boolean(str):
|
||||
return json.loads(str.lower())
|
||||
|
||||
|
||||
def all_settings():
|
||||
from types import ModuleType
|
||||
|
||||
settings = {}
|
||||
for name, item in globals().iteritems():
|
||||
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
|
||||
settings[name] = item
|
||||
|
||||
return settings
|
||||
|
||||
|
||||
NAME = os.environ.get('REDASH_NAME', 're:dash')
|
||||
|
||||
REDIS_URL = os.environ.get('REDASH_REDIS_URL', "redis://localhost:6379/0")
|
||||
@@ -53,19 +68,23 @@ CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
|
||||
|
||||
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
|
||||
# proved to be "safe".
|
||||
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false"))
|
||||
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
|
||||
|
||||
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "hmac")
|
||||
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
|
||||
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
|
||||
|
||||
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
|
||||
# access
|
||||
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
|
||||
GOOGLE_APPS_DOMAIN = set_from_string(os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", ""))
|
||||
|
||||
GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
|
||||
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
|
||||
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
|
||||
|
||||
SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
|
||||
SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
|
||||
SAML_CALLBACK_SERVER_NAME = os.environ.get("REDASH_SAML_CALLBACK_SERVER_NAME", "")
|
||||
|
||||
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
|
||||
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
|
||||
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
|
||||
@@ -73,15 +92,41 @@ LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
|
||||
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
|
||||
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
|
||||
|
||||
# Mail settings:
|
||||
MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')
|
||||
MAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25))
|
||||
MAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false'))
|
||||
MAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false'))
|
||||
MAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None)
|
||||
MAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None)
|
||||
MAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None)
|
||||
MAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None)
|
||||
MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false'))
|
||||
|
||||
HOST = os.environ.get('REDASH_HOST', '')
|
||||
|
||||
# CORS settings for the Query Result API (and possbily future external APIs).
|
||||
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
|
||||
# to the calling domain (or domains in a comma separated list).
|
||||
ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", ""))
|
||||
ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false"))
|
||||
ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT")
|
||||
ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
|
||||
|
||||
# Query Runners
|
||||
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
|
||||
'redash.query_runner.big_query',
|
||||
'redash.query_runner.google_spreadsheets',
|
||||
'redash.query_runner.graphite',
|
||||
'redash.query_runner.mongodb',
|
||||
'redash.query_runner.mysql',
|
||||
'redash.query_runner.pg',
|
||||
'redash.query_runner.script',
|
||||
'redash.query_runner.url',
|
||||
'redash.query_runner.influx_db',
|
||||
'redash.query_runner.elasticsearch',
|
||||
'redash.query_runner.presto',
|
||||
'redash.query_runner.impala_ds',
|
||||
])))
|
||||
|
||||
# Features:
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import time
|
||||
import datetime
|
||||
import logging
|
||||
from flask.ext.mail import Message
|
||||
import redis
|
||||
from celery import Task
|
||||
from celery.result import AsyncResult
|
||||
from celery.utils.log import get_task_logger
|
||||
from redash import redis_connection, models, statsd_client, settings
|
||||
from redash import redis_connection, models, statsd_client, settings, utils, mail
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.worker import celery
|
||||
from redash.query_runner import get_query_runner
|
||||
@@ -223,7 +223,7 @@ def cleanup_query_results():
|
||||
@celery.task(base=BaseTask)
|
||||
def refresh_schemas():
|
||||
"""
|
||||
Refershs the datasources schema.
|
||||
Refreshs the datasources schema.
|
||||
"""
|
||||
|
||||
for ds in models.DataSource.all():
|
||||
@@ -231,6 +231,39 @@ def refresh_schemas():
|
||||
ds.get_schema(refresh=True)
|
||||
|
||||
|
||||
@celery.task(bind=True, base=BaseTask)
|
||||
def check_alerts_for_query(self, query_id):
|
||||
from redash.wsgi import app
|
||||
|
||||
logger.debug("Checking query %d for alerts", query_id)
|
||||
query = models.Query.get_by_id(query_id)
|
||||
for alert in query.alerts:
|
||||
alert.query = query
|
||||
new_state = alert.evaluate()
|
||||
if new_state != alert.state:
|
||||
logger.info("Alert %d new state: %s", alert.id, new_state)
|
||||
old_state = alert.state
|
||||
alert.update_instance(state=new_state)
|
||||
|
||||
if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:
|
||||
logger.debug("Skipping notification (previous state was unknown and now it's ok).")
|
||||
continue
|
||||
|
||||
# message = Message
|
||||
recipients = [s.email for s in alert.subscribers()]
|
||||
logger.debug("Notifying: %s", recipients)
|
||||
html = """
|
||||
Check <a href="{host}/alerts/{alert_id}">alert</a> / check <a href="{host}/queries/{query_id}">query</a>.
|
||||
""".format(host=settings.HOST, alert_id=alert.id, query_id=query.id)
|
||||
|
||||
with app.app_context():
|
||||
message = Message(recipients=recipients,
|
||||
subject="[{1}] {0}".format(alert.name, new_state.upper()),
|
||||
html=html)
|
||||
|
||||
mail.send(message)
|
||||
|
||||
|
||||
@celery.task(bind=True, base=BaseTask, track_started=True)
|
||||
def execute_query(self, query, data_source_id, metadata):
|
||||
start_time = time.time()
|
||||
@@ -252,11 +285,11 @@ def execute_query(self, query, data_source_id, metadata):
|
||||
metadata['Query Hash'] = query_hash
|
||||
metadata['Queue'] = self.request.delivery_info['routing_key']
|
||||
|
||||
annotation = ", ".join(["{}: {}".format(k, v) for k, v in metadata.iteritems()])
|
||||
annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()])
|
||||
|
||||
logging.debug("Annotation: %s", annotation)
|
||||
logging.debug(u"Annotation: %s", annotation)
|
||||
|
||||
annotated_query = "/* {} */ {}".format(annotation, query)
|
||||
annotated_query = u"/* {} */ {}".format(annotation, query)
|
||||
else:
|
||||
annotated_query = query
|
||||
|
||||
@@ -272,7 +305,9 @@ def execute_query(self, query, data_source_id, metadata):
|
||||
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
|
||||
|
||||
if not error:
|
||||
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow())
|
||||
query_result, updated_query_ids = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
|
||||
for query_id in updated_query_ids:
|
||||
check_alerts_for_query.delay(query_id)
|
||||
else:
|
||||
raise Exception(error)
|
||||
|
||||
|
||||
@@ -4,9 +4,11 @@ import codecs
|
||||
import decimal
|
||||
import datetime
|
||||
import json
|
||||
import random
|
||||
import re
|
||||
import hashlib
|
||||
import sqlparse
|
||||
import pytz
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
|
||||
@@ -62,6 +64,14 @@ class SQLMetaData(object):
|
||||
return False
|
||||
|
||||
|
||||
def utcnow():
|
||||
"""Return datetime.now value with timezone specified.
|
||||
|
||||
Without the timezone data, when the timestamp stored to the database it gets the current timezone of the server,
|
||||
which leads to errors in calculations.
|
||||
"""
|
||||
return datetime.datetime.now(pytz.utc)
|
||||
|
||||
def slugify(s):
|
||||
return re.sub('[^a-z0-9_\-]+', '-', s.lower())
|
||||
|
||||
@@ -79,6 +89,14 @@ def gen_query_hash(sql):
|
||||
return hashlib.md5(sql.encode('utf-8')).hexdigest()
|
||||
|
||||
|
||||
def generate_token(length):
|
||||
chars = ('abcdefghijklmnopqrstuvwxyz'
|
||||
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
|
||||
'0123456789')
|
||||
|
||||
rand = random.SystemRandom()
|
||||
return ''.join(rand.choice(chars) for x in range(length))
|
||||
|
||||
class JSONEncoder(json.JSONEncoder):
|
||||
"""Custom JSON encoding class, to handle Decimal and datetime.date instances.
|
||||
"""
|
||||
@@ -86,9 +104,9 @@ class JSONEncoder(json.JSONEncoder):
|
||||
if isinstance(o, decimal.Decimal):
|
||||
return float(o)
|
||||
|
||||
if isinstance(o, datetime.date):
|
||||
if isinstance(o, (datetime.date, datetime.time, datetime.timedelta)):
|
||||
return o.isoformat()
|
||||
|
||||
|
||||
super(JSONEncoder, self).default(o)
|
||||
|
||||
|
||||
@@ -128,4 +146,4 @@ class UnicodeWriter:
|
||||
|
||||
def writerows(self, rows):
|
||||
for row in rows:
|
||||
self.writerow(row)
|
||||
self.writerow(row)
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import json
|
||||
from flask import Flask, make_response
|
||||
from werkzeug.wrappers import Response
|
||||
from flask.ext.restful import Api
|
||||
|
||||
from redash import settings, utils
|
||||
from redash import settings, utils, mail
|
||||
from redash.models import db
|
||||
from redash.admin import init_admin
|
||||
|
||||
@@ -21,13 +22,18 @@ init_admin(app)
|
||||
# configure our database
|
||||
settings.DATABASE_CONFIG.update({'threadlocals': True})
|
||||
app.config['DATABASE'] = settings.DATABASE_CONFIG
|
||||
app.config.update(settings.all_settings())
|
||||
db.init_app(app)
|
||||
mail.init_app(app)
|
||||
|
||||
from redash.authentication import setup_authentication
|
||||
auth = setup_authentication(app)
|
||||
setup_authentication(app)
|
||||
|
||||
@api.representation('application/json')
|
||||
def json_representation(data, code, headers=None):
|
||||
# Flask-Restful checks only for flask.Response but flask-login uses werkzeug.wrappers.Response
|
||||
if isinstance(data, Response):
|
||||
return data
|
||||
resp = make_response(json.dumps(data, cls=utils.JSONEncoder), code)
|
||||
resp.headers.extend(headers or {})
|
||||
return resp
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
Flask==0.10.1
|
||||
Flask-Admin==1.1.0
|
||||
Flask-RESTful==0.2.10
|
||||
Flask-Login==0.2.9
|
||||
Flask-Login==0.2.11
|
||||
Flask-OAuth==0.12
|
||||
flask-mail==0.9.1
|
||||
passlib==1.6.2
|
||||
Jinja2==2.7.2
|
||||
MarkupSafe==0.18
|
||||
@@ -28,3 +29,6 @@ jsonschema==2.4.0
|
||||
click==3.3
|
||||
RestrictedPython==3.6.0
|
||||
wtf-peewee==0.2.3
|
||||
pysaml2==2.4.0
|
||||
pycrypto==2.6.1
|
||||
funcy==1.5
|
||||
|
||||
@@ -98,10 +98,8 @@ if [ ! -f "/opt/redash/.env" ]; then
|
||||
fi
|
||||
|
||||
# Install latest version
|
||||
# REDASH_VERSION=${REDASH_VERSION-0.4.0.b589}
|
||||
# modified by @fedex1 3/15/2015 seems to be the latest version at this point in time.
|
||||
REDASH_VERSION=${REDASH_VERSION-0.6.0.b722}
|
||||
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION/.b/%2Bb}/redash.$REDASH_VERSION.tar.gz"
|
||||
REDASH_VERSION=${REDASH_VERSION-0.6.3.b906}
|
||||
LATEST_URL="https://github.com/EverythingMe/redash/releases/download/v${REDASH_VERSION}/redash.$REDASH_VERSION.tar.gz"
|
||||
VERSION_DIR="/opt/redash/redash.$REDASH_VERSION"
|
||||
REDASH_TARBALL=/tmp/redash.tar.gz
|
||||
REDASH_TARBALL=/tmp/redash.tar.gz
|
||||
@@ -145,6 +143,7 @@ if [ $pg_user_exists -ne 0 ]; then
|
||||
REDASH_READER_PASSWORD=$(pwgen -1)
|
||||
sudo -u postgres psql -c "CREATE ROLE redash_reader WITH PASSWORD '$REDASH_READER_PASSWORD' NOCREATEROLE NOCREATEDB NOSUPERUSER LOGIN"
|
||||
sudo -u redash psql -c "grant select(id,name,type) ON data_sources to redash_reader;" redash
|
||||
sudo -u redash psql -c "grant select(id,name) ON users to redash_reader;" redash
|
||||
sudo -u redash psql -c "grant select on activity_log, events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
|
||||
|
||||
cd /opt/redash/current
|
||||
@@ -162,6 +161,18 @@ pip install MySQL-python==1.2.5
|
||||
# Mongo dependencies:
|
||||
pip install pymongo==2.7.2
|
||||
|
||||
# Google spreadsheets:
|
||||
pip install gspread
|
||||
|
||||
# InfluxDB
|
||||
pip install influxdb
|
||||
|
||||
# Presto
|
||||
pip install pyhive
|
||||
|
||||
# Impala
|
||||
pip install impyla
|
||||
|
||||
# Setup supervisord + sysv init startup script
|
||||
sudo -u redash mkdir -p /opt/redash/supervisord
|
||||
pip install supervisor==3.1.2 # TODO: move to requirements.txt
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
export REDASH_CONNECTION_ADAPTER=pg
|
||||
export REDASH_CONNECTION_STRING="dbname=redash"
|
||||
export REDASH_STATIC_ASSETS_PATH="../rd_ui/dist/"
|
||||
export REDASH_LOG_LEVEL="INFO"
|
||||
export REDASH_WORKERS_COUNT=6
|
||||
export REDASH_REDIS_URL=redis://localhost:6379/1
|
||||
export REDASH_DATABASE_URL="postgresql://redash"
|
||||
export REDASH_COOKIE_SECRET=veryverysecret
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=
|
||||
export REDASH_GOOGLE_APPS_DOMAIN=
|
||||
|
||||
@@ -20,8 +20,12 @@ autorestart=true
|
||||
stdout_logfile=/opt/redash/logs/api.log
|
||||
stderr_logfile=/opt/redash/logs/api_error.log
|
||||
|
||||
# There are two queue types here: one for ad-hoc queries, and one for the refresh of scheduled queries
|
||||
# (note that "scheduled_queries" appears only in the queue list of "redash_celery_scheduled").
|
||||
# The default concurrency level for each is 2 (-c2), you can increase based on your machine's resources.
|
||||
|
||||
[program:redash_celery]
|
||||
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -Qqueries,celery,scheduled_queries
|
||||
command=/opt/redash/current/bin/run celery worker --app=redash.worker --beat -c2 -Qqueries,celery
|
||||
process_name=redash_celery
|
||||
numprocs=1
|
||||
priority=999
|
||||
@@ -29,3 +33,13 @@ autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/opt/redash/logs/celery.log
|
||||
stderr_logfile=/opt/redash/logs/celery_error.log
|
||||
|
||||
[program:redash_celery_scheduled]
|
||||
command=/opt/redash/current/bin/run celery worker --app=redash.worker -c2 -Qscheduled_queries
|
||||
process_name=redash_celery_scheduled
|
||||
numprocs=1
|
||||
priority=999
|
||||
autostart=true
|
||||
autorestart=true
|
||||
stdout_logfile=/opt/redash/logs/celery.log
|
||||
stderr_logfile=/opt/redash/logs/celery_error.log
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import datetime
|
||||
import redash.models
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.utils import gen_query_hash, utcnow
|
||||
|
||||
|
||||
class ModelFactory(object):
|
||||
@@ -66,7 +65,7 @@ query_factory = ModelFactory(redash.models.Query,
|
||||
query_result_factory = ModelFactory(redash.models.QueryResult,
|
||||
data='{"columns":{}, "rows":[]}',
|
||||
runtime=1,
|
||||
retrieved_at=datetime.datetime.utcnow,
|
||||
retrieved_at=utcnow,
|
||||
query="SELECT 1",
|
||||
query_hash=gen_query_hash('SELECT 1'),
|
||||
data_source=data_source_factory.create)
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
from flask.ext.login import current_user
|
||||
from flask import request
|
||||
from mock import patch
|
||||
import time
|
||||
from tests import BaseTestCase
|
||||
from redash import models
|
||||
from redash.google_oauth import create_and_login_user
|
||||
from redash.authentication import ApiKeyAuthentication
|
||||
from redash.authentication import api_key_load_user_from_request, hmac_load_user_from_request, sign
|
||||
from tests.factories import user_factory, query_factory
|
||||
from redash.wsgi import app
|
||||
|
||||
@@ -18,29 +19,72 @@ class TestApiKeyAuthentication(BaseTestCase):
|
||||
self.query = query_factory.create(api_key=self.api_key)
|
||||
|
||||
def test_no_api_key(self):
|
||||
auth = ApiKeyAuthentication()
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}'.format(self.query.id))
|
||||
self.assertFalse(auth.verify_authentication())
|
||||
self.assertIsNone(api_key_load_user_from_request(request))
|
||||
|
||||
def test_wrong_api_key(self):
|
||||
auth = ApiKeyAuthentication()
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': 'whatever'})
|
||||
self.assertFalse(auth.verify_authentication())
|
||||
self.assertIsNone(api_key_load_user_from_request(request))
|
||||
|
||||
def test_correct_api_key(self):
|
||||
auth = ApiKeyAuthentication()
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'api_key': self.api_key})
|
||||
self.assertTrue(auth.verify_authentication())
|
||||
self.assertIsNotNone(api_key_load_user_from_request(request))
|
||||
|
||||
def test_no_query_id(self):
|
||||
auth = ApiKeyAuthentication()
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
|
||||
self.assertFalse(auth.verify_authentication())
|
||||
self.assertIsNone(api_key_load_user_from_request(request))
|
||||
|
||||
def test_user_api_key(self):
|
||||
user = user_factory.create(api_key="user_key")
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/', query_string={'api_key': user.api_key})
|
||||
self.assertEqual(user.id, api_key_load_user_from_request(request).id)
|
||||
|
||||
class TestHMACAuthentication(BaseTestCase):
|
||||
#
|
||||
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
|
||||
#
|
||||
def setUp(self):
|
||||
super(TestHMACAuthentication, self).setUp()
|
||||
self.api_key = 10
|
||||
self.query = query_factory.create(api_key=self.api_key)
|
||||
self.path = '/api/queries/{0}'.format(self.query.id)
|
||||
self.expires = time.time() + 1800
|
||||
|
||||
def signature(self, expires):
|
||||
return sign(self.query.api_key, self.path, expires)
|
||||
|
||||
def test_no_signature(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get(self.path)
|
||||
self.assertIsNone(hmac_load_user_from_request(request))
|
||||
|
||||
def test_wrong_signature(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get(self.path, query_string={'signature': 'whatever', 'expires': self.expires})
|
||||
self.assertIsNone(hmac_load_user_from_request(request))
|
||||
|
||||
def test_correct_signature(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}'.format(self.query.id), query_string={'signature': self.signature(self.expires), 'expires': self.expires})
|
||||
self.assertIsNotNone(hmac_load_user_from_request(request))
|
||||
|
||||
def test_no_query_id(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries', query_string={'api_key': self.api_key})
|
||||
self.assertIsNone(hmac_load_user_from_request(request))
|
||||
|
||||
def test_user_api_key(self):
|
||||
user = user_factory.create(api_key="user_key")
|
||||
path = '/api/queries/'
|
||||
with app.test_client() as c:
|
||||
signature = sign(user.api_key, path, self.expires)
|
||||
rv = c.get(path, query_string={'signature': signature, 'expires': self.expires, 'user_id': user.id})
|
||||
self.assertEqual(user.id, hmac_load_user_from_request(request).id)
|
||||
|
||||
class TestCreateAndLoginUser(BaseTestCase):
|
||||
def test_logins_valid_user(self):
|
||||
|
||||
@@ -319,6 +319,17 @@ class QueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
self.paths = []
|
||||
super(QueryResultAPITest, self).setUp()
|
||||
|
||||
def test_post_result_list(self):
|
||||
data_source = data_source_factory.create()
|
||||
query_result = query_result_factory.create()
|
||||
query = query_factory.create()
|
||||
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = json_request(c.post, '/api/query_results',
|
||||
data={'data_source_id': data_source.id,
|
||||
'query': query.query})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
|
||||
class JobAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
@@ -326,58 +337,6 @@ class JobAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
super(JobAPITest, self).setUp()
|
||||
|
||||
|
||||
class CsvQueryResultAPITest(BaseTestCase, AuthenticationTestMixin):
|
||||
def setUp(self):
|
||||
super(CsvQueryResultAPITest, self).setUp()
|
||||
|
||||
self.paths = []
|
||||
self.query_result = query_result_factory.create()
|
||||
self.query = query_factory.create()
|
||||
self.path = '/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id)
|
||||
|
||||
# TODO: factor out the HMAC authentication tests
|
||||
|
||||
def signature(self, expires):
|
||||
return sign(self.query.api_key, self.path, expires)
|
||||
|
||||
def test_redirect_when_unauthenticated(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get(self.path)
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_wrong_signature(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': 'whatever', 'expires': 0})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_wrong_expires(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(0), 'expires': 0})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_no_expires(self):
|
||||
with app.test_client() as c:
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(time.time()+3600)})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_redirect_for_correct_signature_and_expires_too_long(self):
|
||||
with app.test_client() as c:
|
||||
expires = time.time()+(10*3600)
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
|
||||
self.assertEquals(rv.status_code, 302)
|
||||
|
||||
def test_returns_200_for_correct_signature(self):
|
||||
with app.test_client() as c:
|
||||
expires = time.time()+1800
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id), query_string={'signature': self.signature(expires), 'expires': expires})
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
def test_returns_200_for_authenticated_user(self):
|
||||
with app.test_client() as c, authenticated_user(c):
|
||||
rv = c.get('/api/queries/{0}/results/{1}.csv'.format(self.query.id, self.query_result.id))
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
|
||||
class TestLogin(BaseTestCase):
|
||||
def setUp(self):
|
||||
settings.PASSWORD_LOGIN_ENABLED = True
|
||||
@@ -518,6 +477,6 @@ class DataSourceTest(BaseTestCase):
|
||||
admin = user_factory.create(groups=['admin', 'default'])
|
||||
with app.test_client() as c, authenticated_user(c, user=admin):
|
||||
rv = json_request(c.post, '/api/data_sources',
|
||||
data={'name': 'DS 1', 'type': 'pg', 'options': '{"dbname": "redash"}'})
|
||||
data={'name': 'DS 1', 'type': 'pg', 'options': {"dbname": "redash"}})
|
||||
|
||||
self.assertEqual(rv.status_code, 200)
|
||||
@@ -6,8 +6,7 @@ import mock
|
||||
from tests import BaseTestCase
|
||||
from redash import models
|
||||
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
|
||||
from redash.utils import gen_query_hash
|
||||
from redash import query_runner
|
||||
from redash.utils import gen_query_hash, utcnow
|
||||
|
||||
|
||||
class DashboardTest(BaseTestCase):
|
||||
@@ -141,7 +140,7 @@ class QueryOutdatedQueriesTest(BaseTestCase):
|
||||
self.assertNotIn(query, queries)
|
||||
|
||||
def test_outdated_queries_works_with_specific_time_schedule(self):
|
||||
half_an_hour_ago = datetime.datetime.utcnow() - datetime.timedelta(minutes=30)
|
||||
half_an_hour_ago = utcnow() - datetime.timedelta(minutes=30)
|
||||
query = query_factory.create(schedule=half_an_hour_ago.strftime('%H:%M'))
|
||||
query_result = query_result_factory.create(query=query, retrieved_at=half_an_hour_ago-datetime.timedelta(days=1))
|
||||
query.latest_query_data = query_result
|
||||
@@ -165,7 +164,7 @@ class QueryArchiveTest(BaseTestCase):
|
||||
def test_archived_query_doesnt_return_in_all(self):
|
||||
query = query_factory.create(schedule="1")
|
||||
yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
|
||||
query_result = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
|
||||
query_result, _ = models.QueryResult.store_result(query.data_source.id, query.query_hash, query.query, "1",
|
||||
123, yesterday)
|
||||
|
||||
query.latest_query_data = query_result
|
||||
@@ -326,11 +325,11 @@ class TestQueryResultStoreResult(BaseTestCase):
|
||||
self.query = "SELECT 1"
|
||||
self.query_hash = gen_query_hash(self.query)
|
||||
self.runtime = 123
|
||||
self.utcnow = datetime.datetime.utcnow()
|
||||
self.utcnow = utcnow()
|
||||
self.data = "data"
|
||||
|
||||
def test_stores_the_result(self):
|
||||
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query,
|
||||
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query,
|
||||
self.data, self.runtime, self.utcnow)
|
||||
|
||||
self.assertEqual(query_result.data, self.data)
|
||||
@@ -345,7 +344,7 @@ class TestQueryResultStoreResult(BaseTestCase):
|
||||
query2 = query_factory.create(query=self.query, data_source=self.data_source)
|
||||
query3 = query_factory.create(query=self.query, data_source=self.data_source)
|
||||
|
||||
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
|
||||
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
|
||||
self.runtime, self.utcnow)
|
||||
|
||||
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
|
||||
@@ -357,7 +356,7 @@ class TestQueryResultStoreResult(BaseTestCase):
|
||||
query2 = query_factory.create(query=self.query, data_source=self.data_source)
|
||||
query3 = query_factory.create(query=self.query + "123", data_source=self.data_source)
|
||||
|
||||
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
|
||||
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
|
||||
self.runtime, self.utcnow)
|
||||
|
||||
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
|
||||
@@ -369,7 +368,7 @@ class TestQueryResultStoreResult(BaseTestCase):
|
||||
query2 = query_factory.create(query=self.query, data_source=self.data_source)
|
||||
query3 = query_factory.create(query=self.query, data_source=data_source_factory.create())
|
||||
|
||||
query_result = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
|
||||
query_result, _ = models.QueryResult.store_result(self.data_source.id, self.query_hash, self.query, self.data,
|
||||
self.runtime, self.utcnow)
|
||||
|
||||
self.assertEqual(models.Query.get_by_id(query1.id)._data['latest_query_data'], query_result.id)
|
||||
|
||||
@@ -2,6 +2,7 @@ import datetime
|
||||
from mock import patch, call, ANY
|
||||
from tests import BaseTestCase
|
||||
from tests.factories import query_factory, query_result_factory
|
||||
from redash.utils import utcnow
|
||||
from redash.tasks import refresh_queries
|
||||
|
||||
|
||||
@@ -11,7 +12,7 @@ from redash.tasks import refresh_queries
|
||||
class TestRefreshQueries(BaseTestCase):
|
||||
def test_enqueues_outdated_queries(self):
|
||||
query = query_factory.create(schedule="60")
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
@@ -23,7 +24,7 @@ class TestRefreshQueries(BaseTestCase):
|
||||
|
||||
def test_skips_fresh_queries(self):
|
||||
query = query_factory.create(schedule="1200")
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
|
||||
@@ -33,7 +34,7 @@ class TestRefreshQueries(BaseTestCase):
|
||||
|
||||
def test_skips_queries_with_no_ttl(self):
|
||||
query = query_factory.create(schedule=None)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
|
||||
@@ -45,7 +46,7 @@ class TestRefreshQueries(BaseTestCase):
|
||||
query = query_factory.create(schedule="60")
|
||||
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash,
|
||||
data_source=query.data_source)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
@@ -60,7 +61,7 @@ class TestRefreshQueries(BaseTestCase):
|
||||
def test_enqueues_query_with_correct_data_source(self):
|
||||
query = query_factory.create(schedule="60")
|
||||
query2 = query_factory.create(schedule="60", query=query.query, query_hash=query.query_hash)
|
||||
retrieved_at = datetime.datetime.utcnow() - datetime.timedelta(minutes=10)
|
||||
retrieved_at = utcnow() - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
@@ -79,7 +80,7 @@ class TestRefreshQueries(BaseTestCase):
|
||||
query = query_factory.create(schedule="60")
|
||||
query2 = query_factory.create(schedule="3600", query=query.query, query_hash=query.query_hash)
|
||||
import psycopg2
|
||||
retrieved_at = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
|
||||
retrieved_at = utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None)) - datetime.timedelta(minutes=10)
|
||||
query_result = query_result_factory.create(retrieved_at=retrieved_at, query=query.query,
|
||||
query_hash=query.query_hash)
|
||||
query.latest_query_data = query_result
|
||||
|
||||
Reference in New Issue
Block a user