Compare commits

..

3 Commits

Author SHA1 Message Date
Arik Fraimovich
150fc6dbf0 Fix repo name 2015-04-06 12:39:30 +03:00
Arik Fraimovich
ccff9614d4 New release process 2015-04-06 12:17:47 +03:00
Arik Fraimovich
a7b881874f Remove unused script 2015-04-06 09:38:01 +03:00
114 changed files with 1427 additions and 6081 deletions

View File

@@ -1,6 +1,9 @@
export REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
export REDASH_LOG_LEVEL="INFO"
export REDASH_REDIS_URL=redis://localhost:6379/1
export REDASH_DATABASE_URL="postgresql://redash"
export REDASH_COOKIE_SECRET=veryverysecret
export REDASH_GOOGLE_APPS_DOMAIN=
REDASH_CONNECTION_ADAPTER=pg
REDASH_CONNECTION_STRING="dbname=data"
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
REDASH_GOOGLE_APPS_DOMAIN=
REDASH_ADMINS=
REDASH_WORKERS_COUNT=2
REDASH_COOKIE_SECRET=
REDASH_DATABASE_URL='postgresql://rd'
REDASH_LOG_LEVEL = "INFO"

1
.gitignore vendored
View File

@@ -8,7 +8,6 @@ celerybeat-schedule*
.#*
\#*#
*~
_build
# Vagrant related
.vagrant

View File

@@ -13,7 +13,7 @@ deps:
pack:
sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload:
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)

View File

@@ -1,5 +1,6 @@
<p align="center">
<img title="re:dash" src='http://redash.io/static/img/redash_logo.png' width="200px"/>
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
</p>
<p align="center">
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
@@ -10,8 +11,7 @@
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
Presto, Google Spreadsheets, Cloudera Impala, Hive and custom scripts.
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts.
**_re:dash_** consists of two parts:
@@ -28,7 +28,7 @@ You can try out the demo instance: http://demo.redash.io/ (login with any Google
## Getting Started
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).

View File

@@ -104,26 +104,9 @@ def get_changelog(commit_sha):
return "\n".join(changes)
def update_release_commit_sha(release, commit_sha):
params = {
'target_commitish': commit_sha,
}
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
if response.status_code != 200:
raise exception_from_error("Failed updating commit sha for existing release", response)
return response.json()
def update_release(version, build_filepath, commit_sha):
try:
release = get_rc_release(version)
if release:
release = update_release_commit_sha(release, commit_sha)
else:
release = create_release(version, commit_sha)
release = get_rc_release(version) or create_release(version, commit_sha)
print "Using release id: {}".format(release['id'])
remove_previous_builds(release)
@@ -132,8 +115,8 @@ def update_release(version, build_filepath, commit_sha):
changelog = get_changelog(commit_sha)
response = _github_request('patch', release['url'], {'body': changelog})
if response.status_code != 200:
raise exception_from_error("Failed updating release description", response)
print response.status_code
print response.text
except Exception, ex:
print ex

View File

@@ -0,0 +1,63 @@
"""
Script to test concurrency (multithreading/multiprocess) issues with the workers. Use with caution.
"""
import json
import atfork
atfork.monkeypatch_os_fork_functions()
import atfork.stdlib_fixer
atfork.stdlib_fixer.fix_logging_module()
import time
from redash.data import worker
from redash import models, data_manager, redis_connection
if __name__ == '__main__':
models.create_db(True, False)
print "Creating data source..."
data_source = models.DataSource.create(name="Concurrency", type="pg", options="dbname=postgres")
print "Clear jobs/hashes:"
redis_connection.delete("jobs")
query_hashes = redis_connection.keys("query_hash_*")
if query_hashes:
redis_connection.delete(*query_hashes)
starting_query_results_count = models.QueryResult.select().count()
jobs_count = 5000
workers_count = 10
print "Creating jobs..."
for i in xrange(jobs_count):
query = "SELECT {}".format(i)
print "Inserting: {}".format(query)
data_manager.add_job(query=query, priority=worker.Job.LOW_PRIORITY,
data_source=data_source)
print "Starting workers..."
workers = data_manager.start_workers(workers_count)
print "Waiting for jobs to be done..."
keep_waiting = True
while keep_waiting:
results_count = models.QueryResult.select().count() - starting_query_results_count
print "QueryResults: {}".format(results_count)
time.sleep(5)
if results_count == jobs_count:
print "Yay done..."
keep_waiting = False
data_manager.stop_workers()
qr_count = 0
for qr in models.QueryResult.select():
number = int(qr.query.split()[1])
data_number = json.loads(qr.data)['rows'][0].values()[0]
if number != data_number:
print "Oops? {} != {} ({})".format(number, data_number, qr.id)
qr_count += 1
print "Verified {} query results.".format(qr_count)
print "Done."

View File

@@ -7,11 +7,8 @@ machine:
2.7.3
dependencies:
pre:
- wget http://downloads.sourceforge.net/project/optipng/OptiPNG/optipng-0.7.5/optipng-0.7.5.tar.gz
- tar xvf optipng-0.7.5.tar.gz
- cd optipng-0.7.5; ./configure; make; sudo checkinstall -y;
- make deps
- pip install -r requirements_dev.txt
- pip install -r dev_requirements.txt
- pip install -r requirements.txt
cache_directories:
- rd_ui/node_modules/
@@ -22,6 +19,10 @@ test:
post:
- make pack
deployment:
test:
branch: chore/release_process
commands:
- make upload
github:
branch: master
commands:

View File

@@ -1,192 +0,0 @@
# Makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXOPTS =
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
# User-friendly check for sphinx-build
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
endif
# Internal variables.
PAPEROPT_a4 = -D latex_paper_size=a4
PAPEROPT_letter = -D latex_paper_size=letter
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
# the i18n builder cannot share the environment and doctrees with the others
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
help:
@echo "Please use \`make <target>' where <target> is one of"
@echo " html to make standalone HTML files"
@echo " dirhtml to make HTML files named index.html in directories"
@echo " singlehtml to make a single large HTML file"
@echo " pickle to make pickle files"
@echo " json to make JSON files"
@echo " htmlhelp to make HTML files and a HTML help project"
@echo " qthelp to make HTML files and a qthelp project"
@echo " applehelp to make an Apple Help Book"
@echo " devhelp to make HTML files and a Devhelp project"
@echo " epub to make an epub"
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
@echo " latexpdf to make LaTeX files and run them through pdflatex"
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
@echo " text to make text files"
@echo " man to make manual pages"
@echo " texinfo to make Texinfo files"
@echo " info to make Texinfo files and run them through makeinfo"
@echo " gettext to make PO message catalogs"
@echo " changes to make an overview of all changed/added/deprecated items"
@echo " xml to make Docutils-native XML files"
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
@echo " linkcheck to check all external links for integrity"
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
@echo " coverage to run coverage check of the documentation (if enabled)"
clean:
rm -rf $(BUILDDIR)/*
html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
dirhtml:
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
singlehtml:
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
@echo
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
pickle:
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
@echo
@echo "Build finished; now you can process the pickle files."
json:
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
@echo
@echo "Build finished; now you can process the JSON files."
htmlhelp:
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
@echo
@echo "Build finished; now you can run HTML Help Workshop with the" \
".hhp project file in $(BUILDDIR)/htmlhelp."
qthelp:
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
@echo
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/redash.qhcp"
@echo "To view the help file:"
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/redash.qhc"
applehelp:
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
@echo
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
@echo "N.B. You won't be able to view it unless you put it in" \
"~/Library/Documentation/Help or install it in your application" \
"bundle."
devhelp:
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
@echo
@echo "Build finished."
@echo "To view the help file:"
@echo "# mkdir -p $$HOME/.local/share/devhelp/redash"
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/redash"
@echo "# devhelp"
epub:
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
@echo
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
latex:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
@echo "Run \`make' in that directory to run these through (pdf)latex" \
"(use \`make latexpdf' here to do that automatically)."
latexpdf:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through pdflatex..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
latexpdfja:
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
@echo "Running LaTeX files through platex and dvipdfmx..."
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
text:
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
@echo
@echo "Build finished. The text files are in $(BUILDDIR)/text."
man:
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
@echo
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
texinfo:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
@echo "Run \`make' in that directory to run these through makeinfo" \
"(use \`make info' here to do that automatically)."
info:
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
@echo "Running Texinfo files through makeinfo..."
make -C $(BUILDDIR)/texinfo info
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
gettext:
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
@echo
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
changes:
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
@echo
@echo "The overview file is in $(BUILDDIR)/changes."
linkcheck:
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
@echo
@echo "Link check complete; look for any errors in the above output " \
"or in $(BUILDDIR)/linkcheck/output.txt."
doctest:
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
@echo "Testing of doctests in the sources finished, look at the " \
"results in $(BUILDDIR)/doctest/output.txt."
coverage:
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
@echo "Testing of coverage in the sources finished, look at the " \
"results in $(BUILDDIR)/coverage/python.txt."
xml:
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
@echo
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
pseudoxml:
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
@echo
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."

View File

@@ -1,111 +0,0 @@
# -*- coding: utf-8 -*-
#
# re:dash documentation build configuration file, created by
# sphinx-quickstart on Mon Jul 20 22:40:24 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = []
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u're:dash'
copyright = u'2015, EverythingMe'
author = u'EverythingMe'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
exclude_patterns = ['_build']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# Output file base name for HTML help builder.
htmlhelp_basename = 'redashdoc'
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'redash', u're:dash Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'redash', u're:dash Documentation',
author, 'redash', 'One line description of project.',
'Miscellaneous'),
]

View File

@@ -1,181 +0,0 @@
Supported Data Sources
######################
re:dash supports several types of data sources, and if you set it up using the provided images, it should already have
the needed dependencies to use them all. Starting from version 0.7 and newer, you can manage data sources from the UI
by browsing to ``/data_sources`` on your instance.
If one of the listed data source types isn't available when trying to create a new data source, make sure that:
1. You installed required dependencies.
2. If you've set custom value for the ``REDASH_ENABLED_QUERY_RUNNERS`` setting, it's included in the list.
PostgreSQL / Redshift
---------------------
- **Options**:
- Database name (mandatory)
- User
- Password
- Host
- Port
- **Additional requirements**:
- None
MySQL
-----
- **Options**:
- Database name (mandatory)
- User
- Password
- Host
- Port
- **Additional requirements**:
- ``MySQL-python`` python package
Google BigQuery
---------------
- **Options**:
- Project ID (mandatory)
- JSON key file, generated when creating a service account (see `instructions <https://developers.google.com/console/help/new/#serviceaccounts>`__).
- **Additional requirements**:
- ``google-api-python-client``, ``oauth2client`` and ``pyopenssl`` python packages (on Ubuntu it might require installing ``libffi-dev`` and ``libssl-dev`` as well).
Graphite
--------
- **Options**:
- Url (mandatory)
- User
- Password
- Verify SSL certificate
MongoDB
-------
- **Options**:
- Connection String (mandatory)
- Database name
- Replica set name
- **Additional requirements**:
- ``pymongo`` python package.
For information on how to write MongoDB queries, see :doc:`documentation </usage/mongodb_querying>`.
ElasticSearch
-------------
...
InfluxDB
--------
...
Presto
------
- **Options**:
- Host (mandatory)
- Address to a Presto coordinator.
- Port
- Port to a Presto coordinator. `8080` is the default port.
- Schema
- Default schema name of Presto. You can read other schemas by qualified name like `FROM myschema.table1`.
- Catalog
- Catalog (connector) name of Presto such as `hive-cdh4`, `hive-hadoop1`, etc.
- Username
- User name to connect to a Presto.
- **Additional requirements**:
- ``pyhive`` python package.
Hive
----
...
Impala
------
...
URL
---
A URL based data source which requests URLs that return the :doc:`results JSON
format </dev/results_format>`.
Very useful in situations where you want to expose the data without
connecting directly to the database.
The query itself inside re:dash will simply contain the URL to be
executed (i.e. http://myserver/path/myquery)
- **Options**:
- Url - set this if you want to limit queries to certain base path.
Google Spreadsheets
-------------------
- **Options**:
- JSON key file, generated when creating a service account (see `instructions <https://developers.google.com/console/help/new/#serviceaccounts>`__).
- **Additional requirements**:
- ``gspread`` and ``oauth2client`` python packages.
Notes:
1. To be able to load the spreadsheet in re:dash - share your it with
your ServiceAccount's email (it can be found in the credentials json
file, for example
43242343247-fjdfakljr3r2@developer.gserviceaccount.com).
2. The query format is "DOC\_UUID\|SHEET\_NUM" (for example
"kjsdfhkjh4rsEFSDFEWR232jkddsfh\|0")
Python
------
**Execute other queries, manipulate and compute with Python code**
This is a special query runner, that will execute provided Python code as the query. Useful for various scenarios such as
merging data from different data sources, doing data transformation/manipulation that isn't trivial with SQL, merging
with remote data or using data analysis libraries such as Pandas (see `example query <https://gist.github.com/arikfr/be7c2888520c44cf4f0f>`__).
While the Python query runner uses a sandbox (RestrictedPython), it's not 100% secure and the security depends on the
modules you allow to import. We recommend enabling the Python query runner only in a trusted environment (meaning: behind
VPN and with users you trust).
- **Options**:
- Allowed Modules in a comma separated list (optional). **NOTE:**
You MUST make sure these modules are installed on the machine
running the Celery workers.

View File

@@ -1,11 +0,0 @@
Developer Information
=====================
.. toctree::
:maxdepth: 2
:glob:
dev/vagrant
dev/*

View File

@@ -1,94 +0,0 @@
Query Execution Model
#####################
Introduction
============
The first datasource which was used with re:dash was Redshift. Because
we had billions of records in Redshift, and some queries were costly to
re-run, from the get go there was the idea of caching query results in
re:dash.
This was to relieve stress from the Redshift cluster and also to improve
user experience.
How queries get executed and cached in re:dash?
===============================================
Server
------
To make sure each query is executed only once at any giving time, we
translate the query to a ``query hash``, using the following code:
.. code:: python
COMMENTS_REGEX = re.compile("/\*.*?\*/")
def gen_query_hash(sql):
sql = COMMENTS_REGEX.sub("", sql)
sql = "".join(sql.split()).lower()
return hashlib.md5(sql.encode('utf-8')).hexdigest()
When query execution is done, the result gets stored to
``query_results`` table. Also we check for all queries in the
``queries`` table that have the same query hash and update their
reference to the query result we just saved
(`code <https://github.com/EverythingMe/redash/blob/master/redash/models.py#L235>`__).
Client
------
The client (UI) will execute queries in two scenarios:
1. (automatically) When opening a query page of a query that doesn't
have a result yet.
2. (manually) When the user clicks on "Execute".
In each case the client does a POST request to ``/api/query_results``
with the following parameters: ``query`` (the query text),
``data_source_id`` (data source to execute the query with) and ``ttl``.
When loading a cached result, ``ttl`` will be the one set to the query
(if it was set). This is a relic from previous versions, and I'm not
sure if it's really used anymore, as usually we will fetch query result
using its id.
When loading a non cached result, ``ttl`` will be 0 which will "force"
the server to execute the query.
As a response to ``/api/query_results`` the server will send either the
query results (in case of a cached query) or job id of the currently
executing query. When job id received the client will start polling on
this id, until a query result received (this is encapsulated in
``Query`` and ``QueryResult`` services).
Ideas on how to implement query parameters
==========================================
Client side only implementation
-------------------------------
(This was actually implemented in. See pull request `#363 <https://github.com/EverythingMe/redash/pull/363>`__ for details.)
The basic idea of how to implement parametized queries is to treat the
query as a template and merge it with parameters taken from query string
or UI (or both).
When the caching facility isn't required (with queries that return in a
reasonable time frame) the implementation can be completly client side
and the backend can be "blind" to the parameters - it just receives the
final query to execute and returns result.
As one improvement over this, we can let the UI/user specify the TTL
value when making the request to ``/api/query_results``, in which case
caching will be availble too, while not having to make the server aware
of the parameters.
Hybrid
------
Another option, will be to store the list of possible parameters for a
query, with their default/optional values. In such case, the server can
prefetch all the options and cache them to provide faster results to the
client.

View File

@@ -1,30 +0,0 @@
Data Source Results Format
==========================
All data sources in re:dash return the following results in JSON format:
.. code:: javascript
{
"columns" : [
{
// Required: a unique identifier of the column name in this result
"name" : "COLUMN_NAME",
// Required: friendly name of the column that will appear in the results
"friendly_name" : "FRIENDLY_NAME",
// Optional: If not specified sort might not work well.
// Supported types: integer, float, boolean, string (default), datetime (ISO-8601 text format)
"type" : "VALUE_TYPE"
},
...
],
"rows" : [
{
// name is the column name as it appears in the columns above.
// VALUE is a valid JSON value. For dates its an ISO-8601 string.
"name" : VALUE,
"name2" : VALUE2
},
...
]
}

View File

@@ -1,49 +0,0 @@
Setting up development environment (using Vagrant)
==================================================
To simplify contribution there is a `Vagrant
box <https://vagrantcloud.com/redash/boxes/dev>`__ available with all
the needed software to run re:dash for development (use it only for
development, for demo purposes there is
`redash/demo <https://vagrantcloud.com/redash/boxes/demo>`__ box and the
AWS/GCE images).
To get started with this box:
1. Make sure you have recent version of
`Vagrant <https://www.vagrantup.com/>`__ installed.
2. Clone the re:dash repository:
``git clone https://github.com/EverythingMe/redash.git``.
3. Change dir into the repository (``cd redash``) and run run
``vagrant up``. This might take some time the first time you run it,
as it downloads the Vagrant virtual box.
4. Once Vagrant is ready, ssh into the instance (``vagrant ssh``), and
change dir to ``/opt/redash/current`` -- this is where your local
repository copy synced to.
5. Copy ``.env`` file into this directory (``cp ../.env ./``).
6. From ``/opt/redash/current/rd_ui`` run ``bower install`` to install
frontend packages. This can be done from your host machine as well,
if you have bower installed.
7. Go back to ``/opt/redash/current`` and install python dependencies
``sudo pip install -r requirements.txt``
8. Apply migrations
::
PYTHONPATH=. bin/run python migrations/0001_allow_delete_query.py
PYTHONPATH=. bin/run python migrations/0002_fix_timestamp_fields.py
PYTHONPATH=. bin/run python migrations/0003_update_data_source_config.py
PYTHONPATH=. bin/run python migrations/0004_allow_null_in_event_user.py
PYTHONPATH=. bin/run python migrations/0005_add_updated_at.py
PYTHONPATH=. bin/run python migrations/0006_queries_last_edit_by.py
PYTHONPATH=. bin/run python migrations/0007_add_schedule_to_queries.py
PYTHONPATH=. bin/run python migrations/0008_make_ds_name_unique.py
PYTHONPATH=. bin/run python migrations/0009_add_api_key_to_user.py
PYTHONPATH=. bin/run python migrations/0010_create_alerts.py
PYTHONPATH=. bin/run python migrations/0010_allow_deleting_datasources.py
PYTHONPATH=. bin/run python migrations/0011_migrate_bigquery_to_json.py
9. Start the server and background workers with
``bin/run honcho start -f Procfile.dev``.
10. Now the server should be available on your host on port 9001 and you
can login with username admin and password admin.

View File

@@ -1,57 +0,0 @@
.. image:: http://redash.io/static/img/redash_logo.png
:width: 200px
Open Source Data Collaboration and Visualization Platform
===================================
**re:dash** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
Prior to **re:dash**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
**re:dash** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery,Google Spreadsheets, PostgreSQL, MySQL, Graphite and custom scripts.
Features
########
1. **Query Editor**: think of `JS Fiddle`_ for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it.
2. **Visualizations**: once you have a dataset, you can create different visualizations out of it. Currently it supports charts, pivot table and cohorts.
3. **Dashboards**: combine several visualizations into a single dashboard.
Demo
####
.. figure:: https://raw.github.com/EverythingMe/redash/screenshots/screenshots.gif
:alt: Screenshots
You can try out the demo instance: `http://demo.redash.io`_ (login with any Google account).
.. _http://demo.redash.io: http://demo.redash.io
.. _JS Fiddle: http://jsfiddle.net
Getting Started
###############
:doc:`Setting up re:dash instance </setup>` (includes links to ready made AWS/GCE images).
Getting Help
############
* Source: https://github.com/everythingme/redash
* Issues: https://github.com/everythingme/redash/issues
* Mailing List: https://groups.google.com/forum/#!forum/redash-users
* Gitter (chat): https://gitter.im/EverythingMe/redash
* Contact Arik, the maintainer directly: arik@everything.me.
TOC
###
.. toctree::
:maxdepth: 2
setup
upgrade
datasources
usage
dev
misc

View File

@@ -1,10 +0,0 @@
Miscellaneous
=============
.. toctree::
:maxdepth: 2
:glob:
misc/*

View File

@@ -1,50 +0,0 @@
How To: Create a Google Developers Project
==========================================
1. Go to the `Google Developers
Console <https://console.developers.google.com/>`__.
2. Select a project, or create a new one by clicking Create Project:
1. In the Project name field, type in a name for your project.
2. In the Project ID field, optionally type in a project ID for your
project or use the one that the console has created for you. This
ID must be unique world-wide.
3. Click the **Create** button and wait for the project to be
created.
4. Click on the new project name in the list to start editing the
project.
3. In the left sidebar, select the **APIs** item below "APIs & auth". A
list of Google web services appears.
4. Find the **Google+ API** service and set its status to **ON**—notice
that this action moves the service to the top of the list.
5. In the sidebar under "APIs & auth", select **Consent screen**.
- Choose an Email Address and specify a Product Name.
6. In the sidebar under "APIs & auth", select **Credentials**.
7. Click **Create a new Client ID** — a dialog box appears.
- In the **Application type** section of the dialog, select **Web
application**.
- In the **Authorized JavaScript origins** field, enter the origin
for your app. You can enter multiple origins to use with multiple
re:dash instance. Wildcards are not allowed. In the example below,
we assume your re:dash instance address is *redash.example.com*:
::
http://redash.example.com
https://redash.example.com
- In the Authorized redirect URI field, enter the redirect URI
callback:
::
http://redash.example.com/oauth/google_callback
- Click the ``Create Client ID`` button.
8. In the resulting **Client ID for web application** section, copy the
**Client ID** and **Client secret** to your ``.env`` file.

View File

@@ -1,59 +0,0 @@
SSL (HTTPS) Setup
=================
If you used the provided images or the bootstrap script, to start using
SSL with your instance you need to:
1. Update the nginx config file (``/etc/nginx/sites-available/redash``)
with SSL configuration (see below an example). Make sure to upload
the certificate to the server, and set the paths correctly in the new
config.
2. Open port 443 in your security group (if using AWS or GCE).
.. code:: nginx
upstream redash_servers {
server 127.0.0.1:5000;
}
server {
listen 80;
# Allow accessing /ping without https. Useful when placing behind load balancer.
location /ping {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass http://redash_servers;
}
location / {
# Enforce SSL.
return 301 https://$host$request_uri;
}
}
server {
listen 443 ssl;
# Make sure to set paths to your certificate .pem and .key files.
ssl on;
ssl_certificate /path-to/cert.pem; # or crt
ssl_certificate_key /path-to/cert.key;
access_log /var/log/nginx/redash.access.log;
gzip on;
gzip_types *;
gzip_proxied any;
location / {
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_pass http://redash_servers;
proxy_redirect off;
}
}

View File

@@ -1,3 +0,0 @@
sphinx
sphinx-autobuild
sphinx_rtd_theme

View File

@@ -1,155 +0,0 @@
Setting up re:dash instance
###########################
The `provisioning
script <https://github.com/EverythingMe/redash/blob/master/setup/bootstrap.sh>`__
works on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy. This script
installs all needed dependencies and creates basic setup.
To ease the process, there are also images for AWS and Google Compute
Cloud. These images created with the same provision script using Packer.
Create an instance
==================
Google Compute Engine
---------------------
First, you need to add the images to your account:
.. code:: bash
$ gcloud compute images add redash-063-b906 gs://redash-images/redash.0.6.3.b906.tar.gz
Next you need to launch an instance using this image (n1-standard-1
instance type is recommended). If you plan using re:dash with BigQuery,
you can use a dedicated image which comes with BigQuery preconfigured
(using instance permissions):
.. code:: bash
$ gcloud compute images add redash-063-b906-bq gs://redash-images/redash.0.6.3.b906-bq.tar.gz
Note that you need to launch this instance with BigQuery access:
.. code:: bash
$ gcloud compute instances create <your_instance_name> --image redash-060-b812-bq --scopes storage-ro bigquery
(the same can be done from the web interface, just make sure to enable
BigQuery access)
Now proceed to `"Setup" <#setup>`__.
AWS
---
Launch the instance with from the pre-baked AMI (for small deployments
t2.micro should be enough):
- us-east-1:
`ami-47b4612c <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-47b4612c>`__
- us-west-1:
`ami-a72edde3 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-a72edde3>`__
- us-west-2:
`ami-f9d6d5c9 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-f9d6d5c9>`__
- eu-central-1:
`ami-72eed46f <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-72eed46f>`__
- eu-west-1:
`ami-5a135c2d <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-5a135c2d>`__
- sa-east-1:
`ami-2b78f436 <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-2b78f436>`__
- ap-northeast-1:
`ami-0a55fd0a <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-0a55fd0a>`__
- ap-southeast-2:
`ami-9f793ea5 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-9f793ea5>`__
- ap-southeast-1:
`ami-12545740 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-12545740>`__
Now proceed to `"Setup" <#setup>`__.
Other
-----
Download the provision script and run it on your machine. Note that:
1. You need to run the script as root.
2. It was tested only on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy.
Setup
=====
Once you created the instance with either the image or the script, you
should have a running re:dash instance with everything you need to get
started. You can now login to it with the user "admin" (password:
"admin"). But to make it useful, there are a few more steps that you
need to manually do to complete the setup:
First ssh to your instance and change directory to ``/opt/redash``. If
you're using the GCE image, switch to root (``sudo su``).
Users & Google Authentication setup
-----------------------------------
Most of the settings you need to edit are in the ``/opt/redash/.env``
file.
1. Update the cookie secret (important! otherwise anyone can sign new
cookies and impersonate users): change "veryverysecret" in the line:
``export REDASH_COOKIE_SECRET=veryverysecret`` to something else (you
can use ``pwgen 32 -1`` to generate random string).
2. By default we create an admin user with the password "admin". You
need to change the password:
- ``cd /opt/redash/current``
- ``sudo -u redash bin/run ./manage.py users password admin {new password}``
3. If you want to use Google OAuth to authenticate users, you need to
create a Google Developers project (see :doc:`instructions </misc/google_developers_project>`)
and then add the needed configuration in the ``.env`` file:
.. code::
export REDASH_GOOGLE_CLIENT_ID=""
export REDASH_GOOGLE_CLIENT_SECRET=""
export REDASH_GOOGLE_APPS_DOMAIN=""
``REDASH_GOOGLE_CLIENT_ID`` and ``REDASH_GOOGLE_CLIENT_SECRET`` are the values you get after registering with Google. ``READASH_GOOGLE_APPS_DOMAIN`` is used in case you want to limit access to single Google apps domain (*if you leave it empty anyone with a Google account can access your instance*).
4. Restart the web server to apply the configuration changes:
``sudo supervisorctl restart redash_server``.
5. Once you have Google OAuth enabled, you can login using your Google
Apps account. If you want to grant admin permissions to some users,
you can do it with the ``users grant_admin`` command:
``sudo -u redash bin/run ./manage.py users grant_admin {email}``.
6. If you don't use Google OAuth or just need username/password logins,
you can create additional users using the CLI (see :doc:`documentation </usage/users>`).
Datasources
-----------
To make re:dash truly useful, you need to setup your data sources in it. Browse to ``/data_sources`` on your instance,
to create new data source connection.
See
:doc:`documentation </datasources>`
for the different options. Your instance comes ready with dependencies
needed to setup supported sources.
How to upgrade?
---------------
It's recommended to upgrade once in a while your re:dash instance to
benefit from bug fixes and new features. See :doc:`here </upgrade>` for full upgrade
instructions (including Fabric script).
Notes
=====
- If this is a production setup, you should enforce HTTPS and make sure
you set the cookie secret (see :doc:`instructions </misc/ssl>`).

View File

@@ -1,36 +0,0 @@
How to Upgrade
##############
It's recommended to upgrade your re:dash instance once there are new
releases, to benefit from new features and bug fixes. The upgrade
process is relatively simple, and assuming you used one of the base
images we provide, you can just use the
`Fabric <http://www.fabfile.org/>`__ script provided here:
https://gist.github.com/arikfr/440d1403b4aeb76ebaf8.
How to run the Fabric script
============================
1. Install Fabric: ``pip install fabric requests`` (needed only once)
2. Download the ``fabfile.py`` from the gist.
3. Run the script:
``fab -H{your re:dash host} -u{the ssh user for this host} -i{path to key file for passwordless login} deploy_latest_release``
``-i`` is optional and it is only needed in case you're using private-key based authentication (and didn't add the key file to your authentication agent or set its path in your SSH config).
What the Fabric script does
===========================
Even if you didn't use the image, it's very likely you can reuse most of
this script with small modifications. What this script does is:
1. Find the URL of the latest release tarball (from `GitHub releases
page <github.com/everythingme/redash/releases>`__).
2. Download it.
3. Create new directory for this version (for example:
``/opt/redash/redash.0.5.0.b685``).
4. Unpack that (``tar -C {dir} -xvf {tarball path}``).
5. Link ``/opt/redash/.env`` file into this directory.
6. Apply any new migrations.
7. Link ``/opt/redash/current`` to new version.
8. Restart web server and celery workers.

View File

@@ -1,12 +0,0 @@
Usage
=====
.. toctree::
:maxdepth: 2
:glob:
usage/maintenance.rst
usage/users.rst
usage/*

View File

@@ -1,48 +0,0 @@
ElasticSearch: Querying
#######################
ElasticSearch currently supports only simple Lucene style queries (like
Kibana but without the aggregation).
Full blown JSON based ElasticSearch queries (including aggregations)
will be added later.
Simple query example:
=====================
- Query the index named "twitter"
- Filter by "user:kimchy"
- Return the fields: "@timestamp", "tweet" and "user"
- Return up to 15 results
- Sort by @timestamp ascending
.. code:: json
{
"index" : "twitter",
"query" : "user:kimchy",
"fields" : ["@timestamp", "tweet", "user"],
"size" : 15,
"sort" : "@timestamp:asc"
}
Simple query on a logstash ElasticSearch instance:
==================================================
- Query the index named "logstash-2015.04.\*" (in this case its all of
April 2015)
- Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
- Return fields: "@timestamp", "userId", "channel", "utm\_source",
"utm\_medium", "utm\_campaign", "utm\_content"
- Return up to 250 results
- Sort by @timestamp ascending
.. code:: json
{
"index" : "logstash-2015.04.*",
"query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
"fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
"size" : 250,
"sort" : "@timestamp:asc"
}

View File

@@ -1,94 +0,0 @@
Ongoing Maintanence and Basic Operations
########################################
Configuration and logs
======================
The supervisor config can be found in
``/opt/redash/supervisord/supervisord.conf``.
There you can see the names of its programs (``redash_celery``,
``redash_server``) and the location of their logs.
Restart
=======
Restarting the Web Server
-------------------------
``sudo supervisorctl stop redash_server``
Restarting Celery Workers
-------------------------
``sudo supervisorctl restart redash_celery``
Restarting Celery Workers & the Queries Queue
---------------------------------------------
In case you are handling a problem, and you need to stop the currently
running queries and reset the queue, follow the steps below.
1. Stop celery: ``sudo supervisorctl stop redash_celery`` (celery might
take some time to stop, if it's in the middle of running a query)
2. Flush redis: ``redis-cli flushdb``
3. Start celery: ``sudo supervisorctl start redash_celery``
Changing the Number of Workers
==============================
By default, Celery will start a worker per CPU core. Because most of
re:dash's tasks are IO bound, the real limit for number of workers you
can use depends on the amount of memory your machine has. It's
recommended to increase number of workers, to support more concurrent
queries.
1. Open the supervisord configuration file:
``/opt/redash/supervisord/supervisord.conf``
2. Edit the ``[program:redash_celery]`` section and add to the *command*
value, the param "-c" with the number of concurrent workers you need.
3. Restart supervisord to apply new configuration:
``sudo /etc/init.d/redash_supervisord restart``.
DB
==
Show the Currently Configured Data Source
-----------------------------------------
This varies based on the redash version and personal preferences. You
can do one of the following:
Using the CLI
~~~~~~~~~~~~~
In ``/opt/redash/current``, run:
``sudo -u redash bin/run ./manage.py ds list``
Using the Admin
~~~~~~~~~~~~~~~
(available from version 0.6b797). Browse to ``/admin/datasource``
View the Definition Directly in the DB
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
1. Open psql: ``sudo -u redash psql``
2. Run the query: ``SELECT * from data_sources;``
Backup re:dash's DB:
--------------------
``sudo -u redash pg_dump > backup_filename.sql``
Version
=======
See current version:
``bin/run ./manage.py version``

View File

@@ -1,74 +0,0 @@
MongoDB: Querying
#################
Simple query example:
=====================
.. code:: json
{
"collection" : "my_collection",
"query" : {
"date" : {
"$gt" : "ISODate(\"2015-01-15 11:41\")",
},
"type" : 1
},
"fields" : {
"_id" : 1,
"name" : 2
},
"sort" : [
{
"name" : "date",
"direction" : -1
}
]
}
Live example on the demo instance:
http://demo.redash.io/queries/394/source.
Aggregation
===========
Uses a syntax similar to the one used in PyMongo, however to support the
correct order of sorting, it uses a regular list for the "$sort"
operation that converts into a SON (sorted dictionary) object before
execution.
Aggregation query example:
.. code:: json
{
"collection" : "things",
"aggregate" : [
{
"$unwind" : "$tags"
},
{
"$group" : {
"_id" : "$tags",
"count" : { "$sum" : 1 }
}
},
{
"$sort" : [
{
"name" : "count",
"direction" : -1
},
{
"name" : "_id",
"direction" : -1
}
]
}
]
}
Live examples on the demo instance:
1. http://demo.redash.io/queries/393/source
2. http://demo.redash.io/queries/387/source

View File

@@ -1,39 +0,0 @@
Users' Management
#################
If you use Google OpenID authentication, then each user from the domains
you allowed will automatically be logged in and have the default
permissions.
If you want to give some user different permissions or you want to
create password based users (make sure you enabled this options in
settings first), you need to use the CLI (``manage.py``).
Create a new user
=================
.. code:: bash
$ bin/run ./manage.py users create --help
usage: users create [-h] [--permissions PERMISSIONS] [--password PASSWORD]
[--google] [--admin]
name email
positional arguments:
name User's full name
email User's email
optional arguments:
-h, --help show this help message and exit
--permissions PERMISSIONS
Comma seperated list of permissions (leave blank for
default).
--password PASSWORD Password for users who don't use Google Auth (leave
blank for prompt).
--google user uses Google Auth to login
--admin set user as admin
Grant admin permissions
=======================
``sudo -u redash bin/run ./manage.py users grant_admin {email}``

View File

@@ -2,15 +2,12 @@
"""
CLI to manage redash.
"""
import json
from flask.ext.script import Manager
from redash import settings, models, __version__
from redash.wsgi import app
from redash.import_export import import_manager
from redash.cli import users, database, data_sources
from redash.monitor import get_status
manager = Manager(app)
manager.add_command("database", database.manager)
@@ -24,9 +21,6 @@ def version():
"""Displays re:dash version."""
print __version__
@manager.command
def status():
print json.dumps(get_status(), indent=2)
@manager.command
def runworkers():
@@ -43,15 +37,12 @@ def make_shell_context():
@manager.command
def check_settings():
"""Show the settings as re:dash sees them (useful for debugging)."""
for name, item in settings.all_settings().iteritems():
print "{} = {}".format(name, item)
from types import ModuleType
@manager.command
def send_test_mail():
from redash import mail
from flask_mail import Message
mail.send(Message(subject="Test Message from re:dash", recipients=[settings.MAIL_DEFAULT_SENDER], body="Test message."))
for name in dir(settings):
item = getattr(settings, name)
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
print "{} = {}".format(name, item)
if __name__ == '__main__':

View File

@@ -1,20 +0,0 @@
from redash.models import db
if __name__ == '__main__':
db.connect_db()
with db.database.transaction():
# Make sure all data sources names are unique.
db.database.execute_sql("""
UPDATE data_sources
SET name = new_names.name
FROM (
SELECT id, name || ' ' || id as name
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
) AS new_names
WHERE data_sources.id = new_names.id;
""")
# Add unique constraint on data_sources.name.
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
db.close_db(None)

View File

@@ -1,27 +0,0 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
from redash import models
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
column = models.User.api_key
column.null = True
migrate(
migrator.add_column('users', 'api_key', models.User.api_key),
)
for user in models.User.select():
user.save()
migrate(
migrator.add_not_null('users', 'api_key')
)
db.close_db(None)

View File

@@ -1,18 +0,0 @@
from playhouse.migrate import PostgresqlMigrator, migrate
from redash.models import db
if __name__ == '__main__':
db.connect_db()
migrator = PostgresqlMigrator(db.database)
with db.database.transaction():
migrate(
migrator.drop_not_null('queries', 'data_source_id'),
)
db.close_db(None)

View File

@@ -1,8 +0,0 @@
from redash.models import db, Alert, AlertSubscription
if __name__ == '__main__':
with db.database.transaction():
Alert.create_table()
AlertSubscription.create_table()
db.close_db(None)

View File

@@ -1,44 +0,0 @@
from base64 import b64encode
import json
from redash.models import DataSource
def convert_p12_to_pem(p12file):
from OpenSSL import crypto
with open(p12file, 'rb') as f:
p12 = crypto.load_pkcs12(f.read(), "notasecret")
return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
if __name__ == '__main__':
for ds in DataSource.all():
if ds.type == 'bigquery':
options = json.loads(ds.options)
if 'jsonKeyFile' in options:
continue
new_options = {
'projectId': options['projectId'],
'jsonKeyFile': b64encode(json.dumps({
'client_email': options['serviceAccount'],
'private_key': convert_p12_to_pem(options['privateKey'])
}))
}
ds.options = json.dumps(new_options)
ds.save()
elif ds.type == 'google_spreadsheets':
options = json.loads(ds.options)
if 'jsonKeyFile' in options:
continue
with open(options['credentialsFilePath']) as f:
new_options = {
'jsonKeyFile': b64encode(f.read())
}
ds.options = json.dumps(new_options)
ds.save()

View File

@@ -19,7 +19,6 @@
"trailing": true,
"smarttabs": true,
"globals": {
"angular": false,
"_": false
"angular": false
}
}

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.3 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.0 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 6.0 KiB

View File

@@ -14,18 +14,13 @@
<link rel="stylesheet" href="/bower_components/gridster/dist/jquery.gridster.css">
<link rel="stylesheet" href="/bower_components/pivottable/dist/pivot.css">
<link rel="stylesheet" href="/bower_components/cornelius/src/cornelius.css">
<link rel="stylesheet" href="/bower_components/select2/select2.css">
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
<link rel="stylesheet" href="/styles/redash.css">
<!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head>
<body>
<div growl></div>
@@ -39,7 +34,7 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
</div>
{% raw %}
<div class="collapse navbar-collapse navbar-ex1-collapse">
@@ -72,12 +67,6 @@
<li><a href="/queries">Queries</a></li>
</ul>
</li>
<li>
<a href="/alerts">Alerts</a>
</li>
<li ng-show="currentUser.hasPermission('admin')">
<a href="/data_sources">Data Sources</a>
</li>
</ul>
<form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()">
<div class="form-group">
@@ -120,7 +109,6 @@
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
<script src="/bower_components/codemirror/mode/python/python.js"></script>
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
<script src="/bower_components/highcharts/highcharts.js"></script>
<script src="/bower_components/highcharts/modules/exporting.js"></script>
@@ -130,10 +118,11 @@
<script src="/bower_components/cornelius/src/cornelius.js"></script>
<script src="/bower_components/mousetrap/mousetrap.js"></script>
<script src="/bower_components/mousetrap/plugins/global-bind/mousetrap-global-bind.js"></script>
<script src="/bower_components/select2/select2.js"></script>
<script src="/bower_components/angular-ui-select2/src/select2.js"></script>
<script src="/bower_components/angular-ui-select/dist/select.js"></script>
<script src="/bower_components/underscore.string/lib/underscore.string.js"></script>
<script src="/bower_components/marked/lib/marked.js"></script>
<script src="/bower_components/angular-base64-upload/dist/angular-base64-upload.js"></script>
<script src="/scripts/ng_highchart.js"></script>
<script src="/scripts/ng_smart_table.js"></script>
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
@@ -142,8 +131,7 @@
<script src="/bower_components/mustache/mustache.js"></script>
<script src="/bower_components/canvg/rgbcolor.js"></script>
<script src="/bower_components/canvg/StackBlur.js"></script>
<script src="/bower_components/canvg/canvg.js"></script>
<script src="/bower_components/leaflet/dist/leaflet.js"></script>
<script src="/bower_components/canvg/canvg.js"></script>
<!-- endbuild -->
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
@@ -155,22 +143,18 @@
<script src="/scripts/controllers/controllers.js"></script>
<script src="/scripts/controllers/dashboard.js"></script>
<script src="/scripts/controllers/admin_controllers.js"></script>
<script src="/scripts/controllers/data_sources.js"></script>
<script src="/scripts/controllers/query_view.js"></script>
<script src="/scripts/controllers/query_source.js"></script>
<script src="/scripts/visualizations/base.js"></script>
<script src="/scripts/visualizations/chart.js"></script>
<script src="/scripts/visualizations/cohort.js"></script>
<script src="/scripts/visualizations/map.js"></script>
<script src="/scripts/visualizations/counter.js"></script>
<script src="/scripts/visualizations/table.js"></script>
<script src="/scripts/visualizations/pivot.js"></script>
<script src="/scripts/directives/directives.js"></script>
<script src="/scripts/directives/query_directives.js"></script>
<script src="/scripts/directives/data_source_directives.js"></script>
<script src="/scripts/directives/dashboard_directives.js"></script>
<script src="/scripts/filters.js"></script>
<script src="/scripts/controllers/alerts.js"></script>
<!-- endbuild -->
<script>
@@ -185,7 +169,7 @@
currentUser.hasPermission = function(permission) {
return this.permissions.indexOf(permission) != -1;
};
}
{{ analytics|safe }}
</script>

View File

@@ -13,10 +13,6 @@
<link rel="stylesheet" href="/styles/redash.css">
<link rel="stylesheet" href="/styles/login.css">
<!-- endbuild -->
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
</head>
<body>
@@ -30,20 +26,13 @@
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
</div>
</div>
</nav>
<div class="container">
<div class="row">
{% with messages = get_flashed_messages() %}
{% if messages %}
{% for message in messages %}
<div class="alert alert-warning" role="alert">{{ message }}</div>
{% endfor %}
{% endif %}
{% endwith %}
<div class="main">
{% if show_google_openid %}
@@ -59,23 +48,10 @@
{% endif %}
{% if show_saml_login %}
<div class="row">
<a href="/saml/login">SAML Login</a>
</div>
<div class="login-or">
<hr class="hr-or">
<span class="span-or">or</span>
</div>
{% endif %}
<form role="form" method="post" name="login">
<div class="form-group">
<label for="inputEmail">Email</label>
<input type="text" class="form-control" id="inputEmail" name="email" value="{{email}}">
<label for="inputUsernameEmail">Username or email</label>
<input type="text" class="form-control" id="inputUsernameEmail" name="username" value="{{username}}">
</div>
<div class="form-group">
<!--<a class="pull-right" href="#">Forgot password?</a>-->

View File

@@ -7,16 +7,16 @@ angular.module('redash', [
'redash.renderers',
'redash.visualization',
'highchart',
'ui.select2',
'angular-growl',
'angularMoment',
'ui.bootstrap',
'smartTable.table',
'ngResource',
'ngRoute',
'ui.select',
'naif.base64'
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider', 'uiSelectConfig',
function ($routeProvider, $locationProvider, $compileProvider, growlProvider, uiSelectConfig) {
'ui.select'
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider',
function ($routeProvider, $locationProvider, $compileProvider, growlProvider) {
if (featureFlags.clientSideMetrics) {
Bucky.setOptions({
host: '/api/metrics'
@@ -31,8 +31,6 @@ angular.module('redash', [
return query.$promise;
};
uiSelectConfig.theme = "bootstrap";
$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|http|data):/);
$locationProvider.html5Mode(true);
growlProvider.globalTimeToLive(2000);
@@ -82,30 +80,15 @@ angular.module('redash', [
templateUrl: '/views/admin_status.html',
controller: 'AdminStatusCtrl'
});
$routeProvider.when('/alerts', {
templateUrl: '/views/alerts/list.html',
controller: 'AlertsCtrl'
});
$routeProvider.when('/alerts/:alertId', {
templateUrl: '/views/alerts/edit.html',
controller: 'AlertCtrl'
});
$routeProvider.when('/data_sources/:dataSourceId', {
templateUrl: '/views/data_sources/edit.html',
controller: 'DataSourceCtrl'
});
$routeProvider.when('/data_sources', {
templateUrl: '/views/data_sources/list.html',
controller: 'DataSourcesCtrl'
$routeProvider.when('/admin/workers', {
templateUrl: '/views/admin_workers.html',
controller: 'AdminWorkersCtrl'
});
$routeProvider.when('/', {
templateUrl: '/views/personal.html',
controller: 'PersonalIndexCtrl'
templateUrl: '/views/index.html',
controller: 'IndexCtrl'
});
$routeProvider.when('/personal', {
templateUrl: '/views/personal.html',
controller: 'PersonalIndexCtrl'

View File

@@ -17,7 +17,7 @@
};
refresh();
};
}
angular.module('redash.admin_controllers', [])
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])

View File

@@ -1,174 +0,0 @@
(function() {
var AlertsCtrl = function($scope, Events, Alert) {
Events.record(currentUser, "view", "page", "alerts");
$scope.$parent.pageTitle = "Alerts";
$scope.alerts = []
Alert.query(function(alerts) {
var stateClass = {
'ok': 'label label-success',
'triggered': 'label label-danger',
'unknown': 'label label-warning'
};
_.each(alerts, function(alert) {
alert.class = stateClass[alert.state];
})
$scope.alerts = alerts;
});
$scope.gridConfig = {
isPaginationEnabled: true,
itemsByPage: 50,
maxSize: 8,
};
$scope.gridColumns = [
{
"label": "Name",
"map": "name",
"cellTemplate": '<a href="/alerts/{{dataRow.id}}">{{dataRow.name}}</a> (<a href="/queries/{{dataRow.query.id}}">query</a>)'
},
{
'label': 'Created By',
'map': 'user.name'
},
{
'label': 'State',
'cellTemplate': '<span ng-class="dataRow.class">{{dataRow.state | uppercase}}</span> since <span am-time-ago="dataRow.updated_at"></span>'
},
{
'label': 'Created At',
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
}
];
};
var AlertCtrl = function($scope, $routeParams, $location, growl, Query, Events, Alert) {
$scope.$parent.pageTitle = "Alerts";
$scope.alertId = $routeParams.alertId;
if ($scope.alertId === "new") {
Events.record(currentUser, 'view', 'page', 'alerts/new');
} else {
Events.record(currentUser, 'view', 'alert', $scope.alertId);
}
$scope.onQuerySelected = function(item) {
$scope.selectedQuery = item;
item.getQueryResultPromise().then(function(result) {
$scope.queryResult = result;
$scope.alert.options.column = $scope.alert.options.column || result.getColumnNames()[0];
});
};
if ($scope.alertId === "new") {
$scope.alert = new Alert({options: {}});
} else {
$scope.alert = Alert.get({id: $scope.alertId}, function(alert) {
$scope.onQuerySelected(new Query($scope.alert.query));
});
}
$scope.ops = ['greater than', 'less than', 'equals'];
$scope.selectedQuery = null;
$scope.getDefaultName = function() {
if (!$scope.alert.query) {
return undefined;
}
return _.template("<%= query.name %>: <%= options.column %> <%= options.op %> <%= options.value %>", $scope.alert);
};
$scope.searchQueries = function (term) {
if (!term || term.length < 3) {
return;
}
Query.search({q: term}, function(results) {
$scope.queries = results;
});
};
$scope.saveChanges = function() {
if ($scope.alert.name === undefined || $scope.alert.name === '') {
$scope.alert.name = $scope.getDefaultName();
}
$scope.alert.$save(function(alert) {
growl.addSuccessMessage("Saved.");
if ($scope.alertId === "new") {
$location.path('/alerts/' + alert.id).replace();
}
}, function() {
growl.addErrorMessage("Failed saving alert.");
});
};
};
angular.module('redash.directives').directive('alertSubscribers', ['AlertSubscription', function (AlertSubscription) {
return {
restrict: 'E',
replace: true,
templateUrl: '/views/alerts/subscribers.html',
scope: {
'alertId': '='
},
controller: function ($scope) {
$scope.subscribers = AlertSubscription.query({alertId: $scope.alertId});
}
}
}]);
angular.module('redash.directives').directive('subscribeButton', ['AlertSubscription', 'growl', function (AlertSubscription, growl) {
return {
restrict: 'E',
replace: true,
template: '<button class="btn btn-default btn-xs" ng-click="toggleSubscription()"><i ng-class="class"></i></button>',
controller: function ($scope) {
var updateClass = function() {
if ($scope.subscription) {
$scope.class = "fa fa-eye-slash";
} else {
$scope.class = "fa fa-eye";
}
}
$scope.subscribers.$promise.then(function() {
$scope.subscription = _.find($scope.subscribers, function(subscription) {
return (subscription.user.email == currentUser.email);
});
updateClass();
});
$scope.toggleSubscription = function() {
if ($scope.subscription) {
$scope.subscription.$delete(function() {
$scope.subscribers = _.without($scope.subscribers, $scope.subscription);
$scope.subscription = undefined;
updateClass();
}, function() {
growl.addErrorMessage("Failed saving subscription.");
});
} else {
$scope.subscription = new AlertSubscription({alert_id: $scope.alertId});
$scope.subscription.$save(function() {
$scope.subscribers.push($scope.subscription);
updateClass();
}, function() {
growl.addErrorMessage("Unsubscription failed.");
});
}
}
}
}
}]);
angular.module('redash.controllers')
.controller('AlertsCtrl', ['$scope', 'Events', 'Alert', AlertsCtrl])
.controller('AlertCtrl', ['$scope', '$routeParams', '$location', 'growl', 'Query', 'Events', 'Alert', AlertCtrl])
})();

View File

@@ -23,7 +23,7 @@
},
{
'label': 'Created By',
'map': 'user.name'
'map': 'user_name'
},
{
'label': 'Created At',
@@ -45,6 +45,7 @@
Query.search({q: $scope.term }, function(results) {
$scope.queries = _.map(results, function(query) {
query.created_at = moment(query.created_at);
query.user_name = query.user.name;
return query;
});
});
@@ -92,6 +93,7 @@
$scope.allQueries = _.map(queries, function (query) {
query.created_at = moment(query.created_at);
query.retrieved_at = moment(query.retrieved_at);
query.user_name = query.user.name;
return query;
});
@@ -106,7 +108,7 @@
},
{
'label': 'Created By',
'map': 'user.name'
'map': 'user_name'
},
{
'label': 'Created At',

View File

@@ -1,47 +0,0 @@
(function () {
var DataSourcesCtrl = function ($scope, $location, growl, Events, DataSource) {
Events.record(currentUser, "view", "page", "admin/data_sources");
$scope.$parent.pageTitle = "Data Sources";
$scope.dataSources = DataSource.query();
$scope.openDataSource = function(datasource) {
$location.path('/data_sources/' + datasource.id);
};
$scope.deleteDataSource = function(event, datasource) {
event.stopPropagation();
Events.record(currentUser, "delete", "datasource", datasource.id);
datasource.$delete(function(resource) {
growl.addSuccessMessage("Data source deleted succesfully.");
this.$parent.dataSources = _.without(this.dataSources, resource);
}.bind(this), function(httpResponse) {
console.log("Failed to delete data source: ", httpResponse.status, httpResponse.statusText, httpResponse.data);
growl.addErrorMessage("Failed to delete data source.");
});
}
};
var DataSourceCtrl = function ($scope, $routeParams, $http, $location, Events, DataSource) {
Events.record(currentUser, "view", "page", "admin/data_source");
$scope.$parent.pageTitle = "Data Sources";
$scope.dataSourceId = $routeParams.dataSourceId;
if ($scope.dataSourceId == "new") {
$scope.dataSource = new DataSource({options: {}});
} else {
$scope.dataSource = DataSource.get({id: $routeParams.dataSourceId});
}
$scope.$watch('dataSource.id', function(id) {
if (id != $scope.dataSourceId && id !== undefined) {
$location.path('/data_sources/' + id).replace();
}
});
};
angular.module('redash.controllers')
.controller('DataSourcesCtrl', ['$scope', '$location', 'growl', 'Events', 'DataSource', DataSourcesCtrl])
.controller('DataSourceCtrl', ['$scope', '$routeParams', '$http', '$location', 'Events', 'DataSource', DataSourceCtrl])
})();

View File

@@ -17,7 +17,7 @@
saveQuery = $scope.saveQuery;
$scope.sourceMode = true;
$scope.canEdit = currentUser.canEdit($scope.query) || featureFlags.allowAllToEditQueries;
$scope.canEdit = true;
$scope.isDirty = false;
$scope.newVisualization = undefined;

View File

@@ -15,11 +15,9 @@
maxAge = -1;
}
$scope.showLog = false;
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
}
$scope.dataSource = {};
$scope.query = $route.current.locals.query;
var updateSchema = function() {
@@ -49,19 +47,14 @@
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
$scope.canViewSource = currentUser.hasPermission('view_source');
$scope.dataSources = DataSource.query(function(dataSources) {
$scope.dataSources = DataSource.get(function(dataSources) {
updateSchema();
if ($scope.query.isNew()) {
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
}
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
});
// in view mode, latest dataset is always visible
// source mode changes this behavior
$scope.showDataset = true;
$scope.showLog = false;
$scope.lockButton = function(lock) {
$scope.queryExecuting = lock;
@@ -104,9 +97,6 @@
};
$scope.executeQuery = function() {
if (!$scope.query.query) {
return;
}
getQueryResult(0);
$scope.lockButton(true);
$scope.cancelling = false;
@@ -118,21 +108,21 @@
$scope.queryResult.cancelExecution();
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
};
$scope.archiveQuery = function(options, data) {
if (data) {
data.id = $scope.query.id;
} else {
data = $scope.query;
}
$scope.isDirty = false;
options = _.extend({}, {
successMessage: 'Query archived',
errorMessage: 'Query could not be archived'
}, options);
return Query.delete({id: data.id}, function() {
$scope.query.is_archived = true;
$scope.query.schedule = null;
@@ -159,7 +149,6 @@
}
updateSchema();
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
$scope.executeQuery();
};
@@ -205,10 +194,6 @@
if (status === 'done' || status === 'failed') {
$scope.lockButton(false);
}
if ($scope.queryResult.getLog() != null) {
$scope.showLog = true;
}
});
$scope.openScheduleForm = function() {

View File

@@ -1,76 +0,0 @@
(function () {
'use strict';
var directives = angular.module('redash.directives');
// Angular strips data- from the directive, so data-source-form becomes sourceForm...
directives.directive('sourceForm', ['$http', 'growl', function ($http, growl) {
return {
restrict: 'E',
replace: true,
templateUrl: '/views/data_sources/form.html',
scope: {
'dataSource': '='
},
link: function ($scope) {
var setType = function(types) {
if ($scope.dataSource.type === undefined) {
$scope.dataSource.type = types[0].type;
return types[0];
}
$scope.type = _.find(types, function (t) {
return t.type == $scope.dataSource.type;
});
};
$scope.files = {};
$scope.$watchCollection('files', function() {
_.each($scope.files, function(v, k) {
if (v) {
$scope.dataSource.options[k] = v.base64;
}
});
});
$http.get('/api/data_sources/types').success(function (types) {
setType(types);
$scope.dataSourceTypes = types;
_.each(types, function (type) {
_.each(type.configuration_schema.properties, function (prop, name) {
if (name == 'password' || name == 'passwd') {
prop.type = 'password';
}
if (_.string.endsWith(name, "File")) {
prop.type = 'file';
}
prop.required = _.contains(type.configuration_schema.required, name);
});
});
});
$scope.$watch('dataSource.type', function(current, prev) {
if (prev !== current) {
if (prev !== undefined) {
$scope.dataSource.options = {};
}
setType($scope.dataSourceTypes);
}
});
$scope.saveChanges = function() {
$scope.dataSource.$save(function() {
growl.addSuccessMessage("Saved.");
}, function() {
growl.addErrorMessage("Failed saving.");
});
}
}
}
}]);
})();

View File

@@ -8,7 +8,7 @@
'query': '=',
'visualization': '=?'
},
template: '<small><span class="glyphicon glyphicon-link"></span></small> <a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
link: function(scope, element) {
scope.link = '/queries/' + scope.query.id;
if (scope.visualization) {
@@ -64,23 +64,14 @@
scope: {
'query': '=',
'lock': '=',
'schema': '=',
'syntax': '='
'schema': '='
},
template: '<textarea></textarea>',
link: {
pre: function ($scope, element) {
$scope.syntax = $scope.syntax || 'sql';
var modes = {
'sql': 'text/x-sql',
'python': 'text/x-python',
'json': 'application/json'
};
var textarea = element.children()[0];
var editorOptions = {
mode: modes[$scope.syntax],
mode: 'text/x-sql',
lineWrapping: true,
lineNumbers: true,
readOnly: false,
@@ -117,8 +108,6 @@
$scope.query.query = newValue;
});
}
$('.schema-container').css('height', $('.CodeMirror').css('height'));
});
$scope.$watch('query.query', function () {
@@ -139,12 +128,6 @@
additionalHints = _.unique(keywords);
}
codemirror.refresh();
});
$scope.$watch('syntax', function(syntax) {
codemirror.setOption('mode', modes[syntax]);
});
$scope.$watch('lock', function (locked) {
@@ -241,14 +224,7 @@
value: "60",
name: 'Every minute'
}
];
_.each([5, 10, 15, 30], function(i) {
$scope.refreshOptions.push({
value: String(i*60),
name: "Every " + i + " minutes"
})
});
]
_.each(_.range(1, 13), function (i) {
$scope.refreshOptions.push({

View File

@@ -145,7 +145,7 @@
if (!hasTotalsAlready) {
this.addSeries({
data: _.sortBy(_.values(data), 'x'),
data: _.values(data),
type: 'line',
name: 'Total'
}, false)
@@ -308,22 +308,21 @@
// We check either for true or undefined for backward compatibility.
var series = scope.series;
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
// If this is a chart that has just one row for multiple columns, sort
// by the Y values. For example:
//
// A | B | C
// 20 | 30 | 15
//
// Will be sorted:
// C | A | B
// 15 | 20 | 30
var sortable = _.every(series, function(s) { return s.data.length == 1 });
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
if (sortable) {
series = _.sortBy(series, function (s) {
return s.data[0].y
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
@@ -360,23 +359,6 @@
});
}
}
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
var seriesCopy = [];
_.each(series, function (s) {
// make a copy of series data, so we don't override original.
var fieldName = 'x';
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
fieldName = 'name';
};
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
seriesCopy.push(sorted);
});
series = seriesCopy;
}
scope.chart.counters.color = 0;

File diff suppressed because it is too large Load Diff

View File

@@ -1,28 +1,4 @@
(function () {
function QueryResultError(errorMessage) {
this.errorMessage = errorMessage;
}
QueryResultError.prototype.getError = function() {
return this.errorMessage;
};
QueryResultError.prototype.getStatus = function() {
return 'failed';
};
QueryResultError.prototype.getData = function() {
return null;
};
QueryResultError.prototype.getLog = function() {
return null;
};
QueryResultError.prototype.getChartData = function() {
return null;
};
var QueryResult = function ($resource, $timeout, $q) {
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
var Job = $resource('/api/jobs/:id', {id: '@id'});
@@ -36,8 +12,6 @@
var columnTypes = {};
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
_.each(this.query_result.data.rows, function (row) {
_.each(row, function (v, k) {
if (angular.isNumber(v)) {
@@ -56,9 +30,7 @@
_.each(this.query_result.data.columns, function(column) {
if (columnTypes[column.name]) {
if (column.type == null || column.type == 'string') {
column.type = columnTypes[column.name];
}
column.type = columnTypes[column.name];
}
});
@@ -68,7 +40,7 @@
} else {
this.status = undefined;
}
};
}
function QueryResult(props) {
this.deferred = $q.defer();
@@ -119,14 +91,6 @@
return this.job.error;
}
QueryResult.prototype.getLog = function() {
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
return null;
}
return this.query_result.data.log;
}
QueryResult.prototype.getUpdatedAt = function () {
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
}
@@ -374,15 +338,10 @@
return this.deferred.promise;
}
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
QueryResult.get = function (data_source_id, query, maxAge) {
var queryResult = new QueryResult();
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
if (queryId !== undefined) {
params['query_id'] = queryId;
};
QueryResultResource.post(params, function (response) {
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'max_age': maxAge}, function (response) {
queryResult.update(response);
if ('job' in response) {
@@ -438,23 +397,20 @@
return '/queries/' + this.id + '/source';
};
Query.prototype.isNew = function() {
return this.id === undefined;
};
Query.prototype.hasDailySchedule = function() {
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
};
}
Query.prototype.scheduleInLocalTime = function() {
var parts = this.schedule.split(':');
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
};
}
Query.prototype.getQueryResult = function (maxAge, parameters) {
if (!this.query) {
return;
}
// if (ttl == undefined) {
// ttl = this.ttl;
// }
var queryText = this.query;
var queryParameters = this.getParameters();
@@ -488,9 +444,7 @@
this.queryResult = QueryResult.getById(this.latest_query_data_id);
}
} else if (this.data_source_id) {
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
} else {
return new QueryResultError("Please select data source to run this query.");
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge);
}
return this.queryResult;
@@ -527,41 +481,14 @@
var DataSource = function ($resource) {
var actions = {
'get': {'method': 'GET', 'cache': false, 'isArray': false},
'query': {'method': 'GET', 'cache': false, 'isArray': true},
'get': {'method': 'GET', 'cache': true, 'isArray': true},
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
};
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
return DataSourceResource;
};
var AlertSubscription = function ($resource) {
var resource = $resource('/api/alerts/:alertId/subscriptions/:userId', {alertId: '@alert_id', userId: '@user.id'});
return resource;
};
var Alert = function ($resource, $http) {
var actions = {
save: {
method: 'POST',
transformRequest: [function(data) {
var newData = _.extend({}, data);
if (newData.query_id === undefined) {
newData.query_id = newData.query.id;
delete newData.query;
}
return newData;
}].concat($http.defaults.transformRequest)
}
};
var resource = $resource('/api/alerts/:id', {id: '@id'}, actions);
return resource;
};
}
var Widget = function ($resource, Query) {
var WidgetResource = $resource('/api/widgets/:id', {id: '@id'});
@@ -588,7 +515,5 @@
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
.factory('DataSource', ['$resource', DataSource])
.factory('Alert', ['$resource', '$http', Alert])
.factory('AlertSubscription', ['$resource', AlertSubscription])
.factory('Widget', ['$resource', 'Query', Widget]);
})();

File diff suppressed because one or more lines are too long

View File

@@ -55,22 +55,6 @@
}];
};
var VisualizationName = function(Visualization) {
return {
restrict: 'E',
scope: {
visualization: '='
},
template: '<small>{{name}}</small>',
replace: false,
link: function (scope) {
if (Visualization.visualizations[scope.visualization.type].name != scope.visualization.name) {
scope.name = scope.visualization.name;
}
}
}
}
var VisualizationRenderer = function ($location, Visualization) {
return {
restrict: 'E',
@@ -84,9 +68,46 @@
template: '<filters></filters>\n' + Visualization.renderVisualizationsTemplate,
replace: false,
link: function (scope) {
scope.select2Options = {
width: '50%'
};
function readURL() {
var searchFilters = angular.fromJson($location.search().filters);
if (searchFilters) {
_.forEach(scope.filters, function(filter) {
var value = searchFilters[filter.friendlyName];
if (value) {
filter.current = value;
}
});
}
}
function updateURL(filters) {
var current = {};
_.each(filters, function(filter) {
if (filter.current) {
current[filter.friendlyName] = filter.current;
}
});
var newSearch = angular.extend($location.search(), {
filters: angular.toJson(current)
});
$location.search(newSearch);
}
scope.$watch('queryResult && queryResult.getFilters()', function (filters) {
if (filters) {
scope.filters = filters;
if (filters.length && false) {
readURL();
// start watching for changes and update URL
scope.$watch('filters', updateURL, true);
}
}
});
}
@@ -117,7 +138,7 @@
query: '=',
queryResult: '=',
visualization: '=?',
openEditor: '@',
openEditor: '=?',
onNewSuccess: '=?'
},
link: function (scope, element, attrs) {
@@ -146,13 +167,9 @@
scope.$watch('visualization.type', function (type, oldType) {
// if not edited by user, set name to match type
if (type && oldType != type && scope.visualization && !scope.visForm.name.$dirty) {
scope.visualization.name = _.string.titleize(scope.visualization.type);
// poor man's titlecase
scope.visualization.name = scope.visualization.type[0] + scope.visualization.type.slice(1).toLowerCase();
}
if (type && oldType != type && scope.visualization) {
scope.visualization.options = Visualization.visualizations[scope.visualization.type].defaultOptions;
}
});
scope.submit = function () {
@@ -191,7 +208,6 @@
.provider('Visualization', VisualizationProvider)
.directive('visualizationRenderer', ['$location', 'Visualization', VisualizationRenderer])
.directive('visualizationOptionsEditor', ['Visualization', VisualizationOptionsEditor])
.directive('visualizationName', ['Visualization', VisualizationName])
.directive('filters', Filters)
.directive('editVisulatizationForm', ['Events', 'Visualization', 'growl', EditVisualizationForm])
})();

View File

@@ -112,6 +112,9 @@
scope.columnTypes = {
"X": "x",
// "X (Date time)": "x",
// "X (Linear)": "x-linear",
// "X (Category)": "x-category",
"Y": "y",
"Series": "series",
"Unused": "unused"
@@ -163,7 +166,7 @@
scope.visualization.options.seriesOptions[s] = {'type': scope.visualization.options.globalSeriesType, 'yAxis': 0};
}
scope.visualization.options.seriesOptions[s].zIndex = scope.visualization.options.seriesOptions[s].zIndex === undefined ? i : scope.visualization.options.seriesOptions[s].zIndex;
scope.visualization.options.seriesOptions[s].index = scope.visualization.options.seriesOptions[s].index === undefined ? i : scope.visualization.options.seriesOptions[s].index;
});
scope.zIndexes = _.range(scope.series.length);
scope.yAxes = [[0, 'left'], [1, 'right']];
@@ -224,12 +227,6 @@
}
});
scope.visualization.options.xAxis = scope.visualization.options.xAxis || {};
scope.visualization.options.xAxis.labels = scope.visualization.options.xAxis.labels || {};
if (scope.visualization.options.xAxis.labels.enabled === undefined) {
scope.visualization.options.xAxis.labels.enabled = true;
}
scope.xAxisType = (scope.visualization.options.xAxis && scope.visualization.options.xAxis.type) || scope.xAxisType;
xAxisUnwatch = scope.$watch("xAxisType", function (xAxisType) {

View File

@@ -26,10 +26,7 @@
if ($scope.queryResult.getData() == null) {
} else {
var sortedData = _.sortBy($scope.queryResult.getData(),function(r) {
return r['date'] + r['day_number'] ;
});
var sortedData = _.sortBy($scope.queryResult.getData(), "date");
var grouped = _.groupBy(sortedData, "date");
var maxColumns = _.reduce(grouped, function(memo, data){
return (data.length > memo)? data.length : memo;

View File

@@ -1,238 +0,0 @@
'use strict';
(function() {
var module = angular.module('redash.visualization');
module.config(['VisualizationProvider', function(VisualizationProvider) {
var renderTemplate =
'<map-renderer ' +
'options="visualization.options" query-result="queryResult">' +
'</map-renderer>';
var editTemplate = '<map-editor></map-editor>';
var defaultOptions = {
'height': 500,
'draw': 'Marker',
'classify':'none'
};
VisualizationProvider.registerVisualization({
type: 'MAP',
name: 'Map',
renderTemplate: renderTemplate,
editorTemplate: editTemplate,
defaultOptions: defaultOptions
});
}
]);
module.directive('mapRenderer', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map.html',
link: function($scope, elm, attrs) {
var setBounds = function(){
var b = $scope.visualization.options.bounds;
if(b){
$scope.map.fitBounds([[b._southWest.lat, b._southWest.lng],[b._northEast.lat, b._northEast.lng]]);
} else if ($scope.features.length > 0){
var group= new L.featureGroup($scope.features);
$scope.map.fitBounds(group.getBounds());
}
};
$scope.$watch('[queryResult && queryResult.getData(), visualization.options.draw,visualization.options.latColName,'+
'visualization.options.lonColName,visualization.options.classify,visualization.options.classify]',
function() {
var marker = function(lat,lon){
if (lat == null || lon == null) return;
return L.marker([lat, lon]);
};
var heatpoint = function(lat,lon,obj){
if (lat == null || lon == null) return;
var color = 'red';
if (obj &&
obj[$scope.visualization.options.classify] &&
$scope.visualization.options.classification){
var v = $.grep($scope.visualization.options.classification,function(e){
return e.value == obj[$scope.visualization.options.classify];
});
if (v.length >0) color = v[0].color;
}
var style = {
fillColor:color,
fillOpacity:0.5,
stroke:false
};
return L.circleMarker([lat,lon],style)
};
var color = function(val){
// taken from http://jsfiddle.net/xgJ2e/2/
var h= Math.floor((100 - val) * 120 / 100);
var s = Math.abs(val - 50)/50;
var v = 1;
var rgb, i, data = [];
if (s === 0) {
rgb = [v,v,v];
} else {
h = h / 60;
i = Math.floor(h);
data = [v*(1-s), v*(1-s*(h-i)), v*(1-s*(1-(h-i)))];
switch(i) {
case 0:
rgb = [v, data[2], data[0]];
break;
case 1:
rgb = [data[1], v, data[0]];
break;
case 2:
rgb = [data[0], v, data[2]];
break;
case 3:
rgb = [data[0], data[1], v];
break;
case 4:
rgb = [data[2], data[0], v];
break;
default:
rgb = [v, data[0], data[1]];
break;
}
}
return '#' + rgb.map(function(x){
return ("0" + Math.round(x*255).toString(16)).slice(-2);
}).join('');
};
// Following line is used to avoid "Couldn't autodetect L.Icon.Default.imagePath" error
// https://github.com/Leaflet/Leaflet/issues/766#issuecomment-7741039
L.Icon.Default.imagePath = L.Icon.Default.imagePath || "//api.tiles.mapbox.com/mapbox.js/v2.2.1/images";
function getBounds(e) {
$scope.visualization.options.bounds = $scope.map.getBounds();
}
var queryData = $scope.queryResult.getData();
var classify = $scope.visualization.options.classify;
if (queryData) {
$scope.visualization.options.classification = [];
for (var row in queryData) {
if (queryData[row][classify] &&
$.grep($scope.visualization.options.classification, function (e) {
return e.value == queryData[row][classify]
}).length == 0) {
$scope.visualization.options.classification.push({value: queryData[row][classify], color: null});
}
}
$.each($scope.visualization.options.classification, function (i, c) {
c.color = color(parseInt((i / $scope.visualization.options.classification.length) * 100));
});
if (!$scope.map) {
$scope.map = L.map(elm[0].children[0].children[0])
}
L.tileLayer('//{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', {
attribution: '&copy; <a href="http://osm.org/copyright">OpenStreetMap</a> contributors'
}).addTo($scope.map);
$scope.features = $scope.features || [];
var tmp_features = [];
var lat_col = $scope.visualization.options.latColName || 'lat';
var lon_col = $scope.visualization.options.lonColName || 'lon';
for (var row in queryData) {
var feature;
if ($scope.visualization.options.draw == 'Marker') {
feature = marker(queryData[row][lat_col], queryData[row][lon_col])
} else if ($scope.visualization.options.draw == 'Color') {
feature = heatpoint(queryData[row][lat_col], queryData[row][lon_col], queryData[row])
}
if (!feature) continue;
var obj_description = '<ul style="list-style-type: none;padding-left: 0">';
for (var k in queryData[row]){
obj_description += "<li>" + k + ": " + queryData[row][k] + "</li>";
}
obj_description += '</ul>';
feature.bindPopup(obj_description);
tmp_features.push(feature);
}
$.each($scope.features, function (i, f) {
$scope.map.removeLayer(f);
});
$scope.features = tmp_features;
$.each($scope.features, function (i, f) {
f.addTo($scope.map)
});
setBounds();
$scope.map.on('focus',function(){
$scope.map.on('moveend', getBounds);
});
$scope.map.on('blur',function(){
$scope.map.off('moveend', getBounds);
});
// We redraw the map if it was loaded in a hidden tab
if ($('a[href="#'+$scope.visualization.id+'"]').length > 0) {
$('a[href="#'+$scope.visualization.id+'"]').on('click', function () {
setTimeout(function() {
$scope.map.invalidateSize(false);
setBounds();
},500);
});
}
}
}, true);
$scope.$watch('visualization.options.height', function() {
if (!$scope.map) return;
$scope.map.invalidateSize(false);
setBounds();
});
}
}
});
module.directive('mapEditor', function() {
return {
restrict: 'E',
templateUrl: '/views/visualizations/map_editor.html',
link: function($scope, elm, attrs) {
$scope.draw_options = ['Marker','Color'];
$scope.classify_columns = $scope.queryResult.columnNames.concat('none');
}
}
});
})();

View File

@@ -14,12 +14,7 @@ a.page-title {
}
a.navbar-brand {
padding: 5px 5px 0px 0px;
margin-left: 0px !important;
}
a.navbar-brand img {
height: 40px;
font-style: italic;
}
.graph {
@@ -97,16 +92,7 @@ a.navbar-brand img {
}
.panel-heading .query-link:hover {
text-decoration: underline;
}
.list-group-item.clickable {
cursor: pointer;
}
.list-group-item.clickable:focus,
.list-group-item.clickable:hover {
background-color: #f5f5f5;
text-decoration: none;
}
/* angular-growl */
@@ -137,23 +123,6 @@ a.navbar-brand img {
}
/* Visualization Filters */
.filters-container {
display: flex;
flex-wrap: wrap;
}
.filter {
width: 33%;
padding-left: 5px;
padding-bottom: 5px;
}
.filter > div {
width: 100%;
}
/* Gridster */
.gridster ul {
@@ -187,7 +156,7 @@ li.widget:hover {
/* CodeMirror */
.CodeMirror {
border: 1px solid #eee;
height: auto;
/*height: auto;*/
min-height: 300px;
margin-bottom: 10px;
}
@@ -339,28 +308,18 @@ counter-renderer counter-name {
height: 100%;
}
.schema-container {
height: 300px;
}
.schema-browser {
height: 100%;
overflow-y: auto;
overflow-x: hidden;
height: 300px;
overflow: scroll;
}
div.table-name {
overflow: hidden;
overflow: scroll;
text-overflow: ellipsis;
white-space: nowrap;
cursor: pointer;
}
.blankslate {
text-align: center;
padding: 30px;
}
/*
bootstrap's hidden-xs class adds display:block when not hidden
use this class when you need to keep the original display value
@@ -370,7 +329,3 @@ use this class when you need to keep the original display value
display: none !important;
}
}
.log-container {
margin-bottom: 50px;
}

View File

@@ -1,58 +0,0 @@
<div class="container">
<ol class="breadcrumb">
<li><a href="/alerts">Alerts</a></li>
<li class="active">{{alert.name || getDefaultName() || "New"}}</li>
</ol>
<div class="row">
<div class="col-md-8">
<form name="alertForm" ng-submit="saveChanges()" class="form">
<div class="form-group">
<label>Query</label>
<ui-select ng-model="alert.query" reset-search-input="false" on-select="onQuerySelected($item)">
<ui-select-match placeholder="Search a query by name">{{$select.selected.name}}</ui-select-match>
<ui-select-choices repeat="q in queries"
refresh="searchQueries($select.search)"
refresh-delay="0">
<div ng-bind-html="q.name | highlight: $select.search | trustAsHtml"></div>
</ui-select-choices>
</ui-select>
</div>
<div class="form-group" ng-show="selectedQuery">
<label>Name</label>
<input type="string" placeholder="{{getDefaultName()}}" class="form-control" ng-model="alert.name">
</div>
<div ng-show="queryResult" class="form-horizontal">
<div class="form-group">
<label class="control-label col-md-2">Value column</label>
<div class="col-md-4">
<select ng-options="name for name in queryResult.getColumnNames()" ng-model="alert.options.column" class="form-control"></select>
</div>
<label class="control-label col-md-2">Value</label>
<div class="col-md-4">
<p class="form-control-static">{{queryResult.getData()[0][alert.options.column]}}</p>
</div>
</div>
<div class="form-group">
<label class="control-label col-md-2">Op</label>
<div class="col-md-4">
<select ng-options="name for name in ops" ng-model="alert.options.op" class="form-control"></select>
</div>
<label class="control-label col-md-2">Reference</label>
<div class="col-md-4">
<input type="number" class="form-control" ng-model="alert.options.value" placeholder="reference value" required/>
</div>
</div>
</div>
<div class="form-group">
<button class="btn btn-primary" ng-disabled="!alertForm.$valid">Save</button>
</div>
</form>
</div>
<div class="col-md-4" ng-if="alert.id">
<alert-subscribers alert-id="alert.id"></alert-subscribers>
</div>
</div>
</div>

View File

@@ -1,16 +0,0 @@
<div class="container">
<ol class="breadcrumb">
<li class="active">Alerts</li>
</ol>
<div class="row">
<div class="col-md-12">
<p>
<a href="/alerts/new" class="btn btn-default"><i class="fa fa-plus"></i> New Alert</a>
</p>
<smart-table rows="alerts" columns="gridColumns"
config="gridConfig"
class="table table-condensed table-hover"></smart-table>
</div>
</div>
</div>

View File

@@ -1,4 +0,0 @@
<div>
<strong>Subscribers</strong> <subscribe-button alert-id="alertId" subscribers="subscribers"></subscribe-button><br/>
<img ng-src="{{s.user.gravatar_url}}" class="img-circle" alt="{{s.user.name}}" ng-repeat="s in subscribers"/>
</div>

View File

@@ -28,7 +28,6 @@
<p>
<span ng-hide="currentUser.hasPermission('view_query')">{{query.name}}</span>
<query-link query="query" visualization="widget.visualization" ng-show="currentUser.hasPermission('view_query')"></query-link>
<visualization-name visualization="widget.visualization"/>
</p>
<div class="text-muted" ng-bind-html="query.description | markdown"></div>
</h3>

View File

@@ -1,11 +0,0 @@
<div class="container">
<ol class="breadcrumb">
<li><a href="/data_sources">Data Sources</a></li>
<li class="active">{{dataSource.name || "New"}}</li>
</ol>
<div class="row">
<div class="col-md-8">
<data-source-form data-data-source="dataSource" />
</div>
</div>
</div>

View File

@@ -1,20 +0,0 @@
<form name="dataSourceForm" ng-submit="saveChanges()">
<div class="form-group">
<label for="dataSourceName">Name</label>
<input type="string" class="form-control" name="dataSourceName" ng-model="dataSource.name" required>
</div>
<div class="form-group">
<label for="type">Type</label>
<select name="type" class="form-control" ng-options="type.type as type.name for type in dataSourceTypes" ng-model="dataSource.type"></select>
</div>
<div class="form-group" ng-class='{"has-error": !inner.input.$valid}' ng-form="inner" ng-repeat="(name, input) in type.configuration_schema.properties">
<label>{{input.title || name | capitalize}}</label>
<input name="input" type="{{input.type}}" class="form-control" ng-model="dataSource.options[name]" ng-required="input.required"
ng-if="input.type !== 'file'" accesskey="tab">
<input name="input" type="file" class="form-control" ng-model="files[name]" ng-required="input.required"
base-sixty-four-input
ng-if="input.type === 'file'">
</div>
<button class="btn btn-primary" ng-disabled="!dataSourceForm.$valid">Save</button>
</form>

View File

@@ -1,18 +0,0 @@
<div class="container">
<ol class="breadcrumb">
<li class="active">Data Sources</li>
</ol>
<div class="row">
<div class="col-md-4">
<div class="list-group">
<div class="list-group-item clickable" ng-repeat="dataSource in dataSources" ng-click="openDataSource(dataSource)">
<i class="fa fa-database"></i> {{dataSource.name}}
<button class="btn btn-xs btn-danger pull-right" ng-click="deleteDataSource($event, dataSource)">Delete</button>
</div>
<a ng-href="/data_sources/new" class="list-group-item">
<i class="fa fa-plus"></i> Add Data Source
</a>
</div>
</div>
</div>
</div>

View File

@@ -1,16 +1,9 @@
<div class="container">
<div class="row">
<p>
<a href="/queries/new" class="btn btn-default">New Query</a>
<button ng-show="currentUser.hasPermission('create_dashboard')" type="button" class="btn btn-default" data-toggle="modal" href="#new_dashboard_dialog">New Dashboard</button>
<a href="/alerts/new" class="btn btn-default">New Alert</a>
</p>
</div>
<div class="row">
<div class="list-group col-md-6">
<div class="list-group-item active">
Recent Dashboards
<button ng-show="currentUser.hasPermission('create_dashboard')" type="button" class="btn btn-sm btn-link" data-toggle="modal" href="#new_dashboard_dialog" tooltip="New Dashboard"><span class="glyphicon glyphicon-plus-sign"></span></button>
</div>
<div class="list-group-item" ng-repeat="dashboard in recentDashboards" >
<button type="button" class="close delete-button" aria-hidden="true" ng-show="dashboard.canEdit()" ng-click="archiveDashboard(dashboard)" tooltip="Delete Dashboard">&times;</button>

View File

@@ -79,21 +79,21 @@
</p>
<p>
<query-editor query="query" schema="schema" syntax="dataSource.syntax" lock="queryFormatting"></query-editor>
<query-editor query="query" schema="schema" lock="queryFormatting"></query-editor>
</p>
</div>
</div>
<div class="col-md-3 schema-container" ng-show="hasSchema">
<div ng-show="schema.length < 200">
<div class="col-md-3" ng-show="hasSchema">
<div>
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
</div>
<div class="schema-browser">
<div ng-repeat="table in schema | filter:schemaFilter track by table.name">
<div ng-repeat="table in schema | filter:schemaFilter">
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
</div>
<div collapse="table.collapsed && !schemaFilter">
<div ng-repeat="column in table.columns track by column" style="padding-left:16px;">{{column}}</div>
<div collapse="table.collapsed">
<div ng-repeat="column in table.columns | filter:schemaFilter" style="padding-left:16px;">{{column}}</div>
</div>
</div>
</div>
@@ -138,7 +138,7 @@
</p>
<p>
<i class="fa fa-database"></i>
<span class="glyphicon glyphicon-hdd"></span>
<span class="text-muted">Data Source</span>
<select ng-disabled="!isQueryOwner" ng-model="query.data_source_id" ng-change="updateDataSource()" ng-options="ds.id as ds.name for ds in dataSources"></select>
</p>
@@ -192,16 +192,6 @@
</div>
<div class="alert alert-danger" ng-show="queryResult.getError()">Error running query: <strong>{{queryResult.getError()}}</strong></div>
<div class="row log-container" ng-show="showLog">
<span ng-show="showLog">Log Information:</span>
<table>
<tbody>
<tr ng-repeat="l in queryResult.getLog()">
<td>{{l}}</td>
</tr>
</tbody>
</table>
</div>
<!-- tabs and data -->
<div ng-show="showDataset">
<div class="row">

View File

@@ -54,14 +54,6 @@
ng-model="visualization.options.sortX">
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-2">Show X Axis Labels</label>
<div class="col-sm-10">
<input name="sortX" type="checkbox" class="form-control"
ng-model="visualization.options.xAxis.labels.enabled">
</div>
</div>
</div>
</div>
@@ -108,15 +100,6 @@
class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-3">Index</label>
<div class="col-sm-9">
<select required ng-model="visualization.options.seriesOptions[seriesName].index"
ng-options="o as o for o in zIndexes"
class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="control-label col-sm-3">y Axis</label>

View File

@@ -1,7 +1,7 @@
<div>
<span ng-click="openEditor=!openEditor" class="details-toggle" ng-class="{open: openEditor}">Edit</span>
<form ng-show="openEditor" role="form" name="visForm" ng-submit="submit()">
<form ng-if="openEditor" role="form" name="visForm" ng-submit="submit()">
<div class="form-group">
<label class="control-label">Name</label>
<input name="name" type="text" class="form-control" ng-model="visualization.name" placeholder="{{visualization.type | capitalize}}">

View File

@@ -1,17 +1,8 @@
<div class="well well-sm filters-container" ng-show="filters">
<div class="filter" ng-repeat="filter in filters">
<ui-select ng-model="filter.current" ng-if="!filter.multiple">
<ui-select-match placeholder="Select value for {{filter.friendlyName}}...">{{filter.friendlyName}}: {{$select.selected}}</ui-select-match>
<ui-select-choices repeat="value in filter.values | filter: $select.search track by $index">
{{value}}
</ui-select-choices>
</ui-select>
<ui-select ng-model="filter.current" multiple ng-if="filter.multiple">
<ui-select-match placeholder="Select value for {{filter.friendlyName}}...">{{filter.friendlyName}}: {{$item}}</ui-select-match>
<ui-select-choices repeat="value in filter.values | filter: $select.search track by $index">
{{value}}
</ui-select-choices>
</ui-select>
<div class="well well-sm" ng-show="filters">
<div ng-repeat="filter in filters">
{{filter.friendlyName}}:
<select ui-select2='select2Options' ng-model="filter.current" ng-multiple="{{filter.multiple}}">
<option ng-repeat="value in filter.values" value="{{value}}">{{value}}</option>
</select>
</div>
</div>

View File

@@ -1,3 +0,0 @@
<div style='margin:1%;width:98%;height:{{visualization.options.height}}px'>
<div style="width:100%; height:100%;"></div>
</div>

View File

@@ -1,55 +0,0 @@
<div class="form-horizontal">
<div class="form-group">
<label class="col-lg-2">Map height (px)</label>
<div class="col-sm-4">
<input class="form-control" type="number" ng-model = "visualization.options.height" />
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Draw option</label>
<div class="col-sm-4">
<select ng-options="opt for opt in draw_options" ng-model="visualization.options.draw" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Latitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.latColName" class="form-control"></select>
</div>
</div>
<div class="form-group">
<label class="col-lg-2">Longitude column name</label>
<div class="col-sm-4">
<select ng-options="name for name in queryResult.columnNames" ng-model="visualization.options.lonColName" class="form-control"></select>
</div>
</div>
<div ng-show = "visualization.options.draw == 'Color'">
<div class="form-group">
<label class="col-lg-2">Classify by column</label>
<div class="col-sm-4">
<select ng-options="name for name in classify_columns" ng-model="visualization.options.classify" class="form-control"></select>
</div>
</div>
<div class="row" >
<div class="col-lg-6">
<div ng-repeat="element in visualization.options.classification" class="list-group">
<div class="list-group-item active">
{{element.value}}
</div>
<div class="list-group-item">
<div class="form-group">
<label class="col-lg-4">Color</label>
<div class="col-sm-4">
<input class="form-control" style="background-color:{{element.color}};" type="text" ng-model = "element.color" />
</div>
</div>
</div>
</div>
</div>
</div>
</div>
</div>

View File

@@ -19,18 +19,17 @@
"cornelius": "https://github.com/restorando/cornelius.git",
"gridster": "0.2.0",
"mousetrap": "~1.4.6",
"angular-ui-select2": "~0.0.5",
"jquery-ui": "~1.10.4",
"underscore.string": "~2.3.3",
"marked": "~0.3.2",
"bucky": "~0.2.6",
"pace": "~0.5.1",
"angular-ui-select": "0.8.2",
"font-awesome": "~4.2.0",
"mustache": "~1.0.0",
"canvg": "gabelerner/canvg",
"angular-ui-bootstrap-bower": "~0.12.1",
"leaflet": "~0.7.3",
"angular-base64-upload": "~0.1.11",
"angular-ui-select": "0.8.2"
"angular-ui-bootstrap-bower": "~0.12.1"
},
"devDependencies": {
"angular-mocks": "1.2.18",

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -36,7 +36,6 @@
"node": ">=0.10.0"
},
"scripts": {
"test": "grunt test",
"bower": "bower"
"test": "grunt test"
}
}

View File

@@ -2,12 +2,11 @@ import logging
import urlparse
import redis
from statsd import StatsClient
from flask_mail import Mail
from redash import settings
from redash.query_runner import import_query_runners
__version__ = '0.7.1'
__version__ = '0.6.0'
def setup_logging():
@@ -33,8 +32,6 @@ def create_redis_connection():
setup_logging()
redis_connection = create_redis_connection()
mail = Mail()
mail.init_mail(settings.all_settings())
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
import_query_runners(settings.QUERY_RUNNERS)

View File

@@ -1,110 +0,0 @@
import json
from flask_admin.contrib.peewee import ModelView
from flask.ext.admin import Admin
from flask_admin.contrib.peewee.form import CustomModelConverter
from flask_admin.form.widgets import DateTimePickerWidget
from playhouse.postgres_ext import ArrayField, DateTimeTZField
from wtforms import fields
from wtforms.widgets import TextInput
from redash import models
from redash import query_runner
from redash.permissions import require_permission
class ArrayListField(fields.Field):
widget = TextInput()
def _value(self):
if self.data:
return u', '.join(self.data)
else:
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = [x.strip() for x in valuelist[0].split(',')]
else:
self.data = []
class JSONTextAreaField(fields.TextAreaField):
def process_formdata(self, valuelist):
if valuelist:
try:
json.loads(valuelist[0])
except ValueError:
raise ValueError(self.gettext(u'Invalid JSON'))
self.data = valuelist[0]
else:
self.data = ''
class PasswordHashField(fields.PasswordField):
def _value(self):
return u''
def process_formdata(self, valuelist):
if valuelist:
self.data = models.pwd_context.encrypt(valuelist[0])
else:
self.data = u''
class PgModelConverter(CustomModelConverter):
def __init__(self, view, additional=None):
additional = {ArrayField: self.handle_array_field,
DateTimeTZField: self.handle_datetime_tz_field,
}
super(PgModelConverter, self).__init__(view, additional)
self.view = view
def handle_array_field(self, model, field, **kwargs):
return field.name, ArrayListField(**kwargs)
def handle_datetime_tz_field(self, model, field, **kwargs):
kwargs['widget'] = DateTimePickerWidget()
return field.name, fields.DateTimeField(**kwargs)
class BaseModelView(ModelView):
column_display_pk = True
model_form_converter = PgModelConverter
@require_permission('admin')
def is_accessible(self):
return True
class UserModelView(BaseModelView):
column_searchable_list = ('name', 'email')
form_excluded_columns = ('created_at', 'updated_at')
column_exclude_list = ('password_hash',)
form_overrides = dict(password_hash=PasswordHashField)
form_args = {
'password_hash': {'label': 'Password'}
}
class QueryResultModelView(BaseModelView):
column_exclude_list = ('data',)
class QueryModelView(BaseModelView):
column_exclude_list = ('latest_query_data',)
class DashboardModelView(BaseModelView):
column_searchable_list = ('name', 'slug')
def init_admin(app):
admin = Admin(app, name='re:dash admin', template_mode='bootstrap3')
admin.add_view(UserModelView(models.User))
admin.add_view(QueryModelView(models.Query))
admin.add_view(QueryResultModelView(models.QueryResult))
admin.add_view(DashboardModelView(models.Dashboard))
for m in (models.Visualization, models.Widget, models.ActivityLog, models.Group, models.Event):
admin.add_view(BaseModelView(m))

View File

@@ -1,13 +1,13 @@
import functools
import hashlib
import hmac
import time
import logging
from flask.ext.login import LoginManager
from flask.ext.login import user_logged_in
from flask import request, make_response, redirect, url_for
from flask.ext.login import LoginManager, login_user, current_user, logout_user
from redash import models, settings, google_oauth, saml_auth
from redash.tasks import record_event
from redash import models, settings, google_oauth
login_manager = LoginManager()
logger = logging.getLogger('authentication')
@@ -23,85 +23,77 @@ def sign(key, path, expires):
return h.hexdigest()
@login_manager.user_loader
def load_user(user_id):
return models.User.get_by_id(user_id)
class Authentication(object):
def verify_authentication(self):
return False
def required(self, fn):
@functools.wraps(fn)
def decorated(*args, **kwargs):
if current_user.is_authenticated() or self.verify_authentication():
return fn(*args, **kwargs)
return make_response(redirect(url_for("login", next=request.url)))
return decorated
def hmac_load_user_from_request(request):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
user_id = request.args.get('user_id', None)
class ApiKeyAuthentication(Authentication):
def verify_authentication(self):
api_key = request.args.get('api_key')
query_id = request.view_args.get('query_id', None)
# TODO: 3600 should be a setting
if signature and time.time() < expires <= time.time() + 3600:
if user_id:
user = models.User.get_by_id(user_id)
calculated_signature = sign(user.api_key, request.path, expires)
if query_id and api_key:
query = models.Query.get(models.Query.id == query_id)
if user.api_key and signature == calculated_signature:
return user
if query.api_key and api_key == query.api_key:
login_user(models.ApiUser(query.api_key), remember=False)
return True
if query_id:
return False
class HMACAuthentication(Authentication):
def verify_authentication(self):
signature = request.args.get('signature')
expires = float(request.args.get('expires') or 0)
query_id = request.view_args.get('query_id', None)
# TODO: 3600 should be a setting
if signature and query_id and time.time() < expires <= time.time() + 3600:
query = models.Query.get(models.Query.id == query_id)
calculated_signature = sign(query.api_key, request.path, expires)
if query.api_key and signature == calculated_signature:
return models.ApiUser(query.api_key)
login_user(models.ApiUser(query.api_key), remember=False)
return True
return None
return False
def get_user_from_api_key(api_key, query_id):
if not api_key:
@login_manager.user_loader
def load_user(user_id):
# If the user was previously logged in as api user, the user_id will be the api key and will raise an exception as
# it can't be casted to int.
if isinstance(user_id, basestring) and not user_id.isdigit():
return None
user = None
try:
user = models.User.get_by_api_key(api_key)
except models.User.DoesNotExist:
if query_id:
query = models.Query.get_by_id(query_id)
if query and query.api_key == api_key:
user = models.ApiUser(api_key)
return user
def api_key_load_user_from_request(request):
api_key = request.args.get('api_key', None)
query_id = request.view_args.get('query_id', None)
user = get_user_from_api_key(api_key, query_id)
return user
def log_user_logged_in(app, user):
event = {
'user_id': user.id,
'action': 'login',
'object_type': 'redash',
'timestamp': int(time.time()),
}
record_event.delay(event)
return models.User.select().where(models.User.id == user_id).first()
def setup_authentication(app):
login_manager.init_app(app)
login_manager.anonymous_user = models.AnonymousUser
login_manager.login_view = 'login'
app.secret_key = settings.COOKIE_SECRET
app.register_blueprint(google_oauth.blueprint)
app.register_blueprint(saml_auth.blueprint)
user_logged_in.connect(log_user_logged_in)
if settings.AUTH_TYPE == 'hmac':
login_manager.request_loader(hmac_load_user_from_request)
auth = HMACAuthentication()
elif settings.AUTH_TYPE == 'api_key':
login_manager.request_loader(api_key_load_user_from_request)
auth = ApiKeyAuthentication()
else:
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
login_manager.request_loader(hmac_load_user_from_request)
auth = HMACAuthentication()
return auth

View File

@@ -12,22 +12,17 @@ import time
import logging
from flask import render_template, send_from_directory, make_response, request, jsonify, redirect, \
session, url_for, current_app, flash
from flask.ext.restful import Resource, abort, reqparse
from flask_login import current_user, login_user, logout_user, login_required
from funcy import project
session, url_for
from flask.ext.restful import Resource, abort
from flask_login import current_user, login_user, logout_user
import sqlparse
from itertools import chain
from funcy import distinct
from redash import statsd_client, models, settings, utils
from redash.wsgi import app, api
from redash import redis_connection, statsd_client, models, settings, utils, __version__
from redash.wsgi import app, auth, api
from redash.tasks import QueryTask, record_event
from redash.cache import headers as cache_headers
from redash.permissions import require_permission
from redash.query_runner import query_runners, validate_configuration
from redash.monitor import get_status
@app.route('/ping', methods=['GET'])
@@ -35,19 +30,14 @@ def ping():
return 'PONG.'
@app.route('/admin/<anything>/<whatever>')
@app.route('/admin/<anything>')
@app.route('/dashboard/<anything>')
@app.route('/alerts')
@app.route('/alerts/<pk>')
@app.route('/queries')
@app.route('/data_sources')
@app.route('/data_sources/<pk>')
@app.route('/queries/<query_id>')
@app.route('/queries/<query_id>/<anything>')
@app.route('/personal')
@app.route('/')
@login_required
@auth.required
def index(**kwargs):
email_md5 = hashlib.md5(current_user.email.lower()).hexdigest()
gravatar_url = "https://www.gravatar.com/avatar/%s?s=40" % email_md5
@@ -62,8 +52,7 @@ def index(**kwargs):
}
features = {
'clientSideMetrics': settings.CLIENT_SIDE_METRICS,
'allowAllToEditQueries': settings.FEATURE_ALLOW_ALL_TO_EDIT_QUERIES
'clientSideMetrics': settings.CLIENT_SIDE_METRICS
}
return render_template("index.html", user=json.dumps(user), name=settings.NAME,
@@ -77,30 +66,22 @@ def login():
return redirect(request.args.get('next') or '/')
if not settings.PASSWORD_LOGIN_ENABLED:
if settings.SAML_LOGIN_ENABLED:
return redirect(url_for("saml_auth.sp_initiated", next=request.args.get('next')))
else:
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
return redirect(url_for("google_oauth.authorize", next=request.args.get('next')))
if request.method == 'POST':
try:
user = models.User.get_by_email(request.form['email'])
if user and user.verify_password(request.form['password']):
remember = ('remember' in request.form)
login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
else:
flash("Wrong email or password.")
except models.User.DoesNotExist:
flash("Wrong email or password.")
user = models.User.select().where(models.User.email == request.form['username']).first()
if user and user.verify_password(request.form['password']):
remember = ('remember' in request.form)
login_user(user, remember=remember)
return redirect(request.args.get('next') or '/')
return render_template("login.html",
name=settings.NAME,
analytics=settings.ANALYTICS,
next=request.args.get('next'),
username=request.form.get('username', ''),
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
show_saml_login=settings.SAML_LOGIN_ENABLED)
show_google_openid=settings.GOOGLE_OAUTH_ENABLED)
@app.route('/logout')
def logout():
@@ -110,16 +91,43 @@ def logout():
return redirect('/login')
@app.route('/status.json')
@login_required
@auth.required
@require_permission('admin')
def status_api():
status = get_status()
status = {}
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
return jsonify(status)
@app.route('/api/queries/format', methods=['POST'])
@login_required
@auth.required
def format_sql_query():
arguments = request.get_json(force=True)
query = arguments.get("query", "")
@@ -128,7 +136,7 @@ def format_sql_query():
@app.route('/queries/new', methods=['POST'])
@login_required
@auth.required
def create_query_route():
query = request.form.get('query', None)
data_source_id = request.form.get('data_source_id', None)
@@ -146,7 +154,7 @@ def create_query_route():
class BaseResource(Resource):
decorators = [login_required]
decorators = [auth.required]
def __init__(self, *args, **kwargs):
super(BaseResource, self).__init__(*args, **kwargs)
@@ -191,34 +199,6 @@ class DataSourceTypeListAPI(BaseResource):
api.add_resource(DataSourceTypeListAPI, '/api/data_sources/types', endpoint='data_source_types')
class DataSourceAPI(BaseResource):
@require_permission('admin')
def get(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
return data_source.to_dict(all=True)
@require_permission('admin')
def post(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
req = request.get_json(True)
if not validate_configuration(req['type'], req['options']):
abort(400)
data_source.name = req['name']
data_source.options = json.dumps(req['options'])
data_source.save()
return data_source.to_dict(all=True)
@require_permission('admin')
def delete(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
data_source.delete_instance(recursive=True)
return make_response('', 204)
class DataSourceListAPI(BaseResource):
def get(self):
data_sources = [ds.to_dict() for ds in models.DataSource.all()]
@@ -235,12 +215,11 @@ class DataSourceListAPI(BaseResource):
if not validate_configuration(req['type'], req['options']):
abort(400)
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=json.dumps(req['options']))
datasource = models.DataSource.create(name=req['name'], type=req['type'], options=req['options'])
return datasource.to_dict(all=True)
return datasource.to_dict()
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
api.add_resource(DataSourceAPI, '/api/data_sources/<data_source_id>', endpoint='data_source')
class DataSourceSchemaAPI(BaseResource):
@@ -252,16 +231,9 @@ class DataSourceSchemaAPI(BaseResource):
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
class DashboardRecentAPI(BaseResource):
def get(self):
recent = [d.to_dict() for d in models.Dashboard.recent(current_user.id)]
global_recent = []
if len(recent) < 10:
global_recent = [d.to_dict() for d in models.Dashboard.recent()]
return distinct(chain(recent, global_recent), key=lambda d: d['id'])
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]
class DashboardListAPI(BaseResource):
@@ -365,13 +337,7 @@ class QuerySearchAPI(BaseResource):
class QueryRecentAPI(BaseResource):
@require_permission('view_query')
def get(self):
recent = [d.to_dict() for d in models.Query.recent(current_user.id)]
global_recent = []
if len(recent) < 10:
global_recent = [d.to_dict() for d in models.Query.recent()]
return distinct(chain(recent, global_recent), key=lambda d: d['id'])
return [q.to_dict() for q in models.Query.recent(current_user.id).limit(20)]
class QueryListAPI(BaseResource):
@@ -397,7 +363,7 @@ class QueryAPI(BaseResource):
@require_permission('edit_query')
def post(self, query_id):
query = models.Query.get_by_id(query_id)
query_def = request.get_json(force=True)
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by']:
query_def.pop(field, None)
@@ -408,9 +374,7 @@ class QueryAPI(BaseResource):
if 'data_source_id' in query_def:
query_def['data_source'] = query_def.pop('data_source_id')
# Don't set "last_modified_by" if the user only refreshing this query
if not ('latest_query_data' in query_def and len(query_def.keys()) == 1):
query_def['last_modified_by'] = self.current_user
query_def['last_modified_by'] = self.current_user
# TODO: use #save() with #dirty_fields.
models.Query.update_instance(query_id, **query_def)
@@ -451,7 +415,7 @@ class VisualizationListAPI(BaseResource):
kwargs = request.get_json(force=True)
kwargs['options'] = json.dumps(kwargs['options'])
kwargs['query'] = kwargs.pop('query_id')
vis = models.Visualization(**kwargs)
vis.save()
@@ -486,7 +450,7 @@ api.add_resource(VisualizationAPI, '/api/visualizations/<visualization_id>', end
class QueryResultListAPI(BaseResource):
@require_permission('execute_query')
def post(self):
params = request.get_json(force=True)
params = request.json
if settings.FEATURE_TABLES_PERMISSIONS:
metadata = utils.SQLMetaData(params['query'])
@@ -512,7 +476,7 @@ class QueryResultListAPI(BaseResource):
activity=params['query']
).save()
max_age = int(params.get('max_age', -1))
max_age = int(params['max_age'])
if max_age == 0:
query_result = None
@@ -523,8 +487,7 @@ class QueryResultListAPI(BaseResource):
return {'query_result': query_result.to_dict()}
else:
data_source = models.DataSource.get_by_id(params['data_source_id'])
query_id = params.get('query_id', 'adhoc')
job = QueryTask.add_task(params['query'], data_source, metadata={"Username": self.current_user.name, "Query ID": query_id})
job = QueryTask.add_task(params['query'], data_source)
return {'job': job.to_dict()}
@@ -544,28 +507,6 @@ class QueryResultAPI(BaseResource):
headers.update(cache_headers)
return make_response(s.getvalue(), 200, headers)
@staticmethod
def add_cors_headers(headers):
if 'Origin' in request.headers:
origin = request.headers['Origin']
if origin in settings.ACCESS_CONTROL_ALLOW_ORIGIN:
headers['Access-Control-Allow-Origin'] = origin
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
@require_permission('view_query')
def options(self, query_id=None, query_result_id=None, filetype='json'):
headers = {}
self.add_cors_headers(headers)
if settings.ACCESS_CONTROL_REQUEST_METHOD:
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
return make_response("", 200, headers)
@require_permission('view_query')
def get(self, query_id=None, query_result_id=None, filetype='json'):
if query_result_id is None and query_id is not None:
@@ -595,15 +536,9 @@ class QueryResultAPI(BaseResource):
record_event.delay(event)
headers = {}
if len(settings.ACCESS_CONTROL_ALLOW_ORIGIN) > 0:
self.add_cors_headers(headers)
if filetype == 'json':
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
headers.update(cache_headers)
return make_response(data, 200, headers)
return make_response(data, 200, cache_headers)
else:
return self.csv_response(query_result)
@@ -631,110 +566,13 @@ class JobAPI(BaseResource):
api.add_resource(JobAPI, '/api/jobs/<job_id>', endpoint='job')
class AlertAPI(BaseResource):
def get(self, alert_id):
alert = models.Alert.get_by_id(alert_id)
return alert.to_dict()
def post(self, alert_id):
req = request.get_json(True)
params = project(req, ('options', 'name', 'query_id'))
alert = models.Alert.get_by_id(alert_id)
if 'query_id' in params:
params['query'] = params.pop('query_id')
alert.update_instance(**params)
record_event.delay({
'user_id': self.current_user.id,
'action': 'edit',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
return alert.to_dict()
class AlertListAPI(BaseResource):
def post(self):
req = request.get_json(True)
required_fields = ('options', 'name', 'query_id')
for f in required_fields:
if f not in req:
abort(400)
alert = models.Alert.create(
name=req['name'],
query=req['query_id'],
user=self.current_user,
options=req['options']
)
record_event.delay({
'user_id': self.current_user.id,
'action': 'create',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
# TODO: should be in model?
models.AlertSubscription.create(alert=alert, user=self.current_user)
record_event.delay({
'user_id': self.current_user.id,
'action': 'subscribe',
'timestamp': int(time.time()),
'object_id': alert.id,
'object_type': 'alert'
})
return alert.to_dict()
def get(self):
return [alert.to_dict() for alert in models.Alert.all()]
class AlertSubscriptionListResource(BaseResource):
def post(self, alert_id):
subscription = models.AlertSubscription.create(alert=alert_id, user=self.current_user)
record_event.delay({
'user_id': self.current_user.id,
'action': 'subscribe',
'timestamp': int(time.time()),
'object_id': alert_id,
'object_type': 'alert'
})
return subscription.to_dict()
def get(self, alert_id):
subscriptions = models.AlertSubscription.all(alert_id)
return [s.to_dict() for s in subscriptions]
class AlertSubscriptionResource(BaseResource):
def delete(self, alert_id, subscriber_id):
models.AlertSubscription.unsubscribe(alert_id, subscriber_id)
record_event.delay({
'user_id': self.current_user.id,
'action': 'unsubscribe',
'timestamp': int(time.time()),
'object_id': alert_id,
'object_type': 'alert'
})
api.add_resource(AlertAPI, '/api/alerts/<alert_id>', endpoint='alert')
api.add_resource(AlertSubscriptionListResource, '/api/alerts/<alert_id>/subscriptions', endpoint='alert_subscriptions')
api.add_resource(AlertSubscriptionResource, '/api/alerts/<alert_id>/subscriptions/<subscriber_id>', endpoint='alert_subscription')
api.add_resource(AlertListAPI, '/api/alerts', endpoint='alerts')
@app.route('/<path:filename>')
def send_static(filename):
if current_app.debug:
cache_timeout = 0
else:
cache_timeout = None
return send_from_directory(settings.STATIC_ASSETS_PATH, filename)
if __name__ == '__main__':
app.run(debug=True)
return send_from_directory(settings.STATIC_ASSETS_PATH, filename, cache_timeout=cache_timeout)

View File

@@ -1,25 +1,25 @@
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint, flash
from flask import redirect, url_for, Blueprint
from flask_oauth import OAuth
from redash import models, settings
logger = logging.getLogger('google_oauth')
oauth = OAuth()
request_token_params = {'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile', 'response_type': 'code'}
if not settings.GOOGLE_APPS_DOMAIN:
if settings.GOOGLE_APPS_DOMAIN:
request_token_params['hd'] = settings.GOOGLE_APPS_DOMAIN
else:
logger.warning("No Google Apps domain defined, all Google accounts allowed.")
google = oauth.remote_app('google',
base_url='https://www.google.com/accounts/',
authorize_url='https://accounts.google.com/o/oauth2/auth',
request_token_url=None,
request_token_params={
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
'response_type': 'code'
},
request_token_params=request_token_params,
access_token_url='https://accounts.google.com/o/oauth2/token',
access_token_method='POST',
access_token_params={'grant_type': 'authorization_code'},
@@ -31,7 +31,7 @@ blueprint = Blueprint('google_oauth', __name__)
def get_user_profile(access_token):
headers = {'Authorization': 'OAuth {}'.format(access_token)}
headers = {'Authorization': 'OAuth '+access_token}
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
if response.status_code == 401:
@@ -41,17 +41,9 @@ def get_user_profile(access_token):
return response.json()
def verify_profile(profile):
if not settings.GOOGLE_APPS_DOMAIN:
return True
domain = profile['email'].split('@')[-1]
return domain in settings.GOOGLE_APPS_DOMAIN
def create_and_login_user(name, email):
try:
user_object = models.User.get_by_email(email)
user_object = models.User.get(models.User.email == email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
@@ -78,17 +70,10 @@ def authorized(resp):
if access_token is None:
logger.warning("Access token missing in call back request.")
flash("Validation error. Please retry.")
return redirect(url_for('login'))
profile = get_user_profile(access_token)
if profile is None:
flash("Validation error. Please retry.")
return redirect(url_for('login'))
if not verify_profile(profile):
logger.warning("User tried to login with unauthorized domain name: %s", profile['email'])
flash("Your Google Apps domain name isn't allowed.")
return redirect(url_for('login'))
create_and_login_user(profile['name'], profile['email'])

View File

@@ -15,7 +15,6 @@ import psycopg2
from redash import utils, settings, redis_connection
from redash.query_runner import get_query_runner
from utils import generate_token
class Database(object):
@@ -77,17 +76,6 @@ class BaseModel(peewee.Model):
super(BaseModel, self).save(*args, **kwargs)
self.post_save(created)
def update_instance(self, **kwargs):
for k, v in kwargs.items():
# setattr(model_instance, field_name, field_obj.python_value(value))
setattr(self, k, v)
dirty_fields = self.dirty_fields
if hasattr(self, 'updated_at'):
dirty_fields = dirty_fields + [self.__class__.updated_at]
self.save(only=dirty_fields)
class ModelTimestampsMixin(BaseModel):
updated_at = DateTimeTZField(default=datetime.datetime.now)
@@ -164,7 +152,6 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
email = peewee.CharField(max_length=320, index=True, unique=True)
password_hash = peewee.CharField(max_length=128, null=True)
groups = ArrayField(peewee.CharField, default=DEFAULT_GROUPS)
api_key = peewee.CharField(max_length=40, unique=True)
class Meta:
db_table = 'users'
@@ -174,7 +161,6 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
'id': self.id,
'name': self.name,
'email': self.email,
'gravatar_url': self.gravatar_url,
'updated_at': self.updated_at,
'created_at': self.created_at
}
@@ -183,17 +169,6 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
super(User, self).__init__(*args, **kwargs)
self._allowed_tables = None
def pre_save(self, created):
super(User, self).pre_save(created)
if not self.api_key:
self.api_key = generate_token(40)
@property
def gravatar_url(self):
email_md5 = hashlib.md5(self.email.lower()).hexdigest()
return "https://www.gravatar.com/avatar/%s?s=40" % email_md5
@property
def permissions(self):
# TODO: this should be cached.
@@ -213,12 +188,8 @@ class User(ModelTimestampsMixin, BaseModel, UserMixin, PermissionsCheckMixin):
def get_by_email(cls, email):
return cls.get(cls.email == email)
@classmethod
def get_by_api_key(cls, api_key):
return cls.get(cls.api_key == api_key)
def __unicode__(self):
return u'%s (%s)' % (self.name, self.email)
return '%r, %r' % (self.name, self.email)
def hash_password(self, password):
self.password_hash = pwd_context.encrypt(password)
@@ -254,34 +225,23 @@ class ActivityLog(BaseModel):
class DataSource(BaseModel):
id = peewee.PrimaryKeyField()
name = peewee.CharField(unique=True)
name = peewee.CharField()
type = peewee.CharField()
options = peewee.TextField()
queue_name = peewee.CharField(default="queries")
scheduled_queue_name = peewee.CharField(default="scheduled_queries")
scheduled_queue_name = peewee.CharField(default="queries")
created_at = DateTimeTZField(default=datetime.datetime.now)
class Meta:
db_table = 'data_sources'
def to_dict(self, all=False):
d = {
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'type': self.type,
'syntax': self.query_runner.syntax
'type': self.type
}
if all:
d['options'] = json.loads(self.options)
d['queue_name'] = self.queue_name
d['scheduled_queue_name'] = self.scheduled_queue_name
return d
def __unicode__(self):
return self.name
def get_schema(self, refresh=False):
key = "data_source:schema:{}".format(self.id)
@@ -290,7 +250,7 @@ class DataSource(BaseModel):
cache = redis_connection.get(key)
if cache is None:
query_runner = self.query_runner
query_runner = get_query_runner(self.type, self.options)
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
redis_connection.set(key, json.dumps(schema))
@@ -299,23 +259,11 @@ class DataSource(BaseModel):
return schema
@property
def query_runner(self):
return get_query_runner(self.type, self.options)
@classmethod
def all(cls):
return cls.select().order_by(cls.id.asc())
class JSONField(peewee.TextField):
def db_value(self, value):
return json.dumps(value)
def python_value(self, value):
return json.loads(value)
class QueryResult(BaseModel):
id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource)
@@ -373,17 +321,13 @@ class QueryResult(BaseModel):
logging.info("Inserted query (%s) data; id=%s", query_hash, query_result.id)
sql = "UPDATE queries SET latest_query_data_id = %s WHERE query_hash = %s AND data_source_id = %s RETURNING id"
query_ids = [row[0] for row in db.database.execute_sql(sql, params=(query_result.id, query_hash, data_source_id))]
updated_count = Query.update(latest_query_data=query_result).\
where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
execute()
# TODO: when peewee with update & returning support is released, we can get back to using this code:
# updated_count = Query.update(latest_query_data=query_result).\
# where(Query.query_hash==query_hash, Query.data_source==data_source_id).\
# execute()
logging.info("Updated %s queries with result (%s).", updated_count, query_hash)
logging.info("Updated %s queries with result (%s).", len(query_ids), query_hash)
return query_result, query_ids
return query_result
def __unicode__(self):
return u"%d | %s | %s" % (self.id, self.query_hash, self.retrieved_at)
@@ -412,7 +356,7 @@ def should_schedule_next(previous_iteration, now, schedule):
class Query(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
data_source = peewee.ForeignKeyField(DataSource, null=True)
data_source = peewee.ForeignKeyField(DataSource)
latest_query_data = peewee.ForeignKeyField(QueryResult, null=True)
name = peewee.CharField(max_length=255)
description = peewee.CharField(max_length=4096, null=True)
@@ -446,7 +390,7 @@ class Query(ModelTimestampsMixin, BaseModel):
if with_user:
d['user'] = self.user.to_dict()
d['last_modified_by'] = self.last_modified_by.to_dict() if self.last_modified_by is not None else None
d['last_modified_by'] = self.last_modified_by.to_dict()
else:
d['user_id'] = self._data['user']
@@ -488,7 +432,7 @@ class Query(ModelTimestampsMixin, BaseModel):
.switch(Query).join(DataSource)\
.where(cls.schedule != None)
now = utils.utcnow()
now = datetime.datetime.utcnow().replace(tzinfo=psycopg2.tz.FixedOffsetTimezone(offset=0, name=None))
outdated_queries = {}
for query in queries:
if should_schedule_next(query.latest_query_data.retrieved_at, now, query.schedule):
@@ -511,24 +455,18 @@ class Query(ModelTimestampsMixin, BaseModel):
return cls.select().where(where).order_by(cls.created_at.desc())
@classmethod
def recent(cls, user_id=None, limit=20):
def recent(cls, user_id):
# TODO: instead of t2 here, we should define table_alias for Query table
query = cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")).\
join(Event, on=(Query.id == peewee.SQL("t2.object_id::integer"))).\
where(Event.action << ('edit', 'execute', 'edit_name', 'edit_description', 'view_source')).\
where(Event.user == user_id).\
where(~(Event.object_id >> None)).\
where(Event.object_type == 'query'). \
where(cls.is_archived == False).\
group_by(Event.object_id, Query.id).\
order_by(peewee.SQL("count(0) desc"))
if user_id:
query = query.where(Event.user == user_id)
query = query.limit(limit)
return query
@classmethod
def update_instance(cls, query_id, **kwargs):
if 'query' in kwargs:
@@ -572,83 +510,6 @@ class Query(ModelTimestampsMixin, BaseModel):
return unicode(self.id)
class Alert(ModelTimestampsMixin, BaseModel):
UNKNOWN_STATE = 'unknown'
OK_STATE = 'ok'
TRIGGERED_STATE = 'triggered'
id = peewee.PrimaryKeyField()
name = peewee.CharField()
query = peewee.ForeignKeyField(Query, related_name='alerts')
user = peewee.ForeignKeyField(User, related_name='alerts')
options = JSONField()
state = peewee.CharField(default=UNKNOWN_STATE)
last_triggered_at = DateTimeTZField(null=True)
class Meta:
db_table = 'alerts'
@classmethod
def all(cls):
return cls.select(Alert, User, Query).join(Query).switch(Alert).join(User)
def to_dict(self):
return {
'id': self.id,
'name': self.name,
'query': self.query.to_dict(),
'user': self.user.to_dict(),
'options': self.options,
'state': self.state,
'last_triggered_at': self.last_triggered_at,
'updated_at': self.updated_at,
'created_at': self.created_at
}
def evaluate(self):
data = json.loads(self.query.latest_query_data.data)
# todo: safe guard for empty
value = data['rows'][0][self.options['column']]
op = self.options['op']
if op == 'greater than' and value > self.options['value']:
new_state = self.TRIGGERED_STATE
elif op == 'less than' and value < self.options['value']:
new_state = self.TRIGGERED_STATE
elif op == 'equals' and value == self.options['value']:
new_state = self.TRIGGERED_STATE
else:
new_state = self.OK_STATE
return new_state
def subscribers(self):
return User.select().join(AlertSubscription).where(AlertSubscription.alert==self)
class AlertSubscription(ModelTimestampsMixin, BaseModel):
user = peewee.ForeignKeyField(User)
alert = peewee.ForeignKeyField(Alert)
class Meta:
db_table = 'alert_subscriptions'
def to_dict(self):
return {
'user': self.user.to_dict(),
'alert_id': self._data['alert']
}
@classmethod
def all(cls, alert_id):
return AlertSubscription.select(AlertSubscription, User).join(User).where(AlertSubscription.alert==alert_id)
@classmethod
def unsubscribe(cls, alert_id, user_id):
query = AlertSubscription.delete().where(AlertSubscription.alert==alert_id).where(AlertSubscription.user==user_id)
return query.execute()
class Dashboard(ModelTimestampsMixin, BaseModel):
id = peewee.PrimaryKeyField()
slug = peewee.CharField(max_length=140, index=True)
@@ -709,22 +570,16 @@ class Dashboard(ModelTimestampsMixin, BaseModel):
return cls.get(cls.slug == slug)
@classmethod
def recent(cls, user_id=None, limit=20):
query = cls.select().where(Event.created_at > peewee.SQL("current_date - 7")). \
def recent(cls, user_id):
return cls.select().where(Event.created_at > peewee.SQL("current_date - 7")). \
join(Event, on=(Dashboard.id == peewee.SQL("t2.object_id::integer"))). \
where(Event.action << ('edit', 'view')).\
where(Event.user == user_id). \
where(~(Event.object_id >> None)). \
where(Event.object_type == 'dashboard'). \
group_by(Event.object_id, Dashboard.id). \
order_by(peewee.SQL("count(0) desc"))
if user_id:
query = query.where(Event.user == user_id)
query = query.limit(limit)
return query
def save(self, *args, **kwargs):
if not self.slug:
self.slug = utils.slugify(self.name)
@@ -844,7 +699,7 @@ class Event(BaseModel):
return event
all_models = (DataSource, User, QueryResult, Query, Alert, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
all_models = (DataSource, User, QueryResult, Query, Dashboard, Visualization, Widget, ActivityLog, Group, Event)
def init_db():

View File

@@ -1,33 +0,0 @@
from redash import redis_connection, models, __version__
def get_status():
status = {}
info = redis_connection.info()
status['redis_used_memory'] = info['used_memory_human']
status['version'] = __version__
status['queries_count'] = models.Query.select().count()
status['query_results_count'] = models.QueryResult.select().count()
status['unused_query_results_count'] = models.QueryResult.unused().count()
status['dashboards_count'] = models.Dashboard.select().count()
status['widgets_count'] = models.Widget.select().count()
status['workers'] = []
manager_status = redis_connection.hgetall('redash:status')
status['manager'] = manager_status
status['manager']['outdated_queries_count'] = len(models.Query.outdated_queries())
queues = {}
for ds in models.DataSource.select():
for queue in (ds.queue_name, ds.scheduled_queue_name):
queues.setdefault(queue, set())
queues[queue].add(ds.name)
status['manager']['queues'] = {}
for queue, sources in queues.iteritems():
status['manager']['queues'][queue] = {
'data_sources': ', '.join(sources),
'size': redis_connection.llen(queue)
}
return status

View File

@@ -15,7 +15,6 @@ __all__ = [
'TYPE_STRING',
'TYPE_DATE',
'TYPE_FLOAT',
'SUPPORTED_COLUMN_TYPES',
'register',
'get_query_runner',
'import_query_runners'
@@ -29,19 +28,10 @@ TYPE_STRING = 'string'
TYPE_DATETIME = 'datetime'
TYPE_DATE = 'date'
SUPPORTED_COLUMN_TYPES = set([
TYPE_INTEGER,
TYPE_FLOAT,
TYPE_BOOLEAN,
TYPE_STRING,
TYPE_DATETIME,
TYPE_DATE
])
class BaseQueryRunner(object):
def __init__(self, configuration):
jsonschema.validate(configuration, self.configuration_schema())
self.syntax = 'sql'
self.configuration = configuration
@classmethod
@@ -70,13 +60,6 @@ class BaseQueryRunner(object):
def get_schema(self):
return []
def _run_query_internal(self, query):
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed running query [%s]." % query)
return json.loads(results)['rows']
@classmethod
def to_dict(cls):
return {
@@ -112,11 +95,7 @@ def validate_configuration(query_runner_type, configuration_json):
return False
try:
if isinstance(configuration_json, basestring):
configuration = json.loads(configuration_json)
else:
configuration = configuration_json
jsonschema.validate(configuration, query_runner_class.configuration_schema())
jsonschema.validate(json.loads(configuration_json), query_runner_class.configuration_schema())
except (ValidationError, ValueError):
return False
@@ -125,4 +104,4 @@ def validate_configuration(query_runner_type, configuration_json):
def import_query_runners(query_runner_imports):
for runner_import in query_runner_imports:
__import__(runner_import)
__import__(runner_import)

View File

@@ -1,4 +1,3 @@
from base64 import b64decode
import datetime
import json
import httplib2
@@ -6,8 +5,6 @@ import logging
import sys
import time
import requests
from redash.query_runner import *
from redash.utils import JSONEncoder
@@ -18,7 +15,6 @@ try:
from apiclient.discovery import build
from apiclient.errors import HttpError
from oauth2client.client import SignedJwtAssertionCredentials
from oauth2client import gce
enabled = True
except ImportError:
@@ -70,6 +66,18 @@ def _load_key(filename):
f.close()
def _get_bigquery_service(service_account, private_key):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
credentials = SignedJwtAssertionCredentials(service_account, private_key, scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_query_results(jobs, project_id, job_id, start_index):
query_reply = jobs.getQueryResults(projectId=project_id, jobId=job_id, startIndex=start_index).execute()
logging.debug('query_reply %s', query_reply)
@@ -90,39 +98,30 @@ class BigQuery(BaseQueryRunner):
return {
'type': 'object',
'properties': {
'serviceAccount': {
'type': 'string',
'title': 'Service Account'
},
'projectId': {
'type': 'string',
'title': 'Project ID'
},
'jsonKeyFile': {
"type": "string",
'title': 'JSON Key File'
'privateKey': {
'type': 'string',
'title': 'Private Key Path'
}
},
'required': ['jsonKeyFile', 'projectId']
'required': ['serviceAccount', 'projectId', 'privateKey']
}
def __init__(self, configuration_json):
super(BigQuery, self).__init__(configuration_json)
def _get_bigquery_service(self):
scope = [
"https://www.googleapis.com/auth/bigquery",
]
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
credentials = SignedJwtAssertionCredentials(key['client_email'], key['private_key'], scope=scope)
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
def _get_project_id(self):
return self.configuration["projectId"]
self.private_key = _load_key(self.configuration["privateKey"])
def run_query(self, query):
bigquery_service = self._get_bigquery_service()
bigquery_service = _get_bigquery_service(self.configuration["serviceAccount"],
self.private_key)
jobs = bigquery_service.jobs()
job_data = {
@@ -135,13 +134,13 @@ class BigQuery(BaseQueryRunner):
logger.debug("BigQuery got query: %s", query)
project_id = self._get_project_id()
project_id = self.configuration["projectId"]
try:
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
current_row = 0
query_reply = _get_query_results(jobs, project_id=project_id,
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
logger.debug("bigquery replied: %s", query_reply)
@@ -177,26 +176,4 @@ class BigQuery(BaseQueryRunner):
return json_data, error
class BigQueryGCE(BigQuery):
@classmethod
def type(cls):
return "bigquery_gce"
@classmethod
def configuration_schema(cls):
return {}
def _get_project_id(self):
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
def _get_bigquery_service(self):
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
http = httplib2.Http()
http = credentials.authorize(http)
return build("bigquery", "v2", http=http)
register(BigQuery)
register(BigQueryGCE)
register(BigQuery)

View File

@@ -1,259 +0,0 @@
import datetime
import json
import logging
import sys
import urllib
from redash.query_runner import *
from redash import models
import requests
import dateutil
from dateutil.parser import parse
try:
import http.client as http_client
except ImportError:
# Python 2
import httplib as http_client
logger = logging.getLogger(__name__)
ELASTICSEARCH_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"date" : TYPE_DATE,
# "geo_point" TODO: Need to split to 2 fields somehow
}
PYTHON_TYPES_MAPPING = {
str: TYPE_STRING,
unicode: TYPE_STRING,
bool : TYPE_BOOLEAN,
int : TYPE_INTEGER,
long: TYPE_INTEGER,
float: TYPE_FLOAT
}
#
# ElasticSearch currently supports only simple Lucene style queries (like Kibana
# but without the aggregation).
#
# Full blown JSON based ElasticSearch queries (including aggregations) will be
# added later
#
# Simple query example:
#
# - Query the index named "twitter"
# - Filter by "user:kimchy"
# - Return the fields: "@timestamp", "tweet" and "user"
# - Return up to 15 results
# - Sort by @timestamp ascending
#
# {
# "index" : "twitter",
# "query" : "user:kimchy",
# "fields" : ["@timestamp", "tweet", "user"],
# "size" : 15,
# "sort" : "@timestamp:asc"
# }
#
#
# Simple query on a logstash ElasticSearch instance:
#
# - Query the index named "logstash-2015.04.*" (in this case its all of April 2015)
# - Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
# - Return fields: "@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"
# - Return up to 250 results
# - Sort by @timestamp ascending
# {
# "index" : "logstash-2015.04.*",
# "query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
# "fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
# "size" : 250,
# "sort" : "@timestamp:asc"
# }
#
#
class ElasticSearch(BaseQueryRunner):
DEBUG_ENABLED = False
"""
ElastichSearch query runner for querying ElasticSearch servers.
Query can be done using the Lucene Syntax (single line) or the more complex,
full blown ElasticSearch JSON syntax
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'server': {
'type': 'string',
'title': 'Base URL'
}
},
"required" : ["server"]
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
def __init__(self, configuration_json):
super(ElasticSearch, self).__init__(configuration_json)
self.syntax = "json"
if self.DEBUG_ENABLED:
http_client.HTTPConnection.debuglevel = 1
# you need to initialize logging, otherwise you will not see anything from requests
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
def get_mappings(self, url):
mappings = {}
r = requests.get(url)
mappings_data = r.json()
for index_name in mappings_data:
index_mappings = mappings_data[index_name]
for m in index_mappings.get("mappings", {}):
for property_name in index_mappings["mappings"][m]["properties"]:
property_data = index_mappings["mappings"][m]["properties"][property_name]
if not property_name in mappings:
property_type = property_data.get("type", None)
if property_type:
if property_type in ELASTICSEARCH_TYPES_MAPPING:
mappings[property_name] = property_type
else:
raise "Unknown property type: {0}".format(property_type)
return mappings
def parse_results(self, mappings, result_fields, raw_result, result_columns, result_rows):
result_columns_index = {}
for c in result_columns:
result_columns_index[c["name"]] = c
result_fields_index = {}
if result_fields:
for r in result_fields:
result_fields_index[r] = None
for h in raw_result["hits"]["hits"]:
row = {}
for column in h["_source"]:
if result_fields and column not in result_fields_index:
continue
if column not in result_columns_index:
result_columns.append({
"name" : column,
"friendly_name" : column,
"type" : mappings.get(column, "string")
})
result_columns_index[column] = result_columns[-1]
row[column] = h["_source"][column]
if row and len(row) > 0:
result_rows.append(row)
def execute_simple_query(self, url, _from, mappings, result_fields, result_columns, result_rows):
url += "&from={0}".format(_from)
r = requests.get(url)
if r.status_code != 200:
raise Exception("Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text))
raw_result = r.json()
self.parse_results(mappings, result_fields, raw_result, result_columns, result_rows)
total = raw_result["hits"]["total"]
result_size = len(raw_result["hits"]["hits"])
logger.debug("Result Size: {0} Total: {1}".format(result_size, total))
return raw_result["hits"]["total"]
def run_query(self, query):
try:
error = None
logger.debug(query)
query_params = json.loads(query)
index_name = query_params["index"]
query_data = query_params["query"]
size = int(query_params.get("size", 500))
result_fields = query_params.get("fields", None)
sort = query_params.get("sort", None)
server_url = self.configuration["server"]
if not server_url:
error = "Missing configuration key 'server'"
return None, error
if server_url[-1] == "/":
server_url = server_url[:-1]
url = "{0}/{1}/_search?".format(server_url, index_name)
mapping_url = "{0}/{1}/_mapping".format(server_url, index_name)
mappings = self.get_mappings(mapping_url)
logger.debug(json.dumps(mappings, indent=4))
if size:
url += "&size={0}".format(size)
if sort:
url += "&sort={0}".format(urllib.quote_plus(sort))
url += "&q={0}".format(urllib.quote_plus(query_data))
logger.debug("Using URL: {0}".format(url))
logger.debug("Using Query: {0}".format(query_data))
result_columns = []
result_rows = []
if isinstance(query_data, str) or isinstance(query_data, unicode):
_from = 0
while True:
total = self.execute_simple_query(url, _from, mappings, result_fields, result_columns, result_rows)
_from += size
if _from >= total:
break
else:
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
raise Exception("Advanced queries are not supported")
json_data = json.dumps({
"columns" : result_columns,
"rows" : result_rows
})
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(ElasticSearch)

View File

@@ -1,117 +0,0 @@
from base64 import b64decode
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
import gspread
from oauth2client.client import SignedJwtAssertionCredentials
from dateutil import parser
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install gspread, dateutil and oauth2client.")
logger.warning("You can use pip: pip install gspread dateutil oauth2client")
enabled = False
def _load_key(filename):
with open(filename, "rb") as f:
return json.loads(f.read())
def _guess_type(value):
try:
val = int(value)
return TYPE_INTEGER, val
except ValueError:
pass
try:
val = float(value)
return TYPE_FLOAT, val
except ValueError:
pass
if str(value).lower() in ('true', 'false'):
return TYPE_BOOLEAN, bool(value)
try:
val = parser.parse(value)
return TYPE_DATETIME, val
except ValueError:
pass
return TYPE_STRING, value
class GoogleSpreadsheet(BaseQueryRunner):
HEADER_INDEX = 0
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "google_spreadsheets"
@classmethod
def enabled(cls):
return enabled
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'jsonKeyFile': {
"type": "string",
'title': 'JSON Key File'
}
},
'required': ['jsonKeyFile']
}
def __init__(self, configuration_json):
super(GoogleSpreadsheet, self).__init__(configuration_json)
def _get_spreadsheet_service(self):
scope = [
'https://spreadsheets.google.com/feeds',
]
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
credentials = SignedJwtAssertionCredentials(key['client_email'], key["private_key"], scope=scope)
spreadsheetservice = gspread.authorize(credentials)
return spreadsheetservice
def run_query(self, query):
logger.debug("Spreadsheet is about to execute query: %s", query)
values = query.split("|")
key = values[0] #key of the spreadsheet
worksheet_num = 0 if len(values) != 2 else int(values[1])# if spreadsheet contains more than one worksheet - this is the number of it
try:
spreadsheet_service = self._get_spreadsheet_service()
spreadsheet = spreadsheet_service.open_by_key(key)
worksheets = spreadsheet.worksheets()
all_data = worksheets[worksheet_num].get_all_values()
column_names = []
columns = []
for j, column_name in enumerate(all_data[self.HEADER_INDEX]):
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': _guess_type(all_data[self.HEADER_INDEX+1][j])
})
rows = [dict(zip(column_names, row)) for row in all_data[self.HEADER_INDEX+1:]]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(GoogleSpreadsheet)

View File

@@ -1,134 +0,0 @@
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
from pyhive import hive
enabled = True
except ImportError, e:
logger.warning("Missing dependencies. Please install pyhive.")
logger.warning("You can use pip: pip install pyhive")
enabled = False
COLUMN_NAME = 0
COLUMN_TYPE = 1
types_map = {
'BIGINT': TYPE_INTEGER,
'TINYINT': TYPE_INTEGER,
'SMALLINT': TYPE_INTEGER,
'INT': TYPE_INTEGER,
'DOUBLE': TYPE_FLOAT,
'DECIMAL': TYPE_FLOAT,
'FLOAT': TYPE_FLOAT,
'REAL': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'TIMESTAMP': TYPE_DATETIME,
'DATE': TYPE_DATETIME,
'CHAR': TYPE_STRING,
'STRING': TYPE_STRING,
'VARCHAR': TYPE_STRING
}
class Hive(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"database": {
"type": "string"
},
"username": {
"type": "string"
}
},
"required": ["host"]
}
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "hive"
def __init__(self, configuration_json):
super(Hive, self).__init__(configuration_json)
def get_schema(self):
try:
schemas_query = "show schemas"
tables_query = "show tables in %s"
columns_query = "show columns in %s"
schema = {}
for schema_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['database_name']), self._run_query_internal(schemas_query))):
for table_name in filter(lambda a: len(a) > 0, map(lambda a: str(a['tab_name']), self._run_query_internal(tables_query % schema_name))):
columns = filter(lambda a: len(a) > 0, map(lambda a: str(a['field']), self._run_query_internal(columns_query % table_name)))
if schema_name != 'default':
table_name = '{}.{}'.format(schema_name, table_name)
schema[table_name] = {'name': table_name, 'columns': columns}
except Exception, e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return schema.values()
def run_query(self, query):
connection = None
try:
connection = hive.connect(**self.configuration)
cursor = connection.cursor()
cursor.execute(query)
column_names = []
columns = []
for column in cursor.description:
column_name = column[COLUMN_NAME]
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column[COLUMN_TYPE], None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
logging.exception(e)
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Hive)

View File

@@ -1,151 +0,0 @@
import json
import logging
import sys
from redash.query_runner import *
from redash.utils import JSONEncoder
logger = logging.getLogger(__name__)
try:
from impala.dbapi import connect
from impala.error import DatabaseError, RPCError
enabled = True
except ImportError, e:
logger.warning("Missing dependencies. Please install impyla.")
logger.warning("You can use pip: pip install impyla")
enabled = False
COLUMN_NAME = 0
COLUMN_TYPE = 1
types_map = {
'BIGINT': TYPE_INTEGER,
'TINYINT': TYPE_INTEGER,
'SMALLINT': TYPE_INTEGER,
'INT': TYPE_INTEGER,
'DOUBLE': TYPE_FLOAT,
'DECIMAL': TYPE_FLOAT,
'FLOAT': TYPE_FLOAT,
'REAL': TYPE_FLOAT,
'BOOLEAN': TYPE_BOOLEAN,
'TIMESTAMP': TYPE_DATETIME,
'CHAR': TYPE_STRING,
'STRING': TYPE_STRING,
'VARCHAR': TYPE_STRING
}
class Impala(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
"type": "object",
"properties": {
"host": {
"type": "string"
},
"port": {
"type": "number"
},
"protocol": {
"type": "string",
"title": "Please specify beeswax or hiveserver2"
},
"database": {
"type": "string"
},
"use_ldap": {
"type": "boolean"
},
"ldap_user": {
"type": "string"
},
"ldap_password": {
"type": "string"
},
"timeout": {
"type": "number"
}
},
"required": ["host"]
}
@classmethod
def type(cls):
return "impala"
def __init__(self, configuration_json):
super(Impala, self).__init__(configuration_json)
def get_schema(self):
try:
schemas_query = "show schemas;"
tables_query = "show tables in %s;"
columns_query = "show column stats %s;"
schema = {}
for schema_name in map(lambda a: a['name'], self._run_query_internal(schemas_query)):
for table_name in map(lambda a: a['name'], self._run_query_internal(tables_query % schema_name)):
columns = map(lambda a: a['Column'], self._run_query_internal(columns_query % table_name))
if schema_name != 'default':
table_name = '{}.{}'.format(schema_name, table_name)
schema[table_name] = {'name': table_name, 'columns': columns}
except Exception, e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
return schema.values()
def run_query(self, query):
connection = None
try:
connection = connect(**self.configuration)
cursor = connection.cursor()
cursor.execute(query)
column_names = []
columns = []
for column in cursor.description:
column_name = column[COLUMN_NAME]
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column[COLUMN_TYPE], None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except DatabaseError as e:
logging.exception(e)
json_data = None
error = e.message
except RPCError as e:
logging.exception(e)
json_data = None
error = "Metastore Error [%s]" % e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."
json_data = None
except Exception as e:
logging.exception(e)
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
connection.close()
return json_data, error
register(Impala)

View File

@@ -1,83 +0,0 @@
import json
import logging
from redash.utils import JSONEncoder
from redash.query_runner import *
logger = logging.getLogger(__name__)
try:
from influxdb import InfluxDBClusterClient
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install influxdb.")
logger.warning("You can use pip: pip install influxdb")
enabled = False
def _transform_result(results):
result_columns = []
result_rows = []
for result in results:
if not result_columns:
for c in result.raw['series'][0]['columns']:
result_columns.append({ "name": c })
for point in result.get_points():
result_rows.append(point)
return json.dumps({
"columns" : result_columns,
"rows" : result_rows
}, cls=JSONEncoder)
class InfluxDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'url': {
'type': 'string'
}
},
'required': ['url']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "influxdb"
def __init__(self, configuration_json):
super(InfluxDB, self).__init__(configuration_json)
def run_query(self, query):
client = InfluxDBClusterClient.from_DSN(self.configuration['url'])
logger.debug("influxdb url: %s", self.configuration['url'])
logger.debug("influxdb got query: %s", query)
try:
results = client.query(query)
if not isinstance(results, list):
results = [results]
json_data = _transform_result(results)
error = None
except Exception, ex:
json_data = None
error = ex.message
return json_data, error
register(InfluxDB)

View File

@@ -3,7 +3,6 @@ import datetime
import logging
import re
import time
from dateutil.parser import parse
from redash.utils import JSONEncoder
from redash.query_runner import *
@@ -13,7 +12,6 @@ logger = logging.getLogger(__name__)
try:
import pymongo
from bson.objectid import ObjectId
from bson.son import SON
enabled = True
except ImportError:
@@ -34,73 +32,24 @@ TYPES_MAP = {
date_regex = re.compile("ISODate\(\"(.*)\"\)", re.IGNORECASE)
class MongoDBJSONEncoder(JSONEncoder):
def default(self, o):
if isinstance(o, ObjectId):
return str(o)
return super(MongoDBJSONEncoder, self).default(o)
def _get_column_by_name(columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _convert_date(q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
if q[field_name].find(":") == -1:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d")))
else:
q[field_name] = datetime.datetime.fromtimestamp(time.mktime(time.strptime(m[0], "%Y-%m-%d %H:%M")))
# Simple query example:
#
# {
# "collection" : "my_collection",
# "query" : {
# "date" : {
# "$gt" : "ISODate(\"2015-01-15 11:41\")",
# },
# "type" : 1
# },
# "fields" : {
# "_id" : 1,
# "name" : 2
# },
# "sort" : [
# {
# "name" : "date",
# "direction" : -1
# }
# ]
#
# }
#
#
# Aggregation
# ===========
# Uses a syntax similar to the one used in PyMongo, however to support the
# correct order of sorting, it uses a regular list for the "$sort" operation
# that converts into a SON (sorted dictionary) object before execution.
#
# Aggregation query example:
#
# {
# "collection" : "things",
# "aggregate" : [
# {
# "$unwind" : "$tags"
# },
# {
# "$group" : {
# "_id" : "$tags",
# "count" : { "$sum" : 1 }
# }
# },
# {
# "$sort" : [
# {
# "name" : "count",
# "direction" : -1
# },
# {
# "name" : "_id",
# "direction" : -1
# }
# ]
# }
# ]
# }
#
#
class MongoDB(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
@@ -119,8 +68,8 @@ class MongoDB(BaseQueryRunner):
'type': 'string',
'title': 'Replica Set Name'
},
},
'required': ['connectionString']
'required': ['connectionString']
}
}
@classmethod
@@ -134,49 +83,26 @@ class MongoDB(BaseQueryRunner):
def __init__(self, configuration_json):
super(MongoDB, self).__init__(configuration_json)
self.syntax = 'json'
self.db_name = self.configuration["dbName"]
self.is_replica_set = True if "replicaSetName" in self.configuration and self.configuration["replicaSetName"] else False
def _get_column_by_name(self, columns, column_name):
for c in columns:
if "name" in c and c["name"] == column_name:
return c
return None
def _fix_dates(self, data):
for k in data:
if isinstance(data[k], list):
for i in range(0, len(data[k])):
self._fix_dates(data[k][i])
elif isinstance(data[k], dict):
self._fix_dates(data[k])
else:
if isinstance(data[k], (str, unicode)):
self._convert_date(data, k)
def _convert_date(self, q, field_name):
m = date_regex.findall(q[field_name])
if len(m) > 0:
q[field_name] = parse(m[0], yearfirst=True)
def run_query(self, query):
if self.is_replica_set:
db_connection = pymongo.MongoReplicaSetClient(self.configuration["connectionString"], replicaSet=self.configuration["replicaSetName"])
else:
db_connection = pymongo.MongoClient(self.configuration["connectionString"])
db = db_connection[self.db_name]
if self.db_name not in db_connection.database_names():
return None, "Unknown database name '%s'" % self.db_name
db = db_connection[self.db_name ]
logger.debug("mongodb connection string: %s", self.configuration['connectionString'])
logger.debug("mongodb got query: %s", query)
try:
query_data = json.loads(query)
self._fix_dates(query_data)
except ValueError:
return None, "Invalid query format. The query is not a valid JSON."
@@ -185,79 +111,58 @@ class MongoDB(BaseQueryRunner):
else:
collection = query_data["collection"]
q = query_data.get("query", None)
q = None
if "query" in query_data:
q = query_data["query"]
for k in q:
if q[k] and type(q[k]) in [str, unicode]:
logging.debug(q[k])
_convert_date(q, k)
elif q[k] and type(q[k]) is dict:
for k2 in q[k]:
if type(q[k][k2]) in [str, unicode]:
_convert_date(q[k], k2)
f = None
aggregate = query_data.get("aggregate", None)
if aggregate:
for step in aggregate:
if "$sort" in step:
sort_list = []
for sort_item in step["$sort"]:
sort_list.append((sort_item["name"], sort_item["direction"]))
step["$sort"] = SON(sort_list)
if not aggregate:
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field in query_data["sort"]:
s.append((field["name"], field["direction"]))
if "fields" in query_data:
f = query_data["fields"]
s = None
if "sort" in query_data and query_data["sort"]:
s = []
for field_data in query_data["sort"]:
s.append((field_data["name"], field_data["direction"]))
for field_name in query_data["sort"]:
s.append((field_name, query_data["sort"][field_name]))
columns = []
rows = []
cursor = None
if q or (not q and not aggregate):
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
error = None
json_data = None
if "skip" in query_data:
cursor = cursor.skip(query_data["skip"])
if "limit" in query_data:
cursor = cursor.limit(query_data["limit"])
elif aggregate:
r = db[collection].aggregate(aggregate)
# Backwards compatibility with older pymongo versions.
#
# Older pymongo version would return a dictionary from an aggregate command.
# The dict would contain a "result" key which would hold the cursor.
# Newer ones return pymongo.command_cursor.CommandCursor.
if isinstance(r, dict):
cursor = r["result"]
else:
cursor = r
if s:
cursor = db[collection].find(q, f).sort(s)
else:
cursor = db[collection].find(q, f)
for r in cursor:
for k in r:
if self._get_column_by_name(columns, k) is None:
if _get_column_by_name(columns, k) is None:
columns.append({
"name": k,
"friendly_name": k,
"type": TYPES_MAP.get(type(r[k]), TYPE_STRING)
})
# Convert ObjectId to string
if type(r[k]) == ObjectId:
r[k] = str(r[k])
rows.append(r)
if f:
ordered_columns = []
for k in sorted(f, key=f.get):
ordered_columns.append(self._get_column_by_name(columns, k))
ordered_columns.append(_get_column_by_name(columns, k))
columns = ordered_columns
@@ -266,8 +171,8 @@ class MongoDB(BaseQueryRunner):
"rows": rows
}
error = None
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
json_data = json.dumps(data, cls=JSONEncoder)
return json_data, error
register(MongoDB)
register(MongoDB)

View File

@@ -7,24 +7,6 @@ from redash.query_runner import *
logger = logging.getLogger(__name__)
types_map = {
0: TYPE_FLOAT,
1: TYPE_INTEGER,
2: TYPE_INTEGER,
3: TYPE_INTEGER,
4: TYPE_FLOAT,
5: TYPE_FLOAT,
7: TYPE_DATETIME,
8: TYPE_INTEGER,
9: TYPE_INTEGER,
10: TYPE_DATE,
12: TYPE_DATETIME,
15: TYPE_STRING,
16: TYPE_INTEGER,
246: TYPE_FLOAT,
253: TYPE_STRING,
254: TYPE_STRING,
}
class Mysql(BaseQueryRunner):
@classmethod
@@ -45,10 +27,7 @@ class Mysql(BaseQueryRunner):
'db': {
'type': 'string',
'title': 'Database name'
},
"port": {
"type": "number"
},
}
},
'required': ['db']
}
@@ -103,29 +82,31 @@ class Mysql(BaseQueryRunner):
def run_query(self, query):
import MySQLdb
connection = None
connection = MySQLdb.connect(self.configuration.get('host', ''),
self.configuration.get('user', ''),
self.configuration.get('passwd', ''),
self.configuration['db'],
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
try:
connection = MySQLdb.connect(host=self.configuration.get('host', ''),
user=self.configuration.get('user', ''),
passwd=self.configuration.get('passwd', ''),
db=self.configuration['db'],
port=self.configuration.get('port', 3306),
charset='utf8', use_unicode=True)
cursor = connection.cursor()
logger.debug("MySQL running query: %s", query)
cursor.execute(query)
data = cursor.fetchall()
# TODO - very similar to pg.py
if cursor.description is not None:
columns_data = [(i[0], i[1]) for i in cursor.description]
cursor_desc = cursor.description
if cursor_desc is not None:
num_fields = len(cursor_desc)
column_names = [i[0] for i in cursor.description]
rows = [dict(zip((c[0] for c in columns_data), row)) for row in data]
rows = [dict(zip(column_names, row)) for row in data]
columns = [{'name': col[0],
'friendly_name': col[0],
'type': types_map.get(col[1], None)} for col in columns_data]
# TODO: add types support
columns = [{'name': col_name,
'friendly_name': col_name,
'type': None} for col_name in column_names]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
@@ -144,8 +125,7 @@ class Mysql(BaseQueryRunner):
except Exception as e:
raise sys.exc_info()[1], None, sys.exc_info()[2]
finally:
if connection:
connection.close()
connection.close()
return json_data, error

View File

@@ -93,7 +93,7 @@ class PostgreSQL(BaseQueryRunner):
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
raise Exception("Failed getting schema.")
results = json.loads(results)
@@ -127,38 +127,35 @@ class PostgreSQL(BaseQueryRunner):
columns = []
duplicates_counter = 1
if cursor.description is not None:
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name += str(duplicates_counter)
duplicates_counter += 1
for column in cursor.description:
# TODO: this deduplication needs to be generalized and reused in all query runners.
column_name = column.name
if column_name in column_names:
column_name += str(duplicates_counter)
duplicates_counter += 1
column_names.append(column_name)
column_names.append(column_name)
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column.type_code, None)
})
columns.append({
'name': column_name,
'friendly_name': column_name,
'type': types_map.get(column.type_code, None)
})
rows = [dict(zip(column_names, row)) for row in cursor]
rows = [dict(zip(column_names, row)) for row in cursor]
data = {'columns': columns, 'rows': rows}
error = None
json_data = json.dumps(data, cls=JSONEncoder)
else:
error = 'Query completed but it returned no data.'
json_data = None
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
cursor.close()
except (select.error, OSError) as e:
logging.exception(e)
error = "Query interrupted. Please retry."
json_data = None
except psycopg2.DatabaseError as e:
logging.exception(e)
error = e.message
json_data = None
error = e.message
except KeyboardInterrupt:
connection.cancel()
error = "Query cancelled by user."

View File

@@ -1,98 +0,0 @@
import json
from redash.utils import JSONEncoder
from redash.query_runner import *
import logging
logger = logging.getLogger(__name__)
try:
from pyhive import presto
enabled = True
except ImportError:
logger.warning("Missing dependencies. Please install PyHive.")
logger.warning("You can use pip: pip install pyhive")
enabled = False
PRESTO_TYPES_MAPPING = {
"integer" : TYPE_INTEGER,
"long" : TYPE_INTEGER,
"bigint" : TYPE_INTEGER,
"float" : TYPE_FLOAT,
"double" : TYPE_FLOAT,
"boolean" : TYPE_BOOLEAN,
"string" : TYPE_STRING,
"varchar": TYPE_STRING,
"date" : TYPE_DATE,
}
class Presto(BaseQueryRunner):
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'host': {
'type': 'string'
},
'port': {
'type': 'number'
},
'schema': {
'type': 'string'
},
'catalog': {
'type': 'string'
},
'username': {
'type': 'string'
}
},
'required': ['host']
}
@classmethod
def enabled(cls):
return enabled
@classmethod
def annotate_query(cls):
return False
@classmethod
def type(cls):
return "presto"
def __init__(self, configuration_json):
super(Presto, self).__init__(configuration_json)
def run_query(self, query):
connection = presto.connect(
host=self.configuration.get('host', ''),
port=self.configuration.get('port', 8080),
username=self.configuration.get('username', 'redash'),
catalog=self.configuration.get('catalog', 'hive'),
schema=self.configuration.get('schema', 'default'))
cursor = connection.cursor()
try:
cursor.execute(query)
columns_data = [(row[0], row[1]) for row in cursor.description]
columns = [{'name': col[0],
'friendly_name': col[0],
'type': PRESTO_TYPES_MAPPING.get(col[1], None)} for col in columns_data]
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
data = {'columns': columns, 'rows': rows}
json_data = json.dumps(data, cls=JSONEncoder)
error = None
except Exception, ex:
json_data = None
error = ex.message
return json_data, error
register(Presto)

View File

@@ -1,59 +1,53 @@
import sys
import datetime
import json
import logging
import weakref
from redash.query_runner import *
from redash import models
import importlib
logger = logging.getLogger(__name__)
def get_query_result(query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
from RestrictedPython import compile_restricted
from RestrictedPython.Guards import safe_builtins
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
class CustomPrint(object):
""" CustomPrint redirect "print" calls to be sent as "log" on the result object """
def __init__(self):
self.enabled = True
self.lines = []
def execute_query(data_source_name, query):
try:
data_source = models.DataSource.get(models.DataSource.name==data_source_name)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name: %s." % data_source_name)
def write(self, text):
if self.enabled:
if text and text.strip():
log_line = "[{0}] {1}".format(datetime.datetime.utcnow().isoformat(), text)
self.lines.append(log_line)
query_runner = get_query_runner(data_source.type, data_source.options)
def enable(self):
self.enabled = True
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
def disable(self):
self.enabled = False
def __call__(self):
return self
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
class Python(BaseQueryRunner):
"""
This is very, very unsafe. Use at your own risk with people you really trust.
"""
@classmethod
def configuration_schema(cls):
return {
'type': 'object',
'properties': {
'allowedImportModules': {
'type': 'string',
'title': 'Modules to import prior to running the script'
}
},
}
}
@classmethod
def enabled(cls):
return True
@classmethod
def annotate_query(cls):
return False
@@ -61,143 +55,25 @@ class Python(BaseQueryRunner):
def __init__(self, configuration_json):
super(Python, self).__init__(configuration_json)
self.syntax = "python"
self._allowed_modules = {}
self._script_locals = { "result" : { "rows" : [], "columns" : [], "log" : [] } }
self._enable_print_log = True
self._custom_print = CustomPrint()
if self.configuration.get("allowedImportModules", None):
for item in self.configuration["allowedImportModules"].split(","):
self._allowed_modules[item] = None
def custom_import(self, name, globals=None, locals=None, fromlist=(), level=0):
if name in self._allowed_modules:
m = None
if self._allowed_modules[name] is None:
m = importlib.import_module(name)
self._allowed_modules[name] = m
else:
m = self._allowed_modules[name]
return m
raise Exception("'{0}' is not configured as a supported import module".format(name))
def custom_write(self, obj):
"""
Custom hooks which controls the way objects/lists/tuples/dicts behave in
RestrictedPython
"""
return obj
def custom_get_item(self, obj, key):
return obj[key]
def custom_get_iter(self, obj):
return iter(obj)
def add_result_column(self, result, column_name, friendly_name, column_type):
""" Helper function to add columns inside a Python script running in re:dash in an easier way """
if column_type not in SUPPORTED_COLUMN_TYPES:
raise Exception("'{0}' is not a supported column type".format(column_type))
if not "columns" in result:
result["columns"] = []
result["columns"].append({
"name" : column_name,
"friendly_name" : friendly_name,
"type" : column_type
})
def add_result_row(self, result, values):
if not "rows" in result:
result["rows"] = []
result["rows"].append(values)
def execute_query(self, data_source_name_or_id, query):
try:
if type(data_source_name_or_id) == int:
data_source = models.DataSource.get_by_id(data_source_name_or_id)
else:
data_source = models.DataSource.get(models.DataSource.name==data_source_name_or_id)
except models.DataSource.DoesNotExist:
raise Exception("Wrong data source name/id: %s." % data_source_name_or_id)
query_runner = get_query_runner(data_source.type, data_source.options)
data, error = query_runner.run_query(query)
if error is not None:
raise Exception(error)
# TODO: allow avoiding the json.dumps/loads in same process
return json.loads(data)
def get_query_result(self, query_id):
try:
query = models.Query.get_by_id(query_id)
except models.Query.DoesNotExist:
raise Exception("Query id %s does not exist." % query_id)
if query.latest_query_data is None:
raise Exception("Query does not have results yet.")
if query.latest_query_data.data is None:
raise Exception("Query does not have results yet.")
return json.loads(query.latest_query_data.data)
def run_query(self, query):
try:
error = None
code = compile_restricted(query, '<string>', 'exec')
script_globals = {'get_query_result': get_query_result, 'execute_query': execute_query}
script_locals = {'result': None}
# TODO: timeout, sandboxing
exec query in script_globals, script_locals
safe_builtins["_write_"] = self.custom_write
safe_builtins["__import__"] = self.custom_import
safe_builtins["_getattr_"] = getattr
safe_builtins["getattr"] = getattr
safe_builtins["_setattr_"] = setattr
safe_builtins["setattr"] = setattr
safe_builtins["_getitem_"] = self.custom_get_item
safe_builtins["_getiter_"] = self.custom_get_iter
safe_builtins["_print_"] = self._custom_print
if script_locals['result'] is None:
raise Exception("result wasn't set to value.")
restricted_globals = dict(__builtins__=safe_builtins)
restricted_globals["get_query_result"] = self.get_query_result
restricted_globals["execute_query"] = self.execute_query
restricted_globals["add_result_column"] = self.add_result_column
restricted_globals["add_result_row"] = self.add_result_row
restricted_globals["disable_print_log"] = self._custom_print.disable
restricted_globals["enable_print_log"] = self._custom_print.enable
restricted_globals["TYPE_DATETIME"] = TYPE_DATETIME
restricted_globals["TYPE_BOOLEAN"] = TYPE_BOOLEAN
restricted_globals["TYPE_INTEGER"] = TYPE_INTEGER
restricted_globals["TYPE_STRING"] = TYPE_STRING
restricted_globals["TYPE_DATE"] = TYPE_DATE
restricted_globals["TYPE_FLOAT"] = TYPE_FLOAT
# TODO: Figure out the best way to have a timeout on a script
# One option is to use ETA with Celery + timeouts on workers
# And replacement of worker process every X requests handled.
exec(code) in restricted_globals, self._script_locals
result = self._script_locals['result']
result['log'] = self._custom_print.lines
json_data = json.dumps(result)
json_data = json.dumps(script_locals['result'])
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
except Exception as e:
error = str(e)
json_data = None
raise sys.exc_info()[1], None, sys.exc_info()[2]
return json_data, error
register(Python)

View File

@@ -30,7 +30,7 @@ class Script(BaseQueryRunner):
def __init__(self, configuration_json):
super(Script, self).__init__(configuration_json)
# Poor man's protection against running scripts from outside the scripts directory
# Poor man's protection against running scripts from output the scripts directory
if self.configuration["path"].find("../") > -1:
raise ValidationError("Scripts can only be run from the configured scripts directory")
@@ -41,13 +41,11 @@ class Script(BaseQueryRunner):
query = query.strip()
script = os.path.join(self.configuration["path"], query.split(" ")[0])
script = os.path.join(self.configuration["path"], query)
if not os.path.exists(script):
return None, "Script '%s' not found in script directory" % query
script = os.path.join(self.configuration["path"], query)
output = subprocess.check_output(script.split(" "), shell=False)
output = subprocess.check_output(script, shell=False)
if output is not None:
output = output.strip()
if output != "":
@@ -64,4 +62,4 @@ class Script(BaseQueryRunner):
return json_data, error
register(Script)
register(Script)

View File

@@ -22,10 +22,11 @@ class Url(BaseQueryRunner):
return False
def run_query(self, query):
base_url = self.configuration.get("url", None)
base_url = self.configuration["url"]
try:
error = None
query = query.strip()
if base_url is not None and base_url != "":

View File

@@ -1,145 +0,0 @@
# Copyright 2015 Okta, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from flask.ext.login import login_user
import requests
from flask import redirect, url_for, Blueprint, request
from flask_oauth import OAuth
from redash import models, settings
from saml2 import (
BINDING_HTTP_POST,
BINDING_HTTP_REDIRECT,
entity,
)
from saml2.client import Saml2Client
from saml2.config import Config as Saml2Config
logger = logging.getLogger('saml_auth')
blueprint = Blueprint('saml_auth', __name__)
def get_saml_client():
'''
Return saml configuation.
The configuration is a hash for use by saml2.config.Config
'''
if settings.SAML_CALLBACK_SERVER_NAME:
acs_url=settings.SAML_CALLBACK_SERVER_NAME + url_for("saml_auth.idp_initiated")
else:
acs_url = url_for("saml_auth.idp_initiated",_external=True)
# NOTE:
# Ideally, this should fetch the metadata and pass it to
# PySAML2 via the "inline" metadata type.
# However, this method doesn't seem to work on PySAML2 v2.4.0
#
# SAML metadata changes very rarely. On a production system,
# this data should be cached as approprate for your production system.
rv = requests.get(settings.SAML_METADATA_URL)
import tempfile
tmp = tempfile.NamedTemporaryFile()
f = open(tmp.name, 'w')
f.write(rv.text)
f.close()
saml_settings = {
'metadata': {
# 'inline': metadata,
"local": [tmp.name]
},
'service': {
'sp': {
'endpoints': {
'assertion_consumer_service': [
(acs_url, BINDING_HTTP_REDIRECT),
(acs_url, BINDING_HTTP_POST)
],
},
# Don't verify that the incoming requests originate from us via
# the built-in cache for authn request ids in pysaml2
'allow_unsolicited': True,
# Don't sign authn requests, since signed requests only make
# sense in a situation where you control both the SP and IdP
'authn_requests_signed': False,
'logout_requests_signed': True,
'want_assertions_signed': True,
'want_response_signed': False,
},
},
}
spConfig = Saml2Config()
spConfig.load(saml_settings)
spConfig.allow_unknown_attributes = True
saml_client = Saml2Client(config=spConfig)
tmp.close()
return saml_client
@blueprint.route("/saml/callback", methods=['POST'])
def idp_initiated():
saml_client = get_saml_client()
authn_response = saml_client.parse_authn_request_response(
request.form['SAMLResponse'],
entity.BINDING_HTTP_POST)
authn_response.get_identity()
user_info = authn_response.get_subject()
email = user_info.text
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
# This is what as known as "Just In Time (JIT) provisioning".
# What that means is that, if a user in a SAML assertion
# isn't in the user store, we create that user first, then log them in
try:
user_object = models.User.get(models.User.email == email)
if user_object.name != name:
logger.debug("Updating user name (%r -> %r)", user_object.name, name)
user_object.name = name
user_object.save()
except models.User.DoesNotExist:
logger.debug("Creating user object (%r)", name)
user_object = models.User.create(name=name, email=email, groups=models.User.DEFAULT_GROUPS)
login_user(user_object, remember=True)
url = url_for('index')
return redirect(url)
@blueprint.route("/saml/login")
def sp_initiated():
if not settings.SAML_METADATA_URL:
logger.error("Cannot invoke saml endpoint without metadata url in settings.")
return redirect(url_for('index'))
saml_client = get_saml_client()
reqid, info = saml_client.prepare_for_authenticate()
redirect_url = None
# Select the IdP URL to send the AuthN request to
for key, value in info['headers']:
if key is 'Location':
redirect_url = value
response = redirect(redirect_url, code=302)
# NOTE:
# I realize I _technically_ don't need to set Cache-Control or Pragma:
# http://stackoverflow.com/a/5494469
# However, Section 3.2.3.2 of the SAML spec suggests they are set:
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
# We set those headers here as a "belt and suspenders" approach,
# since enterprise environments don't always conform to RFCs
response.headers['Cache-Control'] = 'no-cache, no-store'
response.headers['Pragma'] = 'no-cache'
return response

View File

@@ -32,25 +32,10 @@ def array_from_string(str):
return array
def set_from_string(str):
return set(array_from_string(str))
def parse_boolean(str):
return json.loads(str.lower())
def all_settings():
from types import ModuleType
settings = {}
for name, item in globals().iteritems():
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
settings[name] = item
return settings
NAME = os.environ.get('REDASH_NAME', 're:dash')
REDIS_URL = os.environ.get('REDASH_REDIS_URL', "redis://localhost:6379/0")
@@ -68,23 +53,19 @@ CELERY_BACKEND = os.environ.get("REDASH_CELERY_BACKEND", REDIS_URL)
# The following enables periodic job (every 5 minutes) of removing unused query results. Behind this "feature flag" until
# proved to be "safe".
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "true"))
QUERY_RESULTS_CLEANUP_ENABLED = parse_boolean(os.environ.get("REDASH_QUERY_RESULTS_CLEANUP_ENABLED", "false"))
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "api_key")
AUTH_TYPE = os.environ.get("REDASH_AUTH_TYPE", "hmac")
PASSWORD_LOGIN_ENABLED = parse_boolean(os.environ.get("REDASH_PASSWORD_LOGIN_ENABLED", "true"))
# Google Apps domain to allow access from; any user with email in this Google Apps will be allowed
# access
GOOGLE_APPS_DOMAIN = set_from_string(os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", ""))
GOOGLE_APPS_DOMAIN = os.environ.get("REDASH_GOOGLE_APPS_DOMAIN", "")
GOOGLE_CLIENT_ID = os.environ.get("REDASH_GOOGLE_CLIENT_ID", "")
GOOGLE_CLIENT_SECRET = os.environ.get("REDASH_GOOGLE_CLIENT_SECRET", "")
GOOGLE_OAUTH_ENABLED = GOOGLE_CLIENT_ID and GOOGLE_CLIENT_SECRET
SAML_METADATA_URL = os.environ.get("REDASH_SAML_METADATA_URL", "")
SAML_LOGIN_ENABLED = SAML_METADATA_URL != ""
SAML_CALLBACK_SERVER_NAME = os.environ.get("REDASH_SAML_CALLBACK_SERVER_NAME", "")
STATIC_ASSETS_PATH = fix_assets_path(os.environ.get("REDASH_STATIC_ASSETS_PATH", "../rd_ui/app/"))
JOB_EXPIRY_TIME = int(os.environ.get("REDASH_JOB_EXPIRY_TIME", 3600 * 6))
COOKIE_SECRET = os.environ.get("REDASH_COOKIE_SECRET", "c292a0a3aa32397cdb050e233733900f")
@@ -92,43 +73,16 @@ LOG_LEVEL = os.environ.get("REDASH_LOG_LEVEL", "INFO")
CLIENT_SIDE_METRICS = parse_boolean(os.environ.get("REDASH_CLIENT_SIDE_METRICS", "false"))
ANALYTICS = os.environ.get("REDASH_ANALYTICS", "")
# Mail settings:
MAIL_SERVER = os.environ.get('REDASH_MAIL_SERVER', 'localhost')
MAIL_PORT = int(os.environ.get('REDASH_MAIL_PORT', 25))
MAIL_USE_TLS = parse_boolean(os.environ.get('REDASH_MAIL_USE_TLS', 'false'))
MAIL_USE_SSL = parse_boolean(os.environ.get('REDASH_MAIL_USE_SSL', 'false'))
MAIL_USERNAME = os.environ.get('REDASH_MAIL_USERNAME', None)
MAIL_PASSWORD = os.environ.get('REDASH_MAIL_PASSWORD', None)
MAIL_DEFAULT_SENDER = os.environ.get('REDASH_MAIL_DEFAULT_SENDER', None)
MAIL_MAX_EMAILS = os.environ.get('REDASH_MAIL_MAX_EMAILS', None)
MAIL_ASCII_ATTACHMENTS = parse_boolean(os.environ.get('REDASH_MAIL_ASCII_ATTACHMENTS', 'false'))
HOST = os.environ.get('REDASH_HOST', '')
# CORS settings for the Query Result API (and possbily future external APIs).
# In most cases all you need to do is set REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN
# to the calling domain (or domains in a comma separated list).
ACCESS_CONTROL_ALLOW_ORIGIN = set_from_string(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN", ""))
ACCESS_CONTROL_ALLOW_CREDENTIALS = parse_boolean(os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS", "false"))
ACCESS_CONTROL_REQUEST_METHOD = os.environ.get("REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD", "GET, POST, PUT")
ACCESS_CONTROL_ALLOW_HEADERS = os.environ.get("REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS", "Content-Type")
# Query Runners
QUERY_RUNNERS = array_from_string(os.environ.get("REDASH_ENABLED_QUERY_RUNNERS", ",".join([
'redash.query_runner.big_query',
'redash.query_runner.google_spreadsheets',
'redash.query_runner.graphite',
'redash.query_runner.mongodb',
'redash.query_runner.mysql',
'redash.query_runner.pg',
'redash.query_runner.script',
'redash.query_runner.url',
'redash.query_runner.influx_db',
'redash.query_runner.elasticsearch',
'redash.query_runner.presto',
'redash.query_runner.hive_ds',
'redash.query_runner.impala_ds',
])))
# Features:
FEATURE_ALLOW_ALL_TO_EDIT_QUERIES = parse_boolean(os.environ.get("REDASH_FEATURE_ALLOW_ALL_TO_EDIT", "true"))
FEATURE_TABLES_PERMISSIONS = parse_boolean(os.environ.get("REDASH_FEATURE_TABLES_PERMISSIONS", "false"))

View File

@@ -1,11 +1,11 @@
import time
import datetime
import logging
from flask.ext.mail import Message
import redis
from celery import Task
from celery.result import AsyncResult
from celery.utils.log import get_task_logger
from redash import redis_connection, models, statsd_client, settings, utils, mail
from redash import redis_connection, models, statsd_client, settings
from redash.utils import gen_query_hash
from redash.worker import celery
from redash.query_runner import get_query_runner
@@ -47,13 +47,12 @@ class QueryTask(object):
return self._async_result.id
@classmethod
def add_task(cls, query, data_source, scheduled=False, metadata={}):
def add_task(cls, query, data_source, scheduled=False):
query_hash = gen_query_hash(query)
logging.info("[Manager][%s] Inserting job", query_hash)
logging.info("[Manager] Metadata: [%s]", metadata)
try_count = 0
job = None
while try_count < cls.MAX_RETRIES:
try_count += 1
@@ -78,9 +77,8 @@ class QueryTask(object):
else:
queue_name = data_source.queue_name
result = execute_query.apply_async(args=(query, data_source.id, metadata), queue=queue_name)
result = execute_query.apply_async(args=(query, data_source.id), queue=queue_name)
job = cls(async_result=result)
logging.info("[Manager][%s] Created new job: %s", query_hash, job.id)
pipe.set(cls._job_lock_id(query_hash, data_source.id), job.id, settings.JOB_EXPIRY_TIME)
pipe.execute()
@@ -148,8 +146,8 @@ def refresh_queries():
outdated_queries_count = 0
for query in models.Query.outdated_queries():
QueryTask.add_task(query.query, query.data_source, scheduled=True,
metadata={'Query ID': query.id, 'Username': 'Scheduled'})
# TODO: this should go into lower priority
QueryTask.add_task(query.query, query.data_source, scheduled=True)
outdated_queries_count += 1
statsd_client.gauge('manager.outdated_queries', outdated_queries_count)
@@ -199,9 +197,9 @@ def cleanup_tasks():
logger.warning("%s is ready (%s), removing lock.", lock_keys[i], t.celery_status)
redis_connection.delete(lock_keys[i])
# if t.celery_status == 'STARTED' and t.id not in all_tasks:
# logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
# redis_connection.delete(lock_keys[i])
if t.celery_status == 'STARTED' and t.id not in all_tasks:
logger.warning("Couldn't find active job for: %s, removing lock.", lock_keys[i])
redis_connection.delete(lock_keys[i])
@celery.task(base=BaseTask)
@@ -223,7 +221,7 @@ def cleanup_query_results():
@celery.task(base=BaseTask)
def refresh_schemas():
"""
Refreshs the datasources schema.
Refershs the datasources schema.
"""
for ds in models.DataSource.all():
@@ -231,41 +229,9 @@ def refresh_schemas():
ds.get_schema(refresh=True)
@celery.task(bind=True, base=BaseTask)
def check_alerts_for_query(self, query_id):
from redash.wsgi import app
logger.debug("Checking query %d for alerts", query_id)
query = models.Query.get_by_id(query_id)
for alert in query.alerts:
alert.query = query
new_state = alert.evaluate()
if new_state != alert.state:
logger.info("Alert %d new state: %s", alert.id, new_state)
old_state = alert.state
alert.update_instance(state=new_state)
if old_state == models.Alert.UNKNOWN_STATE and new_state == models.Alert.OK_STATE:
logger.debug("Skipping notification (previous state was unknown and now it's ok).")
continue
# message = Message
recipients = [s.email for s in alert.subscribers()]
logger.debug("Notifying: %s", recipients)
html = """
Check <a href="{host}/alerts/{alert_id}">alert</a> / check <a href="{host}/queries/{query_id}">query</a>.
""".format(host=settings.HOST, alert_id=alert.id, query_id=query.id)
with app.app_context():
message = Message(recipients=recipients,
subject="[{1}] {0}".format(alert.name, new_state.upper()),
html=html)
mail.send(message)
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id, metadata):
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?
start_time = time.time()
logger.info("Loading data source (%d)...", data_source_id)
@@ -281,15 +247,9 @@ def execute_query(self, query, data_source_id, metadata):
query_runner = get_query_runner(data_source.type, data_source.options)
if query_runner.annotate_query():
metadata['Task ID'] = self.request.id
metadata['Query Hash'] = query_hash
metadata['Queue'] = self.request.delivery_info['routing_key']
annotation = u", ".join([u"{}: {}".format(k, v) for k, v in metadata.iteritems()])
logging.debug(u"Annotation: %s", annotation)
annotated_query = u"/* {} */ {}".format(annotation, query)
# TODO: annotate with queue name
annotated_query = "/* Task Id: %s, Query hash: %s */ %s" % \
(self.request.id, query_hash, query)
else:
annotated_query = query
@@ -305,9 +265,7 @@ def execute_query(self, query, data_source_id, metadata):
redis_connection.delete(QueryTask._job_lock_id(query_hash, data_source.id))
if not error:
query_result, updated_query_ids = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, utils.utcnow())
for query_id in updated_query_ids:
check_alerts_for_query.delay(query_id)
query_result = models.QueryResult.store_result(data_source.id, query_hash, query, data, run_time, datetime.datetime.utcnow())
else:
raise Exception(error)

View File

@@ -4,11 +4,9 @@ import codecs
import decimal
import datetime
import json
import random
import re
import hashlib
import sqlparse
import pytz
COMMENTS_REGEX = re.compile("/\*.*?\*/")
@@ -64,14 +62,6 @@ class SQLMetaData(object):
return False
def utcnow():
"""Return datetime.now value with timezone specified.
Without the timezone data, when the timestamp stored to the database it gets the current timezone of the server,
which leads to errors in calculations.
"""
return datetime.datetime.now(pytz.utc)
def slugify(s):
return re.sub('[^a-z0-9_\-]+', '-', s.lower())
@@ -89,14 +79,6 @@ def gen_query_hash(sql):
return hashlib.md5(sql.encode('utf-8')).hexdigest()
def generate_token(length):
chars = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'0123456789')
rand = random.SystemRandom()
return ''.join(rand.choice(chars) for x in range(length))
class JSONEncoder(json.JSONEncoder):
"""Custom JSON encoding class, to handle Decimal and datetime.date instances.
"""
@@ -104,9 +86,9 @@ class JSONEncoder(json.JSONEncoder):
if isinstance(o, decimal.Decimal):
return float(o)
if isinstance(o, (datetime.date, datetime.time, datetime.timedelta)):
if isinstance(o, datetime.date):
return o.isoformat()
super(JSONEncoder, self).default(o)
@@ -146,4 +128,4 @@ class UnicodeWriter:
def writerows(self, rows):
for row in rows:
self.writerow(row)
self.writerow(row)

Some files were not shown because too many files have changed in this diff Show More