mirror of
https://github.com/getredash/redash.git
synced 2025-12-25 01:03:20 -05:00
Compare commits
3 Commits
v0.9.2.b15
...
v0.6.0-rc
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
150fc6dbf0 | ||
|
|
ccff9614d4 | ||
|
|
a7b881874f |
@@ -1,4 +0,0 @@
|
||||
rd_ui/.tmp/
|
||||
rd_ui/node_modules/
|
||||
.git/
|
||||
.vagrant/
|
||||
13
.env.example
13
.env.example
@@ -1,6 +1,9 @@
|
||||
REDASH_STATIC_ASSETS_PATH="../rd_ui/app/"
|
||||
REDASH_LOG_LEVEL="INFO"
|
||||
REDASH_REDIS_URL=redis://localhost:6379/1
|
||||
REDASH_DATABASE_URL="postgresql://redash"
|
||||
REDASH_COOKIE_SECRET=veryverysecret
|
||||
REDASH_CONNECTION_ADAPTER=pg
|
||||
REDASH_CONNECTION_STRING="dbname=data"
|
||||
REDASH_STATIC_ASSETS_PATH=../rd_ui/app/
|
||||
REDASH_GOOGLE_APPS_DOMAIN=
|
||||
REDASH_ADMINS=
|
||||
REDASH_WORKERS_COUNT=2
|
||||
REDASH_COOKIE_SECRET=
|
||||
REDASH_DATABASE_URL='postgresql://rd'
|
||||
REDASH_LOG_LEVEL = "INFO"
|
||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -8,7 +8,6 @@ celerybeat-schedule*
|
||||
.#*
|
||||
\#*#
|
||||
*~
|
||||
_build
|
||||
|
||||
# Vagrant related
|
||||
.vagrant
|
||||
@@ -19,6 +18,3 @@ redash/dump.rdb
|
||||
venv
|
||||
|
||||
dump.rdb
|
||||
|
||||
# Docker related
|
||||
docker-compose.yml
|
||||
|
||||
51
Dockerfile
51
Dockerfile
@@ -1,51 +0,0 @@
|
||||
FROM ubuntu:trusty
|
||||
|
||||
# Ubuntu packages
|
||||
RUN apt-get update && \
|
||||
apt-get install -y python-pip python-dev curl build-essential pwgen libffi-dev sudo git-core wget \
|
||||
# Postgres client
|
||||
libpq-dev \
|
||||
# Additional packages required for data sources:
|
||||
libssl-dev libmysqlclient-dev freetds-dev && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Users creation
|
||||
RUN useradd --system --comment " " --create-home redash
|
||||
|
||||
# Pip requirements for all data source types
|
||||
RUN pip install -U setuptools && \
|
||||
pip install supervisor==3.1.2
|
||||
|
||||
COPY . /opt/redash/current
|
||||
RUN chown -R redash /opt/redash/current
|
||||
|
||||
# Setting working directory
|
||||
WORKDIR /opt/redash/current
|
||||
|
||||
# Install project specific dependencies
|
||||
RUN pip install -r requirements_all_ds.txt && \
|
||||
pip install -r requirements.txt
|
||||
|
||||
RUN curl https://deb.nodesource.com/setup_4.x | bash - && \
|
||||
apt-get install -y nodejs && \
|
||||
sudo -u redash -H make deps && \
|
||||
rm -rf rd_ui/node_modules /home/redash/.npm /home/redash/.cache && \
|
||||
apt-get purge -y nodejs && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Setup supervisord
|
||||
RUN mkdir -p /opt/redash/supervisord && \
|
||||
mkdir -p /opt/redash/logs && \
|
||||
cp /opt/redash/current/setup/docker/supervisord/supervisord.conf /opt/redash/supervisord/supervisord.conf
|
||||
|
||||
# Fix permissions
|
||||
RUN chown -R redash /opt/redash
|
||||
|
||||
# Expose ports
|
||||
EXPOSE 5000
|
||||
EXPOSE 9001
|
||||
|
||||
# Startup script
|
||||
CMD ["supervisord", "-c", "/opt/redash/supervisord/supervisord.conf"]
|
||||
19
LICENSE
19
LICENSE
@@ -1,5 +1,4 @@
|
||||
Copyright (c) 2013-2016, Arik Fraimovich.
|
||||
All rights reserved.
|
||||
Copyright 2013 DoAT. All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted provided that the following conditions are met:
|
||||
@@ -11,13 +10,17 @@ are permitted provided that the following conditions are met:
|
||||
this list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
||||
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
|
||||
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
THIS SOFTWARE IS PROVIDED “AS IS” WITHOUT ANY WARRANTIES WHATSOEVER.
|
||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF NON INFRINGEMENT, MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE HEREBY DISCLAIMED. IN NO EVENT SHALL DoAT OR CONTRIBUTORS
|
||||
BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
|
||||
EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
The views and conclusions contained in the software and documentation are those of
|
||||
the authors and should not be interpreted as representing official policies,
|
||||
either expressed or implied, of DoAT.
|
||||
|
||||
11
Makefile
11
Makefile
@@ -6,17 +6,18 @@ BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
|
||||
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
|
||||
|
||||
deps:
|
||||
if [ -d "./rd_ui/app" ]; then cd rd_ui && npm install; fi
|
||||
if [ -d "./rd_ui/app" ]; then cd rd_ui && npm run bower install; fi
|
||||
if [ -d "./rd_ui/app" ]; then cd rd_ui && npm run build; fi
|
||||
cd rd_ui && npm install
|
||||
cd rd_ui && npm install -g bower grunt-cli
|
||||
cd rd_ui && bower install
|
||||
cd rd_ui && grunt build
|
||||
|
||||
pack:
|
||||
sed -ri "s/^__version__ = '([0-9.]*)'/__version__ = '$(FULL_VERSION)'/" redash/__init__.py
|
||||
tar -zcv -f $(FILENAME) --exclude="optipng*" --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
|
||||
|
||||
upload:
|
||||
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
|
||||
|
||||
test:
|
||||
nosetests --with-coverage --cover-package=redash tests/
|
||||
nosetests --with-coverage --cover-package=redash tests/*.py
|
||||
#cd rd_ui && grunt test
|
||||
|
||||
32
README.md
32
README.md
@@ -1,12 +1,9 @@
|
||||
More details about the future of re:dash : http://bit.ly/journey-first-step
|
||||
|
||||
---
|
||||
|
||||
<p align="center">
|
||||
<img title="re:dash" src='http://redash.io/static/old_img/redash_logo.png' width="200px"/>
|
||||
<img title="re:dash" src='https://raw.githubusercontent.com/EverythingMe/redash/screenshots/redash_logo.png' />
|
||||
|
||||
</p>
|
||||
<p align="center">
|
||||
<img title="Build Status" src='https://circleci.com/gh/getredash/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
|
||||
<img title="Build Status" src='https://circleci.com/gh/EverythingMe/redash.png?circle-token=8a695aa5ec2cbfa89b48c275aea298318016f040'/>
|
||||
</p>
|
||||
|
||||
**_re:dash_** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
|
||||
@@ -14,8 +11,7 @@ More details about the future of re:dash : http://bit.ly/journey-first-step
|
||||
Prior to **_re:dash_**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
|
||||
|
||||
**_re:dash_** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite,
|
||||
Presto, Google Spreadsheets, Cloudera Impala, Hive and custom scripts.
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery, PostgreSQL, MySQL, Graphite and custom scripts.
|
||||
|
||||
**_re:dash_** consists of two parts:
|
||||
|
||||
@@ -26,27 +22,31 @@ Presto, Google Spreadsheets, Cloudera Impala, Hive and custom scripts.
|
||||
|
||||
## Demo
|
||||
|
||||
<img src="https://cloud.githubusercontent.com/assets/71468/12611424/1faf4d6a-c4f5-11e5-89b5-31efc1155d2c.gif" width="60%"/>
|
||||

|
||||
|
||||
You can try out the demo instance: http://demo.redash.io/ (login with any Google account).
|
||||
|
||||
## Getting Started
|
||||
|
||||
* [Setting up re:dash instance](http://redash.io/deployment/setup.html) (includes links to ready made AWS/GCE images).
|
||||
* [Documentation](http://docs.redash.io).
|
||||
* [Setting up re:dash instance](https://github.com/EverythingMe/redash/wiki/Setting-up-re:dash-instance) (includes links to ready made AWS/GCE images).
|
||||
* Additional documentation in the [Wiki](https://github.com/everythingme/redash/wiki).
|
||||
|
||||
|
||||
## Getting help
|
||||
|
||||
* [Google Group (mailing list)](https://groups.google.com/forum/#!forum/redash-users): the best place to get updates about new releases or ask general questions.
|
||||
* Find us [on gitter](https://gitter.im/getredash/redash#) (chat).
|
||||
* Contact Arik, the maintainer directly: arik@redash.io.
|
||||
* Find us [on gitter](https://gitter.im/EverythingMe/redash#) (chat).
|
||||
* Contact Arik, the maintainer directly: arik@everything.me.
|
||||
|
||||
## Roadmap
|
||||
|
||||
TBD.
|
||||
|
||||
## Reporting Bugs and Contributing Code
|
||||
|
||||
* Want to report a bug or request a feature? Please open [an issue](https://github.com/getredash/redash/issues/new).
|
||||
* Want to help us build **_re:dash_**? Fork the project, edit in a [dev environment](http://docs.redash.io/en/latest/dev/vagrant.html), and make a pull request. We need all the help we can get!
|
||||
* Want to report a bug or request a feature? Please open [an issue](https://github.com/everythingme/redash/issues/new).
|
||||
* Want to help us build **_re:dash_**? Fork the project and make a pull request. We need all the help we can get!
|
||||
|
||||
## License
|
||||
|
||||
See [LICENSE](https://github.com/getredash/redash/blob/master/LICENSE) file.
|
||||
See [LICENSE](https://github.com/EverythingMe/redash/blob/master/LICENSE) file.
|
||||
|
||||
@@ -7,7 +7,7 @@ import requests
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
repo = 'getredash/redash'
|
||||
repo = 'EverythingMe/redash'
|
||||
|
||||
def _github_request(method, path, params=None, headers={}):
|
||||
if not path.startswith('https://api.github.com'):
|
||||
@@ -56,14 +56,14 @@ def create_release(version, commit_sha):
|
||||
return response.json()
|
||||
|
||||
def upload_asset(release, filepath):
|
||||
upload_url = release['upload_url'].replace('{?name,label}', '')
|
||||
upload_url = release['upload_url'].replace('{?name}', '')
|
||||
filename = filepath.split('/')[-1]
|
||||
|
||||
with open(filepath) as file_content:
|
||||
headers = {'Content-Type': 'application/gzip'}
|
||||
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
|
||||
|
||||
if response.status_code != 201: # not 200/201/...
|
||||
if response.status_code != 201: # not 200/201/...
|
||||
raise exception_from_error('Failed uploading asset', response)
|
||||
|
||||
return response
|
||||
@@ -104,26 +104,9 @@ def get_changelog(commit_sha):
|
||||
|
||||
return "\n".join(changes)
|
||||
|
||||
def update_release_commit_sha(release, commit_sha):
|
||||
params = {
|
||||
'target_commitish': commit_sha,
|
||||
}
|
||||
|
||||
response = _github_request('patch', 'repos/{}/releases/{}'.format(repo, release['id']), params)
|
||||
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating commit sha for existing release", response)
|
||||
|
||||
return response.json()
|
||||
|
||||
def update_release(version, build_filepath, commit_sha):
|
||||
try:
|
||||
release = get_rc_release(version)
|
||||
if release:
|
||||
release = update_release_commit_sha(release, commit_sha)
|
||||
else:
|
||||
release = create_release(version, commit_sha)
|
||||
|
||||
release = get_rc_release(version) or create_release(version, commit_sha)
|
||||
print "Using release id: {}".format(release['id'])
|
||||
|
||||
remove_previous_builds(release)
|
||||
@@ -132,8 +115,8 @@ def update_release(version, build_filepath, commit_sha):
|
||||
changelog = get_changelog(commit_sha)
|
||||
|
||||
response = _github_request('patch', release['url'], {'body': changelog})
|
||||
if response.status_code != 200:
|
||||
raise exception_from_error("Failed updating release description", response)
|
||||
print response.status_code
|
||||
print response.text
|
||||
|
||||
except Exception, ex:
|
||||
print ex
|
||||
|
||||
63
bin/test_multithreading.py
Normal file
63
bin/test_multithreading.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""
|
||||
Script to test concurrency (multithreading/multiprocess) issues with the workers. Use with caution.
|
||||
"""
|
||||
import json
|
||||
import atfork
|
||||
atfork.monkeypatch_os_fork_functions()
|
||||
import atfork.stdlib_fixer
|
||||
atfork.stdlib_fixer.fix_logging_module()
|
||||
|
||||
import time
|
||||
from redash.data import worker
|
||||
from redash import models, data_manager, redis_connection
|
||||
|
||||
if __name__ == '__main__':
|
||||
models.create_db(True, False)
|
||||
|
||||
print "Creating data source..."
|
||||
data_source = models.DataSource.create(name="Concurrency", type="pg", options="dbname=postgres")
|
||||
|
||||
print "Clear jobs/hashes:"
|
||||
redis_connection.delete("jobs")
|
||||
query_hashes = redis_connection.keys("query_hash_*")
|
||||
if query_hashes:
|
||||
redis_connection.delete(*query_hashes)
|
||||
|
||||
starting_query_results_count = models.QueryResult.select().count()
|
||||
jobs_count = 5000
|
||||
workers_count = 10
|
||||
|
||||
print "Creating jobs..."
|
||||
for i in xrange(jobs_count):
|
||||
query = "SELECT {}".format(i)
|
||||
print "Inserting: {}".format(query)
|
||||
data_manager.add_job(query=query, priority=worker.Job.LOW_PRIORITY,
|
||||
data_source=data_source)
|
||||
|
||||
print "Starting workers..."
|
||||
workers = data_manager.start_workers(workers_count)
|
||||
|
||||
print "Waiting for jobs to be done..."
|
||||
keep_waiting = True
|
||||
while keep_waiting:
|
||||
results_count = models.QueryResult.select().count() - starting_query_results_count
|
||||
print "QueryResults: {}".format(results_count)
|
||||
time.sleep(5)
|
||||
if results_count == jobs_count:
|
||||
print "Yay done..."
|
||||
keep_waiting = False
|
||||
|
||||
data_manager.stop_workers()
|
||||
|
||||
qr_count = 0
|
||||
for qr in models.QueryResult.select():
|
||||
number = int(qr.query.split()[1])
|
||||
data_number = json.loads(qr.data)['rows'][0].values()[0]
|
||||
|
||||
if number != data_number:
|
||||
print "Oops? {} != {} ({})".format(number, data_number, qr.id)
|
||||
qr_count += 1
|
||||
|
||||
print "Verified {} query results.".format(qr_count)
|
||||
|
||||
print "Done."
|
||||
24
circle.yml
24
circle.yml
@@ -1,34 +1,32 @@
|
||||
machine:
|
||||
services:
|
||||
- docker
|
||||
node:
|
||||
version:
|
||||
0.12.4
|
||||
0.10.24
|
||||
python:
|
||||
version:
|
||||
2.7.3
|
||||
dependencies:
|
||||
pre:
|
||||
- pip install -r requirements_dev.txt
|
||||
- make deps
|
||||
- pip install -r dev_requirements.txt
|
||||
- pip install -r requirements.txt
|
||||
- if [ "$CIRCLE_BRANCH" = "master" ]; then make deps; fi
|
||||
cache_directories:
|
||||
- rd_ui/node_modules/
|
||||
- rd_ui/app/bower_components/
|
||||
test:
|
||||
override:
|
||||
- nosetests --with-xunit --xunit-file=$CIRCLE_TEST_REPORTS/junit.xml --with-coverage --cover-package=redash tests/
|
||||
- make test
|
||||
post:
|
||||
- make pack
|
||||
deployment:
|
||||
github_and_docker:
|
||||
test:
|
||||
branch: chore/release_process
|
||||
commands:
|
||||
- make upload
|
||||
github:
|
||||
branch: master
|
||||
commands:
|
||||
- make pack
|
||||
- make upload
|
||||
- echo "rd_ui/app" >> .dockerignore
|
||||
- docker pull redash/redash:latest
|
||||
- docker build -t redash/redash:$(./manage.py version | sed -e "s/\+/./") .
|
||||
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
|
||||
- docker push redash/redash:$(./manage.py version | sed -e "s/\+/./")
|
||||
notify:
|
||||
webhooks:
|
||||
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
redash:
|
||||
image: redash
|
||||
ports:
|
||||
- "5000:5000"
|
||||
links:
|
||||
- redis
|
||||
- postgres
|
||||
environment:
|
||||
REDASH_STATIC_ASSETS_PATH:"../rd_ui/app/"
|
||||
REDASH_LOG_LEVEL:"INFO"
|
||||
REDASH_REDIS_URL:redis://localhost:6379/0
|
||||
REDASH_DATABASE_URL:"postgresql://redash"
|
||||
REDASH_COOKIE_SECRET:veryverysecret
|
||||
REDASH_GOOGLE_APPS_DOMAIN:
|
||||
redis:
|
||||
image: redis:2.8
|
||||
postgres:
|
||||
image: postgres:9.3
|
||||
volumes:
|
||||
- /opt/postgres-data:/var/lib/postgresql/data
|
||||
redash-nginx:
|
||||
image: redash-nginx:1.0
|
||||
ports:
|
||||
- "80:80"
|
||||
volumes:
|
||||
- "../redash-nginx/nginx.conf:/etc/nginx/nginx.conf"
|
||||
links:
|
||||
- redash
|
||||
192
docs/Makefile
192
docs/Makefile
@@ -1,192 +0,0 @@
|
||||
# Makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
BUILDDIR = _build
|
||||
|
||||
# User-friendly check for sphinx-build
|
||||
ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
|
||||
$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
|
||||
endif
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " applehelp to make an Apple Help Book"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " xml to make Docutils-native XML files"
|
||||
@echo " pseudoxml to make pseudoxml-XML files for display purposes"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@echo " coverage to run coverage check of the documentation (if enabled)"
|
||||
|
||||
clean:
|
||||
rm -rf $(BUILDDIR)/*
|
||||
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/redash.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/redash.qhc"
|
||||
|
||||
applehelp:
|
||||
$(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp
|
||||
@echo
|
||||
@echo "Build finished. The help book is in $(BUILDDIR)/applehelp."
|
||||
@echo "N.B. You won't be able to view it unless you put it in" \
|
||||
"~/Library/Documentation/Help or install it in your application" \
|
||||
"bundle."
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/redash"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/redash"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
latexpdfja:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through platex and dvipdfmx..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
||||
coverage:
|
||||
$(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage
|
||||
@echo "Testing of coverage in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/coverage/python.txt."
|
||||
|
||||
xml:
|
||||
$(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
|
||||
@echo
|
||||
@echo "Build finished. The XML files are in $(BUILDDIR)/xml."
|
||||
|
||||
pseudoxml:
|
||||
$(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
|
||||
@echo
|
||||
@echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
|
||||
110
docs/conf.py
110
docs/conf.py
@@ -1,110 +0,0 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# re:dash documentation build configuration file, created by
|
||||
# sphinx-quickstart on Mon Jul 20 22:40:24 2015.
|
||||
#
|
||||
# This file is execfile()d with the current directory set to its
|
||||
# containing dir.
|
||||
#
|
||||
# Note that not all possible configuration values are present in this
|
||||
# autogenerated file.
|
||||
#
|
||||
# All configuration values have a default; values that are commented out
|
||||
# serve to show the default.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import shlex
|
||||
|
||||
# Add any Sphinx extension module names here, as strings. They can be
|
||||
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
|
||||
# ones.
|
||||
extensions = []
|
||||
|
||||
# The suffix(es) of source filenames.
|
||||
# You can specify multiple suffix as a list of string:
|
||||
# source_suffix = ['.rst', '.md']
|
||||
source_suffix = '.rst'
|
||||
|
||||
# The master toctree document.
|
||||
master_doc = 'index'
|
||||
|
||||
# General information about the project.
|
||||
project = u're:dash'
|
||||
copyright = u'2015, EverythingMe'
|
||||
author = u'EverythingMe'
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
#
|
||||
# This is also used if you do content translation via gettext catalogs.
|
||||
# Usually you set "language" from the command line for these cases.
|
||||
language = None
|
||||
|
||||
exclude_patterns = ['_build']
|
||||
|
||||
# The name of the Pygments (syntax highlighting) style to use.
|
||||
pygments_style = 'sphinx'
|
||||
|
||||
# If true, `todo` and `todoList` produce output, else they produce nothing.
|
||||
todo_include_todos = False
|
||||
|
||||
# -- Options for HTML output ----------------------------------------------
|
||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||
# a list of builtin themes.
|
||||
import sphinx_rtd_theme
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
# The name for this set of Sphinx documents. If None, it defaults to
|
||||
# "<project> v<release> documentation".
|
||||
#html_title = None
|
||||
|
||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||
#html_short_title = None
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top
|
||||
# of the sidebar.
|
||||
#html_logo = None
|
||||
|
||||
# The name of an image file (within the static path) to use as favicon of the
|
||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||
# pixels large.
|
||||
#html_favicon = None
|
||||
|
||||
# Add any paths that contain custom static files (such as style sheets) here,
|
||||
# relative to this directory. They are copied after the builtin static files,
|
||||
# so a file named "default.css" will overwrite the builtin "default.css".
|
||||
html_static_path = ['_static']
|
||||
|
||||
# If true, links to the reST sources are added to the pages.
|
||||
html_show_sourcelink = True
|
||||
|
||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||
html_show_sphinx = False
|
||||
|
||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||
html_show_copyright = False
|
||||
|
||||
# Output file base name for HTML help builder.
|
||||
htmlhelp_basename = 'redashdoc'
|
||||
|
||||
# -- Options for manual page output ---------------------------------------
|
||||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [
|
||||
(master_doc, 'redash', u're:dash Documentation',
|
||||
[author], 1)
|
||||
]
|
||||
|
||||
# -- Options for Texinfo output -------------------------------------------
|
||||
|
||||
# Grouping the document tree into Texinfo files. List of tuples
|
||||
# (source start file, target name, title, author,
|
||||
# dir menu entry, description, category)
|
||||
texinfo_documents = [
|
||||
(master_doc, 'redash', u're:dash Documentation',
|
||||
author, 'redash', 'One line description of project.',
|
||||
'Miscellaneous'),
|
||||
]
|
||||
@@ -1,261 +0,0 @@
|
||||
Supported Data Sources
|
||||
######################
|
||||
|
||||
re:dash supports several types of data sources, and if you set it up using the provided images, it should already have
|
||||
the needed dependencies to use them all. Starting from version 0.7 and newer, you can manage data sources from the UI
|
||||
by browsing to ``/data_sources`` on your instance.
|
||||
|
||||
If one of the listed data source types isn't available when trying to create a new data source, make sure that:
|
||||
|
||||
1. You installed required dependencies.
|
||||
2. If you've set custom value for the ``REDASH_ENABLED_QUERY_RUNNERS`` setting, it's included in the list.
|
||||
|
||||
PostgreSQL / Redshift / Greenplum
|
||||
---------------------------------
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Database name (mandatory)
|
||||
- User
|
||||
- Password
|
||||
- Host
|
||||
- Port
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- None
|
||||
|
||||
|
||||
MySQL
|
||||
-----
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Database name (mandatory)
|
||||
- User
|
||||
- Password
|
||||
- Host
|
||||
- Port
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``MySQL-python`` python package
|
||||
|
||||
|
||||
Google BigQuery
|
||||
---------------
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Project ID (mandatory)
|
||||
- JSON key file, generated when creating a service account (see `instructions <https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount>`__).
|
||||
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``google-api-python-client``, ``oauth2client`` and ``pyopenssl`` python packages (on Ubuntu it might require installing ``libffi-dev`` and ``libssl-dev`` as well).
|
||||
|
||||
|
||||
Graphite
|
||||
--------
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Url (mandatory)
|
||||
- User
|
||||
- Password
|
||||
- Verify SSL certificate
|
||||
|
||||
|
||||
MongoDB
|
||||
-------
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Connection String (mandatory)
|
||||
- Database name
|
||||
- Replica set name
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``pymongo`` python package.
|
||||
|
||||
For information on how to write MongoDB queries, see :doc:`documentation </usage/mongodb_querying>`.
|
||||
|
||||
|
||||
ElasticSearch
|
||||
-------------
|
||||
|
||||
...
|
||||
|
||||
InfluxDB
|
||||
--------
|
||||
|
||||
...
|
||||
|
||||
Presto
|
||||
------
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Host (mandatory)
|
||||
- Address to a Presto coordinator.
|
||||
- Port
|
||||
- Port to a Presto coordinator. `8080` is the default port.
|
||||
- Schema
|
||||
- Default schema name of Presto. You can read other schemas by qualified name like `FROM myschema.table1`.
|
||||
- Catalog
|
||||
- Catalog (connector) name of Presto such as `hive-cdh4`, `hive-hadoop1`, etc.
|
||||
- Username
|
||||
- User name to connect to a Presto.
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``pyhive`` python package.
|
||||
|
||||
Hive
|
||||
----
|
||||
|
||||
...
|
||||
|
||||
Impala
|
||||
------
|
||||
|
||||
...
|
||||
|
||||
URL
|
||||
---
|
||||
|
||||
A URL based data source which requests URLs that return the :doc:`results JSON
|
||||
format </dev/results_format>`.
|
||||
|
||||
Very useful in situations where you want to expose the data without
|
||||
connecting directly to the database.
|
||||
|
||||
The query itself inside re:dash will simply contain the URL to be
|
||||
executed (i.e. http://myserver/path/myquery)
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Url - set this if you want to limit queries to certain base path.
|
||||
|
||||
|
||||
Google Spreadsheets
|
||||
-------------------
|
||||
|
||||
- **Options**:
|
||||
|
||||
- JSON key file, generated when creating a service account (see `instructions <https://developers.google.com/identity/protocols/OAuth2ServiceAccount#creatinganaccount>`__).
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``gspread`` and ``oauth2client`` python packages.
|
||||
|
||||
Notes:
|
||||
|
||||
1. To be able to load the spreadsheet in re:dash - share your it with
|
||||
your ServiceAccount's email (it can be found in the credentials json
|
||||
file, for example
|
||||
43242343247-fjdfakljr3r2@developer.gserviceaccount.com).
|
||||
2. The query format is "DOC\_UUID\|SHEET\_NUM" (for example
|
||||
"kjsdfhkjh4rsEFSDFEWR232jkddsfh\|0")
|
||||
|
||||
|
||||
Python
|
||||
------
|
||||
|
||||
**Execute other queries, manipulate and compute with Python code**
|
||||
|
||||
This is a special query runner, that will execute provided Python code as the query. Useful for various scenarios such as
|
||||
merging data from different data sources, doing data transformation/manipulation that isn't trivial with SQL, merging
|
||||
with remote data or using data analysis libraries such as Pandas (see `example query <https://gist.github.com/arikfr/be7c2888520c44cf4f0f>`__).
|
||||
|
||||
While the Python query runner uses a sandbox (RestrictedPython), it's not 100% secure and the security depends on the
|
||||
modules you allow to import. We recommend enabling the Python query runner only in a trusted environment (meaning: behind
|
||||
VPN and with users you trust).
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Allowed Modules in a comma separated list (optional). **NOTE:**
|
||||
You MUST make sure these modules are installed on the machine
|
||||
running the Celery workers.
|
||||
|
||||
Notes:
|
||||
|
||||
- For security, the python query runner is disabled by default.
|
||||
To enable, add ``redash.query_runner.python`` to the ``REDASH_ADDITIONAL_QUERY_RUNNERS`` environmental variable. If you used
|
||||
the bootstrap script, or one of the provided images, add to ``/opt/redash/.env`` file the line: ``export REDASH_ADDITIONAL_QUERY_RUNNERS=redash.query_runner.python``.
|
||||
|
||||
|
||||
Vertica
|
||||
-----
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Database (mandatory)
|
||||
- User
|
||||
- Password
|
||||
- Host
|
||||
- Port
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``vertica-python`` python package
|
||||
|
||||
Oracle
|
||||
------
|
||||
|
||||
- **Options**
|
||||
|
||||
- DSN Service name
|
||||
- User
|
||||
- Password
|
||||
- Host
|
||||
- Port
|
||||
|
||||
- **Additional requirements**
|
||||
|
||||
- ``cx_Oracle`` python package. This requires the installation of the Oracle `instant client <http://www.oracle.com/technetwork/database/features/instant-client/index-097480.html>`__.
|
||||
|
||||
Treasure Data
|
||||
------
|
||||
|
||||
- **Options**
|
||||
|
||||
- Type (TreasureData)
|
||||
- API Key
|
||||
- Database Name
|
||||
- Type (Presto/Hive[default])
|
||||
|
||||
- **Additional requirements**
|
||||
- Must have account on https://console.treasuredata.com
|
||||
|
||||
Documentation: https://docs.treasuredata.com/articles/redash
|
||||
|
||||
|
||||
|
||||
Microsoft SQL Server
|
||||
-----
|
||||
|
||||
- **Options**:
|
||||
|
||||
- Database (mandatory)
|
||||
- User #TODO: DB users only? What about domain users?
|
||||
- Password
|
||||
- Server
|
||||
- Port
|
||||
|
||||
- **Notes**:
|
||||
|
||||
- Data type support is currently quite limited.
|
||||
- Complex and new types are converted to strings in ``re:dash``
|
||||
- Coerce into simpler types if needed using ``CAST()``
|
||||
- Known conversion issues for:
|
||||
- DATE
|
||||
- TIME
|
||||
- DATETIMEOFFSET
|
||||
|
||||
- **Additional requirements**:
|
||||
|
||||
- ``freetds-dev`` C library
|
||||
- ``pymsssql`` python package, requires FreeTDS to be installed first
|
||||
11
docs/dev.rst
11
docs/dev.rst
@@ -1,11 +0,0 @@
|
||||
Developer Information
|
||||
=====================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
dev/vagrant
|
||||
dev/*
|
||||
|
||||
|
||||
@@ -1,94 +0,0 @@
|
||||
Query Execution Model
|
||||
#####################
|
||||
|
||||
Introduction
|
||||
============
|
||||
|
||||
The first datasource which was used with re:dash was Redshift. Because
|
||||
we had billions of records in Redshift, and some queries were costly to
|
||||
re-run, from the get go there was the idea of caching query results in
|
||||
re:dash.
|
||||
|
||||
This was to relieve stress from the Redshift cluster and also to improve
|
||||
user experience.
|
||||
|
||||
How queries get executed and cached in re:dash?
|
||||
===============================================
|
||||
|
||||
Server
|
||||
------
|
||||
|
||||
To make sure each query is executed only once at any giving time, we
|
||||
translate the query to a ``query hash``, using the following code:
|
||||
|
||||
.. code:: python
|
||||
|
||||
COMMENTS_REGEX = re.compile("/\*.*?\*/")
|
||||
|
||||
def gen_query_hash(sql):
|
||||
sql = COMMENTS_REGEX.sub("", sql)
|
||||
sql = "".join(sql.split()).lower()
|
||||
return hashlib.md5(sql.encode('utf-8')).hexdigest()
|
||||
|
||||
When query execution is done, the result gets stored to
|
||||
``query_results`` table. Also we check for all queries in the
|
||||
``queries`` table that have the same query hash and update their
|
||||
reference to the query result we just saved
|
||||
(`code <https://github.com/getredash/redash/blob/master/redash/models.py#L235>`__).
|
||||
|
||||
Client
|
||||
------
|
||||
|
||||
The client (UI) will execute queries in two scenarios:
|
||||
|
||||
1. (automatically) When opening a query page of a query that doesn't
|
||||
have a result yet.
|
||||
2. (manually) When the user clicks on "Execute".
|
||||
|
||||
In each case the client does a POST request to ``/api/query_results``
|
||||
with the following parameters: ``query`` (the query text),
|
||||
``data_source_id`` (data source to execute the query with) and ``ttl``.
|
||||
|
||||
When loading a cached result, ``ttl`` will be the one set to the query
|
||||
(if it was set). This is a relic from previous versions, and I'm not
|
||||
sure if it's really used anymore, as usually we will fetch query result
|
||||
using its id.
|
||||
|
||||
When loading a non cached result, ``ttl`` will be 0 which will "force"
|
||||
the server to execute the query.
|
||||
|
||||
As a response to ``/api/query_results`` the server will send either the
|
||||
query results (in case of a cached query) or job id of the currently
|
||||
executing query. When job id received the client will start polling on
|
||||
this id, until a query result received (this is encapsulated in
|
||||
``Query`` and ``QueryResult`` services).
|
||||
|
||||
Ideas on how to implement query parameters
|
||||
==========================================
|
||||
|
||||
Client side only implementation
|
||||
-------------------------------
|
||||
|
||||
(This was actually implemented in. See pull request `#363 <https://github.com/getredash/redash/pull/363>`__ for details.)
|
||||
|
||||
The basic idea of how to implement parametized queries is to treat the
|
||||
query as a template and merge it with parameters taken from query string
|
||||
or UI (or both).
|
||||
|
||||
When the caching facility isn't required (with queries that return in a
|
||||
reasonable time frame) the implementation can be completely client side
|
||||
and the backend can be "blind" to the parameters - it just receives the
|
||||
final query to execute and returns result.
|
||||
|
||||
As one improvement over this, we can let the UI/user specify the TTL
|
||||
value when making the request to ``/api/query_results``, in which case
|
||||
caching will be availble too, while not having to make the server aware
|
||||
of the parameters.
|
||||
|
||||
Hybrid
|
||||
------
|
||||
|
||||
Another option, will be to store the list of possible parameters for a
|
||||
query, with their default/optional values. In such case, the server can
|
||||
prefetch all the options and cache them to provide faster results to the
|
||||
client.
|
||||
@@ -1,30 +0,0 @@
|
||||
Data Source Results Format
|
||||
==========================
|
||||
|
||||
All data sources in re:dash return the following results in JSON format:
|
||||
|
||||
.. code:: javascript
|
||||
|
||||
{
|
||||
"columns" : [
|
||||
{
|
||||
// Required: a unique identifier of the column name in this result
|
||||
"name" : "COLUMN_NAME",
|
||||
// Required: friendly name of the column that will appear in the results
|
||||
"friendly_name" : "FRIENDLY_NAME",
|
||||
// Optional: If not specified sort might not work well.
|
||||
// Supported types: integer, float, boolean, string (default), datetime (ISO-8601 text format)
|
||||
"type" : "VALUE_TYPE"
|
||||
},
|
||||
...
|
||||
],
|
||||
"rows" : [
|
||||
{
|
||||
// name is the column name as it appears in the columns above.
|
||||
// VALUE is a valid JSON value. For dates its an ISO-8601 string.
|
||||
"name" : VALUE,
|
||||
"name2" : VALUE2
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
@@ -1,38 +0,0 @@
|
||||
Setting up development environment (using Vagrant)
|
||||
==================================================
|
||||
|
||||
To simplify contribution there is a `Vagrant
|
||||
box <https://vagrantcloud.com/redash/boxes/dev>`__ available with all
|
||||
the needed software to run re:dash for development (use it only for
|
||||
development, for demo purposes there is
|
||||
`redash/demo <https://vagrantcloud.com/redash/boxes/demo>`__ box and the
|
||||
AWS/GCE images).
|
||||
|
||||
To get started with this box:
|
||||
|
||||
1. Make sure you have recent version of
|
||||
`Vagrant <https://www.vagrantup.com/>`__ installed.
|
||||
2. Clone the re:dash repository:
|
||||
``git clone https://github.com/getredash/redash.git``.
|
||||
3. Change dir into the repository (``cd redash``) and run run
|
||||
``vagrant up``. This might take some time the first time you run it,
|
||||
as it downloads the Vagrant virtual box.
|
||||
4. Once Vagrant is ready, ssh into the instance (``vagrant ssh``), and
|
||||
change dir to ``/opt/redash/current`` -- this is where your local
|
||||
repository copy synced to.
|
||||
5. Copy ``.env`` file into this directory (``cp ../.env ./``).
|
||||
6. From ``/opt/redash/current/rd_ui`` run ``bower install`` to install
|
||||
frontend packages. This can be done from your host machine as well,
|
||||
if you have bower installed.
|
||||
7. Go back to ``/opt/redash/current`` and install python dependencies
|
||||
``sudo pip install -r requirements.txt``
|
||||
8. Apply migrations
|
||||
|
||||
::
|
||||
|
||||
export PYTHONPATH=. && find migrations/ -type f | grep 00 --null | xargs -I file bin/run python file
|
||||
|
||||
9. Start the server and background workers with
|
||||
``bin/run honcho start -f Procfile.dev``.
|
||||
10. Now the server should be available on your host on port 9001 and you
|
||||
can login with username admin and password admin.
|
||||
@@ -1,57 +0,0 @@
|
||||
.. image:: http://redash.io/static/old_img/redash_logo.png
|
||||
:width: 200px
|
||||
|
||||
Open Source Data Collaboration and Visualization Platform
|
||||
===================================
|
||||
|
||||
**re:dash** is our take on freeing the data within our company in a way that will better fit our culture and usage patterns.
|
||||
|
||||
Prior to **re:dash**, we tried to use traditional BI suites and discovered a set of bloated, technically challenged and slow tools/flows. What we were looking for was a more hacker'ish way to look at data, so we built one.
|
||||
|
||||
**re:dash** was built to allow fast and easy access to billions of records, that we process and collect using Amazon Redshift ("petabyte scale data warehouse" that "speaks" PostgreSQL).
|
||||
Today **_re:dash_** has support for querying multiple databases, including: Redshift, Google BigQuery,Google Spreadsheets, PostgreSQL, MySQL, Graphite and custom scripts.
|
||||
|
||||
Features
|
||||
########
|
||||
|
||||
1. **Query Editor**: think of `JS Fiddle`_ for SQL queries. It's your way to share data in the organization in an open way, by sharing both the dataset and the query that generated it. This way everyone can peer review not only the resulting dataset but also the process that generated it.
|
||||
2. **Visualizations**: once you have a dataset, you can create different visualizations out of it. Currently it supports charts, pivot table and cohorts.
|
||||
3. **Dashboards**: combine several visualizations into a single dashboard.
|
||||
|
||||
Demo
|
||||
####
|
||||
|
||||
.. figure:: https://raw.github.com/getredash/redash/screenshots/screenshots.gif
|
||||
:alt: Screenshots
|
||||
|
||||
You can try out the demo instance: `http://demo.redash.io`_ (login with any Google account).
|
||||
|
||||
.. _http://demo.redash.io: http://demo.redash.io
|
||||
.. _JS Fiddle: http://jsfiddle.net
|
||||
|
||||
Getting Started
|
||||
###############
|
||||
|
||||
:doc:`Setting up re:dash instance </setup>` (includes links to ready made AWS/GCE images).
|
||||
|
||||
Getting Help
|
||||
############
|
||||
|
||||
* Source: https://github.com/getredash/redash
|
||||
* Issues: https://github.com/getredash/redash/issues
|
||||
* Mailing List: https://groups.google.com/forum/#!forum/redash-users
|
||||
* Gitter (chat): https://gitter.im/getredash/redash
|
||||
* Contact Arik, the maintainer directly: arik@redash.io.
|
||||
|
||||
TOC
|
||||
###
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
setup
|
||||
upgrade
|
||||
datasources
|
||||
usage
|
||||
dev
|
||||
misc
|
||||
@@ -1,10 +0,0 @@
|
||||
Miscellaneous
|
||||
=============
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
misc/*
|
||||
|
||||
|
||||
@@ -1,74 +0,0 @@
|
||||
How To: Backup your re:dash database and restore it on a different server
|
||||
=================
|
||||
|
||||
**Note:** This guide assumes that the default database name (redash) has not been changed.
|
||||
|
||||
1. Check the size of your redash database. This can be done by creating a query within redash itself against the 're:dash metadata' data source.
|
||||
|
||||
.. code::
|
||||
|
||||
select t1.datname AS db_name, pg_size_pretty(pg_database_size(t1.datname)) as db_size
|
||||
from pg_database t1
|
||||
where t1.datname = 'redash'
|
||||
|
||||
|
||||
2. Check the amount of available disk space on your existing server.
|
||||
|
||||
.. code::
|
||||
|
||||
df -hT
|
||||
|
||||
|
||||
3. Backup the existing redash database.
|
||||
|
||||
.. code::
|
||||
|
||||
sudo -u redash pg_dump redash | gzip > redash_backup.gz
|
||||
|
||||
|
||||
4. Transfer the backup to the new server.
|
||||
|
||||
5. `Perform a clean install of re:dash <http://docs.redash.io/en/latest/setup.html>`__ on the new server.
|
||||
|
||||
6. Check the amount of available disk space on the new server.
|
||||
|
||||
.. code::
|
||||
|
||||
df -hT
|
||||
|
||||
|
||||
7. Login as postgres user on the new server.
|
||||
|
||||
.. code::
|
||||
|
||||
sudo -u postgres -i
|
||||
|
||||
|
||||
8. drop the current redash database, create a new database named redash, and then restore the backup into the new database.
|
||||
|
||||
.. code::
|
||||
|
||||
dropdb redash
|
||||
createdb -T template0 redash
|
||||
gunzip -c redash_backup.gz | psql redash
|
||||
|
||||
|
||||
9. Set a new password of your choosing for the 'redash_reader' user (since the new installation generated a random password).
|
||||
|
||||
.. code::
|
||||
|
||||
psql -c "ALTER ROLE redash_reader WITH PASSWORD 'yourpasswordgoeshere';"
|
||||
|
||||
|
||||
**Note:** Then you must navigate to the 're:dash metadata' data source (/data_sources/1) in the new re:dash installation and change the password to match the one entered above.
|
||||
|
||||
10. Grant permissions on the redash database to the redash_reader user.
|
||||
|
||||
.. code::
|
||||
|
||||
psql -c "grant select(id,name,type) ON data_sources to redash_reader;" redash
|
||||
psql -c "grant select(id,name) ON users to redash_reader;" redash
|
||||
psql -c "grant select on events, queries, dashboards, widgets, visualizations, query_results to redash_reader;" redash
|
||||
|
||||
|
||||
Create a new query in redash (using re:dash metadata as the data source) to test that everything is working as expected.
|
||||
@@ -1,50 +0,0 @@
|
||||
How To: Create a Google Developers Project
|
||||
==========================================
|
||||
|
||||
1. Go to the `Google Developers
|
||||
Console <https://console.developers.google.com/>`__.
|
||||
2. Select a project, or create a new one by clicking Create Project:
|
||||
|
||||
1. In the Project name field, type in a name for your project.
|
||||
2. In the Project ID field, optionally type in a project ID for your
|
||||
project or use the one that the console has created for you. This
|
||||
ID must be unique world-wide.
|
||||
3. Click the **Create** button and wait for the project to be
|
||||
created.
|
||||
4. Click on the new project name in the list to start editing the
|
||||
project.
|
||||
|
||||
3. In the left sidebar, select the **APIs** item below "APIs & auth". A
|
||||
list of Google web services appears.
|
||||
4. Find the **Google+ API** service and set its status to **ON**—notice
|
||||
that this action moves the service to the top of the list.
|
||||
5. In the sidebar under "APIs & auth", select **Credentials** and in that screen choose the **OAuth consent screen** tab
|
||||
|
||||
- Choose an Email Address and specify a Product Name.
|
||||
|
||||
6. In the sidebar under "APIs & auth", select **Credentials**.
|
||||
7. Click **Add Credentials** button and choose **OAuth 20 Client ID**.
|
||||
|
||||
- In the **Application type** section of the dialog, select **Web
|
||||
application**.
|
||||
- In the **Authorized JavaScript origins** field, enter the origin
|
||||
for your app. You can enter multiple origins to use with multiple
|
||||
re:dash instance. Wildcards are not allowed. In the example below,
|
||||
we assume your re:dash instance address is *redash.example.com*:
|
||||
|
||||
::
|
||||
|
||||
http://redash.example.com
|
||||
https://redash.example.com
|
||||
|
||||
- In the Authorized redirect URI field, enter the redirect URI
|
||||
callback:
|
||||
|
||||
::
|
||||
|
||||
http://redash.example.com/oauth/google_callback
|
||||
|
||||
- Click the ``Create`` button.
|
||||
|
||||
8. In the resulting **Client ID for web application** section, copy the
|
||||
**Client ID** and **Client secret** to your ``.env`` file.
|
||||
@@ -1,141 +0,0 @@
|
||||
How To: Encrypt your re:dash installation with a free SSL certificate from Let's Encrypt
|
||||
=================
|
||||
|
||||
**Note:** This below steps were tested on Ubuntu 14.04, but *should* work with any Debian-based distro.
|
||||
|
||||
`Let's Encrypt <https://letsencrypt.org/>`__ is a new certificate authority sponsored by major tech companies including Mozilla, Google, Cisco, and Facebook. Unlike traditional CA authorities, Let's Encrypt allows you to generate and renew an SSL certificate quickly and **at no cost**.
|
||||
|
||||
1. Open port 443 in your security group (if using AWS or GCE).
|
||||
|
||||
2. Update package lists, install git, and clone the letsencrypt repository.
|
||||
|
||||
.. code::
|
||||
|
||||
sudo apt-get update
|
||||
sudo apt-get install git
|
||||
sudo git clone https://github.com/letsencrypt/letsencrypt /opt/letsencrypt
|
||||
|
||||
|
||||
3. Stop nginx and redash, then ensure that no processes are still listening on port 80.
|
||||
|
||||
.. code::
|
||||
|
||||
sudo supervisorctl stop redash_server
|
||||
sudo service nginx stop
|
||||
netstat -na | grep ':80.*LISTEN'
|
||||
|
||||
|
||||
4. Generate your letsencrypt certificate.
|
||||
|
||||
.. code::
|
||||
|
||||
cd /opt/letsencrypt
|
||||
sudo pip install urllib3[secure] --upgrade
|
||||
./letsencrypt-auto certonly --standalone
|
||||
|
||||
|
||||
In most cases you'll want to enter 'example.com www.example.com' when prompted for your domain so that you can use the certificate on http://example.com and http://www.example.com.
|
||||
|
||||
5. Optionally generate a stronger Diffie-Hellman ephemeral parameter. Without this step, you will not achieve higher than a B score on `SSLLabs <https://www.ssllabs.com/ssltest/>`__. Please note that on a low-end server (VPS or micro/small GCE instance) this step can take approximately 20-30 minutes.
|
||||
|
||||
.. code::
|
||||
|
||||
cd /etc/ssl/certs
|
||||
sudo openssl dhparam -out dhparam.pem 3072
|
||||
|
||||
|
||||
6. Backup the existing nginx redash config, delete it, and then create a new version with the code supplied below.
|
||||
|
||||
.. code::
|
||||
|
||||
sudo cp /etc/nginx/sites-available/redash /etc/nginx/sites-available/redash.bak
|
||||
sudo rm /etc/nginx/sites-available/redash
|
||||
sudo nano /etc/nginx/sites-available/redash
|
||||
|
||||
|
||||
.. code:: nginx
|
||||
|
||||
upstream redash_servers {
|
||||
server 127.0.0.1:5000;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
# Allow accessing /ping without https. Useful when placing behind load balancer.
|
||||
location /ping {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://redash_servers;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Enforce SSL.
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
ssl on;
|
||||
|
||||
# Make sure to set paths to your certificate .pem and .key files.
|
||||
ssl_certificate /etc/letsencrypt/live/YOURDOMAIN.TLD/fullchain.pem;
|
||||
ssl_certificate_key /etc/letsencrypt/live/YOURDOMAIN.TLD/privkey.pem;
|
||||
ssl_dhparam /etc/ssl/certs/dhparam.pem;
|
||||
|
||||
# Use secure protocols and ciphers which are compatible with modern browsers
|
||||
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
|
||||
ssl_prefer_server_ciphers on;
|
||||
ssl_ciphers AES256+EECDH:AES256+EDH;
|
||||
ssl_session_cache shared:SSL:20m;
|
||||
|
||||
# Enforce strict transport security
|
||||
add_header Strict-Transport-Security "max-age=31536000; includeSubdomains;";
|
||||
|
||||
access_log /var/log/nginx/redash.access.log;
|
||||
|
||||
gzip on;
|
||||
gzip_types *;
|
||||
gzip_proxied any;
|
||||
|
||||
location / {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_pass http://redash_servers;
|
||||
proxy_redirect off;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
7. Start the nginx and redash servers again.
|
||||
|
||||
.. code::
|
||||
|
||||
sudo service nginx start
|
||||
sudo supervisorctl start redash_server
|
||||
|
||||
|
||||
8. Verify the installation by running a `SSLLabs test <https://www.ssllabs.com/ssltest/>`__. This guide *should* yield an A+ score. If everything is working as expected, optionally delete the old redash nginx config:
|
||||
|
||||
.. code::
|
||||
|
||||
sudo rm /etc/nginx/sites-available/redash.bak
|
||||
|
||||
|
||||
**Important Note:** letsencrypt certificates only remain valid for 90 days. To renew your certificate, simply follow steps 3 and 4 again:
|
||||
|
||||
.. code::
|
||||
|
||||
sudo supervisorctl stop redash_server
|
||||
sudo service nginx stop
|
||||
netstat -na | grep ':80.*LISTEN'
|
||||
|
||||
cd /opt/letsencrypt
|
||||
./letsencrypt-auto certonly --standalone
|
||||
|
||||
sudo service nginx start
|
||||
sudo supervisorctl start redash_server
|
||||
@@ -1,59 +0,0 @@
|
||||
SSL (HTTPS) Setup
|
||||
=================
|
||||
|
||||
If you used the provided images or the bootstrap script, to start using
|
||||
SSL with your instance you need to:
|
||||
|
||||
1. Update the nginx config file (``/etc/nginx/sites-available/redash``)
|
||||
with SSL configuration (see below an example). Make sure to upload
|
||||
the certificate to the server, and set the paths correctly in the new
|
||||
config.
|
||||
|
||||
2. Open port 443 in your security group (if using AWS or GCE).
|
||||
|
||||
.. code:: nginx
|
||||
|
||||
upstream redash_servers {
|
||||
server 127.0.0.1:5000;
|
||||
}
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
|
||||
# Allow accessing /ping without https. Useful when placing behind load balancer.
|
||||
location /ping {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_pass http://redash_servers;
|
||||
}
|
||||
|
||||
location / {
|
||||
# Enforce SSL.
|
||||
return 301 https://$host$request_uri;
|
||||
}
|
||||
}
|
||||
|
||||
server {
|
||||
listen 443 ssl;
|
||||
|
||||
# Make sure to set paths to your certificate .pem and .key files.
|
||||
ssl on;
|
||||
ssl_certificate /path-to/cert.pem; # or crt
|
||||
ssl_certificate_key /path-to/cert.key;
|
||||
|
||||
access_log /var/log/nginx/redash.access.log;
|
||||
|
||||
gzip on;
|
||||
gzip_types *;
|
||||
gzip_proxied any;
|
||||
|
||||
location / {
|
||||
proxy_set_header Host $http_host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_pass http://redash_servers;
|
||||
proxy_redirect off;
|
||||
}
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
sphinx
|
||||
sphinx-autobuild
|
||||
sphinx_rtd_theme
|
||||
@@ -1,62 +0,0 @@
|
||||
Settings
|
||||
########
|
||||
|
||||
Much of the functionality of re:dash can be changes with settings. Settings are read by `/redash/settings.py` from environment variables which (for most installs) can be set in `/opt/redash/current/.env`
|
||||
|
||||
The follow is a list of settings and what they control:
|
||||
|
||||
- **REDASH_NAME**: name of the site, used in page titles, *default "re:dash"*
|
||||
- **REDASH_REDIS_URL**: *default "redis://localhost:6379/0"*
|
||||
- **REDASH_PROXIES_COUNT**: *default "1"*
|
||||
- **REDASH_STATSD_HOST**: *default "127.0.0.1"*
|
||||
- **REDASH_STATSD_PORT**: *default "8125"*
|
||||
- **REDASH_STATSD_PREFIX**: *default "redash"*
|
||||
- **REDASH_DATABASE_URL**: *default "postgresql://postgres"*
|
||||
- **REDASH_CELERY_BROKER**: *default REDIS_URL*
|
||||
- **REDASH_CELERY_BACKEND**: *default CELERY_BROKER*
|
||||
- **REDASH_QUERY_RESULTS_CLEANUP_ENABLED**: *default "true"*
|
||||
- **REDASH_QUERY_RESULTS_CLEANUP_COUNT**: *default "100"*
|
||||
- **REDASH_QUERY_RESULTS_CLEANUP_MAX_AGE**: *default "7"*
|
||||
- **REDASH_AUTH_TYPE**: *default "api_key"*
|
||||
- **REDASH_PASSWORD_LOGIN_ENABLED**: *default "true"*
|
||||
- **REDASH_ENFORCE_HTTPS**: *default "false"*
|
||||
- **REDASH_MULTI_ORG**: *default "false"*
|
||||
- **REDASH_GOOGLE_APPS_DOMAIN**: *default ""*
|
||||
- **REDASH_GOOGLE_CLIENT_ID**: *default ""*
|
||||
- **REDASH_GOOGLE_CLIENT_SECRET**: *default ""*
|
||||
- **REDASH_SAML_METADATA_URL**: *default ""*
|
||||
- **REDASH_SAML_CALLBACK_SERVER_NAME**: *default ""*
|
||||
- **REDASH_STATIC_ASSETS_PATH**: *default "../rd_ui/app/"*
|
||||
- **REDASH_JOB_EXPIRY_TIME**: *default 3600 * 6*
|
||||
- **REDASH_COOKIE_SECRET**: *default "c292a0a3aa32397cdb050e233733900f"*
|
||||
- **REDASH_LOG_LEVEL**: *default "INFO"*
|
||||
- **REDASH_ANALYTICS**: *default ""*
|
||||
- **REDASH_MAIL_SERVER**: *default "localhost"*
|
||||
- **REDASH_MAIL_PORT**: *default 25*
|
||||
- **REDASH_MAIL_USE_TLS**: *default "false"*
|
||||
- **REDASH_MAIL_USE_SSL**: *default "false"*
|
||||
- **REDASH_MAIL_USERNAME**: *default None*
|
||||
- **REDASH_MAIL_PASSWORD**: *default None*
|
||||
- **REDASH_MAIL_DEFAULT_SENDER**: *default None*
|
||||
- **REDASH_MAIL_MAX_EMAILS**: *default None*
|
||||
- **REDASH_MAIL_ASCII_ATTACHMENTS**: *default "false"*
|
||||
- **REDASH_HOST**: *default ""*
|
||||
- **REDASH_HIPCHAT_API_TOKEN**: *default None*
|
||||
- **REDASH_HIPCHAT_API_URL**: *default None*
|
||||
- **REDASH_HIPCHAT_ROOM_ID**: *default None*
|
||||
- **REDASH_WEBHOOK_ENDPOINT**: *default None*
|
||||
- **REDASH_WEBHOOK_USERNAME**: *default None*
|
||||
- **REDASH_CORS_ACCESS_CONTROL_ALLOW_ORIGIN**: *default ""*
|
||||
- **REDASH_CORS_ACCESS_CONTROL_ALLOW_CREDENTIALS**: *default "false"*
|
||||
- **REDASH_CORS_ACCESS_CONTROL_REQUEST_METHOD**: *default GET, POST, PUT""*
|
||||
- **REDASH_CORS_ACCESS_CONTROL_ALLOW_HEADERS**: *default "Content-Type"*
|
||||
- **REDASH_ENABLED_QUERY_RUNNERS**: *default ",".join(default_query_runners)*
|
||||
- **REDASH_ADDITIONAL_QUERY_RUNNERS**: *default ""*
|
||||
- **REDASH_SENTRY_DSN**: *default ""*
|
||||
- **REDASH_ALLOW_SCRIPTS_IN_USER_INPUT**: disable sanitization of text input, allowing full HTML, *default "true"*
|
||||
- **REDASH_DATE_FORMAT**: *default "DD/MM/YY"*
|
||||
- **REDASH_FEATURE_ALLOW_ALL_TO_EDIT**: *default "true"*
|
||||
- **REDASH_FEATURE_TABLES_PERMISSIONS**: *default "false"*
|
||||
- **REDASH_VERSION_CEHCK**: *default "true"*
|
||||
- **REDASH_BIGQUERY_HTTP_TIMEOUT**: *default "600"*
|
||||
- **REDASH_SCHEMA_RUN_TABLE_SIZE_CALCULATIONS**: *default "false"*
|
||||
180
docs/setup.rst
180
docs/setup.rst
@@ -1,180 +0,0 @@
|
||||
Setting up re:dash instance
|
||||
###########################
|
||||
|
||||
The `provisioning
|
||||
script <https://raw.githubusercontent.com/getredash/redash/master/setup/ubuntu/bootstrap.sh>`__
|
||||
works on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy. This script
|
||||
installs all needed dependencies and creates basic setup.
|
||||
|
||||
To ease the process, there are also images for AWS and Google Compute
|
||||
Cloud. These images created with the same provision script using Packer.
|
||||
|
||||
Create an instance
|
||||
==================
|
||||
|
||||
AWS
|
||||
---
|
||||
|
||||
Launch the instance with from the pre-baked AMI (for small deployments
|
||||
t2.micro should be enough):
|
||||
|
||||
- us-east-1: `ami-a7ddfbcd <https://console.aws.amazon.com/ec2/home?region=us-east-1#LaunchInstanceWizard:ami=ami-a7ddfbcd>`__
|
||||
- us-west-1: `ami-269feb46 <https://console.aws.amazon.com/ec2/home?region=us-west-1#LaunchInstanceWizard:ami=ami-269feb46>`__
|
||||
- us-west-2: `ami-435fba23 <https://console.aws.amazon.com/ec2/home?region=us-west-2#LaunchInstanceWizard:ami=ami-435fba23>`__
|
||||
- eu-west-1: `ami-b4c277c7 <https://console.aws.amazon.com/ec2/home?region=eu-west-1#LaunchInstanceWizard:ami=ami-b4c277c7>`__
|
||||
- eu-central-1: `ami-07ced76b <https://console.aws.amazon.com/ec2/home?region=eu-central-1#LaunchInstanceWizard:ami=ami-07ced76b>`__
|
||||
- sa-east-1: `ami-6e2eaf02 <https://console.aws.amazon.com/ec2/home?region=sa-east-1#LaunchInstanceWizard:ami=ami-6e2eaf02>`__
|
||||
- ap-northeast-1: `ami-aa5a64c4 <https://console.aws.amazon.com/ec2/home?region=ap-northeast-1#LaunchInstanceWizard:ami=ami-aa5a64c4>`__
|
||||
- ap-southeast-1: `ami-1c45897f <https://console.aws.amazon.com/ec2/home?region=ap-southeast-1#LaunchInstanceWizard:ami=ami-1c45897f>`__
|
||||
- ap-southeast-2: `ami-42b79221 <https://console.aws.amazon.com/ec2/home?region=ap-southeast-2#LaunchInstanceWizard:ami=ami-42b79221>`__
|
||||
|
||||
(the above AMIs are of version: 0.9.1)
|
||||
|
||||
When launching the instance make sure to use a security group, that **only** allows incoming traffic on: port 22 (SSH), 80 (HTTP) and 443 (HTTPS).
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
Google Compute Engine
|
||||
---------------------
|
||||
|
||||
First, you need to add the images to your account:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images create "redash-091-b1377" --source-uri gs://redash-images/redash.0.9.1.b1377.tar.gz
|
||||
|
||||
Next you need to launch an instance using this image (n1-standard-1
|
||||
instance type is recommended). If you plan using re:dash with BigQuery,
|
||||
you can use a dedicated image which comes with BigQuery preconfigured
|
||||
(using instance permissions):
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute images create "redash-091-b1377-bq" --source-uri gs://redash-images/redash.0.9.1.b1377-bq.tar.gz
|
||||
|
||||
Note that you need to launch this instance with BigQuery access:
|
||||
|
||||
.. code:: bash
|
||||
|
||||
$ gcloud compute instances create <your_instance_name> --image redash-091-b1377-bq --scopes storage-ro,bigquery
|
||||
|
||||
(the same can be done from the web interface, just make sure to enable
|
||||
BigQuery access)
|
||||
|
||||
Now proceed to `"Setup" <#setup>`__.
|
||||
|
||||
|
||||
Other
|
||||
-----
|
||||
|
||||
Download the provision script and run it on your machine. Note that:
|
||||
|
||||
1. You need to run the script as root.
|
||||
2. It was tested only on Ubuntu 12.04, Ubuntu 14.04 and Debian Wheezy.
|
||||
3. It's designed to run on a "clean" machine. If you're running this script on a machine that is used for other purposes, you might want to tweak it to your needs (like removing the ``apt-get dist-upgrade`` call at the beginning of it).
|
||||
|
||||
Setup
|
||||
=====
|
||||
|
||||
Once you created the instance with either the image or the script, you
|
||||
should have a running re:dash instance with everything you need to get
|
||||
started. You can now login to it with the user "admin" (password:
|
||||
"admin"). But to make it useful, there are a few more steps that you
|
||||
need to manually do to complete the setup:
|
||||
|
||||
First ssh to your instance and change directory to ``/opt/redash``. If
|
||||
you're using the GCE image, switch to root (``sudo su``).
|
||||
|
||||
Users & Google Authentication setup
|
||||
-----------------------------------
|
||||
|
||||
Most of the settings you need to edit are in the ``/opt/redash/.env``
|
||||
file.
|
||||
|
||||
1. Update the cookie secret (important! otherwise anyone can sign new
|
||||
cookies and impersonate users): change "veryverysecret" in the line:
|
||||
``export REDASH_COOKIE_SECRET=veryverysecret`` to something else (you
|
||||
can run the command ``pwgen 32 -1`` to generate a random string).
|
||||
|
||||
2. By default we create an admin user with the password "admin". You
|
||||
can change this password opening the: ``/users/me#password`` page after
|
||||
logging in as admin.
|
||||
|
||||
3. If you want to use Google OAuth to authenticate users, you need to
|
||||
create a Google Developers project (see :doc:`instructions </misc/google_developers_project>`)
|
||||
and then add the needed configuration in the ``.env`` file:
|
||||
|
||||
.. code::
|
||||
|
||||
export REDASH_GOOGLE_CLIENT_ID=""
|
||||
export REDASH_GOOGLE_CLIENT_SECRET=""
|
||||
|
||||
|
||||
4. Configure the domain(s) you want to allow to use with Google Apps, by running the command:
|
||||
|
||||
.. code::
|
||||
|
||||
cd /opt/redash/current
|
||||
sudo -u redash bin/run ./manage.py org set_google_apps_domains {{domains}}
|
||||
|
||||
|
||||
If you're passing multiple domains, separate them with commas.
|
||||
|
||||
|
||||
5. Restart the web server to apply the configuration changes:
|
||||
``sudo supervisorctl restart redash_server``.
|
||||
|
||||
6. Once you have Google OAuth enabled, you can login using your Google
|
||||
Apps account. If you want to grant admin permissions to some users,
|
||||
you can do this by adding them to the admin group (from ``/groups`` page).
|
||||
|
||||
7. If you don't use Google OAuth or just need username/password logins,
|
||||
you can create additional users by opening the ``/users/new`` page.
|
||||
|
||||
Datasources
|
||||
-----------
|
||||
|
||||
To make re:dash truly useful, you need to setup your data sources in it. Browse to ``/data_sources`` on your instance,
|
||||
to create new data source connection.
|
||||
|
||||
See :doc:`documentation </datasources>` for the different options.
|
||||
Your instance comes ready with dependencies needed to setup supported sources.
|
||||
|
||||
Mail Configuration
|
||||
------------------
|
||||
|
||||
For the system to be able to send emails (for example when alerts trigger), you need to set the mail server to use and the
|
||||
host name of your re:dash server. If you're using one of our images, you can do this by editing the `.env` file:
|
||||
|
||||
.. code::
|
||||
|
||||
# Note that not all values are required, as they have default values.
|
||||
|
||||
export REDASH_MAIL_SERVER="" # default: localhost
|
||||
export REDASH_MAIL_PORT="" # default: 25
|
||||
export REDASH_MAIL_USE_TLS="" # default: False
|
||||
export REDASH_MAIL_USE_SSL="" # default: False
|
||||
export REDASH_MAIL_USERNAME="" # default: None
|
||||
export REDASH_MAIL_PASSWORD="" # default: None
|
||||
export REDASH_MAIL_DEFAULT_SENDER="" # Email address to send from
|
||||
|
||||
export REDASH_HOST="" # base address of your re:dash instance, for example: "https://demo.redash.io"
|
||||
|
||||
- Note that not all values are required, as there are default values.
|
||||
- It's recommended to use some mail service, like `Amazon SES <https://aws.amazon.com/ses/>`__, `Mailgun <http://www.mailgun.com/>`__
|
||||
or `Mandrill <http://mandrillapp.com>`__ to send emails to ensure deliverability.
|
||||
|
||||
To test email configuration, you can run `bin/run ./manage.py send_test_mail` (from `/opt/redash/current`).
|
||||
|
||||
How to upgrade?
|
||||
---------------
|
||||
|
||||
It's recommended to upgrade once in a while your re:dash instance to
|
||||
benefit from bug fixes and new features. See :doc:`here </upgrade>` for full upgrade
|
||||
instructions (including Fabric script).
|
||||
|
||||
Notes
|
||||
=====
|
||||
|
||||
- If this is a production setup, you should enforce HTTPS and make sure
|
||||
you set the cookie secret (see :doc:`instructions </misc/ssl>`).
|
||||
@@ -1,36 +0,0 @@
|
||||
How to Upgrade
|
||||
##############
|
||||
|
||||
It's recommended to upgrade your re:dash instance once there are new
|
||||
releases, to benefit from new features and bug fixes. The upgrade
|
||||
process is relatively simple, and assuming you used one of the base
|
||||
images we provide, you can just use the
|
||||
`Fabric <http://www.fabfile.org/>`__ script provided here:
|
||||
https://gist.github.com/arikfr/440d1403b4aeb76ebaf8.
|
||||
|
||||
How to run the Fabric script
|
||||
============================
|
||||
|
||||
1. Install Fabric: ``pip install fabric requests`` (needed only once)
|
||||
2. Download the ``fabfile.py`` from the gist.
|
||||
3. Run the script:
|
||||
``fab -H{your re:dash host} -u{the ssh user for this host} -i{path to key file for passwordless login} deploy_latest_release``
|
||||
|
||||
``-i`` is optional and it is only needed in case you're using private-key based authentication (and didn't add the key file to your authentication agent or set its path in your SSH config).
|
||||
|
||||
What the Fabric script does
|
||||
===========================
|
||||
|
||||
Even if you didn't use the image, it's very likely you can reuse most of
|
||||
this script with small modifications. What this script does is:
|
||||
|
||||
1. Find the URL of the latest release tarball (from `GitHub releases
|
||||
page <github.com/getredash/redash/releases>`__).
|
||||
2. Download it.
|
||||
3. Create new directory for this version (for example:
|
||||
``/opt/redash/redash.0.5.0.b685``).
|
||||
4. Unpack that (``tar -C {dir} -xvf {tarball path}``).
|
||||
5. Link ``/opt/redash/.env`` file into this directory.
|
||||
6. Apply any new migrations.
|
||||
7. Link ``/opt/redash/current`` to new version.
|
||||
8. Restart web server and celery workers.
|
||||
@@ -1,11 +0,0 @@
|
||||
Usage
|
||||
=====
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:glob:
|
||||
|
||||
usage/maintenance.rst
|
||||
usage/*
|
||||
|
||||
|
||||
@@ -1,72 +0,0 @@
|
||||
ElasticSearch: Querying
|
||||
#######################
|
||||
|
||||
ElasticSearch currently supports only simple Lucene style queries (like
|
||||
Kibana but without the aggregation).
|
||||
|
||||
Full blown JSON based ElasticSearch queries (including aggregations)
|
||||
will be added later.
|
||||
|
||||
Simple query example:
|
||||
=====================
|
||||
|
||||
- Query the index named "twitter"
|
||||
- Filter by "user:kimchy"
|
||||
- Return the fields: "@timestamp", "tweet" and "user"
|
||||
- Return up to 15 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "twitter",
|
||||
"query" : "user:kimchy",
|
||||
"fields" : ["@timestamp", "tweet", "user"],
|
||||
"size" : 15,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
|
||||
Simple query on a logstash ElasticSearch instance:
|
||||
==================================================
|
||||
|
||||
- Query the index named "logstash-2015.04.\*" (in this case its all of
|
||||
April 2015)
|
||||
- Filter by type:events AND eventName:UserUpgrade AND channel:selfserve
|
||||
- Return fields: "@timestamp", "userId", "channel", "utm\_source",
|
||||
"utm\_medium", "utm\_campaign", "utm\_content"
|
||||
- Return up to 250 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "logstash-2015.04.*",
|
||||
"query" : "type:events AND eventName:UserUpgrade AND channel:selfserve",
|
||||
"fields" : ["@timestamp", "userId", "channel", "utm_source", "utm_medium", "utm_campaign", "utm_content"],
|
||||
"size" : 250,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
|
||||
Simple query on a ElasticSearch instance:
|
||||
==================================================
|
||||
|
||||
|
||||
- Query the index named "twitter"
|
||||
- Filter by user equal "kimchy"
|
||||
- Return the fields: "@timestamp", "tweet" and "user"
|
||||
- Return up to 15 results
|
||||
- Sort by @timestamp ascending
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"index" : "twitter",
|
||||
"query" : {
|
||||
"match": {
|
||||
"user" : "kimchy"
|
||||
}
|
||||
},
|
||||
"fields" : ["@timestamp", "tweet", "user"],
|
||||
"size" : 15,
|
||||
"sort" : "@timestamp:asc"
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
Ongoing Maintanence and Basic Operations
|
||||
########################################
|
||||
|
||||
Configuration and logs
|
||||
======================
|
||||
|
||||
The supervisor config can be found in
|
||||
``/opt/redash/supervisord/supervisord.conf``.
|
||||
|
||||
There you can see the names of its programs (``redash_celery``,
|
||||
``redash_server``) and the location of their logs.
|
||||
|
||||
Restart
|
||||
=======
|
||||
|
||||
Restarting the Web Server
|
||||
-------------------------
|
||||
|
||||
``sudo supervisorctl stop redash_server``
|
||||
|
||||
Restarting Celery Workers
|
||||
-------------------------
|
||||
|
||||
``sudo supervisorctl restart redash_celery``
|
||||
|
||||
Restarting Celery Workers & the Queries Queue
|
||||
---------------------------------------------
|
||||
|
||||
In case you are handling a problem, and you need to stop the currently
|
||||
running queries and reset the queue, follow the steps below.
|
||||
|
||||
1. Stop celery: ``sudo supervisorctl stop redash_celery`` (celery might
|
||||
take some time to stop, if it's in the middle of running a query)
|
||||
|
||||
2. Flush redis: ``redis-cli flushall``.
|
||||
|
||||
3. Start celery: ``sudo supervisorctl start redash_celery``
|
||||
|
||||
Changing the Number of Workers
|
||||
==============================
|
||||
|
||||
By default, Celery will start a worker per CPU core. Because most of
|
||||
re:dash's tasks are IO bound, the real limit for number of workers you
|
||||
can use depends on the amount of memory your machine has. It's
|
||||
recommended to increase number of workers, to support more concurrent
|
||||
queries.
|
||||
|
||||
1. Open the supervisord configuration file:
|
||||
``/opt/redash/supervisord/supervisord.conf``
|
||||
|
||||
2. Edit the ``[program:redash_celery]`` section and add to the *command*
|
||||
value, the param "-c" with the number of concurrent workers you need.
|
||||
|
||||
3. Restart supervisord to apply new configuration:
|
||||
``sudo /etc/init.d/redash_supervisord restart``.
|
||||
|
||||
DB
|
||||
==
|
||||
|
||||
Backup re:dash's DB:
|
||||
--------------------
|
||||
|
||||
Uncompressed backup: ``sudo -u redash pg_dump > backup_filename.sql``
|
||||
|
||||
Compressed backup: ``sudo -u redash pg_dump redash | gzip > backup_filename.gz``
|
||||
|
||||
Version
|
||||
=======
|
||||
|
||||
See current version:
|
||||
|
||||
``bin/run ./manage.py version``
|
||||
@@ -1,74 +0,0 @@
|
||||
MongoDB: Querying
|
||||
#################
|
||||
|
||||
Simple query example:
|
||||
=====================
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"collection" : "my_collection",
|
||||
"query" : {
|
||||
"date" : {
|
||||
"$gt" : "ISODate(\"2015-01-15 11:41\")",
|
||||
},
|
||||
"type" : 1
|
||||
},
|
||||
"fields" : {
|
||||
"_id" : 1,
|
||||
"name" : 2
|
||||
},
|
||||
"sort" : [
|
||||
{
|
||||
"name" : "date",
|
||||
"direction" : -1
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Live example on the demo instance:
|
||||
http://demo.redash.io/queries/394/source.
|
||||
|
||||
Aggregation
|
||||
===========
|
||||
|
||||
Uses a syntax similar to the one used in PyMongo, however to support the
|
||||
correct order of sorting, it uses a regular list for the "$sort"
|
||||
operation that converts into a SON (sorted dictionary) object before
|
||||
execution.
|
||||
|
||||
Aggregation query example:
|
||||
|
||||
.. code:: json
|
||||
|
||||
{
|
||||
"collection" : "things",
|
||||
"aggregate" : [
|
||||
{
|
||||
"$unwind" : "$tags"
|
||||
},
|
||||
{
|
||||
"$group" : {
|
||||
"_id" : "$tags",
|
||||
"count" : { "$sum" : 1 }
|
||||
}
|
||||
},
|
||||
{
|
||||
"$sort" : [
|
||||
{
|
||||
"name" : "count",
|
||||
"direction" : -1
|
||||
},
|
||||
{
|
||||
"name" : "_id",
|
||||
"direction" : -1
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
Live examples on the demo instance:
|
||||
|
||||
1. http://demo.redash.io/queries/393/source
|
||||
2. http://demo.redash.io/queries/387/source
|
||||
27
manage.py
27
manage.py
@@ -2,21 +2,18 @@
|
||||
"""
|
||||
CLI to manage redash.
|
||||
"""
|
||||
import json
|
||||
|
||||
from flask_script import Manager
|
||||
from flask.ext.script import Manager
|
||||
|
||||
from redash import settings, models, __version__
|
||||
from redash.wsgi import app
|
||||
from redash.cli import users, database, data_sources, organization
|
||||
from redash.monitor import get_status
|
||||
from redash.import_export import import_manager
|
||||
from redash.cli import users, database, data_sources
|
||||
|
||||
manager = Manager(app)
|
||||
manager.add_command("database", database.manager)
|
||||
manager.add_command("users", users.manager)
|
||||
manager.add_command("import", import_manager)
|
||||
manager.add_command("ds", data_sources.manager)
|
||||
manager.add_command("org", organization.manager)
|
||||
|
||||
|
||||
|
||||
@manager.command
|
||||
@@ -24,9 +21,6 @@ def version():
|
||||
"""Displays re:dash version."""
|
||||
print __version__
|
||||
|
||||
@manager.command
|
||||
def status():
|
||||
print json.dumps(get_status(), indent=2)
|
||||
|
||||
@manager.command
|
||||
def runworkers():
|
||||
@@ -43,15 +37,12 @@ def make_shell_context():
|
||||
@manager.command
|
||||
def check_settings():
|
||||
"""Show the settings as re:dash sees them (useful for debugging)."""
|
||||
for name, item in settings.all_settings().iteritems():
|
||||
print "{} = {}".format(name, item)
|
||||
from types import ModuleType
|
||||
|
||||
@manager.command
|
||||
def send_test_mail():
|
||||
from redash import mail
|
||||
from flask_mail import Message
|
||||
|
||||
mail.send(Message(subject="Test Message from re:dash", recipients=[settings.MAIL_DEFAULT_SENDER], body="Test message."))
|
||||
for name in dir(settings):
|
||||
item = getattr(settings, name)
|
||||
if not callable(item) and not name.startswith("__") and not isinstance(item, ModuleType):
|
||||
print "{} = {}".format(name, item)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
@@ -18,3 +18,4 @@ if __name__ == '__main__':
|
||||
db.database.execute_sql("ALTER TABLE {} ALTER COLUMN {} TYPE timestamp with time zone;".format(*column))
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
@@ -1,31 +1,13 @@
|
||||
import json
|
||||
import jsonschema
|
||||
from jsonschema import ValidationError
|
||||
|
||||
from redash import query_runner
|
||||
from redash.models import DataSource
|
||||
|
||||
|
||||
def validate_configuration(query_runner_type, configuration_json):
|
||||
query_runner_class = query_runner.query_runners.get(query_runner_type, None)
|
||||
if query_runner_class is None:
|
||||
return False
|
||||
|
||||
try:
|
||||
if isinstance(configuration_json, basestring):
|
||||
configuration = json.loads(configuration_json)
|
||||
else:
|
||||
configuration = configuration_json
|
||||
jsonschema.validate(configuration, query_runner_class.configuration_schema())
|
||||
except (ValidationError, ValueError):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def update(data_source):
|
||||
print "[%s] Old options: %s" % (data_source.name, data_source.options)
|
||||
|
||||
if validate_configuration(data_source.type, data_source.options):
|
||||
if query_runner.validate_configuration(data_source.type, data_source.options):
|
||||
print "[%s] configuration already valid. skipping." % data_source.name
|
||||
return
|
||||
|
||||
@@ -83,9 +65,9 @@ def update(data_source):
|
||||
print "[%s] No need to convert type of: %s" % (data_source.name, data_source.type)
|
||||
|
||||
print "[%s] New options: %s" % (data_source.name, data_source.options)
|
||||
data_source.save(only=data_source.dirty_fields)
|
||||
data_source.save()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
for data_source in DataSource.select(DataSource.id, DataSource.name, DataSource.type, DataSource.options):
|
||||
update(data_source)
|
||||
for data_source in DataSource.all():
|
||||
update(data_source)
|
||||
@@ -23,3 +23,4 @@ if __name__ == '__main__':
|
||||
db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
@@ -15,3 +15,5 @@ if __name__ == '__main__':
|
||||
db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
@@ -19,3 +19,5 @@ if __name__ == '__main__':
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
|
||||
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
with db.database.transaction():
|
||||
# Make sure all data sources names are unique.
|
||||
db.database.execute_sql("""
|
||||
UPDATE data_sources
|
||||
SET name = new_names.name
|
||||
FROM (
|
||||
SELECT id, name || ' ' || id as name
|
||||
FROM (SELECT id, name, rank() OVER (PARTITION BY name ORDER BY created_at ASC) FROM data_sources) ds WHERE rank > 1
|
||||
) AS new_names
|
||||
WHERE data_sources.id = new_names.id;
|
||||
""")
|
||||
# Add unique constraint on data_sources.name.
|
||||
db.database.execute_sql("ALTER TABLE data_sources ADD CONSTRAINT unique_name UNIQUE (name);")
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,24 +0,0 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
column = models.User.api_key
|
||||
column.null = True
|
||||
migrate(
|
||||
migrator.add_column('users', 'api_key', models.User.api_key),
|
||||
)
|
||||
|
||||
for user in models.User.select(models.User.id, models.User.api_key):
|
||||
user.save(only=user.dirty_fields)
|
||||
|
||||
migrate(
|
||||
migrator.add_not_null('users', 'api_key')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,14 +0,0 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.drop_not_null('queries', 'data_source_id'),
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,8 +0,0 @@
|
||||
from redash.models import db, Alert, AlertSubscription
|
||||
|
||||
if __name__ == '__main__':
|
||||
with db.database.transaction():
|
||||
Alert.create_table()
|
||||
AlertSubscription.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,44 +0,0 @@
|
||||
from base64 import b64encode
|
||||
import json
|
||||
from redash.models import DataSource
|
||||
|
||||
|
||||
def convert_p12_to_pem(p12file):
|
||||
from OpenSSL import crypto
|
||||
with open(p12file, 'rb') as f:
|
||||
p12 = crypto.load_pkcs12(f.read(), "notasecret")
|
||||
|
||||
return crypto.dump_privatekey(crypto.FILETYPE_PEM, p12.get_privatekey())
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for ds in DataSource.select(DataSource.id, DataSource.type, DataSource.options):
|
||||
|
||||
if ds.type == 'bigquery':
|
||||
options = json.loads(ds.options)
|
||||
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
new_options = {
|
||||
'projectId': options['projectId'],
|
||||
'jsonKeyFile': b64encode(json.dumps({
|
||||
'client_email': options['serviceAccount'],
|
||||
'private_key': convert_p12_to_pem(options['privateKey'])
|
||||
}))
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save(only=ds.dirty_fields)
|
||||
elif ds.type == 'google_spreadsheets':
|
||||
options = json.loads(ds.options)
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
with open(options['credentialsFilePath']) as f:
|
||||
new_options = {
|
||||
'jsonKeyFile': b64encode(f.read())
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.save(only=ds.dirty_fields)
|
||||
@@ -1,7 +0,0 @@
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
default_group = models.Group.select(models.Group.id, models.Group.permissions).where(models.Group.name=='default').first()
|
||||
default_group.permissions.append('list_users')
|
||||
default_group.save(only=[models.Group.permissions])
|
||||
@@ -1,23 +0,0 @@
|
||||
import json
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
for vis in models.Visualization.select():
|
||||
if vis.type == 'COUNTER':
|
||||
options = json.loads(vis.options)
|
||||
print "Before: ", options
|
||||
if 'rowNumber' in options and options['rowNumber'] is not None:
|
||||
options['rowNumber'] += 1
|
||||
else:
|
||||
options['rowNumber'] = 1
|
||||
|
||||
if 'counterColName' not in options:
|
||||
options['counterColName'] = 'counter'
|
||||
|
||||
if 'targetColName' not in options:
|
||||
options['targetColName'] = 'target'
|
||||
options['targetRowNumber'] = options['rowNumber']
|
||||
|
||||
print "After: ", options
|
||||
vis.options = json.dumps(options)
|
||||
vis.save()
|
||||
@@ -1,21 +0,0 @@
|
||||
import peewee
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
cursor = db.database.execute_sql("SELECT column_name FROM information_schema.columns WHERE table_name='alerts' and column_name='rearm';")
|
||||
if cursor.rowcount > 0:
|
||||
print "Column exists. Skipping."
|
||||
exit()
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.add_column('alerts', 'rearm', models.Alert.rearm),
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,10 +0,0 @@
|
||||
__author__ = 'lior'
|
||||
|
||||
from redash.models import DataSource
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
for ds in DataSource.select(DataSource.id, DataSource.type):
|
||||
if ds.type == 'elasticsearch':
|
||||
ds.type = 'kibana'
|
||||
ds.save(only=ds.dirty_fields)
|
||||
@@ -1,6 +0,0 @@
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
default_group = models.Group.select(models.Group.id, models.Group.permissions).where(models.Group.name=='default').first()
|
||||
default_group.permissions.append('schedule_query')
|
||||
default_group.save(only=[models.Group.permissions])
|
||||
@@ -1,9 +0,0 @@
|
||||
from redash.models import db, Alert, AlertSubscription
|
||||
|
||||
if __name__ == '__main__':
|
||||
with db.database.transaction():
|
||||
# There was an AWS/GCE image created without this table, to make sure this exists we run this migration.
|
||||
if not AlertSubscription.table_exists():
|
||||
AlertSubscription.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,14 +0,0 @@
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
from redash.models import db
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
migrate(
|
||||
migrator.drop_column('groups', 'tables')
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,34 +0,0 @@
|
||||
from redash.models import db, Organization, Group
|
||||
from redash import settings
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
if __name__ == '__main__':
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
Organization.create_table()
|
||||
|
||||
default_org = Organization.create(name="Default", slug='default', settings={
|
||||
Organization.SETTING_GOOGLE_APPS_DOMAINS: list(settings.GOOGLE_APPS_DOMAIN)
|
||||
})
|
||||
|
||||
column = Group.org
|
||||
column.default = default_org
|
||||
|
||||
migrate(
|
||||
migrator.add_column('groups', 'org_id', column),
|
||||
migrator.add_column('events', 'org_id', column),
|
||||
migrator.add_column('data_sources', 'org_id', column),
|
||||
migrator.add_column('users', 'org_id', column),
|
||||
migrator.add_column('dashboards', 'org_id', column),
|
||||
migrator.add_column('queries', 'org_id', column),
|
||||
migrator.add_column('query_results', 'org_id', column),
|
||||
)
|
||||
|
||||
# Change the uniqueness constraint on user email to be (org, email):
|
||||
migrate(
|
||||
migrator.drop_index('users', 'users_email'),
|
||||
migrator.add_index('users', ('org_id', 'email'), unique=True)
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,44 +0,0 @@
|
||||
from collections import defaultdict
|
||||
from redash.models import db, DataSourceGroup, DataSource, Group, Organization, User
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
import peewee
|
||||
|
||||
if __name__ == '__main__':
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
# Add type to groups
|
||||
migrate(
|
||||
migrator.add_column('groups', 'type', Group.type)
|
||||
)
|
||||
|
||||
for name in ['default', 'admin']:
|
||||
group = Group.get(Group.name==name)
|
||||
group.type = Group.BUILTIN_GROUP
|
||||
group.save()
|
||||
|
||||
# Create association table between data sources and groups
|
||||
DataSourceGroup.create_table()
|
||||
|
||||
# add default to existing data source:
|
||||
default_org = Organization.get_by_id(1)
|
||||
default_group = Group.get(Group.name=="default")
|
||||
for ds in DataSource.all(default_org):
|
||||
DataSourceGroup.create(data_source=ds, group=default_group)
|
||||
|
||||
# change the groups list on a user object to be an ids list
|
||||
migrate(
|
||||
migrator.rename_column('users', 'groups', 'old_groups'),
|
||||
)
|
||||
|
||||
migrate(migrator.add_column('users', 'groups', User.groups))
|
||||
|
||||
group_map = dict(map(lambda g: (g.name, g.id), Group.select()))
|
||||
user_map = defaultdict(list)
|
||||
for user in User.select(User, peewee.SQL('old_groups')):
|
||||
group_ids = [group_map[group] for group in user.old_groups]
|
||||
user.update_instance(groups=group_ids)
|
||||
|
||||
migrate(migrator.drop_column('users', 'old_groups'))
|
||||
|
||||
db.close_db(None)
|
||||
@@ -1,6 +0,0 @@
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
admin_group = models.Group.get(models.Group.name=='admin')
|
||||
admin_group.permissions.append('super_admin')
|
||||
admin_group.save()
|
||||
@@ -1,19 +0,0 @@
|
||||
from redash.models import db
|
||||
import peewee
|
||||
from playhouse.migrate import PostgresqlMigrator, migrate
|
||||
|
||||
if __name__ == '__main__':
|
||||
migrator = PostgresqlMigrator(db.database)
|
||||
|
||||
with db.database.transaction():
|
||||
# Change the uniqueness constraint on data source name to be (org, name):
|
||||
# In some cases it's a constraint:
|
||||
db.database.execute_sql('ALTER TABLE data_sources DROP CONSTRAINT IF EXISTS unique_name')
|
||||
# In others only an index:
|
||||
db.database.execute_sql('DROP INDEX IF EXISTS data_sources_name')
|
||||
|
||||
migrate(
|
||||
migrator.add_index('data_sources', ('org_id', 'name'), unique=True)
|
||||
)
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/add_created_at_field.py
Normal file
13
migrations/add_created_at_field.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.Dashboard, models.Dashboard.created_at, 'created_at')
|
||||
migrator.add_column(models.Widget, models.Widget.created_at, 'created_at')
|
||||
|
||||
db.close_db(None)
|
||||
12
migrations/add_global_filters_to_dashboard.py
Normal file
12
migrations/add_global_filters_to_dashboard.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import models
|
||||
from redash.models import db
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.Dashboard, models.Dashboard.dashboard_filters_enabled, 'dashboard_filters_enabled')
|
||||
|
||||
db.close_db(None)
|
||||
12
migrations/add_password_to_users.py
Normal file
12
migrations/add_password_to_users.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.User, models.User.password_hash, 'password_hash')
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/add_permissions_to_user.py
Normal file
13
migrations/add_permissions_to_user.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.User, models.User.permissions, 'permissions')
|
||||
models.User.update(permissions=['admin'] + models.User.DEFAULT_PERMISSIONS).where(models.User.is_admin == True).execute()
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/add_queue_name_to_data_source.py
Normal file
13
migrations/add_queue_name_to_data_source.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.DataSource, models.DataSource.queue_name, 'queue_name')
|
||||
migrator.add_column(models.DataSource, models.DataSource.scheduled_queue_name, 'scheduled_queue_name')
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/add_text_to_widgets.py
Normal file
13
migrations/add_text_to_widgets.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.Widget, models.Widget.text, 'text')
|
||||
migrator.set_nullable(models.Widget, models.Widget.visualization, True)
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/add_view_query_permission.py
Normal file
13
migrations/add_view_query_permission.py
Normal file
@@ -0,0 +1,13 @@
|
||||
import peewee
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
previous_default_permissions = models.User.DEFAULT_PERMISSIONS[:]
|
||||
previous_default_permissions.remove('view_query')
|
||||
models.User.update(permissions=peewee.fn.array_append(models.User.permissions, 'view_query')).where(peewee.SQL("'view_source' = any(permissions)")).execute()
|
||||
|
||||
db.close_db(None)
|
||||
12
migrations/change_queries_description_to_nullable.py
Normal file
12
migrations/change_queries_description_to_nullable.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.set_nullable(models.Query, models.Query.description, True)
|
||||
|
||||
db.close_db(None)
|
||||
13
migrations/change_query_id_on_widgets_to_null.py
Normal file
13
migrations/change_query_id_on_widgets_to_null.py
Normal file
@@ -0,0 +1,13 @@
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
migrator.set_nullable(models.Widget, models.Widget.query_id, True)
|
||||
migrator.set_nullable(models.Widget, models.Widget.type, True)
|
||||
|
||||
db.close_db(None)
|
||||
11
migrations/create_activity_log.py
Normal file
11
migrations/create_activity_log.py
Normal file
@@ -0,0 +1,11 @@
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
if not models.ActivityLog.table_exists():
|
||||
print "Creating activity_log table..."
|
||||
models.ActivityLog.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
48
migrations/create_data_sources.py
Normal file
48
migrations/create_data_sources.py
Normal file
@@ -0,0 +1,48 @@
|
||||
import logging
|
||||
import peewee
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
from redash import settings
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
if not models.DataSource.table_exists():
|
||||
print "Creating data_sources table..."
|
||||
models.DataSource.create_table()
|
||||
|
||||
default_data_source = models.DataSource.create(name="Default",
|
||||
type=settings.CONNECTION_ADAPTER,
|
||||
options=settings.CONNECTION_STRING)
|
||||
else:
|
||||
default_data_source = models.DataSource.select().first()
|
||||
|
||||
migrator = Migrator(db.database)
|
||||
models.Query.data_source.null = True
|
||||
models.QueryResult.data_source.null = True
|
||||
try:
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.Query, models.Query.data_source, "data_source_id")
|
||||
except peewee.ProgrammingError:
|
||||
print "Failed to create data_source_id column -- assuming it already exists"
|
||||
|
||||
try:
|
||||
with db.database.transaction():
|
||||
migrator.add_column(models.QueryResult, models.QueryResult.data_source, "data_source_id")
|
||||
except peewee.ProgrammingError:
|
||||
print "Failed to create data_source_id column -- assuming it already exists"
|
||||
|
||||
print "Updating data source to existing one..."
|
||||
models.Query.update(data_source=default_data_source.id).execute()
|
||||
models.QueryResult.update(data_source=default_data_source.id).execute()
|
||||
|
||||
with db.database.transaction():
|
||||
print "Setting data source to non nullable..."
|
||||
migrator.set_nullable(models.Query, models.Query.data_source, False)
|
||||
|
||||
with db.database.transaction():
|
||||
print "Setting data source to non nullable..."
|
||||
migrator.set_nullable(models.QueryResult, models.QueryResult.data_source, False)
|
||||
|
||||
db.close_db(None)
|
||||
12
migrations/create_events.py
Normal file
12
migrations/create_events.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from redash.models import db
|
||||
from redash import models
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
if not models.Event.table_exists():
|
||||
print "Creating events table..."
|
||||
models.Event.create_table()
|
||||
|
||||
db.close_db(None)
|
||||
56
migrations/create_users.py
Normal file
56
migrations/create_users.py
Normal file
@@ -0,0 +1,56 @@
|
||||
import json
|
||||
import itertools
|
||||
import peewee
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db, settings
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
|
||||
if not models.User.table_exists():
|
||||
print "Creating user table..."
|
||||
models.User.create_table()
|
||||
|
||||
migrator = Migrator(db.database)
|
||||
with db.database.transaction():
|
||||
print "Creating user field on dashboard and queries..."
|
||||
try:
|
||||
migrator.rename_column(models.Query, '"user"', "user_email")
|
||||
migrator.rename_column(models.Dashboard, '"user"', "user_email")
|
||||
except peewee.ProgrammingError:
|
||||
print "Failed to rename user column -- assuming it already exists"
|
||||
|
||||
with db.database.transaction():
|
||||
models.Query.user.null = True
|
||||
models.Dashboard.user.null = True
|
||||
|
||||
try:
|
||||
migrator.add_column(models.Query, models.Query.user, "user_id")
|
||||
migrator.add_column(models.Dashboard, models.Dashboard.user, "user_id")
|
||||
except peewee.ProgrammingError:
|
||||
print "Failed to create user_id column -- assuming it already exists"
|
||||
|
||||
print "Creating user for all queries and dashboards..."
|
||||
for obj in itertools.chain(models.Query.select(), models.Dashboard.select()):
|
||||
# Some old databases might have queries with empty string as user email:
|
||||
email = obj.user_email or settings.ADMINS[0]
|
||||
email = email.split(',')[0]
|
||||
|
||||
print ".. {} , {}, {}".format(type(obj), obj.id, email)
|
||||
|
||||
try:
|
||||
user = models.User.get(models.User.email == email)
|
||||
except models.User.DoesNotExist:
|
||||
is_admin = email in settings.ADMINS
|
||||
user = models.User.create(email=email, name=email, is_admin=is_admin)
|
||||
|
||||
obj.user = user
|
||||
obj.save()
|
||||
|
||||
print "Set user_id to non null..."
|
||||
with db.database.transaction():
|
||||
migrator.set_nullable(models.Query, models.Query.user, False)
|
||||
migrator.set_nullable(models.Dashboard, models.Dashboard.user, False)
|
||||
migrator.set_nullable(models.Query, models.Query.user_email, True)
|
||||
migrator.set_nullable(models.Dashboard, models.Dashboard.user_email, True)
|
||||
70
migrations/create_visualizations.py
Normal file
70
migrations/create_visualizations.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import json
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import db
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
default_options = {"series": {"type": "column"}}
|
||||
|
||||
db.connect_db()
|
||||
|
||||
if not models.Visualization.table_exists():
|
||||
print "Creating visualization table..."
|
||||
models.Visualization.create_table()
|
||||
|
||||
with db.database.transaction():
|
||||
migrator = Migrator(db.database)
|
||||
print "Adding visualization_id to widgets:"
|
||||
field = models.Widget.visualization
|
||||
field.null = True
|
||||
migrator.add_column(models.Widget, models.Widget.visualization, 'visualization_id')
|
||||
|
||||
print 'Creating TABLE visualizations for all queries...'
|
||||
for query in models.Query.select():
|
||||
vis = models.Visualization(query=query, name="Table",
|
||||
description=query.description or "",
|
||||
type="TABLE", options="{}")
|
||||
vis.save()
|
||||
|
||||
print 'Creating COHORT visualizations for all queries named like %cohort%...'
|
||||
for query in models.Query.select().where(models.Query.name ** "%cohort%"):
|
||||
vis = models.Visualization(query=query, name="Cohort",
|
||||
description=query.description or "",
|
||||
type="COHORT", options="{}")
|
||||
vis.save()
|
||||
|
||||
print 'Create visualization for all widgets (unless exists already):'
|
||||
for widget in models.Widget.select():
|
||||
print 'Processing widget id: %d:' % widget.id
|
||||
vis_type = widget.type.upper()
|
||||
if vis_type == 'GRID':
|
||||
vis_type = 'TABLE'
|
||||
|
||||
query = models.Query.get_by_id(widget.query_id)
|
||||
vis = query.visualizations.where(models.Visualization.type == vis_type).first()
|
||||
if vis:
|
||||
print '... visualization type (%s) found.' % vis_type
|
||||
widget.visualization = vis
|
||||
widget.save()
|
||||
else:
|
||||
vis_name = vis_type.title()
|
||||
|
||||
options = json.loads(widget.options)
|
||||
vis_options = {"series": options} if options else default_options
|
||||
vis_options = json.dumps(vis_options)
|
||||
|
||||
vis = models.Visualization(query=query, name=vis_name,
|
||||
description=query.description or "",
|
||||
type=vis_type, options=vis_options)
|
||||
|
||||
print '... Created visualization for type: %s' % vis_type
|
||||
vis.save()
|
||||
widget.visualization = vis
|
||||
widget.save()
|
||||
|
||||
with db.database.transaction():
|
||||
migrator = Migrator(db.database)
|
||||
print "Setting visualization_id as not null..."
|
||||
migrator.set_nullable(models.Widget, models.Widget.visualization, False)
|
||||
|
||||
db.close_db(None)
|
||||
29
migrations/permissions_migration.py
Normal file
29
migrations/permissions_migration.py
Normal file
@@ -0,0 +1,29 @@
|
||||
import peewee
|
||||
from playhouse.migrate import Migrator
|
||||
from redash import models
|
||||
from redash.models import db
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
db.connect_db()
|
||||
migrator = Migrator(db.database)
|
||||
|
||||
if not models.Group.table_exists():
|
||||
print "Creating groups table..."
|
||||
models.Group.create_table()
|
||||
|
||||
with db.database.transaction():
|
||||
models.Group.insert(name='admin', permissions=['admin'], tables=['*']).execute()
|
||||
models.Group.insert(name='api', permissions=['view_query'], tables=['*']).execute()
|
||||
models.Group.insert(name='default', permissions=models.Group.DEFAULT_PERMISSIONS, tables=['*']).execute()
|
||||
|
||||
migrator.add_column(models.User, models.User.groups, 'groups')
|
||||
|
||||
models.User.update(groups=['admin', 'default']).where(peewee.SQL("is_admin = true")).execute()
|
||||
models.User.update(groups=['admin', 'default']).where(peewee.SQL("'admin' = any(permissions)")).execute()
|
||||
models.User.update(groups=['default']).where(peewee.SQL("is_admin = false")).execute()
|
||||
|
||||
migrator.drop_column(models.User, 'permissions')
|
||||
migrator.drop_column(models.User, 'is_admin')
|
||||
|
||||
db.close_db(None)
|
||||
@@ -11,17 +11,14 @@
|
||||
"latedef": true,
|
||||
"newcap": true,
|
||||
"noarg": true,
|
||||
"quotmark": false,
|
||||
"quotmark": "single",
|
||||
"regexp": true,
|
||||
"undef": true,
|
||||
"unused": true,
|
||||
"strict": false,
|
||||
"strict": true,
|
||||
"trailing": true,
|
||||
"smarttabs": true,
|
||||
"globals": {
|
||||
"angular": false,
|
||||
"_": false,
|
||||
"$": false,
|
||||
"currentUser": false
|
||||
"angular": false
|
||||
}
|
||||
}
|
||||
|
||||
416
rd_ui/Gruntfile.js
Normal file
416
rd_ui/Gruntfile.js
Normal file
@@ -0,0 +1,416 @@
|
||||
// Generated on 2014-07-30 using generator-angular 0.9.2
|
||||
'use strict';
|
||||
|
||||
// # Globbing
|
||||
// for performance reasons we're only matching one level down:
|
||||
// 'test/spec/{,*/}*.js'
|
||||
// use this if you want to recursively match all subfolders:
|
||||
// 'test/spec/**/*.js'
|
||||
|
||||
module.exports = function (grunt) {
|
||||
|
||||
// Load grunt tasks automatically
|
||||
require('load-grunt-tasks')(grunt);
|
||||
|
||||
// Time how long tasks take. Can help when optimizing build times
|
||||
require('time-grunt')(grunt);
|
||||
|
||||
// Configurable paths for the application
|
||||
var appConfig = {
|
||||
app: require('./bower.json').appPath || 'app',
|
||||
dist: 'dist'
|
||||
};
|
||||
|
||||
// Define the configuration for all the tasks
|
||||
grunt.initConfig({
|
||||
|
||||
// Project settings
|
||||
yeoman: appConfig,
|
||||
|
||||
// Watches files for changes and runs tasks based on the changed files
|
||||
watch: {
|
||||
bower: {
|
||||
files: ['bower.json'],
|
||||
tasks: ['wiredep']
|
||||
},
|
||||
js: {
|
||||
files: ['<%= yeoman.app %>/scripts/{,*/}*.js'],
|
||||
tasks: ['newer:jshint:all'],
|
||||
options: {
|
||||
livereload: '<%= connect.options.livereload %>'
|
||||
}
|
||||
},
|
||||
jsTest: {
|
||||
files: ['test/spec/{,*/}*.js'],
|
||||
tasks: ['newer:jshint:test', 'karma']
|
||||
},
|
||||
styles: {
|
||||
files: ['<%= yeoman.app %>/styles/{,*/}*.css'],
|
||||
tasks: ['newer:copy:styles', 'autoprefixer']
|
||||
},
|
||||
gruntfile: {
|
||||
files: ['Gruntfile.js']
|
||||
},
|
||||
livereload: {
|
||||
options: {
|
||||
livereload: '<%= connect.options.livereload %>'
|
||||
},
|
||||
files: [
|
||||
'<%= yeoman.app %>/{,*/}*.html',
|
||||
'.tmp/styles/{,*/}*.css',
|
||||
'<%= yeoman.app %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}'
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// The actual grunt server settings
|
||||
connect: {
|
||||
options: {
|
||||
port: 9000,
|
||||
// Change this to '0.0.0.0' to access the server from outside.
|
||||
hostname: 'localhost',
|
||||
livereload: 35729
|
||||
},
|
||||
livereload: {
|
||||
options: {
|
||||
open: true,
|
||||
middleware: function (connect) {
|
||||
return [
|
||||
connect.static('.tmp'),
|
||||
connect().use(
|
||||
'/bower_components',
|
||||
connect.static('./bower_components')
|
||||
),
|
||||
connect.static(appConfig.app)
|
||||
];
|
||||
}
|
||||
}
|
||||
},
|
||||
test: {
|
||||
options: {
|
||||
port: 9001,
|
||||
middleware: function (connect) {
|
||||
return [
|
||||
connect.static('.tmp'),
|
||||
connect.static('test'),
|
||||
connect().use(
|
||||
'/bower_components',
|
||||
connect.static('./bower_components')
|
||||
),
|
||||
connect.static(appConfig.app)
|
||||
];
|
||||
}
|
||||
}
|
||||
},
|
||||
dist: {
|
||||
options: {
|
||||
open: true,
|
||||
base: '<%= yeoman.dist %>'
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Make sure code styles are up to par and there are no obvious mistakes
|
||||
jshint: {
|
||||
options: {
|
||||
jshintrc: '.jshintrc',
|
||||
reporter: require('jshint-stylish')
|
||||
},
|
||||
all: {
|
||||
src: [
|
||||
'Gruntfile.js',
|
||||
'<%= yeoman.app %>/scripts/{,*/}*.js'
|
||||
]
|
||||
},
|
||||
test: {
|
||||
options: {
|
||||
jshintrc: 'test/.jshintrc'
|
||||
},
|
||||
src: ['test/spec/{,*/}*.js']
|
||||
}
|
||||
},
|
||||
|
||||
// Empties folders to start fresh
|
||||
clean: {
|
||||
dist: {
|
||||
files: [{
|
||||
dot: true,
|
||||
src: [
|
||||
'.tmp',
|
||||
'<%= yeoman.dist %>/{,*/}*',
|
||||
'!<%= yeoman.dist %>/.git*'
|
||||
]
|
||||
}]
|
||||
},
|
||||
server: '.tmp'
|
||||
},
|
||||
|
||||
// Add vendor prefixed styles
|
||||
autoprefixer: {
|
||||
options: {
|
||||
browsers: ['last 1 version']
|
||||
},
|
||||
dist: {
|
||||
files: [{
|
||||
expand: true,
|
||||
cwd: '.tmp/styles/',
|
||||
src: '{,*/}*.css',
|
||||
dest: '.tmp/styles/'
|
||||
}]
|
||||
}
|
||||
},
|
||||
|
||||
// Automatically inject Bower components into the app
|
||||
wiredep: {
|
||||
options: {
|
||||
},
|
||||
app: {
|
||||
src: ['<%= yeoman.app %>/index.html'],
|
||||
ignorePath: /\.\.\//
|
||||
}
|
||||
},
|
||||
|
||||
// Renames files for browser caching purposes
|
||||
filerev: {
|
||||
dist: {
|
||||
src: [
|
||||
'<%= yeoman.dist %>/scripts/{,*/}*.js',
|
||||
'<%= yeoman.dist %>/styles/{,*/}*.css',
|
||||
'<%= yeoman.dist %>/images/{,*/}*.{png,jpg,jpeg,gif,webp,svg}',
|
||||
'<%= yeoman.dist %>/styles/fonts/*'
|
||||
]
|
||||
}
|
||||
},
|
||||
|
||||
// Reads HTML for usemin blocks to enable smart builds that automatically
|
||||
// concat, minify and revision files. Creates configurations in memory so
|
||||
// additional tasks can operate on them
|
||||
useminPrepare: {
|
||||
html: ['<%= yeoman.app %>/index.html', '<%= yeoman.app %>/login.html'],
|
||||
options: {
|
||||
dest: '<%= yeoman.dist %>',
|
||||
flow: {
|
||||
html: {
|
||||
steps: {
|
||||
js: ['concat', 'uglifyjs'],
|
||||
css: ['cssmin']
|
||||
},
|
||||
post: {}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
// Performs rewrites based on filerev and the useminPrepare configuration
|
||||
usemin: {
|
||||
html: ['<%= yeoman.dist %>/{,*/}*.html'],
|
||||
css: ['<%= yeoman.dist %>/styles/{,*/}*.css'],
|
||||
options: {
|
||||
assetsDirs: ['<%= yeoman.dist %>','<%= yeoman.dist %>/images']
|
||||
}
|
||||
},
|
||||
|
||||
// The following *-min tasks will produce minified files in the dist folder
|
||||
// By default, your `index.html`'s <!-- Usemin block --> will take care of
|
||||
// minification. These next options are pre-configured if you do not wish
|
||||
// to use the Usemin blocks.
|
||||
// cssmin: {
|
||||
// dist: {
|
||||
// files: {
|
||||
// '<%= yeoman.dist %>/styles/main.css': [
|
||||
// '.tmp/styles/{,*/}*.css'
|
||||
// ]
|
||||
// }
|
||||
// }
|
||||
// },
|
||||
// uglify: {
|
||||
// dist: {
|
||||
// files: {
|
||||
// '<%= yeoman.dist %>/scripts/scripts.js': [
|
||||
// '<%= yeoman.dist %>/scripts/scripts.js'
|
||||
// ]
|
||||
// }
|
||||
// }
|
||||
// },
|
||||
// concat: {
|
||||
// dist: {}
|
||||
// },
|
||||
|
||||
imagemin: {
|
||||
dist: {
|
||||
files: [{
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.app %>/images',
|
||||
src: '{,*/}*.{png,jpg,jpeg,gif}',
|
||||
dest: '<%= yeoman.dist %>/images'
|
||||
}]
|
||||
}
|
||||
},
|
||||
|
||||
svgmin: {
|
||||
dist: {
|
||||
files: [{
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.app %>/images',
|
||||
src: '{,*/}*.svg',
|
||||
dest: '<%= yeoman.dist %>/images'
|
||||
}]
|
||||
}
|
||||
},
|
||||
|
||||
htmlmin: {
|
||||
dist: {
|
||||
options: {
|
||||
collapseWhitespace: true,
|
||||
conservativeCollapse: true,
|
||||
collapseBooleanAttributes: true,
|
||||
removeCommentsFromCDATA: true,
|
||||
removeOptionalTags: true
|
||||
},
|
||||
files: [{
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.dist %>',
|
||||
src: ['*.html', 'views/{,*/}*.html'],
|
||||
dest: '<%= yeoman.dist %>'
|
||||
}]
|
||||
}
|
||||
},
|
||||
|
||||
// ngmin tries to make the code safe for minification automatically by
|
||||
// using the Angular long form for dependency injection. It doesn't work on
|
||||
// things like resolve or inject so those have to be done manually.
|
||||
ngmin: {
|
||||
dist: {
|
||||
files: [{
|
||||
expand: true,
|
||||
cwd: '.tmp/concat/scripts',
|
||||
src: '*.js',
|
||||
dest: '.tmp/concat/scripts'
|
||||
}]
|
||||
}
|
||||
},
|
||||
|
||||
// Replace Google CDN references
|
||||
cdnify: {
|
||||
dist: {
|
||||
html: ['<%= yeoman.dist %>/*.html']
|
||||
}
|
||||
},
|
||||
|
||||
// Copies remaining files to places other tasks can use
|
||||
copy: {
|
||||
dist: {
|
||||
files: [{
|
||||
expand: true,
|
||||
dot: true,
|
||||
cwd: '<%= yeoman.app %>',
|
||||
dest: '<%= yeoman.dist %>',
|
||||
src: [
|
||||
'*.{ico,png,txt}',
|
||||
'.htaccess',
|
||||
'*.html',
|
||||
'views/{,*/}*.html',
|
||||
'images/{,*/}*.{webp}',
|
||||
'fonts/*'
|
||||
]
|
||||
}, {
|
||||
expand: true,
|
||||
cwd: '.tmp/images',
|
||||
dest: '<%= yeoman.dist %>/images',
|
||||
src: ['generated/*']
|
||||
}, {
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.app %>/bower_components/bootstrap/dist',
|
||||
src: 'fonts/*',
|
||||
dest: '<%= yeoman.dist %>'
|
||||
}, {
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.app %>/bower_components/font-awesome',
|
||||
src: 'fonts/*',
|
||||
dest: '<%= yeoman.dist %>'
|
||||
}]
|
||||
},
|
||||
styles: {
|
||||
expand: true,
|
||||
cwd: '<%= yeoman.app %>/styles',
|
||||
dest: '.tmp/styles/',
|
||||
src: '{,*/}*.css'
|
||||
}
|
||||
},
|
||||
|
||||
// Run some tasks in parallel to speed up the build process
|
||||
concurrent: {
|
||||
server: [
|
||||
'copy:styles'
|
||||
],
|
||||
test: [
|
||||
'copy:styles'
|
||||
],
|
||||
dist: [
|
||||
'copy:styles',
|
||||
'imagemin',
|
||||
'svgmin'
|
||||
]
|
||||
},
|
||||
|
||||
// Test settings
|
||||
karma: {
|
||||
unit: {
|
||||
configFile: 'test/karma.conf.js',
|
||||
singleRun: true
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
grunt.registerTask('serve', 'Compile then start a connect web server', function (target) {
|
||||
if (target === 'dist') {
|
||||
return grunt.task.run(['build', 'connect:dist:keepalive']);
|
||||
}
|
||||
|
||||
grunt.task.run([
|
||||
'clean:server',
|
||||
'wiredep',
|
||||
'concurrent:server',
|
||||
'autoprefixer',
|
||||
'connect:livereload',
|
||||
'watch'
|
||||
]);
|
||||
});
|
||||
|
||||
grunt.registerTask('server', 'DEPRECATED TASK. Use the "serve" task instead', function (target) {
|
||||
grunt.log.warn('The `server` task has been deprecated. Use `grunt serve` to start a server.');
|
||||
grunt.task.run(['serve:' + target]);
|
||||
});
|
||||
|
||||
grunt.registerTask('test', [
|
||||
'clean:server',
|
||||
'concurrent:test',
|
||||
'autoprefixer',
|
||||
'connect:test',
|
||||
'karma'
|
||||
]);
|
||||
|
||||
grunt.registerTask('build', [
|
||||
'clean:dist',
|
||||
'wiredep',
|
||||
'useminPrepare',
|
||||
'concurrent:dist',
|
||||
'autoprefixer',
|
||||
'concat',
|
||||
'ngmin',
|
||||
'copy:dist',
|
||||
'cdnify',
|
||||
'cssmin',
|
||||
'uglify',
|
||||
'filerev',
|
||||
'usemin',
|
||||
'htmlmin'
|
||||
]);
|
||||
|
||||
grunt.registerTask('default', [
|
||||
'newer:jshint',
|
||||
'test',
|
||||
'build'
|
||||
]);
|
||||
};
|
||||
@@ -1,82 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7" ng-app="redash" ng-controller='MainCtrl'> <![endif]-->
|
||||
<!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8" ng-app="redash" ng-controller='MainCtrl'> <![endif]-->
|
||||
<!--[if IE 8]> <html class="no-js lt-ie9" ng-app="redash" ng-controller='MainCtrl'> <![endif]-->
|
||||
<!--[if gt IE 8]><!--> <html class="no-js" ng-app="redash" ng-controller='EmbedCtrl'> <!--<![endif]-->
|
||||
<head>
|
||||
<base href="{{base_href}}">
|
||||
<title ng-bind="'{{name}} | ' + pageTitle"></title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
|
||||
<!-- build:css /styles/embed.css -->
|
||||
<link rel="stylesheet" href="/bower_components/bootstrap/dist/css/bootstrap.css">
|
||||
<link rel="stylesheet" href="/bower_components/codemirror/lib/codemirror.css">
|
||||
<link rel="stylesheet" href="/bower_components/gridster/dist/jquery.gridster.css">
|
||||
<link rel="stylesheet" href="/bower_components/pivottable/dist/pivot.css">
|
||||
<link rel="stylesheet" href="/bower_components/cornelius/src/cornelius.css">
|
||||
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
|
||||
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
|
||||
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
|
||||
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
|
||||
<style>
|
||||
body { padding:0; }
|
||||
.col-lg-12, .row, .container, .panel { margin:0; padding:0; }
|
||||
.container::after, .row::after { display:none; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div growl></div>
|
||||
<div ng-view></div>
|
||||
|
||||
{% include 'vendor_scripts.html' %}
|
||||
|
||||
<!-- build:js({.tmp,app}) /scripts/embed-scripts.js -->
|
||||
<script src="/scripts/embed.js"></script>
|
||||
<script src="/scripts/services/services.js"></script>
|
||||
<script src="/scripts/services/resources.js"></script>
|
||||
<script src="/scripts/services/notifications.js"></script>
|
||||
<script src="/scripts/services/dashboards.js"></script>
|
||||
<script src="/scripts/controllers/controllers.js"></script>
|
||||
<script src="/scripts/controllers/dashboard.js"></script>
|
||||
<script src="/scripts/controllers/admin_controllers.js"></script>
|
||||
<script src="/scripts/controllers/data_sources.js"></script>
|
||||
<script src="/scripts/controllers/query_view.js"></script>
|
||||
<script src="/scripts/controllers/query_source.js"></script>
|
||||
<script src="/scripts/controllers/users.js"></script>
|
||||
<script src="/scripts/visualizations/base.js"></script>
|
||||
<script src="/scripts/visualizations/chart.js"></script>
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/map.js"></script>
|
||||
<script src="/scripts/visualizations/counter.js"></script>
|
||||
<script src="/scripts/visualizations/boxplot.js"></script>
|
||||
<script src="/scripts/visualizations/box.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/visualizations/pivot.js"></script>
|
||||
<script src="/scripts/visualizations/date_range_selector.js"></script>
|
||||
<script src="/scripts/directives/directives.js"></script>
|
||||
<script src="/scripts/directives/query_directives.js"></script>
|
||||
<script src="/scripts/directives/data_source_directives.js"></script>
|
||||
<script src="/scripts/directives/dashboard_directives.js"></script>
|
||||
<script src="/scripts/filters.js"></script>
|
||||
<script src="/scripts/controllers/alerts.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<script>
|
||||
var clientConfig = {{ client_config|safe }};
|
||||
var visualization = {{ visualization|safe }};
|
||||
var query_result = {{ query_result|safe }};
|
||||
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
</body>
|
||||
</html>
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.3 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 2.0 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 3.8 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 6.0 KiB |
@@ -4,7 +4,6 @@
|
||||
<!--[if IE 8]> <html class="no-js lt-ie9" ng-app="redash" ng-controller='MainCtrl'> <![endif]-->
|
||||
<!--[if gt IE 8]><!--> <html class="no-js" ng-app="redash" ng-controller='MainCtrl'> <!--<![endif]-->
|
||||
<head>
|
||||
<base href="{{base_href}}">
|
||||
<title ng-bind="'{{name}} | ' + pageTitle"></title>
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
@@ -15,18 +14,13 @@
|
||||
<link rel="stylesheet" href="/bower_components/gridster/dist/jquery.gridster.css">
|
||||
<link rel="stylesheet" href="/bower_components/pivottable/dist/pivot.css">
|
||||
<link rel="stylesheet" href="/bower_components/cornelius/src/cornelius.css">
|
||||
<link rel="stylesheet" href="/bower_components/select2/select2.css">
|
||||
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
|
||||
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
|
||||
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
|
||||
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
|
||||
<link rel="stylesheet" href="/bower_components/leaflet/dist/leaflet.css">
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
|
||||
</head>
|
||||
<body>
|
||||
<div growl></div>
|
||||
@@ -40,26 +34,27 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="{{base_href}}"><img src="/images/redash_icon_small.png"/></a>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
</div>
|
||||
{% raw %}
|
||||
<div class="collapse navbar-collapse navbar-ex1-collapse">
|
||||
<ul class="nav navbar-nav">
|
||||
<li class="active" ng-show="pageTitle"><a class="page-title" ng-bind="pageTitle"></a></li>
|
||||
<li class="dropdown" ng-show="groupedDashboards.length > 0 || otherDashboards.length > 0 || currentUser.hasPermission('create_dashboard')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="fa fa-tachometer"></span> <b class="caret"></b></a>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle><span class="glyphicon glyphicon-th-large"></span> <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<span ng-repeat="(name, group) in groupedDashboards">
|
||||
<li class="dropdown-submenu">
|
||||
<a href="#" ng-bind="name"></a>
|
||||
<ul class="dropdown-menu">
|
||||
<li ng-repeat="dashboard in group" role="presentation">
|
||||
<a role="menu-item" ng-href="dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
<a role="menu-item" ng-href="/dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
</span>
|
||||
<li ng-repeat="dashboard in otherDashboards">
|
||||
<a role="menu-item" ng-href="dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
<a role="menu-item" ng-href="/dashboard/{{dashboard.slug}}" ng-bind="dashboard.name"></a>
|
||||
</li>
|
||||
<li class="divider" ng-show="currentUser.hasPermission('create_dashboard') && (groupedDashboards.length > 0 || otherDashboards.length > 0)"></li>
|
||||
<li><a data-toggle="modal" href="#new_dashboard_dialog" ng-show="currentUser.hasPermission('create_dashboard')">New Dashboard</a></li>
|
||||
@@ -68,13 +63,10 @@
|
||||
<li class="dropdown" ng-show="currentUser.hasPermission('view_query')" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle>Queries <b class="caret"></b></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<li ng-show="currentUser.hasPermission('create_query')"><a href="queries/new">New Query</a></li>
|
||||
<li><a href="queries">Queries</a></li>
|
||||
<li ng-show="currentUser.hasPermission('create_query')"><a href="/queries/new">New Query</a></li>
|
||||
<li><a href="/queries">Queries</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li>
|
||||
<a href="alerts">Alerts</a>
|
||||
</li>
|
||||
</ul>
|
||||
<form class="navbar-form navbar-left" role="search" ng-submit="searchQueries()">
|
||||
<div class="form-group">
|
||||
@@ -83,34 +75,12 @@
|
||||
<button type="submit" class="btn btn-default"><span class="glyphicon glyphicon-search"></span></button>
|
||||
</form>
|
||||
<ul class="nav navbar-nav navbar-right">
|
||||
<li ng-show="currentUser.hasPermission('admin')">
|
||||
<a href="data_sources" title="Data Sources"><i class="fa fa-database"></i></a>
|
||||
</li>
|
||||
<li ng-show="currentUser.hasPermission('list_users')">
|
||||
<a href="users" title="Users"><i class="fa fa-users"></i></a>
|
||||
</li>
|
||||
<li class="dropdown" dropdown>
|
||||
<a href="#" class="dropdown-toggle" dropdown-toggle><span ng-bind="currentUser.name"></span> <span class="caret"></span></a>
|
||||
<ul class="dropdown-menu" dropdown-menu>
|
||||
<li style="width:300px">
|
||||
<a ng-href="users/{{currentUser.id}}">
|
||||
<div class="row">
|
||||
<div class="col-sm-2">
|
||||
<img ng-src="{{currentUser.gravatar_url}}" size="40px" class="img-circle"/>
|
||||
</div>
|
||||
<div class="col-sm-10">
|
||||
<p><strong>{{currentUser.name}}</strong></p>
|
||||
</div>
|
||||
</div>
|
||||
</a>
|
||||
</li>
|
||||
<li class="divider">
|
||||
</li>
|
||||
<li>
|
||||
<a href="logout" target="_self">Log out</a>
|
||||
</li>
|
||||
</ul>
|
||||
</li>
|
||||
<p class="navbar-text avatar" ng-show="currentUser.id" ng-cloak>
|
||||
<img ng-src="{{currentUser.gravatar_url}}" class="img-circle" alt="{{currentUser.name}}"/>
|
||||
<a target="_self" href="/logout" id="logout" title="Logout">
|
||||
<span class="glyphicon glyphicon-log-out"></span>
|
||||
</a>
|
||||
</p>
|
||||
</ul>
|
||||
</div>
|
||||
{% endraw %}
|
||||
@@ -120,38 +90,49 @@
|
||||
|
||||
<edit-dashboard-form dashboard="newDashboard" id="new_dashboard_dialog"></edit-dashboard-form>
|
||||
<div ng-view></div>
|
||||
<div ng-if="showPermissionError" class="ng-cloak container" ng-cloak>
|
||||
<div class="row">
|
||||
<div class="text-center">
|
||||
<h1><span class="glyphicon glyphicon-lock"></span></h1>
|
||||
<p class="text-muted">
|
||||
You do not have permission to view the requested page.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{% raw %}
|
||||
<div class="visible-print">
|
||||
<hr>
|
||||
Source: {{location}}
|
||||
</div>
|
||||
<div class="container-fluid footer hidden-print">
|
||||
<hr/>
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<a href="http://redash.io">re:dash</a> <span ng-bind="version"></span>
|
||||
<small ng-if="newVersionAvailable" ng-cloak class="ng-cloak"><a href="http://version.redash.io/">(new re:dash version available)</a></small>
|
||||
<div class="pull-right">
|
||||
<a href="http://docs.redash.io/">Docs</a>
|
||||
<a href="http://github.com/getredash/redash">Contribute</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
{% endraw %}
|
||||
<script src="/bower_components/jquery/jquery.js"></script>
|
||||
|
||||
{% include 'vendor_scripts.html' %}
|
||||
<!-- build:js /scripts/plugins.js -->
|
||||
<script src="/bower_components/angular/angular.js"></script>
|
||||
<script src="/bower_components/jquery-ui/ui/jquery-ui.js"></script>
|
||||
<script src="/bower_components/bootstrap/js/collapse.js"></script>
|
||||
<script src="/bower_components/bootstrap/js/modal.js"></script>
|
||||
<script src="/bower_components/angular-resource/angular-resource.js"></script>
|
||||
<script src="/bower_components/angular-route/angular-route.js"></script>
|
||||
<script src="/bower_components/underscore/underscore.js"></script>
|
||||
<script src="/bower_components/moment/moment.js"></script>
|
||||
<script src="/bower_components/angular-moment/angular-moment.js"></script>
|
||||
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
|
||||
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
|
||||
<script src="/bower_components/highcharts/highcharts.js"></script>
|
||||
<script src="/bower_components/highcharts/modules/exporting.js"></script>
|
||||
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>
|
||||
<script src="/bower_components/angular-growl/build/angular-growl.js"></script>
|
||||
<script src="/bower_components/pivottable/dist/pivot.js"></script>
|
||||
<script src="/bower_components/cornelius/src/cornelius.js"></script>
|
||||
<script src="/bower_components/mousetrap/mousetrap.js"></script>
|
||||
<script src="/bower_components/mousetrap/plugins/global-bind/mousetrap-global-bind.js"></script>
|
||||
<script src="/bower_components/select2/select2.js"></script>
|
||||
<script src="/bower_components/angular-ui-select2/src/select2.js"></script>
|
||||
<script src="/bower_components/angular-ui-select/dist/select.js"></script>
|
||||
<script src="/bower_components/underscore.string/lib/underscore.string.js"></script>
|
||||
<script src="/bower_components/marked/lib/marked.js"></script>
|
||||
<script src="/scripts/ng_highchart.js"></script>
|
||||
<script src="/scripts/ng_smart_table.js"></script>
|
||||
<script src="/bower_components/angular-ui-bootstrap-bower/ui-bootstrap-tpls.js"></script>
|
||||
<script src="/bower_components/bucky/bucky.js"></script>
|
||||
<script src="/bower_components/pace/pace.js"></script>
|
||||
<script src="/bower_components/mustache/mustache.js"></script>
|
||||
<script src="/bower_components/canvg/rgbcolor.js"></script>
|
||||
<script src="/bower_components/canvg/StackBlur.js"></script>
|
||||
<script src="/bower_components/canvg/canvg.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<!-- build:js({.tmp,app}) /scripts/scripts.js -->
|
||||
<script src="/scripts/app.js"></script>
|
||||
@@ -162,46 +143,33 @@
|
||||
<script src="/scripts/controllers/controllers.js"></script>
|
||||
<script src="/scripts/controllers/dashboard.js"></script>
|
||||
<script src="/scripts/controllers/admin_controllers.js"></script>
|
||||
<script src="/scripts/controllers/data_sources.js"></script>
|
||||
<script src="/scripts/controllers/query_view.js"></script>
|
||||
<script src="/scripts/controllers/query_source.js"></script>
|
||||
<script src="/scripts/controllers/users.js"></script>
|
||||
<script src="/scripts/visualizations/base.js"></script>
|
||||
<script src="/scripts/visualizations/chart.js"></script>
|
||||
<script src="/scripts/visualizations/cohort.js"></script>
|
||||
<script src="/scripts/visualizations/map.js"></script>
|
||||
<script src="/scripts/visualizations/counter.js"></script>
|
||||
<script src="/scripts/visualizations/boxplot.js"></script>
|
||||
<script src="/scripts/visualizations/box.js"></script>
|
||||
<script src="/scripts/visualizations/table.js"></script>
|
||||
<script src="/scripts/visualizations/pivot.js"></script>
|
||||
<script src="/scripts/visualizations/date_range_selector.js"></script>
|
||||
<script src="/scripts/directives/directives.js"></script>
|
||||
<script src="/scripts/directives/query_directives.js"></script>
|
||||
<script src="/scripts/directives/data_source_directives.js"></script>
|
||||
<script src="/scripts/directives/dashboard_directives.js"></script>
|
||||
<script src="/scripts/filters.js"></script>
|
||||
<script src="/scripts/controllers/alerts.js"></script>
|
||||
<!-- endbuild -->
|
||||
|
||||
<script>
|
||||
// TODO: move currentUser & features to be an Angular service
|
||||
var clientConfig = {{ client_config|safe }};
|
||||
var basePath = "{{base_href}}";
|
||||
var featureFlags = {{ features|safe }};
|
||||
var currentUser = {{ user|safe }};
|
||||
var currentOrgSlug = "{{ org_slug }}";
|
||||
|
||||
currentUser.canEdit = function(object) {
|
||||
var user_id = object.user_id || (object.user && object.user.id);
|
||||
return this.hasPermission('admin') || (user_id && (user_id == currentUser.id));
|
||||
return user_id && (user_id == currentUser.id);
|
||||
};
|
||||
|
||||
currentUser.hasPermission = function(permission) {
|
||||
return this.permissions.indexOf(permission) != -1;
|
||||
};
|
||||
|
||||
currentUser.isAdmin = currentUser.hasPermission('admin');
|
||||
|
||||
}
|
||||
|
||||
{{ analytics|safe }}
|
||||
</script>
|
||||
|
||||
@@ -13,10 +13,6 @@
|
||||
<link rel="stylesheet" href="/styles/redash.css">
|
||||
<link rel="stylesheet" href="/styles/login.css">
|
||||
<!-- endbuild -->
|
||||
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/images/favicon-32x32.png">
|
||||
<link rel="icon" type="image/png" sizes="96x96" href="/images/favicon-96x96.png">
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/images/favicon-16x16.png">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
@@ -30,39 +26,19 @@
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a class="navbar-brand" href="/"><img src="/images/redash_icon_small.png"/></a>
|
||||
<a class="navbar-brand" href="/"><strong>{{name}}</strong></a>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
{% with messages = get_flashed_messages() %}
|
||||
{% if messages %}
|
||||
{% for message in messages %}
|
||||
<div class="alert alert-warning" role="alert">{{ message }}</div>
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{% endwith %}
|
||||
|
||||
<div class="main">
|
||||
{% if show_google_openid %}
|
||||
|
||||
<div class="row">
|
||||
<a href="{{ google_auth_url }}"><img src="/google_login.png" class="login-button"/></a>
|
||||
</div>
|
||||
|
||||
<div class="login-or">
|
||||
<hr class="hr-or">
|
||||
<span class="span-or">or</span>
|
||||
</div>
|
||||
|
||||
{% endif %}
|
||||
|
||||
{% if show_saml_login %}
|
||||
|
||||
<div class="row">
|
||||
<a href="/saml/login">SAML Login</a>
|
||||
<a href="/oauth/google?next={{next}}"><img src="/google_login.png" class="login-button"/></a>
|
||||
</div>
|
||||
|
||||
<div class="login-or">
|
||||
@@ -74,8 +50,8 @@
|
||||
|
||||
<form role="form" method="post" name="login">
|
||||
<div class="form-group">
|
||||
<label for="inputEmail">Email</label>
|
||||
<input type="text" class="form-control" id="inputEmail" name="email" value="{{email}}">
|
||||
<label for="inputUsernameEmail">Username or email</label>
|
||||
<input type="text" class="form-control" id="inputUsernameEmail" name="username" value="{{username}}">
|
||||
</div>
|
||||
<div class="form-group">
|
||||
<!--<a class="pull-right" href="#">Forgot password?</a>-->
|
||||
|
||||
@@ -4,29 +4,33 @@ angular.module('redash', [
|
||||
'redash.controllers',
|
||||
'redash.filters',
|
||||
'redash.services',
|
||||
'redash.renderers',
|
||||
'redash.visualization',
|
||||
'plotly',
|
||||
'plotly-chart',
|
||||
'highchart',
|
||||
'ui.select2',
|
||||
'angular-growl',
|
||||
'angularMoment',
|
||||
'ui.bootstrap',
|
||||
'ui.sortable',
|
||||
'smartTable.table',
|
||||
'ngResource',
|
||||
'ngRoute',
|
||||
'ui.select',
|
||||
'naif.base64',
|
||||
'ui.bootstrap.showErrors',
|
||||
'ngSanitize'
|
||||
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider', 'uiSelectConfig',
|
||||
function ($routeProvider, $locationProvider, $compileProvider, growlProvider, uiSelectConfig) {
|
||||
'ui.select'
|
||||
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider',
|
||||
function ($routeProvider, $locationProvider, $compileProvider, growlProvider) {
|
||||
if (featureFlags.clientSideMetrics) {
|
||||
Bucky.setOptions({
|
||||
host: '/api/metrics'
|
||||
});
|
||||
|
||||
Bucky.requests.monitor('ajax_requsts');
|
||||
Bucky.requests.transforms.enable('dashboards', /dashboard\/[\w-]+/ig, '/dashboard');
|
||||
}
|
||||
|
||||
function getQuery(Query, $route) {
|
||||
var query = Query.get({'id': $route.current.params.queryId });
|
||||
return query.$promise;
|
||||
};
|
||||
|
||||
uiSelectConfig.theme = "bootstrap";
|
||||
|
||||
$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|http|data):/);
|
||||
$locationProvider.html5Mode(true);
|
||||
growlProvider.globalTimeToLive(2000);
|
||||
@@ -48,8 +52,7 @@ angular.module('redash', [
|
||||
resolve: {
|
||||
'query': ['Query', function newQuery(Query) {
|
||||
return Query.newQuery();
|
||||
}],
|
||||
'dataSources': ['DataSource', function (DataSource) { return DataSource.query().$promise }]
|
||||
}]
|
||||
}
|
||||
});
|
||||
$routeProvider.when('/queries/search', {
|
||||
@@ -77,56 +80,18 @@ angular.module('redash', [
|
||||
templateUrl: '/views/admin_status.html',
|
||||
controller: 'AdminStatusCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/alerts', {
|
||||
templateUrl: '/views/alerts/list.html',
|
||||
controller: 'AlertsCtrl'
|
||||
});
|
||||
$routeProvider.when('/alerts/:alertId', {
|
||||
templateUrl: '/views/alerts/edit.html',
|
||||
controller: 'AlertCtrl'
|
||||
$routeProvider.when('/admin/workers', {
|
||||
templateUrl: '/views/admin_workers.html',
|
||||
controller: 'AdminWorkersCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/data_sources/:dataSourceId', {
|
||||
templateUrl: '/views/data_sources/edit.html',
|
||||
controller: 'DataSourceCtrl'
|
||||
});
|
||||
$routeProvider.when('/data_sources', {
|
||||
templateUrl: '/views/data_sources/list.html',
|
||||
controller: 'DataSourcesCtrl'
|
||||
});
|
||||
|
||||
$routeProvider.when('/users/new', {
|
||||
templateUrl: '/views/users/new.html',
|
||||
controller: 'NewUserCtrl'
|
||||
});
|
||||
$routeProvider.when('/users/:userId', {
|
||||
templateUrl: '/views/users/show.html',
|
||||
reloadOnSearch: false,
|
||||
controller: 'UserCtrl'
|
||||
});
|
||||
$routeProvider.when('/users', {
|
||||
templateUrl: '/views/users/list.html',
|
||||
controller: 'UsersCtrl'
|
||||
});
|
||||
$routeProvider.when('/groups/:groupId/data_sources', {
|
||||
templateUrl: '/views/groups/show_data_sources.html',
|
||||
controller: 'GroupDataSourcesCtrl'
|
||||
});
|
||||
$routeProvider.when('/groups/:groupId', {
|
||||
templateUrl: '/views/groups/show.html',
|
||||
controller: 'GroupCtrl'
|
||||
});
|
||||
$routeProvider.when('/groups', {
|
||||
templateUrl: '/views/groups/list.html',
|
||||
controller: 'GroupsCtrl'
|
||||
})
|
||||
$routeProvider.when('/', {
|
||||
templateUrl: '/views/index.html',
|
||||
controller: 'IndexCtrl'
|
||||
});
|
||||
$routeProvider.when('/personal', {
|
||||
redirectTo: '/'
|
||||
templateUrl: '/views/personal.html',
|
||||
controller: 'PersonalIndexCtrl'
|
||||
});
|
||||
$routeProvider.otherwise({
|
||||
redirectTo: '/'
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
};
|
||||
|
||||
refresh();
|
||||
};
|
||||
}
|
||||
|
||||
angular.module('redash.admin_controllers', [])
|
||||
.controller('AdminStatusCtrl', ['$scope', 'Events', '$http', '$timeout', AdminStatusCtrl])
|
||||
|
||||
@@ -1,176 +0,0 @@
|
||||
(function() {
|
||||
|
||||
var AlertsCtrl = function($scope, Events, Alert) {
|
||||
Events.record(currentUser, "view", "page", "alerts");
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alerts = []
|
||||
Alert.query(function(alerts) {
|
||||
var stateClass = {
|
||||
'ok': 'label label-success',
|
||||
'triggered': 'label label-danger',
|
||||
'unknown': 'label label-warning'
|
||||
};
|
||||
_.each(alerts, function(alert) {
|
||||
alert.class = stateClass[alert.state];
|
||||
})
|
||||
$scope.alerts = alerts;
|
||||
|
||||
});
|
||||
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: 50,
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
"map": "name",
|
||||
"cellTemplate": '<a href="alerts/{{dataRow.id}}">{{dataRow.name}}</a> (<a href="queries/{{dataRow.query.id}}">query</a>)'
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user.name'
|
||||
},
|
||||
{
|
||||
'label': 'State',
|
||||
'cellTemplate': '<span ng-class="dataRow.class">{{dataRow.state | uppercase}}</span> since <span am-time-ago="dataRow.updated_at"></span>'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
|
||||
}
|
||||
];
|
||||
};
|
||||
|
||||
var AlertCtrl = function($scope, $routeParams, $location, growl, Query, Events, Alert) {
|
||||
$scope.$parent.pageTitle = "Alerts";
|
||||
|
||||
$scope.alertId = $routeParams.alertId;
|
||||
if ($scope.alertId === "new") {
|
||||
Events.record(currentUser, 'view', 'page', 'alerts/new');
|
||||
} else {
|
||||
Events.record(currentUser, 'view', 'alert', $scope.alertId);
|
||||
}
|
||||
|
||||
$scope.onQuerySelected = function(item) {
|
||||
$scope.selectedQuery = item;
|
||||
item.getQueryResultPromise().then(function(result) {
|
||||
$scope.queryResult = result;
|
||||
$scope.alert.options.column = $scope.alert.options.column || result.getColumnNames()[0];
|
||||
});
|
||||
};
|
||||
|
||||
if ($scope.alertId === "new") {
|
||||
$scope.alert = new Alert({options: {}});
|
||||
} else {
|
||||
$scope.alert = Alert.get({id: $scope.alertId}, function(alert) {
|
||||
$scope.onQuerySelected(new Query($scope.alert.query));
|
||||
});
|
||||
}
|
||||
|
||||
$scope.ops = ['greater than', 'less than', 'equals'];
|
||||
$scope.selectedQuery = null;
|
||||
|
||||
$scope.getDefaultName = function() {
|
||||
if (!$scope.alert.query) {
|
||||
return undefined;
|
||||
}
|
||||
return _.template("<%= query.name %>: <%= options.column %> <%= options.op %> <%= options.value %>", $scope.alert);
|
||||
};
|
||||
|
||||
$scope.searchQueries = function (term) {
|
||||
if (!term || term.length < 3) {
|
||||
return;
|
||||
}
|
||||
|
||||
Query.search({q: term}, function(results) {
|
||||
$scope.queries = results;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
if ($scope.alert.name === undefined || $scope.alert.name === '') {
|
||||
$scope.alert.name = $scope.getDefaultName();
|
||||
}
|
||||
if ($scope.alert.rearm === '' || $scope.alert.rearm === 0) {
|
||||
$scope.alert.rearm = null;
|
||||
}
|
||||
$scope.alert.$save(function(alert) {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
if ($scope.alertId === "new") {
|
||||
$location.path('/alerts/' + alert.id).replace();
|
||||
}
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving alert.");
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
angular.module('redash.directives').directive('alertSubscribers', ['AlertSubscription', function (AlertSubscription) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/alerts/subscribers.html',
|
||||
scope: {
|
||||
'alertId': '='
|
||||
},
|
||||
controller: function ($scope) {
|
||||
$scope.subscribers = AlertSubscription.query({alertId: $scope.alertId});
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.directives').directive('subscribeButton', ['AlertSubscription', 'growl', function (AlertSubscription, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
template: '<button class="btn btn-default btn-xs" ng-click="toggleSubscription()"><i ng-class="class"></i></button>',
|
||||
controller: function ($scope) {
|
||||
var updateClass = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.class = "fa fa-eye-slash";
|
||||
} else {
|
||||
$scope.class = "fa fa-eye";
|
||||
}
|
||||
}
|
||||
|
||||
$scope.subscribers.$promise.then(function() {
|
||||
$scope.subscription = _.find($scope.subscribers, function(subscription) {
|
||||
return (subscription.user.email == currentUser.email);
|
||||
});
|
||||
|
||||
updateClass();
|
||||
});
|
||||
|
||||
$scope.toggleSubscription = function() {
|
||||
if ($scope.subscription) {
|
||||
$scope.subscription.$delete(function() {
|
||||
$scope.subscribers = _.without($scope.subscribers, $scope.subscription);
|
||||
$scope.subscription = undefined;
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving subscription.");
|
||||
});
|
||||
} else {
|
||||
$scope.subscription = new AlertSubscription({alert_id: $scope.alertId});
|
||||
$scope.subscription.$save(function() {
|
||||
$scope.subscribers.push($scope.subscription);
|
||||
updateClass();
|
||||
}, function() {
|
||||
growl.addErrorMessage("Unsubscription failed.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('AlertsCtrl', ['$scope', 'Events', 'Alert', AlertsCtrl])
|
||||
.controller('AlertCtrl', ['$scope', '$routeParams', '$location', 'growl', 'Query', 'Events', 'Alert', AlertCtrl])
|
||||
|
||||
})();
|
||||
@@ -3,8 +3,7 @@
|
||||
if (!value) {
|
||||
return "-";
|
||||
}
|
||||
|
||||
return value.format(clientConfig.dateTimeFormat);
|
||||
return value.toDate().toLocaleString();
|
||||
};
|
||||
|
||||
var QuerySearchCtrl = function($scope, $location, $filter, Events, Query) {
|
||||
@@ -24,7 +23,7 @@
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user.name'
|
||||
'map': 'user_name'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
@@ -46,6 +45,7 @@
|
||||
Query.search({q: $scope.term }, function(results) {
|
||||
$scope.queries = _.map(results, function(query) {
|
||||
query.created_at = moment(query.created_at);
|
||||
query.user_name = query.user.name;
|
||||
return query;
|
||||
});
|
||||
});
|
||||
@@ -93,6 +93,7 @@
|
||||
$scope.allQueries = _.map(queries, function (query) {
|
||||
query.created_at = moment(query.created_at);
|
||||
query.retrieved_at = moment(query.retrieved_at);
|
||||
query.user_name = query.user.name;
|
||||
return query;
|
||||
});
|
||||
|
||||
@@ -107,7 +108,7 @@
|
||||
},
|
||||
{
|
||||
'label': 'Created By',
|
||||
'map': 'user.name'
|
||||
'map': 'user_name'
|
||||
},
|
||||
{
|
||||
'label': 'Created At',
|
||||
@@ -151,21 +152,13 @@
|
||||
}
|
||||
|
||||
var MainCtrl = function ($scope, $location, Dashboard, notifications) {
|
||||
$scope.$on("$routeChangeSuccess", function (event, current, previous, rejection) {
|
||||
if ($scope.showPermissionError) {
|
||||
$scope.showPermissionError = false;
|
||||
}
|
||||
});
|
||||
if (featureFlags.clientSideMetrics) {
|
||||
$scope.$on('$locationChangeSuccess', function(event, newLocation, oldLocation) {
|
||||
// This will be called once per actual page load.
|
||||
Bucky.sendPagePerformance();
|
||||
});
|
||||
}
|
||||
|
||||
$scope.$on("$routeChangeError", function (event, current, previous, rejection) {
|
||||
if (rejection.status === 403) {
|
||||
$scope.showPermissionError = true;
|
||||
}
|
||||
});
|
||||
|
||||
$scope.location = String(document.location);
|
||||
$scope.version = clientConfig.version;
|
||||
$scope.newVersionAvailable = clientConfig.newVersionAvailable && currentUser.hasPermission("admin");
|
||||
|
||||
$scope.dashboards = [];
|
||||
$scope.reloadDashboards = function () {
|
||||
@@ -200,17 +193,41 @@
|
||||
});
|
||||
};
|
||||
|
||||
var IndexCtrl = function ($scope, Events, Dashboard, Query) {
|
||||
var IndexCtrl = function ($scope, Events, Dashboard) {
|
||||
Events.record(currentUser, "view", "page", "homepage");
|
||||
$scope.$parent.pageTitle = "Home";
|
||||
|
||||
$scope.archiveDashboard = function (dashboard) {
|
||||
if (confirm('Are you sure you want to delete "' + dashboard.name + '" dashboard?')) {
|
||||
Events.record(currentUser, "archive", "dashboard", dashboard.id);
|
||||
dashboard.$delete(function () {
|
||||
$scope.$parent.reloadDashboards();
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var PersonalIndexCtrl = function ($scope, Events, Dashboard, Query) {
|
||||
Events.record(currentUser, "view", "page", "personal_homepage");
|
||||
$scope.$parent.pageTitle = "Home";
|
||||
|
||||
$scope.recentQueries = Query.recent();
|
||||
$scope.recentDashboards = Dashboard.recent();
|
||||
|
||||
$scope.archiveDashboard = function (dashboard) {
|
||||
if (confirm('Are you sure you want to delete "' + dashboard.name + '" dashboard?')) {
|
||||
Events.record(currentUser, "archive", "dashboard", dashboard.id);
|
||||
dashboard.$delete(function () {
|
||||
$scope.$parent.reloadDashboards();
|
||||
});
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
angular.module('redash.controllers', [])
|
||||
.controller('QueriesCtrl', ['$scope', '$http', '$location', '$filter', 'Query', QueriesCtrl])
|
||||
.controller('IndexCtrl', ['$scope', 'Events', 'Dashboard', 'Query', IndexCtrl])
|
||||
.controller('IndexCtrl', ['$scope', 'Events', 'Dashboard', IndexCtrl])
|
||||
.controller('PersonalIndexCtrl', ['$scope', 'Events', 'Dashboard', 'Query', PersonalIndexCtrl])
|
||||
.controller('MainCtrl', ['$scope', '$location', 'Dashboard', 'notifications', MainCtrl])
|
||||
.controller('QuerySearchCtrl', ['$scope', '$location', '$filter', 'Events', 'Query', QuerySearchCtrl]);
|
||||
})();
|
||||
|
||||
@@ -1,72 +1,71 @@
|
||||
(function() {
|
||||
var DashboardCtrl = function($scope, Events, Widget, $routeParams, $location, $http, $timeout, $q, Dashboard) {
|
||||
$scope.refreshEnabled = false;
|
||||
$scope.isFullscreen = false;
|
||||
$scope.refreshRate = 60;
|
||||
|
||||
var renderDashboard = function (dashboard) {
|
||||
$scope.$parent.pageTitle = dashboard.name;
|
||||
var loadDashboard = _.throttle(function() {
|
||||
$scope.dashboard = Dashboard.get({ slug: $routeParams.dashboardSlug }, function (dashboard) {
|
||||
Events.record(currentUser, "view", "dashboard", dashboard.id);
|
||||
|
||||
var promises = [];
|
||||
$scope.$parent.pageTitle = dashboard.name;
|
||||
|
||||
_.each($scope.dashboard.widgets, function (row) {
|
||||
return _.each(row, function (widget) {
|
||||
if (widget.visualization) {
|
||||
var queryResult = widget.getQuery().getQueryResult();
|
||||
if (angular.isDefined(queryResult))
|
||||
promises.push(queryResult.toPromise());
|
||||
}
|
||||
});
|
||||
});
|
||||
var promises = [];
|
||||
|
||||
$q.all(promises).then(function(queryResults) {
|
||||
var filters = {};
|
||||
_.each(queryResults, function(queryResult) {
|
||||
var queryFilters = queryResult.getFilters();
|
||||
_.each(queryFilters, function (queryFilter) {
|
||||
var hasQueryStringValue = _.has($location.search(), queryFilter.name);
|
||||
$scope.dashboard.widgets = _.map($scope.dashboard.widgets, function (row) {
|
||||
return _.map(row, function (widget) {
|
||||
var w = new Widget(widget);
|
||||
|
||||
if (!(hasQueryStringValue || dashboard.dashboard_filters_enabled)) {
|
||||
// If dashboard filters not enabled, or no query string value given, skip filters linking.
|
||||
return;
|
||||
if (w.visualization) {
|
||||
promises.push(w.getQuery().getQueryResult().toPromise());
|
||||
}
|
||||
|
||||
if (!_.has(filters, queryFilter.name)) {
|
||||
var filter = _.extend({}, queryFilter);
|
||||
filters[filter.name] = filter;
|
||||
filters[filter.name].originFilters = [];
|
||||
if (hasQueryStringValue) {
|
||||
filter.current = $location.search()[filter.name];
|
||||
}
|
||||
|
||||
$scope.$watch(function () { return filter.current }, function (value) {
|
||||
_.each(filter.originFilters, function (originFilter) {
|
||||
originFilter.current = value;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: merge values.
|
||||
filters[queryFilter.name].originFilters.push(queryFilter);
|
||||
return w;
|
||||
});
|
||||
});
|
||||
|
||||
$scope.filters = _.values(filters);
|
||||
});
|
||||
}
|
||||
$q.all(promises).then(function(queryResults) {
|
||||
var filters = {};
|
||||
_.each(queryResults, function(queryResult) {
|
||||
var queryFilters = queryResult.getFilters();
|
||||
_.each(queryFilters, function (queryFilter) {
|
||||
var hasQueryStringValue = _.has($location.search(), queryFilter.name);
|
||||
|
||||
var loadDashboard = _.throttle(function () {
|
||||
$scope.dashboard = Dashboard.get({slug: $routeParams.dashboardSlug}, function (dashboard) {
|
||||
Events.record(currentUser, "view", "dashboard", dashboard.id);
|
||||
renderDashboard(dashboard);
|
||||
}, function () {
|
||||
// error...
|
||||
// try again. we wrap loadDashboard with throttle so it doesn't happen too often.\
|
||||
// we might want to consider exponential backoff and also move this as a general solution in $http/$resource for
|
||||
// all AJAX calls.
|
||||
loadDashboard();
|
||||
}
|
||||
);
|
||||
if (!(hasQueryStringValue || dashboard.dashboard_filters_enabled)) {
|
||||
// If dashboard filters not enabled, or no query string value given, skip filters linking.
|
||||
return;
|
||||
}
|
||||
|
||||
if (!_.has(filters, queryFilter.name)) {
|
||||
var filter = _.extend({}, queryFilter);
|
||||
filters[filter.name] = filter;
|
||||
filters[filter.name].originFilters = [];
|
||||
if (hasQueryStringValue) {
|
||||
filter.current = $location.search()[filter.name];
|
||||
}
|
||||
|
||||
$scope.$watch(function () { return filter.current }, function (value) {
|
||||
_.each(filter.originFilters, function (originFilter) {
|
||||
originFilter.current = value;
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// TODO: merge values.
|
||||
filters[queryFilter.name].originFilters.push(queryFilter);
|
||||
});
|
||||
});
|
||||
|
||||
$scope.filters = _.values(filters);
|
||||
});
|
||||
|
||||
|
||||
}, function () {
|
||||
// error...
|
||||
// try again. we wrap loadDashboard with throttle so it doesn't happen too often.\
|
||||
// we might want to consider exponential backoff and also move this as a general solution in $http/$resource for
|
||||
// all AJAX calls.
|
||||
loadDashboard();
|
||||
});
|
||||
}, 1000);
|
||||
|
||||
loadDashboard();
|
||||
@@ -95,19 +94,6 @@
|
||||
}
|
||||
};
|
||||
|
||||
$scope.archiveDashboard = function () {
|
||||
if (confirm('Are you sure you want to archive the "' + $scope.dashboard.name + '" dashboard?')) {
|
||||
Events.record(currentUser, "archive", "dashboard", $scope.dashboard.id);
|
||||
$scope.dashboard.$delete(function () {
|
||||
$scope.$parent.reloadDashboards();
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
$scope.toggleFullscreen = function() {
|
||||
$scope.isFullscreen = !$scope.isFullscreen;
|
||||
};
|
||||
|
||||
$scope.triggerRefresh = function() {
|
||||
$scope.refreshEnabled = !$scope.refreshEnabled;
|
||||
|
||||
@@ -137,16 +123,12 @@
|
||||
|
||||
Events.record(currentUser, "delete", "widget", $scope.widget.id);
|
||||
|
||||
$scope.widget.$delete(function(response) {
|
||||
$scope.widget.$delete(function() {
|
||||
$scope.dashboard.widgets = _.map($scope.dashboard.widgets, function(row) {
|
||||
return _.filter(row, function(widget) {
|
||||
return widget.id != undefined;
|
||||
})
|
||||
});
|
||||
|
||||
$scope.dashboard.widgets = _.filter($scope.dashboard.widgets, function(row) { return row.length > 0 });
|
||||
|
||||
$scope.dashboard.layout = response.layout;
|
||||
});
|
||||
};
|
||||
|
||||
@@ -162,8 +144,6 @@
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
|
||||
$scope.type = 'visualization';
|
||||
} else if ($scope.widget.restricted) {
|
||||
$scope.type = 'restricted';
|
||||
} else {
|
||||
$scope.type = 'textbox';
|
||||
}
|
||||
|
||||
@@ -1,47 +0,0 @@
|
||||
(function () {
|
||||
var DataSourcesCtrl = function ($scope, $location, growl, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_sources");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSources = DataSource.query();
|
||||
|
||||
$scope.openDataSource = function(datasource) {
|
||||
$location.path('/data_sources/' + datasource.id);
|
||||
};
|
||||
|
||||
$scope.deleteDataSource = function(event, datasource) {
|
||||
event.stopPropagation();
|
||||
Events.record(currentUser, "delete", "datasource", datasource.id);
|
||||
datasource.$delete(function(resource) {
|
||||
growl.addSuccessMessage("Data source deleted successfully.");
|
||||
this.$parent.dataSources = _.without(this.dataSources, resource);
|
||||
}.bind(this), function(httpResponse) {
|
||||
console.log("Failed to delete data source: ", httpResponse.status, httpResponse.statusText, httpResponse.data);
|
||||
growl.addErrorMessage("Failed to delete data source.");
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
var DataSourceCtrl = function ($scope, $routeParams, $http, $location, Events, DataSource) {
|
||||
Events.record(currentUser, "view", "page", "admin/data_source");
|
||||
$scope.$parent.pageTitle = "Data Sources";
|
||||
|
||||
$scope.dataSourceId = $routeParams.dataSourceId;
|
||||
|
||||
if ($scope.dataSourceId == "new") {
|
||||
$scope.dataSource = new DataSource({options: {}});
|
||||
} else {
|
||||
$scope.dataSource = DataSource.get({id: $routeParams.dataSourceId});
|
||||
}
|
||||
|
||||
$scope.$watch('dataSource.id', function(id) {
|
||||
if (id != $scope.dataSourceId && id !== undefined) {
|
||||
$location.path('/data_sources/' + id).replace();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('DataSourcesCtrl', ['$scope', '$location', 'growl', 'Events', 'DataSource', DataSourcesCtrl])
|
||||
.controller('DataSourceCtrl', ['$scope', '$routeParams', '$http', '$location', 'Events', 'DataSource', DataSourceCtrl])
|
||||
})();
|
||||
@@ -17,9 +17,8 @@
|
||||
saveQuery = $scope.saveQuery;
|
||||
|
||||
$scope.sourceMode = true;
|
||||
$scope.canEdit = currentUser.canEdit($scope.query);// TODO: bring this back? || clientConfig.allowAllToEditQueries;
|
||||
$scope.canEdit = true;
|
||||
$scope.isDirty = false;
|
||||
$scope.base_url = $location.protocol()+"://"+$location.host()+":"+$location.port();
|
||||
|
||||
$scope.newVisualization = undefined;
|
||||
|
||||
@@ -68,9 +67,9 @@
|
||||
|
||||
$scope.duplicateQuery = function() {
|
||||
Events.record(currentUser, 'fork', 'query', $scope.query.id);
|
||||
$scope.query.name = 'Copy of (#'+$scope.query.id+') '+$scope.query.name;
|
||||
$scope.query.id = null;
|
||||
$scope.query.schedule = null;
|
||||
|
||||
$scope.saveQuery({
|
||||
successMessage: 'Query forked',
|
||||
errorMessage: 'Query could not be forked'
|
||||
|
||||
@@ -4,8 +4,6 @@
|
||||
function QueryViewCtrl($scope, Events, $route, $location, notifications, growl, $modal, Query, DataSource) {
|
||||
var DEFAULT_TAB = 'table';
|
||||
|
||||
$scope.base_url = $location.protocol()+"://"+$location.host()+":"+$location.port();
|
||||
|
||||
var getQueryResult = function(maxAge) {
|
||||
// Collect params, and getQueryResult with params; getQueryResult merges it into the query
|
||||
var parameters = Query.collectParamsFromQueryString($location, $scope.query);
|
||||
@@ -17,64 +15,16 @@
|
||||
maxAge = -1;
|
||||
}
|
||||
|
||||
$scope.showLog = false;
|
||||
$scope.queryResult = $scope.query.getQueryResult(maxAge, parameters);
|
||||
}
|
||||
|
||||
var getDataSourceId = function() {
|
||||
// Try to get the query's data source id
|
||||
var dataSourceId = $scope.query.data_source_id;
|
||||
|
||||
// If there is no source yet, then parse what we have in localStorage
|
||||
// e.g. `null` -> `NaN`, malformed data -> `NaN`, "1" -> 1
|
||||
if (dataSourceId === undefined) {
|
||||
dataSourceId = parseInt(localStorage.lastSelectedDataSourceId, 10);
|
||||
}
|
||||
|
||||
// If we had an invalid value in localStorage (e.g. nothing, deleted source), then use the first data source
|
||||
var isValidDataSourceId = !isNaN(dataSourceId) && _.some($scope.dataSources, function(ds) {
|
||||
return ds.id == dataSourceId;
|
||||
});
|
||||
|
||||
if (!isValidDataSourceId) {
|
||||
dataSourceId = $scope.dataSources[0].id;
|
||||
}
|
||||
|
||||
// Return our data source id
|
||||
return dataSourceId;
|
||||
}
|
||||
|
||||
var updateDataSources = function(dataSources) {
|
||||
// Filter out data sources the user can't query (or used by current query):
|
||||
$scope.dataSources = _.filter(dataSources, function(dataSource) {
|
||||
return !dataSource.view_only || dataSource.id === $scope.query.data_source_id;
|
||||
});
|
||||
|
||||
if ($scope.dataSources.length == 0) {
|
||||
$scope.noDataSources = true;
|
||||
return;
|
||||
}
|
||||
|
||||
if ($scope.query.isNew()) {
|
||||
$scope.query.data_source_id = getDataSourceId();
|
||||
}
|
||||
|
||||
$scope.dataSource = _.find(dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
|
||||
//$scope.canExecuteQuery = $scope.canExecuteQuery && _.some(dataSources, function(ds) { return !ds.view_only });
|
||||
$scope.canCreateQuery = _.any(dataSources, function(ds) { return !ds.view_only });
|
||||
|
||||
updateSchema();
|
||||
}
|
||||
|
||||
|
||||
$scope.dataSource = {};
|
||||
$scope.query = $route.current.locals.query;
|
||||
|
||||
var updateSchema = function() {
|
||||
$scope.hasSchema = false;
|
||||
$scope.editorSize = "col-md-12";
|
||||
DataSource.getSchema({id: $scope.query.data_source_id}, function(data) {
|
||||
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
|
||||
DataSource.getSchema({id: dataSourceId}, function(data) {
|
||||
if (data && data.length > 0) {
|
||||
$scope.schema = data;
|
||||
_.each(data, function(table) {
|
||||
@@ -97,23 +47,14 @@
|
||||
$scope.isQueryOwner = (currentUser.id === $scope.query.user.id) || currentUser.hasPermission('admin');
|
||||
$scope.canViewSource = currentUser.hasPermission('view_source');
|
||||
|
||||
$scope.canExecuteQuery = function() {
|
||||
return currentUser.hasPermission('execute_query') && !$scope.dataSource.view_only;
|
||||
}
|
||||
|
||||
$scope.canScheduleQuery = currentUser.hasPermission('schedule_query');
|
||||
|
||||
if ($route.current.locals.dataSources) {
|
||||
$scope.dataSources = $route.current.locals.dataSources;
|
||||
updateDataSources($route.current.locals.dataSources);
|
||||
} else {
|
||||
$scope.dataSources = DataSource.query(updateDataSources);
|
||||
}
|
||||
$scope.dataSources = DataSource.get(function(dataSources) {
|
||||
updateSchema();
|
||||
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
|
||||
});
|
||||
|
||||
// in view mode, latest dataset is always visible
|
||||
// source mode changes this behavior
|
||||
$scope.showDataset = true;
|
||||
$scope.showLog = false;
|
||||
|
||||
$scope.lockButton = function(lock) {
|
||||
$scope.queryExecuting = lock;
|
||||
@@ -156,14 +97,6 @@
|
||||
};
|
||||
|
||||
$scope.executeQuery = function() {
|
||||
if (!$scope.canExecuteQuery()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!$scope.query.query) {
|
||||
return;
|
||||
}
|
||||
|
||||
getQueryResult(0);
|
||||
$scope.lockButton(true);
|
||||
$scope.cancelling = false;
|
||||
@@ -175,21 +108,21 @@
|
||||
$scope.queryResult.cancelExecution();
|
||||
Events.record(currentUser, 'cancel_execute', 'query', $scope.query.id);
|
||||
};
|
||||
|
||||
|
||||
$scope.archiveQuery = function(options, data) {
|
||||
if (data) {
|
||||
data.id = $scope.query.id;
|
||||
} else {
|
||||
data = $scope.query;
|
||||
}
|
||||
|
||||
|
||||
$scope.isDirty = false;
|
||||
|
||||
|
||||
options = _.extend({}, {
|
||||
successMessage: 'Query archived',
|
||||
errorMessage: 'Query could not be archived'
|
||||
}, options);
|
||||
|
||||
|
||||
return Query.delete({id: data.id}, function() {
|
||||
$scope.query.is_archived = true;
|
||||
$scope.query.schedule = null;
|
||||
@@ -203,7 +136,6 @@
|
||||
|
||||
$scope.updateDataSource = function() {
|
||||
Events.record(currentUser, 'update_data_source', 'query', $scope.query.id);
|
||||
localStorage.lastSelectedDataSourceId = $scope.query.data_source_id;
|
||||
|
||||
$scope.query.latest_query_data = null;
|
||||
$scope.query.latest_query_data_id = null;
|
||||
@@ -217,7 +149,6 @@
|
||||
}
|
||||
|
||||
updateSchema();
|
||||
$scope.dataSource = _.find($scope.dataSources, function(ds) { return ds.id == $scope.query.data_source_id; });
|
||||
$scope.executeQuery();
|
||||
};
|
||||
|
||||
@@ -263,14 +194,10 @@
|
||||
if (status === 'done' || status === 'failed') {
|
||||
$scope.lockButton(false);
|
||||
}
|
||||
|
||||
if ($scope.queryResult.getLog() != null) {
|
||||
$scope.showLog = true;
|
||||
}
|
||||
});
|
||||
|
||||
$scope.openScheduleForm = function() {
|
||||
if (!$scope.isQueryOwner || !$scope.canScheduleQuery) {
|
||||
if (!$scope.isQueryOwner) {
|
||||
return;
|
||||
};
|
||||
|
||||
|
||||
@@ -1,349 +0,0 @@
|
||||
(function () {
|
||||
var GroupsCtrl = function ($scope, $location, $modal, growl, Events, Group) {
|
||||
Events.record(currentUser, "view", "page", "groups");
|
||||
$scope.$parent.pageTitle = "Groups";
|
||||
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: 20,
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
"map": "name",
|
||||
"cellTemplate": '<a href="groups/{{dataRow.id}}">{{dataRow.name}}</a>'
|
||||
}
|
||||
];
|
||||
|
||||
$scope.groups = [];
|
||||
Group.query(function(groups) {
|
||||
$scope.groups = groups;
|
||||
});
|
||||
|
||||
$scope.newGroup = function() {
|
||||
$modal.open({
|
||||
templateUrl: '/views/groups/edit_group_form.html',
|
||||
size: 'sm',
|
||||
resolve: {
|
||||
group: function() { return new Group({}); }
|
||||
},
|
||||
controller: ['$scope', '$modalInstance', 'group', function($scope, $modalInstance, group) {
|
||||
$scope.group = group;
|
||||
var newGroup = group.id === undefined;
|
||||
|
||||
if (newGroup) {
|
||||
$scope.saveButtonText = "Create";
|
||||
$scope.title = "Create a New Group";
|
||||
} else {
|
||||
$scope.saveButtonText = "Save";
|
||||
$scope.title = "Edit Group";
|
||||
}
|
||||
|
||||
$scope.ok = function() {
|
||||
$scope.group.$save(function(group) {
|
||||
if (newGroup) {
|
||||
$location.path('/groups/' + group.id).replace();
|
||||
$modalInstance.close();
|
||||
} else {
|
||||
$modalInstance.close();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
$scope.cancel = function() {
|
||||
$modalInstance.close();
|
||||
}
|
||||
}]
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
var usersNav = function($location) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
template:
|
||||
'<ul class="nav nav-tabs">' +
|
||||
'<li role="presentation" ng-class="{\'active\': usersPage }"><a href="users">Users</a></li>' +
|
||||
'<li role="presentation" ng-class="{\'active\': groupsPage }" ng-if="showGroupsLink"><a href="groups">Groups</a></li>' +
|
||||
'</ul>',
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.usersPage = _.string.startsWith($location.path(), '/users');
|
||||
$scope.groupsPage = _.string.startsWith($location.path(), '/groups');
|
||||
$scope.showGroupsLink = currentUser.hasPermission('list_users');
|
||||
}]
|
||||
}
|
||||
}
|
||||
|
||||
var groupName = function ($location, growl) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
'group': '='
|
||||
},
|
||||
transclude: true,
|
||||
template:
|
||||
'<h2>'+
|
||||
'<edit-in-place editable="canEdit()" done="saveName" ignore-blanks=\'true\' value="group.name"></edit-in-place> ' +
|
||||
'<button class="btn btn-xs btn-danger" ng-if="canEdit()" ng-click="deleteGroup()">Delete this group</button>' +
|
||||
'</h2>',
|
||||
replace: true,
|
||||
controller: ['$scope', function ($scope) {
|
||||
$scope.canEdit = function() {
|
||||
return currentUser.isAdmin && $scope.group.type != 'builtin';
|
||||
};
|
||||
|
||||
$scope.saveName = function() {
|
||||
$scope.group.$save();
|
||||
};
|
||||
|
||||
$scope.deleteGroup = function() {
|
||||
if (confirm("Are you sure you want to delete this group?")) {
|
||||
$scope.group.$delete(function() {
|
||||
$location.path('/groups').replace();
|
||||
growl.addSuccessMessage("Group deleted successfully.");
|
||||
})
|
||||
}
|
||||
}
|
||||
}]
|
||||
}
|
||||
};
|
||||
|
||||
var GroupDataSourcesCtrl = function($scope, $routeParams, $http, $location, growl, Events, Group, DataSource) {
|
||||
Events.record(currentUser, "view", "group_data_sources", $scope.groupId);
|
||||
$scope.group = Group.get({id: $routeParams.groupId});
|
||||
$scope.dataSources = Group.dataSources({id: $routeParams.groupId});
|
||||
$scope.newDataSource = {};
|
||||
|
||||
$scope.findDataSource = function(search) {
|
||||
if ($scope.foundDataSources === undefined) {
|
||||
DataSource.query(function(dataSources) {
|
||||
var existingIds = _.map($scope.dataSources, function(m) { return m.id; });
|
||||
$scope.foundDataSources = _.filter(dataSources, function(ds) { return !_.contains(existingIds, ds.id); });
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
$scope.addDataSource = function(dataSource) {
|
||||
// Clear selection, to clear up the input control.
|
||||
$scope.newDataSource.selected = undefined;
|
||||
|
||||
$http.post('api/groups/' + $routeParams.groupId + '/data_sources', {'data_source_id': dataSource.id}).success(function(user) {
|
||||
dataSource.view_only = false;
|
||||
$scope.dataSources.unshift(dataSource);
|
||||
|
||||
if ($scope.foundDataSources) {
|
||||
$scope.foundDataSources = _.filter($scope.foundDataSources, function(ds) { return ds != dataSource; });
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
$scope.changePermission = function(dataSource, viewOnly) {
|
||||
$http.post('api/groups/' + $routeParams.groupId + '/data_sources/' + dataSource.id, {view_only: viewOnly}).success(function() {
|
||||
dataSource.view_only = viewOnly;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.removeDataSource = function(dataSource) {
|
||||
$http.delete('api/groups/' + $routeParams.groupId + '/data_sources/' + dataSource.id).success(function() {
|
||||
$scope.dataSources = _.filter($scope.dataSources, function(ds) { return dataSource != ds; });
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
var GroupCtrl = function($scope, $routeParams, $http, $location, growl, Events, Group, User) {
|
||||
Events.record(currentUser, "view", "group", $scope.groupId);
|
||||
$scope.group = Group.get({id: $routeParams.groupId});
|
||||
$scope.members = Group.members({id: $routeParams.groupId});
|
||||
$scope.newMember = {};
|
||||
|
||||
$scope.findUser = function(search) {
|
||||
if (search == "") {
|
||||
return;
|
||||
}
|
||||
|
||||
if ($scope.foundUsers === undefined) {
|
||||
User.query(function(users) {
|
||||
var existingIds = _.map($scope.members, function(m) { return m.id; });
|
||||
_.each(users, function(user) { user.alreadyMember = _.contains(existingIds, user.id); });
|
||||
$scope.foundUsers = users;
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
$scope.addMember = function(user) {
|
||||
// Clear selection, to clear up the input control.
|
||||
$scope.newMember.selected = undefined;
|
||||
|
||||
$http.post('api/groups/' + $routeParams.groupId + '/members', {'user_id': user.id}).success(function() {
|
||||
$scope.members.unshift(user);
|
||||
user.alreadyMember = true;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.removeMember = function(member) {
|
||||
$http.delete('api/groups/' + $routeParams.groupId + '/members/' + member.id).success(function() {
|
||||
$scope.members = _.filter($scope.members, function(m) { return m != member });
|
||||
|
||||
if ($scope.foundUsers) {
|
||||
_.each($scope.foundUsers, function(user) { if (user.id == member.id) { user.alreadyMember = false }; });
|
||||
}
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
var UsersCtrl = function ($scope, $location, growl, Events, User) {
|
||||
Events.record(currentUser, "view", "page", "users");
|
||||
$scope.$parent.pageTitle = "Users";
|
||||
|
||||
$scope.gridConfig = {
|
||||
isPaginationEnabled: true,
|
||||
itemsByPage: 20,
|
||||
maxSize: 8,
|
||||
};
|
||||
|
||||
$scope.gridColumns = [
|
||||
{
|
||||
"label": "Name",
|
||||
"map": "name",
|
||||
"cellTemplate": '<img src="{{dataRow.gravatar_url}}" height="40px"/> <a href="users/{{dataRow.id}}">{{dataRow.name}}</a>'
|
||||
},
|
||||
{
|
||||
'label': 'Joined',
|
||||
'cellTemplate': '<span am-time-ago="dataRow.created_at"></span>'
|
||||
}
|
||||
];
|
||||
|
||||
$scope.users = [];
|
||||
User.query(function(users) {
|
||||
$scope.users = users;
|
||||
});
|
||||
};
|
||||
|
||||
var UserCtrl = function ($scope, $routeParams, $http, $location, growl, Events, User) {
|
||||
$scope.$parent.pageTitle = "Users";
|
||||
|
||||
$scope.userId = $routeParams.userId;
|
||||
|
||||
if ($scope.userId === 'me') {
|
||||
$scope.userId = currentUser.id;
|
||||
}
|
||||
Events.record(currentUser, "view", "user", $scope.userId);
|
||||
$scope.canEdit = currentUser.hasPermission("admin") || currentUser.id === parseInt($scope.userId);
|
||||
$scope.showSettings = false;
|
||||
$scope.showPasswordSettings = false;
|
||||
|
||||
$scope.selectTab = function(tab) {
|
||||
_.each($scope.tabs, function(v, k) {
|
||||
$scope.tabs[k] = (k === tab);
|
||||
});
|
||||
};
|
||||
|
||||
$scope.setTab = function(tab) {
|
||||
$location.hash(tab);
|
||||
}
|
||||
|
||||
$scope.tabs = {
|
||||
profile: false,
|
||||
apiKey: false,
|
||||
settings: false,
|
||||
password: false
|
||||
};
|
||||
|
||||
$scope.selectTab($location.hash() || 'profile');
|
||||
|
||||
$scope.user = User.get({id: $scope.userId}, function(user) {
|
||||
if (user.auth_type == 'password') {
|
||||
$scope.showSettings = $scope.canEdit;
|
||||
$scope.showPasswordSettings = $scope.canEdit;
|
||||
}
|
||||
});
|
||||
|
||||
$scope.password = {
|
||||
current: '',
|
||||
new: '',
|
||||
newRepeat: ''
|
||||
};
|
||||
|
||||
$scope.savePassword = function(form) {
|
||||
$scope.$broadcast('show-errors-check-validity');
|
||||
|
||||
if (!form.$valid) {
|
||||
return;
|
||||
}
|
||||
|
||||
var data = {
|
||||
id: $scope.user.id,
|
||||
password: $scope.password.new,
|
||||
old_password: $scope.password.current
|
||||
};
|
||||
|
||||
User.save(data, function() {
|
||||
growl.addSuccessMessage("Password Saved.")
|
||||
$scope.password = {
|
||||
current: '',
|
||||
new: '',
|
||||
newRepeat: ''
|
||||
};
|
||||
}, function(error) {
|
||||
var message = error.data.message || "Failed saving password.";
|
||||
growl.addErrorMessage(message);
|
||||
});
|
||||
};
|
||||
|
||||
$scope.updateUser = function(form) {
|
||||
$scope.$broadcast('show-errors-check-validity');
|
||||
|
||||
if (!form.$valid) {
|
||||
return;
|
||||
}
|
||||
|
||||
var data = {
|
||||
id: $scope.user.id,
|
||||
name: $scope.user.name,
|
||||
email: $scope.user.email
|
||||
};
|
||||
|
||||
User.save(data, function(user) {
|
||||
growl.addSuccessMessage("Saved.")
|
||||
$scope.user = user;
|
||||
}, function(error) {
|
||||
var message = error.data.message || "Failed saving.";
|
||||
growl.addErrorMessage(message);
|
||||
});
|
||||
};
|
||||
};
|
||||
|
||||
var NewUserCtrl = function ($scope, $location, growl, Events, User) {
|
||||
Events.record(currentUser, "view", "page", "users/new");
|
||||
|
||||
$scope.user = new User({});
|
||||
$scope.saveUser = function() {
|
||||
$scope.$broadcast('show-errors-check-validity');
|
||||
|
||||
if (!$scope.userForm.$valid) {
|
||||
return;
|
||||
}
|
||||
|
||||
$scope.user.$save(function(user) {
|
||||
growl.addSuccessMessage("Saved.")
|
||||
$location.path('/users/' + user.id).replace();
|
||||
}, function(error) {
|
||||
var message = error.data.message || "Failed saving.";
|
||||
growl.addErrorMessage(message);
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
angular.module('redash.controllers')
|
||||
.controller('GroupsCtrl', ['$scope', '$location', '$modal', 'growl', 'Events', 'Group', GroupsCtrl])
|
||||
.directive('groupName', ['$location', 'growl', groupName])
|
||||
.directive('usersNav', ['$location', usersNav])
|
||||
.controller('GroupCtrl', ['$scope', '$routeParams', '$http', '$location', 'growl', 'Events', 'Group', 'User', GroupCtrl])
|
||||
.controller('GroupDataSourcesCtrl', ['$scope', '$routeParams', '$http', '$location', 'growl', 'Events', 'Group', 'DataSource', GroupDataSourcesCtrl])
|
||||
.controller('UsersCtrl', ['$scope', '$location', 'growl', 'Events', 'User', UsersCtrl])
|
||||
.controller('UserCtrl', ['$scope', '$routeParams', '$http', '$location', 'growl', 'Events', 'User', UserCtrl])
|
||||
.controller('NewUserCtrl', ['$scope', '$location', 'growl', 'Events', 'User', NewUserCtrl])
|
||||
})();
|
||||
@@ -31,7 +31,7 @@
|
||||
'<div class="panel-heading">{name}' +
|
||||
'</div></li>';
|
||||
|
||||
$scope.$watch('dashboard.layout', function() {
|
||||
$scope.$watch('dashboard.widgets && dashboard.widgets.length', function(widgets_length) {
|
||||
$timeout(function() {
|
||||
gridster.remove_all_widgets();
|
||||
|
||||
@@ -57,7 +57,7 @@
|
||||
});
|
||||
}
|
||||
});
|
||||
}, true);
|
||||
});
|
||||
|
||||
$scope.saveDashboard = function() {
|
||||
$scope.saveInProgress = true;
|
||||
@@ -81,15 +81,18 @@
|
||||
$scope.dashboard.layout = layout;
|
||||
|
||||
layout = JSON.stringify(layout);
|
||||
Dashboard.save({slug: $scope.dashboard.id, name: $scope.dashboard.name, layout: layout}, function(dashboard) {
|
||||
$scope.dashboard = dashboard;
|
||||
$http.post('/api/dashboards/' + $scope.dashboard.id, {
|
||||
'name': $scope.dashboard.name,
|
||||
'layout': layout
|
||||
}).success(function(response) {
|
||||
$scope.dashboard = new Dashboard(response);
|
||||
$scope.saveInProgress = false;
|
||||
$(element).modal('hide');
|
||||
});
|
||||
Events.record(currentUser, 'edit', 'dashboard', $scope.dashboard.id);
|
||||
} else {
|
||||
|
||||
$http.post('api/dashboards', {
|
||||
$http.post('/api/dashboards', {
|
||||
'name': $scope.dashboard.name
|
||||
}).success(function(response) {
|
||||
$(element).modal('hide');
|
||||
@@ -139,11 +142,6 @@
|
||||
|
||||
$scope.setType = function (type) {
|
||||
$scope.type = type;
|
||||
if (type == 'textbox') {
|
||||
$scope.widgetSizes.push({name: 'Hidden', value: 0});
|
||||
} else if ($scope.widgetSizes.length > 2) {
|
||||
$scope.widgetSizes.pop();
|
||||
}
|
||||
};
|
||||
|
||||
var reset = function() {
|
||||
@@ -188,6 +186,7 @@
|
||||
|
||||
$scope.saveWidget = function() {
|
||||
$scope.saveInProgress = true;
|
||||
|
||||
var widget = new Widget({
|
||||
'visualization_id': $scope.selectedVis && $scope.selectedVis.id,
|
||||
'dashboard_id': $scope.dashboard.id,
|
||||
@@ -220,4 +219,4 @@
|
||||
}
|
||||
}
|
||||
])
|
||||
})();
|
||||
})();
|
||||
@@ -1,83 +0,0 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var directives = angular.module('redash.directives');
|
||||
|
||||
// Angular strips data- from the directive, so data-source-form becomes sourceForm...
|
||||
directives.directive('sourceForm', ['$http', 'growl', '$q', function ($http, growl, $q) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
replace: true,
|
||||
templateUrl: '/views/data_sources/form.html',
|
||||
scope: {
|
||||
'dataSource': '='
|
||||
},
|
||||
link: function ($scope) {
|
||||
var setType = function(types) {
|
||||
if ($scope.dataSource.type === undefined) {
|
||||
$scope.dataSource.type = types[0].type;
|
||||
return types[0];
|
||||
}
|
||||
|
||||
$scope.type = _.find(types, function (t) {
|
||||
return t.type == $scope.dataSource.type;
|
||||
});
|
||||
};
|
||||
|
||||
$scope.files = {};
|
||||
|
||||
$scope.$watchCollection('files', function() {
|
||||
_.each($scope.files, function(v, k) {
|
||||
if (v) {
|
||||
$scope.dataSource.options[k] = v.base64;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
var typesPromise = $http.get('api/data_sources/types');
|
||||
|
||||
$q.all([typesPromise, $scope.dataSource.$promise]).then(function(responses) {
|
||||
var types = responses[0].data;
|
||||
setType(types);
|
||||
|
||||
$scope.dataSourceTypes = types;
|
||||
|
||||
_.each(types, function (type) {
|
||||
_.each(type.configuration_schema.properties, function (prop, name) {
|
||||
if (name == 'password' || name == 'passwd') {
|
||||
prop.type = 'password';
|
||||
}
|
||||
|
||||
if (_.string.endsWith(name, "File")) {
|
||||
prop.type = 'file';
|
||||
}
|
||||
|
||||
if (prop.type == 'boolean') {
|
||||
prop.type = 'checkbox';
|
||||
}
|
||||
|
||||
prop.required = _.contains(type.configuration_schema.required, name);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
$scope.$watch('dataSource.type', function(current, prev) {
|
||||
if (prev !== current) {
|
||||
if (prev !== undefined) {
|
||||
$scope.dataSource.options = {};
|
||||
}
|
||||
setType($scope.dataSourceTypes);
|
||||
}
|
||||
});
|
||||
|
||||
$scope.saveChanges = function() {
|
||||
$scope.dataSource.$save(function() {
|
||||
growl.addSuccessMessage("Saved.");
|
||||
}, function() {
|
||||
growl.addErrorMessage("Failed saving.");
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}]);
|
||||
})();
|
||||
@@ -40,20 +40,7 @@
|
||||
}
|
||||
}]);
|
||||
|
||||
directives.directive('hashLink', ['$location', function($location) {
|
||||
return {
|
||||
restrict: 'A',
|
||||
scope: {
|
||||
'hash': '@'
|
||||
},
|
||||
link: function (scope, element) {
|
||||
var basePath = $location.path().substring(1);
|
||||
element[0].href = basePath + "#" + scope.hash;
|
||||
}
|
||||
};
|
||||
}]);
|
||||
|
||||
directives.directive('rdTab', ['$location', function ($location) {
|
||||
directives.directive('rdTab', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
scope: {
|
||||
@@ -61,10 +48,9 @@
|
||||
'name': '@'
|
||||
},
|
||||
transclude: true,
|
||||
template: '<li class="rd-tab" ng-class="{active: tabId==selectedTab}"><a href="{{basePath}}#{{tabId}}">{{name}}<span ng-transclude></span></a></li>',
|
||||
template: '<li class="rd-tab" ng-class="{active: tabId==selectedTab}"><a href="#{{tabId}}">{{name}}<span ng-transclude></span></a></li>',
|
||||
replace: true,
|
||||
link: function (scope) {
|
||||
scope.basePath = $location.path().substring(1);
|
||||
scope.$watch(function () {
|
||||
return scope.$parent.selectedTab
|
||||
}, function (tab) {
|
||||
@@ -72,7 +58,7 @@
|
||||
});
|
||||
}
|
||||
}
|
||||
}]);
|
||||
});
|
||||
|
||||
directives.directive('rdTabs', ['$location', function ($location) {
|
||||
return {
|
||||
@@ -81,10 +67,9 @@
|
||||
tabsCollection: '=',
|
||||
selectedTab: '='
|
||||
},
|
||||
template: '<ul class="nav nav-tabs"><li ng-class="{active: tab==selectedTab}" ng-repeat="tab in tabsCollection"><a href="{{basePath}}#{{tab.key}}">{{tab.name}}</a></li></ul>',
|
||||
template: '<ul class="nav nav-tabs"><li ng-class="{active: tab==selectedTab}" ng-repeat="tab in tabsCollection"><a href="#{{tab.key}}">{{tab.name}}</a></li></ul>',
|
||||
replace: true,
|
||||
link: function ($scope, element, attrs) {
|
||||
$scope.basePath = $location.path().substring(1);
|
||||
$scope.selectTab = function (tabKey) {
|
||||
$scope.selectedTab = _.find($scope.tabsCollection, function (tab) {
|
||||
return tab.key == tabKey;
|
||||
@@ -262,82 +247,4 @@
|
||||
};
|
||||
}]
|
||||
);
|
||||
|
||||
directives.directive('compareTo', function () {
|
||||
return {
|
||||
require: "ngModel",
|
||||
scope: {
|
||||
otherModelValue: "=compareTo"
|
||||
},
|
||||
link: function (scope, element, attributes, ngModel) {
|
||||
var validate = function(value) {
|
||||
ngModel.$setValidity("compareTo", value === scope.otherModelValue);
|
||||
};
|
||||
|
||||
scope.$watch("otherModelValue", function() {
|
||||
validate(ngModel.$modelValue);
|
||||
});
|
||||
|
||||
ngModel.$parsers.push(function(value) {
|
||||
validate(value);
|
||||
return value;
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('inputErrors', function () {
|
||||
return {
|
||||
restrict: "E",
|
||||
templateUrl: "/views/directives/input_errors.html",
|
||||
replace: true,
|
||||
scope: {
|
||||
errors: "="
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('onDestroy', function () {
|
||||
/* This directive can be used to invoke a callback when an element is destroyed,
|
||||
A useful example is the following:
|
||||
<div ng-if="includeText" on-destroy="form.text = null;">
|
||||
<input type="text" ng-model="form.text">
|
||||
</div>
|
||||
*/
|
||||
return {
|
||||
restrict: "A",
|
||||
scope: {
|
||||
onDestroy: "&",
|
||||
},
|
||||
link: function(scope, elem, attrs) {
|
||||
scope.$on('$destroy', function() {
|
||||
scope.onDestroy();
|
||||
});
|
||||
}
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('colorBox', function () {
|
||||
return {
|
||||
restrict: "E",
|
||||
scope: {color: "="},
|
||||
template: "<span style='width: 12px; height: 12px; background-color: {{color}}; display: inline-block; margin-right: 5px;'></span>"
|
||||
};
|
||||
});
|
||||
|
||||
directives.directive('overlay', function() {
|
||||
return {
|
||||
restrict: "E",
|
||||
transclude: true,
|
||||
template: "" +
|
||||
'<div>' +
|
||||
'<div class="overlay"></div>' +
|
||||
'<div style="width: 100%; position:absolute; top:50px; z-index:2000">' +
|
||||
'<div class="well well-lg" style="width: 70%; margin: auto;" ng-transclude>' +
|
||||
'</div>' +
|
||||
'</div>' +
|
||||
'</div>'
|
||||
}
|
||||
})
|
||||
|
||||
})();
|
||||
|
||||
@@ -1,286 +0,0 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var ColorPalette = {
|
||||
'Blue': '#4572A7',
|
||||
'Red': '#AA4643',
|
||||
'Green': '#89A54E',
|
||||
'Purple': '#80699B',
|
||||
'Cyan': '#3D96AE',
|
||||
'Orange': '#DB843D',
|
||||
'Light Blue': '#92A8CD',
|
||||
'Lilac': '#A47D7C',
|
||||
'Light Green': '#B5CA92',
|
||||
'Brown': '#A52A2A',
|
||||
'Black': '#000000',
|
||||
'Gray': '#808080',
|
||||
'Pink': '#FFC0CB',
|
||||
'Dark Blue': '#00008b'
|
||||
};
|
||||
|
||||
var ColorPaletteArray = _.values(ColorPalette);
|
||||
|
||||
var fillXValues = function(seriesList) {
|
||||
var xValues = _.sortBy(_.union.apply(_, _.pluck(seriesList, 'x')), _.identity);
|
||||
_.each(seriesList, function(series) {
|
||||
series.x = _.sortBy(series.x, _.identity);
|
||||
|
||||
_.each(xValues, function(value, index) {
|
||||
if (series.x[index] !== value) {
|
||||
series.x.splice(index, 0, value);
|
||||
series.y.splice(index, 0, null);
|
||||
}
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
var normalAreaStacking = function(seriesList) {
|
||||
fillXValues(seriesList);
|
||||
_.each(seriesList, function(series) {
|
||||
series.text = [];
|
||||
series.hoverinfo = 'text+name';
|
||||
});
|
||||
for (var i = 0; i < seriesList.length; i++) {
|
||||
for (var j = 0; j < seriesList[i].y.length; j++) {
|
||||
var sum = i > 0 ? seriesList[i-1].y[j] : 0;
|
||||
seriesList[i].text.push('Value: ' + seriesList[i].y[j] + '<br>Sum: ' + (sum + seriesList[i].y[j]));
|
||||
seriesList[i].y[j] += sum;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var percentAreaStacking = function(seriesList) {
|
||||
if (seriesList.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
fillXValues(seriesList);
|
||||
_.each(seriesList, function(series) {
|
||||
series.text = [];
|
||||
series.hoverinfo = 'text+name';
|
||||
});
|
||||
for (var i = 0; i < seriesList[0].y.length; i++) {
|
||||
var sum = 0;
|
||||
for(var j = 0; j < seriesList.length; j++) {
|
||||
sum += seriesList[j].y[i];
|
||||
}
|
||||
|
||||
for(var j = 0; j < seriesList.length; j++) {
|
||||
var value = seriesList[j].y[i] / sum * 100;
|
||||
seriesList[j].text.push('Value: ' + seriesList[j].y[i] + '<br>Relative: ' + value.toFixed(2) + '%');
|
||||
|
||||
seriesList[j].y[i] = value;
|
||||
if (j > 0) {
|
||||
seriesList[j].y[i] += seriesList[j-1].y[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var percentBarStacking = function(seriesList) {
|
||||
if (seriesList.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
fillXValues(seriesList);
|
||||
_.each(seriesList, function(series) {
|
||||
series.text = [];
|
||||
series.hoverinfo = 'text+name';
|
||||
});
|
||||
for (var i = 0; i < seriesList[0].y.length; i++) {
|
||||
var sum = 0;
|
||||
for(var j = 0; j < seriesList.length; j++) {
|
||||
sum += seriesList[j].y[i];
|
||||
}
|
||||
for(var j = 0; j < seriesList.length; j++) {
|
||||
var value = seriesList[j].y[i] / sum * 100;
|
||||
seriesList[j].text.push('Value: ' + seriesList[j].y[i] + '<br>Relative: ' + value.toFixed(2) + '%');
|
||||
seriesList[j].y[i] = value;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var normalizeValue = function(value) {
|
||||
if (moment.isMoment(value)) {
|
||||
return value.format("YYYY-MM-DD HH:mm:ss");
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
angular.module('plotly-chart', [])
|
||||
.constant('ColorPalette', ColorPalette)
|
||||
.directive('plotlyChart', function () {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<plotly data="data" layout="layout" options="plotlyOptions"></plotly>',
|
||||
scope: {
|
||||
options: "=",
|
||||
series: "=",
|
||||
minHeight: "="
|
||||
},
|
||||
link: function (scope) {
|
||||
var getScaleType = function(scale) {
|
||||
if (scale === 'datetime') {
|
||||
return 'date';
|
||||
}
|
||||
if (scale === 'logarithmic') {
|
||||
return 'log';
|
||||
}
|
||||
return scale;
|
||||
};
|
||||
|
||||
var setType = function(series, type) {
|
||||
if (type === 'column') {
|
||||
series.type = 'bar';
|
||||
} else if (type === 'line') {
|
||||
series.mode = 'lines';
|
||||
} else if (type === 'area') {
|
||||
series.fill = scope.options.series.stacking === null ? 'tozeroy' : 'tonexty';
|
||||
series.mode = 'lines';
|
||||
} else if (type === 'scatter') {
|
||||
series.type = 'scatter';
|
||||
series.mode = 'markers';
|
||||
}
|
||||
};
|
||||
|
||||
var getColor = function(index) {
|
||||
return ColorPaletteArray[index % ColorPaletteArray.length];
|
||||
};
|
||||
|
||||
var bottomMargin = 50,
|
||||
pixelsPerLegendRow = 21;
|
||||
var redraw = function() {
|
||||
scope.data.length = 0;
|
||||
scope.layout.showlegend = _.has(scope.options, 'legend') ? scope.options.legend.enabled : true;
|
||||
delete scope.layout.barmode;
|
||||
delete scope.layout.xaxis;
|
||||
delete scope.layout.yaxis;
|
||||
delete scope.layout.yaxis2;
|
||||
|
||||
if (scope.options.globalSeriesType === 'pie') {
|
||||
var hasX = _.contains(_.values(scope.options.columnMapping), 'x');
|
||||
var rows = scope.series.length > 2 ? 2 : 1;
|
||||
var cellsInRow = Math.ceil(scope.series.length / rows);
|
||||
var cellWidth = 1 / cellsInRow;
|
||||
var cellHeight = 1 / rows;
|
||||
var xPadding = 0.02;
|
||||
var yPadding = 0.05;
|
||||
var largestXCount = 0;
|
||||
_.each(scope.series, function(series, index) {
|
||||
var xPosition = (index % cellsInRow) * cellWidth;
|
||||
var yPosition = Math.floor(index / cellsInRow) * cellHeight;
|
||||
var plotlySeries = {values: [], labels: [], type: 'pie', hole: .4,
|
||||
marker: {colors: ColorPaletteArray},
|
||||
text: series.name, textposition: 'inside', name: series.name,
|
||||
domain: {x: [xPosition, xPosition + cellWidth - xPadding],
|
||||
y: [yPosition, yPosition + cellHeight - yPadding]}};
|
||||
_.each(series.data, function(row, index) {
|
||||
plotlySeries.values.push(row.y);
|
||||
plotlySeries.labels.push(hasX ? row.x : 'Slice ' + index);
|
||||
});
|
||||
scope.data.push(plotlySeries);
|
||||
largestXCount = Math.max(largestXCount, plotlySeries.labels.length);
|
||||
});
|
||||
scope.layout.height = Math.max(scope.minHeight, pixelsPerLegendRow * largestXCount);
|
||||
scope.layout.margin.b = scope.layout.height - (scope.minHeight - bottomMargin);
|
||||
return;
|
||||
}
|
||||
|
||||
scope.layout.height = Math.max(scope.minHeight, pixelsPerLegendRow * scope.series.length);
|
||||
scope.layout.margin.b = scope.layout.height - (scope.minHeight - bottomMargin);
|
||||
var hasY2 = false;
|
||||
var sortX = scope.options.sortX === true || scope.options.sortX === undefined;
|
||||
var useUnifiedXaxis = sortX && scope.options.xAxis.type === 'category';
|
||||
|
||||
var unifiedX = null;
|
||||
if (useUnifiedXaxis) {
|
||||
unifiedX = _.sortBy(_.union.apply(_, _.map(scope.series, function(s) { return _.pluck(s.data, 'x'); })), _.identity);
|
||||
}
|
||||
|
||||
_.each(scope.series, function(series, index) {
|
||||
var seriesOptions = scope.options.seriesOptions[series.name] || {};
|
||||
var plotlySeries = {x: [],
|
||||
y: [],
|
||||
name: seriesOptions.name || series.name,
|
||||
marker: {color: seriesOptions.color ? seriesOptions.color : getColor(index)}};
|
||||
|
||||
if (seriesOptions.yAxis === 1 && (scope.options.series.stacking === null || seriesOptions.type === 'line')) {
|
||||
hasY2 = true;
|
||||
plotlySeries.yaxis = 'y2';
|
||||
}
|
||||
|
||||
setType(plotlySeries, seriesOptions.type);
|
||||
var data = series.data;
|
||||
if (sortX) {
|
||||
data = _.sortBy(data, 'x');
|
||||
}
|
||||
|
||||
if (useUnifiedXaxis && index === 0) {
|
||||
var values = {};
|
||||
_.each(data, function(row) {
|
||||
values[row.x] = row.y;
|
||||
});
|
||||
|
||||
_.each(unifiedX, function(x) {
|
||||
plotlySeries.x.push(normalizeValue(x));
|
||||
plotlySeries.y.push(normalizeValue(values[x] || null));
|
||||
});
|
||||
} else {
|
||||
_.each(data, function(row) {
|
||||
plotlySeries.x.push(normalizeValue(row.x));
|
||||
plotlySeries.y.push(normalizeValue(row.y));
|
||||
});
|
||||
}
|
||||
|
||||
scope.data.push(plotlySeries);
|
||||
});
|
||||
|
||||
var getTitle = function(axis) {
|
||||
if (angular.isDefined(axis) && angular.isDefined(axis.title)) {
|
||||
return axis.title.text;
|
||||
}
|
||||
return null;
|
||||
};
|
||||
|
||||
scope.layout.xaxis = {title: getTitle(scope.options.xAxis),
|
||||
type: getScaleType(scope.options.xAxis.type)};
|
||||
if (angular.isDefined(scope.options.xAxis.labels)) {
|
||||
scope.layout.xaxis.showticklabels = scope.options.xAxis.labels.enabled;
|
||||
}
|
||||
if (angular.isArray(scope.options.yAxis)) {
|
||||
scope.layout.yaxis = {title: getTitle(scope.options.yAxis[0]),
|
||||
type: getScaleType(scope.options.yAxis[0].type)};
|
||||
}
|
||||
if (hasY2 && angular.isDefined(scope.options.yAxis)) {
|
||||
scope.layout.yaxis2 = {title: getTitle(scope.options.yAxis[1]),
|
||||
type: getScaleType(scope.options.yAxis[1].type),
|
||||
overlaying: 'y',
|
||||
side: 'right'};
|
||||
} else {
|
||||
delete scope.layout.yaxis2;
|
||||
}
|
||||
|
||||
if (scope.options.series.stacking === 'normal') {
|
||||
scope.layout.barmode = 'stack';
|
||||
if (scope.options.globalSeriesType === 'area') {
|
||||
normalAreaStacking(scope.data);
|
||||
}
|
||||
} else if (scope.options.series.stacking === 'percent') {
|
||||
scope.layout.barmode = 'stack';
|
||||
if (scope.options.globalSeriesType === 'area') {
|
||||
percentAreaStacking(scope.data);
|
||||
} else if (scope.options.globalSeriesType === 'column') {
|
||||
percentBarStacking(scope.data);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
scope.$watch('series', redraw);
|
||||
scope.$watch('options', redraw, true);
|
||||
scope.layout = {margin: {l: 50, r: 50, b: 50, t: 20, pad: 4}, hovermode: 'closest'};
|
||||
scope.plotlyOptions = {showLink: false, displaylogo: false};
|
||||
scope.data = [];
|
||||
}
|
||||
};
|
||||
});
|
||||
})();
|
||||
@@ -10,7 +10,7 @@
|
||||
},
|
||||
template: '<a ng-href="{{link}}" class="query-link">{{query.name}}</a>',
|
||||
link: function(scope, element) {
|
||||
scope.link = 'queries/' + scope.query.id;
|
||||
scope.link = '/queries/' + scope.query.id;
|
||||
if (scope.visualization) {
|
||||
if (scope.visualization.type === 'TABLE') {
|
||||
// link to hard-coded table tab instead of the (hidden) visualization tab
|
||||
@@ -29,21 +29,19 @@
|
||||
restrict: 'E',
|
||||
template: '<span ng-show="query.id && canViewSource">\
|
||||
<a ng-show="!sourceMode"\
|
||||
ng-href="queries/{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
|
||||
</a>\
|
||||
<a ng-show="sourceMode"\
|
||||
ng-href="queries/{{query.id}}#{{selectedTab}}">Hide Source\
|
||||
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
|
||||
</a>\
|
||||
</span>'
|
||||
}
|
||||
}
|
||||
|
||||
function queryResultLink() {
|
||||
function queryResultCSVLink() {
|
||||
return {
|
||||
restrict: 'A',
|
||||
link: function (scope, element, attrs) {
|
||||
|
||||
var fileType = attrs.fileType ? attrs.fileType : "csv";
|
||||
link: function (scope, element) {
|
||||
scope.$watch('queryResult && queryResult.getData()', function(data) {
|
||||
if (!data) {
|
||||
return;
|
||||
@@ -52,8 +50,8 @@
|
||||
if (scope.queryResult.getId() == null) {
|
||||
element.attr('href', '');
|
||||
} else {
|
||||
element.attr('href', 'api/queries/' + scope.query.id + '/results/' + scope.queryResult.getId() + '.' + fileType);
|
||||
element.attr('download', scope.query.name.replace(" ", "_") + moment(scope.queryResult.getUpdatedAt()).format("_YYYY_MM_DD") + "." + fileType);
|
||||
element.attr('href', '/api/queries/' + scope.query.id + '/results/' + scope.queryResult.getId() + '.csv');
|
||||
element.attr('download', scope.query.name.replace(" ", "_") + moment(scope.queryResult.getUpdatedAt()).format("_YYYY_MM_DD") + ".csv");
|
||||
}
|
||||
});
|
||||
}
|
||||
@@ -66,23 +64,14 @@
|
||||
scope: {
|
||||
'query': '=',
|
||||
'lock': '=',
|
||||
'schema': '=',
|
||||
'syntax': '='
|
||||
'schema': '='
|
||||
},
|
||||
template: '<textarea></textarea>',
|
||||
link: {
|
||||
pre: function ($scope, element) {
|
||||
$scope.syntax = $scope.syntax || 'sql';
|
||||
|
||||
var modes = {
|
||||
'sql': 'text/x-sql',
|
||||
'python': 'text/x-python',
|
||||
'json': 'application/json'
|
||||
};
|
||||
|
||||
var textarea = element.children()[0];
|
||||
var editorOptions = {
|
||||
mode: modes[$scope.syntax],
|
||||
mode: 'text/x-sql',
|
||||
lineWrapping: true,
|
||||
lineNumbers: true,
|
||||
readOnly: false,
|
||||
@@ -119,8 +108,6 @@
|
||||
$scope.query.query = newValue;
|
||||
});
|
||||
}
|
||||
|
||||
$('.schema-container').css('height', $('.CodeMirror').css('height'));
|
||||
});
|
||||
|
||||
$scope.$watch('query.query', function () {
|
||||
@@ -141,12 +128,6 @@
|
||||
|
||||
additionalHints = _.unique(keywords);
|
||||
}
|
||||
|
||||
codemirror.refresh();
|
||||
});
|
||||
|
||||
$scope.$watch('syntax', function(syntax) {
|
||||
codemirror.setOption('mode', modes[syntax]);
|
||||
});
|
||||
|
||||
$scope.$watch('lock', function (locked) {
|
||||
@@ -243,14 +224,7 @@
|
||||
value: "60",
|
||||
name: 'Every minute'
|
||||
}
|
||||
];
|
||||
|
||||
_.each([5, 10, 15, 30], function(i) {
|
||||
$scope.refreshOptions.push({
|
||||
value: String(i*60),
|
||||
name: "Every " + i + " minutes"
|
||||
})
|
||||
});
|
||||
]
|
||||
|
||||
_.each(_.range(1, 13), function (i) {
|
||||
$scope.refreshOptions.push({
|
||||
@@ -267,10 +241,6 @@
|
||||
value: String(7 * 24 * 3600),
|
||||
name: 'Once a week'
|
||||
});
|
||||
$scope.refreshOptions.push({
|
||||
value: String(30 * 24 * 3600),
|
||||
name: 'Every 30d'
|
||||
});
|
||||
|
||||
$scope.$watch('refreshType', function() {
|
||||
if ($scope.refreshType == 'periodic') {
|
||||
@@ -288,9 +258,9 @@
|
||||
angular.module('redash.directives')
|
||||
.directive('queryLink', queryLink)
|
||||
.directive('querySourceLink', querySourceLink)
|
||||
.directive('queryResultLink', queryResultLink)
|
||||
.directive('queryResultLink', queryResultCSVLink)
|
||||
.directive('queryEditor', queryEditor)
|
||||
.directive('queryRefreshSelect', queryRefreshSelect)
|
||||
.directive('queryTimePicker', queryTimePicker)
|
||||
.directive('queryFormatter', ['$http', queryFormatter]);
|
||||
})();
|
||||
})();
|
||||
@@ -1,55 +0,0 @@
|
||||
angular.module('redash', [
|
||||
'redash.directives',
|
||||
'redash.admin_controllers',
|
||||
'redash.controllers',
|
||||
'redash.filters',
|
||||
'redash.services',
|
||||
'redash.visualization',
|
||||
'plotly',
|
||||
'plotly-chart',
|
||||
'angular-growl',
|
||||
'angularMoment',
|
||||
'ui.bootstrap',
|
||||
'ui.sortable',
|
||||
'smartTable.table',
|
||||
'ngResource',
|
||||
'ngRoute',
|
||||
'ui.select',
|
||||
'naif.base64',
|
||||
'ui.bootstrap.showErrors',
|
||||
'ngSanitize'
|
||||
]).config(['$routeProvider', '$locationProvider', '$compileProvider', 'growlProvider', 'uiSelectConfig',
|
||||
function ($routeProvider, $locationProvider, $compileProvider, growlProvider, uiSelectConfig) {
|
||||
function getQuery(Query, $route) {
|
||||
var query = Query.get({'id': $route.current.params.queryId });
|
||||
return query.$promise;
|
||||
};
|
||||
|
||||
uiSelectConfig.theme = "bootstrap";
|
||||
|
||||
$compileProvider.aHrefSanitizationWhitelist(/^\s*(https?|http|data):/);
|
||||
$locationProvider.html5Mode(true);
|
||||
growlProvider.globalTimeToLive(2000);
|
||||
|
||||
$routeProvider.when('/embed/query/:queryId/visualization/:visualizationId', {
|
||||
templateUrl: '/views/visualization-embed.html',
|
||||
controller: 'EmbedCtrl',
|
||||
reloadOnSearch: false
|
||||
});
|
||||
$routeProvider.otherwise({
|
||||
redirectTo: '/embed'
|
||||
});
|
||||
|
||||
|
||||
}
|
||||
])
|
||||
.controller('EmbedCtrl', ['$scope', function ($scope) {} ])
|
||||
.controller('EmbeddedVisualizationCtrl', ['$scope', 'Query', 'QueryResult',
|
||||
function ($scope, Query, QueryResult) {
|
||||
$scope.embed = true;
|
||||
$scope.visualization = visualization;
|
||||
$scope.query = visualization.query;
|
||||
query = new Query(visualization.query);
|
||||
$scope.queryResult = new QueryResult({query_result:query_result});
|
||||
} ])
|
||||
;
|
||||
@@ -48,9 +48,6 @@ angular.module('redash.filters', []).
|
||||
|
||||
.filter('colWidth', function () {
|
||||
return function (widgetWidth) {
|
||||
if (widgetWidth == 0) {
|
||||
return 0;
|
||||
}
|
||||
if (widgetWidth == 1) {
|
||||
return 6;
|
||||
}
|
||||
@@ -69,12 +66,6 @@ angular.module('redash.filters', []).
|
||||
}
|
||||
})
|
||||
|
||||
.filter('dateTime', function() {
|
||||
return function(value) {
|
||||
return moment(value).format(clientConfig.dateTimeFormat);
|
||||
}
|
||||
})
|
||||
|
||||
.filter('linkify', function () {
|
||||
return function (text) {
|
||||
return text.replace(urlPattern, "$1<a href='$2' target='_blank'>$2</a>");
|
||||
@@ -86,13 +77,7 @@ angular.module('redash.filters', []).
|
||||
if (!text) {
|
||||
return "";
|
||||
}
|
||||
|
||||
var html = marked(text);
|
||||
if (clientConfig.allowScriptsInUserInput) {
|
||||
html = $sce.trustAsHtml(html);
|
||||
}
|
||||
|
||||
return html;
|
||||
return $sce.trustAsHtml(marked(text));
|
||||
}
|
||||
}])
|
||||
|
||||
@@ -103,21 +88,4 @@ angular.module('redash.filters', []).
|
||||
}
|
||||
return $sce.trustAsHtml(text);
|
||||
}
|
||||
}])
|
||||
|
||||
.filter('remove', function() {
|
||||
return function(items, item) {
|
||||
if (items == undefined)
|
||||
return items;
|
||||
if (item instanceof Array) {
|
||||
var notEquals = function(other) { return item.indexOf(other) == -1; }
|
||||
} else {
|
||||
var notEquals = function(other) { return item != other; }
|
||||
}
|
||||
var filtered = [];
|
||||
for (var i = 0; i < items.length; i++)
|
||||
if (notEquals(items[i]))
|
||||
filtered.push(items[i])
|
||||
return filtered;
|
||||
};
|
||||
});
|
||||
}]);
|
||||
|
||||
396
rd_ui/app/scripts/ng_highchart.js
Normal file
396
rd_ui/app/scripts/ng_highchart.js
Normal file
@@ -0,0 +1,396 @@
|
||||
(function () {
|
||||
'use strict';
|
||||
|
||||
var ColorPalette = {
|
||||
'Blue':'#4572A7',
|
||||
'Red':'#AA4643',
|
||||
'Green': '#89A54E',
|
||||
'Purple': '#80699B',
|
||||
'Cyan': '#3D96AE',
|
||||
'Orange': '#DB843D',
|
||||
'Light Blue': '#92A8CD',
|
||||
'Lilac': '#A47D7C',
|
||||
'Light Green': '#B5CA92',
|
||||
};
|
||||
|
||||
Highcharts.setOptions({
|
||||
colors: _.values(ColorPalette)
|
||||
});
|
||||
|
||||
var defaultOptions = {
|
||||
title: {
|
||||
"text": null
|
||||
},
|
||||
xAxis: {
|
||||
type: 'datetime'
|
||||
},
|
||||
yAxis: [
|
||||
{
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
// showEmpty: true // by default
|
||||
},
|
||||
{
|
||||
title: {
|
||||
text: null
|
||||
},
|
||||
opposite: true,
|
||||
showEmpty: false
|
||||
}
|
||||
],
|
||||
|
||||
|
||||
tooltip: {
|
||||
valueDecimals: 2,
|
||||
formatter: function () {
|
||||
if (!this.points) {
|
||||
this.points = [this.point];
|
||||
}
|
||||
;
|
||||
|
||||
if (moment.isMoment(this.x)) {
|
||||
var s = '<b>' + this.x.toDate().toLocaleString() + '</b>',
|
||||
pointsCount = this.points.length;
|
||||
|
||||
$.each(this.points, function (i, point) {
|
||||
s += '<br/><span style="color:' + point.series.color + '">' + point.series.name + '</span>: ' +
|
||||
Highcharts.numberFormat(point.y);
|
||||
|
||||
if (pointsCount > 1 && point.percentage) {
|
||||
s += " (" + Highcharts.numberFormat(point.percentage) + "%)";
|
||||
}
|
||||
});
|
||||
} else {
|
||||
var points = this.points;
|
||||
var name = points[0].key || points[0].name;
|
||||
|
||||
var s = "<b>" + name + "</b>";
|
||||
|
||||
$.each(points, function (i, point) {
|
||||
if (points.length > 1) {
|
||||
s += '<br/><span style="color:' + point.series.color + '">' + point.series.name + '</span>: ' + Highcharts.numberFormat(point.y);
|
||||
} else {
|
||||
s += ": " + Highcharts.numberFormat(point.y);
|
||||
if (point.percentage < 100) {
|
||||
s += ' (' + Highcharts.numberFormat(point.percentage) + '%)';
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return s;
|
||||
},
|
||||
shared: true
|
||||
},
|
||||
exporting: {
|
||||
chartOptions: {
|
||||
title: {
|
||||
text: ''
|
||||
}
|
||||
},
|
||||
buttons: {
|
||||
contextButton: {
|
||||
menuItems: [
|
||||
{
|
||||
text: 'Toggle % Stacking',
|
||||
onclick: function () {
|
||||
var newStacking = "normal";
|
||||
if (this.series[0].options.stacking == "normal") {
|
||||
newStacking = "percent";
|
||||
}
|
||||
|
||||
_.each(this.series, function (series) {
|
||||
series.update({stacking: newStacking}, true);
|
||||
});
|
||||
}
|
||||
},
|
||||
{
|
||||
text: 'Select All',
|
||||
onclick: function () {
|
||||
_.each(this.series, function (s) {
|
||||
s.setVisible(true, false);
|
||||
});
|
||||
this.redraw();
|
||||
}
|
||||
},
|
||||
{
|
||||
text: 'Unselect All',
|
||||
onclick: function () {
|
||||
_.each(this.series, function (s) {
|
||||
s.setVisible(false, false);
|
||||
});
|
||||
this.redraw();
|
||||
}
|
||||
},
|
||||
{
|
||||
text: 'Show Total',
|
||||
onclick: function () {
|
||||
var hasTotalsAlready = _.some(this.series, function (s) {
|
||||
var res = (s.name == 'Total');
|
||||
//if 'Total' already exists - just make it visible
|
||||
if (res) s.setVisible(true, false);
|
||||
return res;
|
||||
})
|
||||
var data = {};
|
||||
_.each(this.series, function (s) {
|
||||
if (s.name != 'Total') s.setVisible(false, false);
|
||||
if (!hasTotalsAlready) {
|
||||
_.each(s.data, function (p) {
|
||||
data[p.x] = data[p.x] || {'x': p.x, 'y': 0};
|
||||
data[p.x].y = data[p.x].y + p.y;
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
if (!hasTotalsAlready) {
|
||||
this.addSeries({
|
||||
data: _.values(data),
|
||||
type: 'line',
|
||||
name: 'Total'
|
||||
}, false)
|
||||
}
|
||||
|
||||
this.redraw();
|
||||
}
|
||||
},
|
||||
{
|
||||
text: 'Save Image',
|
||||
onclick: function () {
|
||||
var canvas = document.createElement('canvas');
|
||||
window.canvg(canvas, this.getSVG());
|
||||
var href = canvas.toDataURL('image/png');
|
||||
var a = document.createElement('a');
|
||||
a.href = href;
|
||||
var filenameSuffix = new Date().toISOString().replace(/:/g,'_').replace('Z', '');
|
||||
if (this.title) {
|
||||
filenameSuffix = this.title.text;
|
||||
}
|
||||
a.download = 'redash_charts_'+filenameSuffix+'.png';
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
a.remove();
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
credits: {
|
||||
enabled: false
|
||||
},
|
||||
plotOptions: {
|
||||
area: {
|
||||
marker: {
|
||||
enabled: false,
|
||||
symbol: 'circle',
|
||||
radius: 2,
|
||||
states: {
|
||||
hover: {
|
||||
enabled: true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
column: {
|
||||
stacking: "normal",
|
||||
pointPadding: 0,
|
||||
borderWidth: 1,
|
||||
groupPadding: 0,
|
||||
shadow: false
|
||||
},
|
||||
line: {
|
||||
marker: {
|
||||
radius: 1
|
||||
},
|
||||
lineWidth: 2,
|
||||
states: {
|
||||
hover: {
|
||||
lineWidth: 2,
|
||||
marker: {
|
||||
radius: 3
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
pie: {
|
||||
allowPointSelect: true,
|
||||
cursor: 'pointer',
|
||||
dataLabels: {
|
||||
enabled: true,
|
||||
color: '#000000',
|
||||
connectorColor: '#000000',
|
||||
format: '<b>{point.name}</b>: {point.y} ({point.percentage:.1f} %)'
|
||||
}
|
||||
},
|
||||
scatter: {
|
||||
marker: {
|
||||
radius: 5,
|
||||
states: {
|
||||
hover: {
|
||||
enabled: true,
|
||||
lineColor: 'rgb(100,100,100)'
|
||||
}
|
||||
}
|
||||
},
|
||||
tooltip: {
|
||||
headerFormat: '<b>{series.name}</b><br>',
|
||||
pointFormat: '{point.x}, {point.y}'
|
||||
}
|
||||
}
|
||||
},
|
||||
series: []
|
||||
};
|
||||
|
||||
angular.module('highchart', [])
|
||||
.constant('ColorPalette', ColorPalette)
|
||||
.directive('chart', ['$timeout', function ($timeout) {
|
||||
return {
|
||||
restrict: 'E',
|
||||
template: '<div></div>',
|
||||
scope: {
|
||||
options: "=options",
|
||||
series: "=series"
|
||||
},
|
||||
transclude: true,
|
||||
replace: true,
|
||||
|
||||
link: function (scope, element, attrs) {
|
||||
var chartsDefaults = {
|
||||
chart: {
|
||||
renderTo: element[0],
|
||||
type: attrs.type || null,
|
||||
height: attrs.height || null,
|
||||
width: attrs.width || null
|
||||
}
|
||||
};
|
||||
|
||||
var chartOptions = $.extend(true, {}, defaultOptions, chartsDefaults);
|
||||
|
||||
// $timeout makes sure that this function invoked after the DOM ready. When draw/init
|
||||
// invoked after the DOM is ready, we see first an empty HighCharts objects and later
|
||||
// they get filled up. Which gives the feeling that the charts loading faster (otherwise
|
||||
// we stare at an empty screen until the HighCharts object is ready).
|
||||
$timeout(function () {
|
||||
// Update when options change
|
||||
scope.$watch('options', function (newOptions) {
|
||||
initChart(newOptions);
|
||||
}, true);
|
||||
|
||||
//Update when charts data changes
|
||||
scope.$watchCollection('series', function (series) {
|
||||
if (!series || series.length == 0) {
|
||||
scope.chart.showLoading();
|
||||
} else {
|
||||
drawChart();
|
||||
}
|
||||
;
|
||||
});
|
||||
});
|
||||
|
||||
function initChart(options) {
|
||||
if (scope.chart) {
|
||||
scope.chart.destroy();
|
||||
}
|
||||
;
|
||||
|
||||
$.extend(true, chartOptions, options);
|
||||
|
||||
scope.chart = new Highcharts.Chart(chartOptions);
|
||||
drawChart();
|
||||
}
|
||||
|
||||
function drawChart() {
|
||||
while (scope.chart.series.length > 0) {
|
||||
scope.chart.series[0].remove(false);
|
||||
};
|
||||
|
||||
// We check either for true or undefined for backward compatibility.
|
||||
var series = scope.series;
|
||||
|
||||
if (chartOptions['sortX'] === true || chartOptions['sortX'] === undefined) {
|
||||
var seriesCopy = [];
|
||||
|
||||
_.each(series, function (s) {
|
||||
// make a copy of series data, so we don't override original.
|
||||
var fieldName = 'x';
|
||||
if (s.data.length > 0 && _.has(s.data[0], 'name')) {
|
||||
fieldName = 'name';
|
||||
};
|
||||
|
||||
var sorted = _.extend({}, s, {data: _.sortBy(s.data, fieldName)});
|
||||
seriesCopy.push(sorted);
|
||||
});
|
||||
|
||||
series = seriesCopy;
|
||||
}
|
||||
|
||||
if (!('xAxis' in chartOptions && 'type' in chartOptions['xAxis'])) {
|
||||
if (series.length > 0 && _.some(series[0].data, function (p) {
|
||||
return (angular.isString(p.x) || angular.isDefined(p.name));
|
||||
})) {
|
||||
chartOptions['xAxis'] = chartOptions['xAxis'] || {};
|
||||
chartOptions['xAxis']['type'] = 'category';
|
||||
} else {
|
||||
chartOptions['xAxis'] = chartOptions['xAxis'] || {};
|
||||
chartOptions['xAxis']['type'] = 'datetime';
|
||||
}
|
||||
}
|
||||
|
||||
if (chartOptions['xAxis']['type'] == 'category' || chartOptions['series']['type']=='pie') {
|
||||
if (!angular.isDefined(series[0].data[0].name)) {
|
||||
// We need to make sure that for each category, each series has a value.
|
||||
var categories = _.union.apply(this, _.map(series, function (s) {
|
||||
return _.pluck(s.data, 'x')
|
||||
}));
|
||||
|
||||
_.each(series, function (s) {
|
||||
// TODO: move this logic to Query#getChartData
|
||||
var yValues = _.groupBy(s.data, 'x');
|
||||
|
||||
var newData = _.map(categories, function (category) {
|
||||
return {
|
||||
name: category,
|
||||
y: (yValues[category] && yValues[category][0].y) || 0
|
||||
}
|
||||
});
|
||||
|
||||
s.data = newData;
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
scope.chart.counters.color = 0;
|
||||
|
||||
_.each(series, function (s) {
|
||||
// here we override the series with the visualization config
|
||||
s = _.extend(s, chartOptions['series']);
|
||||
|
||||
if (s.type == 'area') {
|
||||
_.each(s.data, function (p) {
|
||||
// This is an insane hack: somewhere deep in HighChart's code,
|
||||
// when you stack areas, it tries to convert the string representation
|
||||
// of point's x into a number. With the default implementation of toString
|
||||
// it fails....
|
||||
|
||||
if (moment.isMoment(p.x)) {
|
||||
p.x.toString = function () {
|
||||
return String(this.toDate().getTime());
|
||||
};
|
||||
}
|
||||
});
|
||||
}
|
||||
;
|
||||
|
||||
scope.chart.addSeries(s, false);
|
||||
});
|
||||
|
||||
scope.chart.redraw();
|
||||
scope.chart.hideLoading();
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
}]);
|
||||
})();
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,32 +1,10 @@
|
||||
(function () {
|
||||
var Dashboard = function($resource, $http, Widget) {
|
||||
var transformSingle = function(dashboard) {
|
||||
dashboard.widgets = _.map(dashboard.widgets, function (row) {
|
||||
return _.map(row, function (widget) {
|
||||
return new Widget(widget);
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
var transform = $http.defaults.transformResponse.concat(function(data, headers) {
|
||||
if (_.isArray(data)) {
|
||||
_.each(data, transformSingle);
|
||||
} else {
|
||||
transformSingle(data);
|
||||
}
|
||||
return data;
|
||||
});
|
||||
|
||||
var resource = $resource('api/dashboards/:slug', {slug: '@slug'}, {
|
||||
'get': {method: 'GET', transformResponse: transform},
|
||||
'save': {method: 'POST', transformResponse: transform},
|
||||
'query': {method: 'GET', isArray: true, transformResponse: transform},
|
||||
var Dashboard = function($resource) {
|
||||
var resource = $resource('/api/dashboards/:slug', {slug: '@slug'}, {
|
||||
recent: {
|
||||
method: 'get',
|
||||
isArray: true,
|
||||
url: "api/dashboards/recent",
|
||||
transformResponse: transform
|
||||
|
||||
url: "/api/dashboards/recent"
|
||||
}});
|
||||
|
||||
resource.prototype.canEdit = function() {
|
||||
@@ -36,5 +14,5 @@
|
||||
}
|
||||
|
||||
angular.module('redash.services')
|
||||
.factory('Dashboard', ['$resource', '$http', 'Widget', Dashboard])
|
||||
.factory('Dashboard', ['$resource', Dashboard])
|
||||
})();
|
||||
|
||||
@@ -1,31 +1,7 @@
|
||||
(function () {
|
||||
function QueryResultError(errorMessage) {
|
||||
this.errorMessage = errorMessage;
|
||||
}
|
||||
|
||||
QueryResultError.prototype.getError = function() {
|
||||
return this.errorMessage;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getStatus = function() {
|
||||
return 'failed';
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getLog = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
QueryResultError.prototype.getChartData = function() {
|
||||
return null;
|
||||
};
|
||||
|
||||
var QueryResult = function ($resource, $timeout, $q) {
|
||||
var QueryResultResource = $resource('api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
|
||||
var Job = $resource('api/jobs/:id', {id: '@id'});
|
||||
var QueryResultResource = $resource('/api/query_results/:id', {id: '@id'}, {'post': {'method': 'POST'}});
|
||||
var Job = $resource('/api/jobs/:id', {id: '@id'});
|
||||
|
||||
var updateFunction = function (props) {
|
||||
angular.extend(this, props);
|
||||
@@ -36,17 +12,15 @@
|
||||
|
||||
var columnTypes = {};
|
||||
|
||||
// TODO: we should stop manipulating incoming data, and switch to relaying on the column type set by the backend.
|
||||
// This logic is prone to errors, and better be removed. Kept for now, for backward compatability.
|
||||
_.each(this.query_result.data.rows, function (row) {
|
||||
_.each(row, function (v, k) {
|
||||
if (angular.isNumber(v)) {
|
||||
columnTypes[k] = 'float';
|
||||
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}T/)) {
|
||||
row[k] = moment.utc(v);
|
||||
row[k] = moment(v);
|
||||
columnTypes[k] = 'datetime';
|
||||
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}$/)) {
|
||||
row[k] = moment.utc(v);
|
||||
} else if (_.isString(v) && v.match(/^\d{4}-\d{2}-\d{2}/)) {
|
||||
row[k] = moment(v);
|
||||
columnTypes[k] = 'date';
|
||||
} else if (typeof(v) == 'object' && v !== null) {
|
||||
row[k] = JSON.stringify(v);
|
||||
@@ -56,9 +30,7 @@
|
||||
|
||||
_.each(this.query_result.data.columns, function(column) {
|
||||
if (columnTypes[column.name]) {
|
||||
if (column.type == null || column.type == 'string') {
|
||||
column.type = columnTypes[column.name];
|
||||
}
|
||||
column.type = columnTypes[column.name];
|
||||
}
|
||||
});
|
||||
|
||||
@@ -68,7 +40,7 @@
|
||||
} else {
|
||||
this.status = undefined;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
function QueryResult(props) {
|
||||
this.deferred = $q.defer();
|
||||
@@ -119,14 +91,6 @@
|
||||
return this.job.error;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getLog = function() {
|
||||
if (!this.query_result.data || !this.query_result.data.log || this.query_result.data.log.length == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return this.query_result.data.log;
|
||||
}
|
||||
|
||||
QueryResult.prototype.getUpdatedAt = function () {
|
||||
return this.query_result.retrieved_at || this.job.updated_at * 1000.0 || this.updatedAt;
|
||||
}
|
||||
@@ -186,22 +150,7 @@
|
||||
}
|
||||
|
||||
return this.filteredData;
|
||||
};
|
||||
|
||||
/**
|
||||
* Helper function to add a point into a series
|
||||
*/
|
||||
QueryResult.prototype._addPointToSeries = function (point, seriesCollection, seriesName) {
|
||||
if (seriesCollection[seriesName] == undefined) {
|
||||
seriesCollection[seriesName] = {
|
||||
name: seriesName,
|
||||
type: 'column',
|
||||
data: []
|
||||
};
|
||||
}
|
||||
|
||||
seriesCollection[seriesName]['data'].push(point);
|
||||
};
|
||||
}
|
||||
|
||||
QueryResult.prototype.getChartData = function (mapping) {
|
||||
var series = {};
|
||||
@@ -213,8 +162,8 @@
|
||||
var yValues = {};
|
||||
|
||||
_.each(row, function (value, definition) {
|
||||
var name = definition.split("::")[0] || definition.split("__")[0];
|
||||
var type = definition.split("::")[1] || definition.split("__")[1];
|
||||
var name = definition.split("::")[0];
|
||||
var type = definition.split("::")[1];
|
||||
if (mapping) {
|
||||
type = mapping[definition];
|
||||
}
|
||||
@@ -239,20 +188,31 @@
|
||||
seriesName = String(value);
|
||||
}
|
||||
|
||||
if (type == 'multiFilter' || type == 'multi-filter') {
|
||||
if (type == 'multi-filter') {
|
||||
seriesName = String(value);
|
||||
}
|
||||
});
|
||||
|
||||
var addPointToSeries = function (seriesName, point) {
|
||||
if (series[seriesName] == undefined) {
|
||||
series[seriesName] = {
|
||||
name: seriesName,
|
||||
type: 'column',
|
||||
data: []
|
||||
}
|
||||
}
|
||||
|
||||
series[seriesName]['data'].push(point);
|
||||
}
|
||||
|
||||
if (seriesName === undefined) {
|
||||
_.each(yValues, function (yValue, seriesName) {
|
||||
this._addPointToSeries({'x': xValue, 'y': yValue}, series, seriesName);
|
||||
}.bind(this));
|
||||
addPointToSeries(seriesName, {'x': xValue, 'y': yValue});
|
||||
});
|
||||
} else {
|
||||
addPointToSeries(seriesName, point);
|
||||
}
|
||||
else {
|
||||
this._addPointToSeries(point, series, seriesName);
|
||||
}
|
||||
}.bind(this));
|
||||
});
|
||||
|
||||
return _.values(series);
|
||||
};
|
||||
@@ -276,16 +236,7 @@
|
||||
}
|
||||
|
||||
QueryResult.prototype.getColumnNameWithoutType = function (column) {
|
||||
var typeSplit;
|
||||
if (column.indexOf("::") != -1) {
|
||||
typeSplit = "::";
|
||||
} else if (column.indexOf("__" != -1)) {
|
||||
typeSplit = "__";
|
||||
} else {
|
||||
return column;
|
||||
}
|
||||
|
||||
var parts = column.split(typeSplit);
|
||||
var parts = column.split('::');
|
||||
if (parts[0] == "" && parts.length == 2) {
|
||||
return parts[1];
|
||||
}
|
||||
@@ -326,18 +277,16 @@
|
||||
|
||||
QueryResult.prototype.prepareFilters = function () {
|
||||
var filters = [];
|
||||
var filterTypes = ['filter', 'multi-filter', 'multiFilter'];
|
||||
_.each(this.getColumns(), function (col) {
|
||||
var name = col.name;
|
||||
var type = name.split('::')[1] || name.split('__')[1];
|
||||
var filterTypes = ['filter', 'multi-filter'];
|
||||
_.each(this.getColumnNames(), function (col) {
|
||||
var type = col.split('::')[1]
|
||||
if (_.contains(filterTypes, type)) {
|
||||
// filter found
|
||||
var filter = {
|
||||
name: name,
|
||||
friendlyName: this.getColumnFriendlyName(name),
|
||||
column: col,
|
||||
name: col,
|
||||
friendlyName: this.getColumnFriendlyName(col),
|
||||
values: [],
|
||||
multiple: (type=='multiFilter') || (type=='multi-filter')
|
||||
multiple: (type=='multi-filter')
|
||||
}
|
||||
filters.push(filter);
|
||||
}
|
||||
@@ -389,24 +338,15 @@
|
||||
return this.deferred.promise;
|
||||
}
|
||||
|
||||
QueryResult.get = function (data_source_id, query, maxAge, queryId) {
|
||||
QueryResult.get = function (data_source_id, query, maxAge) {
|
||||
var queryResult = new QueryResult();
|
||||
|
||||
var params = {'data_source_id': data_source_id, 'query': query, 'max_age': maxAge};
|
||||
if (queryId !== undefined) {
|
||||
params['query_id'] = queryId;
|
||||
};
|
||||
|
||||
QueryResultResource.post(params, function (response) {
|
||||
QueryResultResource.post({'data_source_id': data_source_id, 'query': query, 'max_age': maxAge}, function (response) {
|
||||
queryResult.update(response);
|
||||
|
||||
if ('job' in response) {
|
||||
refreshStatus(queryResult, query);
|
||||
}
|
||||
}, function(error) {
|
||||
if (error.status === 403) {
|
||||
queryResult.update(error.data);
|
||||
}
|
||||
});
|
||||
|
||||
return queryResult;
|
||||
@@ -416,17 +356,17 @@
|
||||
};
|
||||
|
||||
var Query = function ($resource, QueryResult, DataSource) {
|
||||
var Query = $resource('api/queries/:id', {id: '@id'},
|
||||
var Query = $resource('/api/queries/:id', {id: '@id'},
|
||||
{
|
||||
search: {
|
||||
method: 'get',
|
||||
isArray: true,
|
||||
url: "api/queries/search"
|
||||
url: "/api/queries/search"
|
||||
},
|
||||
recent: {
|
||||
method: 'get',
|
||||
isArray: true,
|
||||
url: "api/queries/recent"
|
||||
url: "/api/queries/recent"
|
||||
}});
|
||||
|
||||
Query.newQuery = function () {
|
||||
@@ -457,23 +397,20 @@
|
||||
return '/queries/' + this.id + '/source';
|
||||
};
|
||||
|
||||
Query.prototype.isNew = function() {
|
||||
return this.id === undefined;
|
||||
};
|
||||
|
||||
Query.prototype.hasDailySchedule = function() {
|
||||
return (this.schedule && this.schedule.match(/\d\d:\d\d/) !== null);
|
||||
};
|
||||
}
|
||||
|
||||
Query.prototype.scheduleInLocalTime = function() {
|
||||
var parts = this.schedule.split(':');
|
||||
return moment.utc().hour(parts[0]).minute(parts[1]).local().format('HH:mm');
|
||||
};
|
||||
}
|
||||
|
||||
Query.prototype.getQueryResult = function (maxAge, parameters) {
|
||||
if (!this.query) {
|
||||
return;
|
||||
}
|
||||
// if (ttl == undefined) {
|
||||
// ttl = this.ttl;
|
||||
// }
|
||||
|
||||
var queryText = this.query;
|
||||
|
||||
var queryParameters = this.getParameters();
|
||||
@@ -507,9 +444,7 @@
|
||||
this.queryResult = QueryResult.getById(this.latest_query_data_id);
|
||||
}
|
||||
} else if (this.data_source_id) {
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge, this.id);
|
||||
} else {
|
||||
return new QueryResultError("Please select data source to run this query.");
|
||||
this.queryResult = QueryResult.get(this.data_source_id, queryText, maxAge);
|
||||
}
|
||||
|
||||
return this.queryResult;
|
||||
@@ -546,82 +481,17 @@
|
||||
|
||||
var DataSource = function ($resource) {
|
||||
var actions = {
|
||||
'get': {'method': 'GET', 'cache': false, 'isArray': false},
|
||||
'query': {'method': 'GET', 'cache': false, 'isArray': true},
|
||||
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': 'api/data_sources/:id/schema'}
|
||||
'get': {'method': 'GET', 'cache': true, 'isArray': true},
|
||||
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
|
||||
};
|
||||
|
||||
var DataSourceResource = $resource('api/data_sources/:id', {id: '@id'}, actions);
|
||||
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
|
||||
|
||||
return DataSourceResource;
|
||||
};
|
||||
|
||||
var User = function ($resource, $http) {
|
||||
var transformSingle = function(user) {
|
||||
if (user.groups !== undefined) {
|
||||
user.admin = user.groups.indexOf("admin") != -1;
|
||||
}
|
||||
};
|
||||
|
||||
var transform = $http.defaults.transformResponse.concat(function(data, headers) {
|
||||
if (_.isArray(data)) {
|
||||
_.each(data, transformSingle);
|
||||
} else {
|
||||
transformSingle(data);
|
||||
}
|
||||
return data;
|
||||
});
|
||||
|
||||
var actions = {
|
||||
'get': {method: 'GET', transformResponse: transform},
|
||||
'save': {method: 'POST', transformResponse: transform},
|
||||
'query': {method: 'GET', isArray: true, transformResponse: transform},
|
||||
'delete': {method: 'DELETE', transformResponse: transform}
|
||||
};
|
||||
|
||||
var UserResource = $resource('api/users/:id', {id: '@id'}, actions);
|
||||
|
||||
return UserResource;
|
||||
};
|
||||
|
||||
var Group = function ($resource) {
|
||||
var actions = {
|
||||
'get': {'method': 'GET', 'cache': false, 'isArray': false},
|
||||
'query': {'method': 'GET', 'cache': false, 'isArray': true},
|
||||
'members': {'method': 'GET', 'cache': true, 'isArray': true, 'url': 'api/groups/:id/members'},
|
||||
'dataSources': {'method': 'GET', 'cache': true, 'isArray': true, 'url': 'api/groups/:id/data_sources'}
|
||||
};
|
||||
var resource = $resource('api/groups/:id', {id: '@id'}, actions);
|
||||
return resource;
|
||||
};
|
||||
|
||||
var AlertSubscription = function ($resource) {
|
||||
var resource = $resource('api/alerts/:alertId/subscriptions/:userId', {alertId: '@alert_id', userId: '@user.id'});
|
||||
return resource;
|
||||
};
|
||||
|
||||
var Alert = function ($resource, $http) {
|
||||
var actions = {
|
||||
save: {
|
||||
method: 'POST',
|
||||
transformRequest: [function(data) {
|
||||
var newData = _.extend({}, data);
|
||||
if (newData.query_id === undefined) {
|
||||
newData.query_id = newData.query.id;
|
||||
delete newData.query;
|
||||
}
|
||||
|
||||
return newData;
|
||||
}].concat($http.defaults.transformRequest)
|
||||
}
|
||||
};
|
||||
var resource = $resource('api/alerts/:id', {id: '@id'}, actions);
|
||||
|
||||
return resource;
|
||||
};
|
||||
}
|
||||
|
||||
var Widget = function ($resource, Query) {
|
||||
var WidgetResource = $resource('api/widgets/:id', {id: '@id'});
|
||||
var WidgetResource = $resource('/api/widgets/:id', {id: '@id'});
|
||||
|
||||
WidgetResource.prototype.getQuery = function () {
|
||||
if (!this.query && this.visualization) {
|
||||
@@ -645,9 +515,5 @@
|
||||
.factory('QueryResult', ['$resource', '$timeout', '$q', QueryResult])
|
||||
.factory('Query', ['$resource', 'QueryResult', 'DataSource', Query])
|
||||
.factory('DataSource', ['$resource', DataSource])
|
||||
.factory('Alert', ['$resource', '$http', Alert])
|
||||
.factory('AlertSubscription', ['$resource', AlertSubscription])
|
||||
.factory('Widget', ['$resource', 'Query', Widget])
|
||||
.factory('User', ['$resource', '$http', User])
|
||||
.factory('Group', ['$resource', Group]);
|
||||
.factory('Widget', ['$resource', 'Query', Widget]);
|
||||
})();
|
||||
|
||||
@@ -26,7 +26,7 @@
|
||||
var events = this.events;
|
||||
this.events = [];
|
||||
|
||||
$http.post('api/events', events);
|
||||
$http.post('/api/events', events);
|
||||
|
||||
}, 1000);
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user