mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Black support for the Python codebase (#4297)
* Apply black formatting * Add auto formatting when committing to master * Update CONTRIBUTING.md re. Black & Prettier
This commit is contained in:
31
.github/workflows/black.yml
vendored
Normal file
31
.github/workflows/black.yml
vendored
Normal file
@@ -0,0 +1,31 @@
|
||||
name: Python Code Formatting (Black)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
|
||||
jobs:
|
||||
format:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
container:
|
||||
image: python:3.7.4-alpine
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Install Black
|
||||
run: apk add gcc musl-dev && pip install black
|
||||
- name: Run Black
|
||||
run: black redash tests migrations/versions
|
||||
- name: Commit formatted code
|
||||
uses: EndBug/add-and-commit@v2.1.0
|
||||
with:
|
||||
author_name: Black
|
||||
author_email: team@redash.io
|
||||
message: "Autoformatted Python code with Black"
|
||||
path: "."
|
||||
pattern: "*.py"
|
||||
force: false
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
@@ -46,8 +46,8 @@ When creating a new bug report, please make sure to:
|
||||
|
||||
If you would like to suggest an enhancement or ask for a new feature:
|
||||
|
||||
- Please check [the roadmap](https://trello.com/b/b2LUHU7A/redash-roadmap) for existing Trello card for what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- If there is no existing card, open a thread in [the forum](https://discuss.redash.io/c/feature-requests) to start a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*.
|
||||
- Please check [the forum](https://discuss.redash.io/c/feature-requests/5) for existing threads about what you want to suggest/ask. If there is, feel free to upvote it to signal interest or add your comments.
|
||||
- If there is no open thread, you're welcome to start one to have a discussion about what you want to suggest. Try to provide as much details and context as possible and include information about *the problem you want to solve* rather only *your proposed solution*.
|
||||
|
||||
### Pull Requests
|
||||
|
||||
@@ -55,9 +55,9 @@ If you would like to suggest an enhancement or ask for a new feature:
|
||||
- Include screenshots and animated GIFs in your pull request whenever possible.
|
||||
- Please add [documentation](#documentation) for new features or changes in functionality along with the code.
|
||||
- Please follow existing code style:
|
||||
- Python: we use PEP8 for Python.
|
||||
- Javascript: we use Airbnb's style guides for [JavaScript](https://github.com/airbnb/javascript#naming-conventions) and [React](https://github.com/airbnb/javascript/blob/master/react) (currently we don't follow Airbnb's convention for naming files, but we're gradually fixing this). To make it automatic and easy, we recommend using [Prettier](https://github.com/prettier/prettier).
|
||||
|
||||
- Python: we use [Black](https://github.com/psf/black) to auto format the code.
|
||||
- Javascript: we use [Prettier](https://github.com/prettier/prettier) to auto-format the code.
|
||||
|
||||
### Documentation
|
||||
|
||||
The project's documentation can be found at [https://redash.io/help/](https://redash.io/help/). The [documentation sources](https://github.com/getredash/website/tree/master/src/pages/kb) are hosted on GitHub. To contribute edits / new pages, you can use GitHub's interface. Click the "Edit on GitHub" link on the documentation page to quickly open the edit interface.
|
||||
@@ -66,9 +66,9 @@ The project's documentation can be found at [https://redash.io/help/](https://re
|
||||
|
||||
### Release Method
|
||||
|
||||
We publish a stable release every ~2 months, although the goal is to get to a stable release every month. You can see the change log on [GitHub releases page](https://github.com/getredash/redash/releases).
|
||||
We publish a stable release every ~3-4 months, although the goal is to get to a stable release every month.
|
||||
|
||||
Every build of the master branch updates the latest *RC release*. These releases are usually stable, but might contain regressions and therefore recommended for "advanced users" only.
|
||||
Every build of the master branch updates the *redash/redash:preview* Docker Image. These releases are usually stable, but might contain regressions and therefore recommended for "advanced users" only.
|
||||
|
||||
When we release a new stable release, we also update the *latest* Docker image tag, the EC2 AMIs and GCE images.
|
||||
|
||||
|
||||
@@ -14,20 +14,20 @@ from redash import models
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '0f740a081d20'
|
||||
down_revision = 'a92d92aa678e'
|
||||
revision = "0f740a081d20"
|
||||
down_revision = "a92d92aa678e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
tags_regex = re.compile('^([\w\s]+):|#([\w-]+)', re.I | re.U)
|
||||
tags_regex = re.compile("^([\w\s]+):|#([\w-]+)", re.I | re.U)
|
||||
connection = op.get_bind()
|
||||
|
||||
dashboards = connection.execute("SELECT id, name FROM dashboards")
|
||||
|
||||
update_query = text("UPDATE dashboards SET tags = :tags WHERE id = :id")
|
||||
|
||||
|
||||
for dashboard in dashboards:
|
||||
tags = compact(flatten(tags_regex.findall(dashboard[1])))
|
||||
if tags:
|
||||
|
||||
@@ -10,18 +10,15 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '1daa601d3ae5'
|
||||
down_revision = '969126bd800f'
|
||||
revision = "1daa601d3ae5"
|
||||
down_revision = "969126bd800f"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
'users',
|
||||
sa.Column('disabled_at', sa.DateTime(True), nullable=True)
|
||||
)
|
||||
op.add_column("users", sa.Column("disabled_at", sa.DateTime(True), nullable=True))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('users', 'disabled_at')
|
||||
op.drop_column("users", "disabled_at")
|
||||
|
||||
@@ -12,24 +12,28 @@ import sqlalchemy_searchable as ss
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '5ec5c84ba61e'
|
||||
down_revision = '7671dca4e604'
|
||||
revision = "5ec5c84ba61e"
|
||||
down_revision = "7671dca4e604"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
conn = op.get_bind()
|
||||
op.add_column('queries', sa.Column('search_vector', su.TSVectorType()))
|
||||
op.create_index('ix_queries_search_vector', 'queries', ['search_vector'],
|
||||
unique=False, postgresql_using='gin')
|
||||
ss.sync_trigger(conn, 'queries', 'search_vector',
|
||||
['name', 'description', 'query'])
|
||||
op.add_column("queries", sa.Column("search_vector", su.TSVectorType()))
|
||||
op.create_index(
|
||||
"ix_queries_search_vector",
|
||||
"queries",
|
||||
["search_vector"],
|
||||
unique=False,
|
||||
postgresql_using="gin",
|
||||
)
|
||||
ss.sync_trigger(conn, "queries", "search_vector", ["name", "description", "query"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
ss.drop_trigger(conn, 'queries', 'search_vector')
|
||||
op.drop_index('ix_queries_search_vector', table_name='queries')
|
||||
op.drop_column('queries', 'search_vector')
|
||||
ss.drop_trigger(conn, "queries", "search_vector")
|
||||
op.drop_index("ix_queries_search_vector", table_name="queries")
|
||||
op.drop_column("queries", "search_vector")
|
||||
|
||||
@@ -15,93 +15,110 @@ from redash.models import MutableDict, PseudoJSON
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '640888ce445d'
|
||||
down_revision = '71477dadd6ef'
|
||||
revision = "640888ce445d"
|
||||
down_revision = "71477dadd6ef"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
# Copy "schedule" column into "old_schedule" column
|
||||
op.add_column('queries', sa.Column('old_schedule', sa.String(length=10), nullable=True))
|
||||
op.add_column(
|
||||
"queries", sa.Column("old_schedule", sa.String(length=10), nullable=True)
|
||||
)
|
||||
|
||||
queries = table(
|
||||
'queries',
|
||||
sa.Column('schedule', sa.String(length=10)),
|
||||
sa.Column('old_schedule', sa.String(length=10)))
|
||||
"queries",
|
||||
sa.Column("schedule", sa.String(length=10)),
|
||||
sa.Column("old_schedule", sa.String(length=10)),
|
||||
)
|
||||
|
||||
op.execute(
|
||||
queries
|
||||
.update()
|
||||
.values({'old_schedule': queries.c.schedule}))
|
||||
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
|
||||
|
||||
# Recreate "schedule" column as a dict type
|
||||
op.drop_column('queries', 'schedule')
|
||||
op.add_column('queries', sa.Column('schedule', MutableDict.as_mutable(PseudoJSON), nullable=False, server_default=json.dumps({})))
|
||||
op.drop_column("queries", "schedule")
|
||||
op.add_column(
|
||||
"queries",
|
||||
sa.Column(
|
||||
"schedule",
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
)
|
||||
|
||||
# Move over values from old_schedule
|
||||
queries = table(
|
||||
'queries',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('schedule', MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column('old_schedule', sa.String(length=10)))
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", sa.String(length=10)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
for query in conn.execute(queries.select()):
|
||||
schedule_json = {
|
||||
'interval': None,
|
||||
'until': None,
|
||||
'day_of_week': None,
|
||||
'time': None
|
||||
"interval": None,
|
||||
"until": None,
|
||||
"day_of_week": None,
|
||||
"time": None,
|
||||
}
|
||||
|
||||
if query.old_schedule is not None:
|
||||
if ":" in query.old_schedule:
|
||||
schedule_json['interval'] = 86400
|
||||
schedule_json['time'] = query.old_schedule
|
||||
schedule_json["interval"] = 86400
|
||||
schedule_json["time"] = query.old_schedule
|
||||
else:
|
||||
schedule_json['interval'] = query.old_schedule
|
||||
schedule_json["interval"] = query.old_schedule
|
||||
|
||||
conn.execute(
|
||||
queries
|
||||
.update()
|
||||
.where(queries.c.id == query.id)
|
||||
.values(schedule=MutableDict(schedule_json)))
|
||||
queries.update()
|
||||
.where(queries.c.id == query.id)
|
||||
.values(schedule=MutableDict(schedule_json))
|
||||
)
|
||||
|
||||
op.drop_column("queries", "old_schedule")
|
||||
|
||||
op.drop_column('queries', 'old_schedule')
|
||||
|
||||
def downgrade():
|
||||
op.add_column('queries', sa.Column('old_schedule', MutableDict.as_mutable(PseudoJSON), nullable=False, server_default=json.dumps({})))
|
||||
op.add_column(
|
||||
"queries",
|
||||
sa.Column(
|
||||
"old_schedule",
|
||||
MutableDict.as_mutable(PseudoJSON),
|
||||
nullable=False,
|
||||
server_default=json.dumps({}),
|
||||
),
|
||||
)
|
||||
|
||||
queries = table(
|
||||
'queries',
|
||||
sa.Column('schedule', MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column('old_schedule', MutableDict.as_mutable(PseudoJSON)))
|
||||
"queries",
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
)
|
||||
|
||||
op.execute(
|
||||
queries
|
||||
.update()
|
||||
.values({'old_schedule': queries.c.schedule}))
|
||||
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
|
||||
|
||||
op.drop_column('queries', 'schedule')
|
||||
op.add_column('queries', sa.Column('schedule', sa.String(length=10), nullable=True))
|
||||
op.drop_column("queries", "schedule")
|
||||
op.add_column("queries", sa.Column("schedule", sa.String(length=10), nullable=True))
|
||||
|
||||
queries = table(
|
||||
'queries',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('schedule', sa.String(length=10)),
|
||||
sa.Column('old_schedule', MutableDict.as_mutable(PseudoJSON)))
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", sa.String(length=10)),
|
||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
for query in conn.execute(queries.select()):
|
||||
scheduleValue = query.old_schedule['interval']
|
||||
scheduleValue = query.old_schedule["interval"]
|
||||
if scheduleValue <= 86400:
|
||||
scheduleValue = query.old_schedule['time']
|
||||
scheduleValue = query.old_schedule["time"]
|
||||
|
||||
conn.execute(
|
||||
queries
|
||||
.update()
|
||||
.where(queries.c.id == query.id)
|
||||
.values(schedule=scheduleValue))
|
||||
queries.update()
|
||||
.where(queries.c.id == query.id)
|
||||
.values(schedule=scheduleValue)
|
||||
)
|
||||
|
||||
op.drop_column('queries', 'old_schedule')
|
||||
op.drop_column("queries", "old_schedule")
|
||||
|
||||
@@ -12,7 +12,7 @@ import sqlalchemy as sa
|
||||
# revision identifiers, used by Alembic.
|
||||
from sqlalchemy.exc import ProgrammingError
|
||||
|
||||
revision = '65fc9ede4746'
|
||||
revision = "65fc9ede4746"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
@@ -20,18 +20,28 @@ depends_on = None
|
||||
|
||||
def upgrade():
|
||||
try:
|
||||
op.add_column('queries', sa.Column('is_draft', sa.Boolean, default=True, index=True))
|
||||
op.add_column('dashboards', sa.Column('is_draft', sa.Boolean, default=True, index=True))
|
||||
op.add_column(
|
||||
"queries", sa.Column("is_draft", sa.Boolean, default=True, index=True)
|
||||
)
|
||||
op.add_column(
|
||||
"dashboards", sa.Column("is_draft", sa.Boolean, default=True, index=True)
|
||||
)
|
||||
op.execute("UPDATE queries SET is_draft = (name = 'New Query')")
|
||||
op.execute("UPDATE dashboards SET is_draft = false")
|
||||
except ProgrammingError as e:
|
||||
# The columns might exist if you ran the old migrations.
|
||||
if 'column "is_draft" of relation "queries" already exists' in str(e):
|
||||
print("Can't run this migration as you already have is_draft columns, please run:")
|
||||
print("./manage.py db stamp {} # you might need to alter the command to match your environment.".format(revision))
|
||||
print(
|
||||
"Can't run this migration as you already have is_draft columns, please run:"
|
||||
)
|
||||
print(
|
||||
"./manage.py db stamp {} # you might need to alter the command to match your environment.".format(
|
||||
revision
|
||||
)
|
||||
)
|
||||
exit()
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('queries', 'is_draft')
|
||||
op.drop_column('dashboards', 'is_draft')
|
||||
op.drop_column("queries", "is_draft")
|
||||
op.drop_column("dashboards", "is_draft")
|
||||
|
||||
@@ -11,8 +11,8 @@ import sqlalchemy_searchable as ss
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '6b5be7e0a0ef'
|
||||
down_revision = '5ec5c84ba61e'
|
||||
revision = "6b5be7e0a0ef"
|
||||
down_revision = "5ec5c84ba61e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
@@ -23,7 +23,7 @@ def upgrade():
|
||||
conn = op.get_bind()
|
||||
|
||||
metadata = sa.MetaData(bind=conn)
|
||||
queries = sa.Table('queries', metadata, autoload=True)
|
||||
queries = sa.Table("queries", metadata, autoload=True)
|
||||
|
||||
@ss.vectorizer(queries.c.id)
|
||||
def integer_vectorizer(column):
|
||||
@@ -31,18 +31,22 @@ def upgrade():
|
||||
|
||||
ss.sync_trigger(
|
||||
conn,
|
||||
'queries',
|
||||
'search_vector',
|
||||
['id', 'name', 'description', 'query'],
|
||||
metadata=metadata
|
||||
"queries",
|
||||
"search_vector",
|
||||
["id", "name", "description", "query"],
|
||||
metadata=metadata,
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
conn = op.get_bind()
|
||||
ss.drop_trigger(conn, 'queries', 'search_vector')
|
||||
op.drop_index('ix_queries_search_vector', table_name='queries')
|
||||
op.create_index('ix_queries_search_vector', 'queries', ['search_vector'],
|
||||
unique=False, postgresql_using='gin')
|
||||
ss.sync_trigger(conn, 'queries', 'search_vector',
|
||||
['name', 'description', 'query'])
|
||||
ss.drop_trigger(conn, "queries", "search_vector")
|
||||
op.drop_index("ix_queries_search_vector", table_name="queries")
|
||||
op.create_index(
|
||||
"ix_queries_search_vector",
|
||||
"queries",
|
||||
["search_vector"],
|
||||
unique=False,
|
||||
postgresql_using="gin",
|
||||
)
|
||||
ss.sync_trigger(conn, "queries", "search_vector", ["name", "description", "query"])
|
||||
|
||||
@@ -10,15 +10,17 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '71477dadd6ef'
|
||||
down_revision = '0f740a081d20'
|
||||
revision = "71477dadd6ef"
|
||||
down_revision = "0f740a081d20"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_unique_constraint('unique_favorite', 'favorites', ['object_type', 'object_id', 'user_id'])
|
||||
op.create_unique_constraint(
|
||||
"unique_favorite", "favorites", ["object_type", "object_id", "user_id"]
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint('unique_favorite', 'favorites', type_='unique')
|
||||
op.drop_constraint("unique_favorite", "favorites", type_="unique")
|
||||
|
||||
@@ -13,8 +13,8 @@ from sqlalchemy.sql import table
|
||||
from redash.models import MutableDict, PseudoJSON
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '73beceabb948'
|
||||
down_revision = 'e7f8a917aa8e'
|
||||
revision = "73beceabb948"
|
||||
down_revision = "e7f8a917aa8e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
@@ -26,30 +26,32 @@ def is_empty_schedule(schedule):
|
||||
if schedule == {}:
|
||||
return True
|
||||
|
||||
if schedule.get('interval') is None and schedule.get('until') is None and schedule.get('day_of_week') is None and schedule.get('time') is None:
|
||||
if (
|
||||
schedule.get("interval") is None
|
||||
and schedule.get("until") is None
|
||||
and schedule.get("day_of_week") is None
|
||||
and schedule.get("time") is None
|
||||
):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.alter_column('queries', 'schedule',
|
||||
nullable=True,
|
||||
server_default=None)
|
||||
op.alter_column("queries", "schedule", nullable=True, server_default=None)
|
||||
|
||||
queries = table(
|
||||
'queries',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('schedule', MutableDict.as_mutable(PseudoJSON)))
|
||||
"queries",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
for query in conn.execute(queries.select()):
|
||||
if is_empty_schedule(query.schedule):
|
||||
conn.execute(
|
||||
queries
|
||||
.update()
|
||||
.where(queries.c.id == query.id)
|
||||
.values(schedule=None))
|
||||
queries.update().where(queries.c.id == query.id).values(schedule=None)
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -10,16 +10,18 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7671dca4e604'
|
||||
down_revision = 'd1eae8b9893e'
|
||||
revision = "7671dca4e604"
|
||||
down_revision = "d1eae8b9893e"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('users', sa.Column('profile_image_url', sa.String(),
|
||||
nullable=True, server_default=None))
|
||||
op.add_column(
|
||||
"users",
|
||||
sa.Column("profile_image_url", sa.String(), nullable=True, server_default=None),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('users', 'profile_image_url')
|
||||
op.drop_column("users", "profile_image_url")
|
||||
|
||||
@@ -14,8 +14,8 @@ from redash.models import Dashboard, Widget, db
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '969126bd800f'
|
||||
down_revision = '6b5be7e0a0ef'
|
||||
revision = "969126bd800f"
|
||||
down_revision = "6b5be7e0a0ef"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
@@ -26,17 +26,18 @@ def upgrade():
|
||||
print("Updating dashboards position data:")
|
||||
dashboard_result = db.session.execute("SELECT id, layout FROM dashboards")
|
||||
for dashboard in dashboard_result:
|
||||
print(" Updating dashboard: {}".format(dashboard['id']))
|
||||
layout = simplejson.loads(dashboard['layout'])
|
||||
print(" Updating dashboard: {}".format(dashboard["id"]))
|
||||
layout = simplejson.loads(dashboard["layout"])
|
||||
|
||||
print(" Building widgets map:")
|
||||
widgets = {}
|
||||
widget_result = db.session.execute(
|
||||
"SELECT id, options, width FROM widgets WHERE dashboard_id=:dashboard_id",
|
||||
{"dashboard_id" : dashboard['id']})
|
||||
"SELECT id, options, width FROM widgets WHERE dashboard_id=:dashboard_id",
|
||||
{"dashboard_id": dashboard["id"]},
|
||||
)
|
||||
for w in widget_result:
|
||||
print(" Widget: {}".format(w['id']))
|
||||
widgets[w['id']] = w
|
||||
print(" Widget: {}".format(w["id"]))
|
||||
widgets[w["id"]] = w
|
||||
widget_result.close()
|
||||
|
||||
print(" Iterating over layout:")
|
||||
@@ -52,25 +53,32 @@ def upgrade():
|
||||
if widget is None:
|
||||
continue
|
||||
|
||||
options = simplejson.loads(widget['options']) or {}
|
||||
options['position'] = {
|
||||
options = simplejson.loads(widget["options"]) or {}
|
||||
options["position"] = {
|
||||
"row": row_index,
|
||||
"col": column_index * column_size,
|
||||
"sizeX": column_size * widget.width
|
||||
"sizeX": column_size * widget.width,
|
||||
}
|
||||
|
||||
db.session.execute(
|
||||
"UPDATE widgets SET options=:options WHERE id=:id",
|
||||
{"options" : simplejson.dumps(options), "id" : widget_id})
|
||||
{"options": simplejson.dumps(options), "id": widget_id},
|
||||
)
|
||||
|
||||
dashboard_result.close()
|
||||
db.session.commit()
|
||||
|
||||
# Remove legacy columns no longer in use.
|
||||
op.drop_column('widgets', 'type')
|
||||
op.drop_column('widgets', 'query_id')
|
||||
op.drop_column("widgets", "type")
|
||||
op.drop_column("widgets", "query_id")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.add_column('widgets', sa.Column('query_id', sa.INTEGER(), autoincrement=False, nullable=True))
|
||||
op.add_column('widgets', sa.Column('type', sa.VARCHAR(length=100), autoincrement=False, nullable=True))
|
||||
op.add_column(
|
||||
"widgets",
|
||||
sa.Column("query_id", sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
)
|
||||
op.add_column(
|
||||
"widgets",
|
||||
sa.Column("type", sa.VARCHAR(length=100), autoincrement=False, nullable=True),
|
||||
)
|
||||
|
||||
@@ -13,36 +13,52 @@ from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||
|
||||
from redash import settings
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.models.types import EncryptedConfiguration, Configuration, MutableDict, MutableList, PseudoJSON
|
||||
from redash.models.types import (
|
||||
EncryptedConfiguration,
|
||||
Configuration,
|
||||
MutableDict,
|
||||
MutableList,
|
||||
PseudoJSON,
|
||||
)
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '98af61feea92'
|
||||
down_revision = '73beceabb948'
|
||||
revision = "98af61feea92"
|
||||
down_revision = "73beceabb948"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('data_sources', sa.Column('encrypted_options', postgresql.BYTEA(), nullable=True))
|
||||
op.add_column(
|
||||
"data_sources",
|
||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True),
|
||||
)
|
||||
|
||||
# copy values
|
||||
data_sources = table(
|
||||
'data_sources',
|
||||
sa.Column('id', sa.Integer, primary_key=True),
|
||||
sa.Column('encrypted_options', ConfigurationContainer.as_mutable(EncryptedConfiguration(sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine))),
|
||||
sa.Column('options', ConfigurationContainer.as_mutable(Configuration)))
|
||||
"data_sources",
|
||||
sa.Column("id", sa.Integer, primary_key=True),
|
||||
sa.Column(
|
||||
"encrypted_options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
sa.Text, settings.DATASOURCE_SECRET_KEY, FernetEngine
|
||||
)
|
||||
),
|
||||
),
|
||||
sa.Column("options", ConfigurationContainer.as_mutable(Configuration)),
|
||||
)
|
||||
|
||||
conn = op.get_bind()
|
||||
for ds in conn.execute(data_sources.select()):
|
||||
conn.execute(
|
||||
data_sources
|
||||
.update()
|
||||
data_sources.update()
|
||||
.where(data_sources.c.id == ds.id)
|
||||
.values(encrypted_options=ds.options))
|
||||
.values(encrypted_options=ds.options)
|
||||
)
|
||||
|
||||
op.drop_column('data_sources', 'options')
|
||||
op.alter_column('data_sources', 'encrypted_options',
|
||||
nullable=False)
|
||||
op.drop_column("data_sources", "options")
|
||||
op.alter_column("data_sources", "encrypted_options", nullable=False)
|
||||
|
||||
|
||||
def downgrade():
|
||||
|
||||
@@ -13,17 +13,21 @@ from sqlalchemy.dialects import postgresql
|
||||
from redash import models
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'a92d92aa678e'
|
||||
down_revision = 'e7004224f284'
|
||||
revision = "a92d92aa678e"
|
||||
down_revision = "e7004224f284"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('dashboards', sa.Column('tags', postgresql.ARRAY(sa.Unicode()), nullable=True))
|
||||
op.add_column('queries', sa.Column('tags', postgresql.ARRAY(sa.Unicode()), nullable=True))
|
||||
op.add_column(
|
||||
"dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
op.add_column(
|
||||
"queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('queries', 'tags')
|
||||
op.drop_column('dashboards', 'tags')
|
||||
op.drop_column("queries", "tags")
|
||||
op.drop_column("dashboards", "tags")
|
||||
|
||||
@@ -10,16 +10,20 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd1eae8b9893e'
|
||||
down_revision = '65fc9ede4746'
|
||||
revision = "d1eae8b9893e"
|
||||
down_revision = "65fc9ede4746"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('queries', sa.Column('schedule_failures', sa.Integer(),
|
||||
nullable=False, server_default='0'))
|
||||
op.add_column(
|
||||
"queries",
|
||||
sa.Column(
|
||||
"schedule_failures", sa.Integer(), nullable=False, server_default="0"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('queries', 'schedule_failures')
|
||||
op.drop_column("queries", "schedule_failures")
|
||||
|
||||
@@ -10,24 +10,25 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'd4c798575877'
|
||||
down_revision = '1daa601d3ae5'
|
||||
revision = "d4c798575877"
|
||||
down_revision = "1daa601d3ae5"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table('favorites',
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('object_type', sa.Unicode(length=255), nullable=False),
|
||||
sa.Column('object_id', sa.Integer(), nullable=False),
|
||||
sa.Column('user_id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
op.create_table(
|
||||
"favorites",
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("object_type", sa.Unicode(length=255), nullable=False),
|
||||
sa.Column("object_id", sa.Integer(), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"]),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table('favorites')
|
||||
op.drop_table("favorites")
|
||||
|
||||
@@ -12,15 +12,15 @@ from redash import redis_connection
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e5c7a4e2df4d'
|
||||
down_revision = '98af61feea92'
|
||||
revision = "e5c7a4e2df4d"
|
||||
down_revision = "98af61feea92"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
DONE_LIST = 'query_task_trackers:done'
|
||||
WAITING_LIST = 'query_task_trackers:waiting'
|
||||
IN_PROGRESS_LIST = 'query_task_trackers:in_progress'
|
||||
DONE_LIST = "query_task_trackers:done"
|
||||
WAITING_LIST = "query_task_trackers:waiting"
|
||||
IN_PROGRESS_LIST = "query_task_trackers:in_progress"
|
||||
|
||||
|
||||
def prune(list_name, keep_count, max_keys=100):
|
||||
|
||||
@@ -10,17 +10,17 @@ import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e7004224f284'
|
||||
down_revision = 'd4c798575877'
|
||||
revision = "e7004224f284"
|
||||
down_revision = "d4c798575877"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('favorites', sa.Column('org_id', sa.Integer(), nullable=False))
|
||||
op.create_foreign_key(None, 'favorites', 'organizations', ['org_id'], ['id'])
|
||||
op.add_column("favorites", sa.Column("org_id", sa.Integer(), nullable=False))
|
||||
op.create_foreign_key(None, "favorites", "organizations", ["org_id"], ["id"])
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_constraint(None, 'favorites', type_='foreignkey')
|
||||
op.drop_column('favorites', 'org_id')
|
||||
op.drop_constraint(None, "favorites", type_="foreignkey")
|
||||
op.drop_column("favorites", "org_id")
|
||||
|
||||
@@ -10,15 +10,23 @@ import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = 'e7f8a917aa8e'
|
||||
down_revision = '640888ce445d'
|
||||
revision = "e7f8a917aa8e"
|
||||
down_revision = "640888ce445d"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column('users', sa.Column('details', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=True))
|
||||
op.add_column(
|
||||
"users",
|
||||
sa.Column(
|
||||
"details",
|
||||
postgresql.JSON(astext_type=sa.Text()),
|
||||
server_default="{}",
|
||||
nullable=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column('users', 'details')
|
||||
op.drop_column("users", "details")
|
||||
|
||||
@@ -15,12 +15,13 @@ from .app import create_app # noqa
|
||||
from .query_runner import import_query_runners
|
||||
from .destinations import import_destinations
|
||||
|
||||
__version__ = '9.0.0-alpha'
|
||||
__version__ = "9.0.0-alpha"
|
||||
|
||||
|
||||
if os.environ.get("REMOTE_DEBUG"):
|
||||
import ptvsd
|
||||
ptvsd.enable_attach(address=('0.0.0.0', 5678))
|
||||
|
||||
ptvsd.enable_attach(address=("0.0.0.0", 5678))
|
||||
|
||||
|
||||
def setup_logging():
|
||||
@@ -32,7 +33,12 @@ def setup_logging():
|
||||
|
||||
# Make noisy libraries less noisy
|
||||
if settings.LOG_LEVEL != "DEBUG":
|
||||
for name in ["passlib", "requests.packages.urllib3", "snowflake.connector", "apiclient"]:
|
||||
for name in [
|
||||
"passlib",
|
||||
"requests.packages.urllib3",
|
||||
"snowflake.connector",
|
||||
"apiclient",
|
||||
]:
|
||||
logging.getLogger(name).setLevel("ERROR")
|
||||
|
||||
|
||||
@@ -42,7 +48,9 @@ redis_connection = redis.from_url(settings.REDIS_URL)
|
||||
rq_redis_connection = redis.from_url(settings.RQ_REDIS_URL)
|
||||
mail = Mail()
|
||||
migrate = Migrate()
|
||||
statsd_client = StatsClient(host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX)
|
||||
statsd_client = StatsClient(
|
||||
host=settings.STATSD_HOST, port=settings.STATSD_PORT, prefix=settings.STATSD_PREFIX
|
||||
)
|
||||
limiter = Limiter(key_func=get_ipaddr, storage_uri=settings.LIMITER_STORAGE)
|
||||
|
||||
import_query_runners(settings.QUERY_RUNNERS)
|
||||
|
||||
@@ -6,17 +6,20 @@ from . import settings
|
||||
|
||||
class Redash(Flask):
|
||||
"""A custom Flask app for Redash"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.update({
|
||||
'template_folder': settings.STATIC_ASSETS_PATH,
|
||||
'static_folder': settings.STATIC_ASSETS_PATH,
|
||||
'static_url_path': '/static',
|
||||
})
|
||||
kwargs.update(
|
||||
{
|
||||
"template_folder": settings.STATIC_ASSETS_PATH,
|
||||
"static_folder": settings.STATIC_ASSETS_PATH,
|
||||
"static_url_path": "/static",
|
||||
}
|
||||
)
|
||||
super(Redash, self).__init__(__name__, *args, **kwargs)
|
||||
# Make sure we get the right referral address even behind proxies like nginx.
|
||||
self.wsgi_app = ProxyFix(self.wsgi_app, settings.PROXIES_COUNT)
|
||||
# Configure Redash using our settings
|
||||
self.config.from_object('redash.settings')
|
||||
self.config.from_object("redash.settings")
|
||||
|
||||
|
||||
def create_app():
|
||||
|
||||
@@ -15,16 +15,18 @@ from sqlalchemy.orm.exc import NoResultFound
|
||||
from werkzeug.exceptions import Unauthorized
|
||||
|
||||
login_manager = LoginManager()
|
||||
logger = logging.getLogger('authentication')
|
||||
logger = logging.getLogger("authentication")
|
||||
|
||||
|
||||
def get_login_url(external=False, next="/"):
|
||||
if settings.MULTI_ORG and current_org == None:
|
||||
login_url = '/'
|
||||
login_url = "/"
|
||||
elif settings.MULTI_ORG:
|
||||
login_url = url_for('redash.login', org_slug=current_org.slug, next=next, _external=external)
|
||||
login_url = url_for(
|
||||
"redash.login", org_slug=current_org.slug, next=next, _external=external
|
||||
)
|
||||
else:
|
||||
login_url = url_for('redash.login', next=next, _external=external)
|
||||
login_url = url_for("redash.login", next=next, _external=external)
|
||||
|
||||
return login_url
|
||||
|
||||
@@ -60,24 +62,28 @@ def load_user(user_id_with_identity):
|
||||
|
||||
def request_loader(request):
|
||||
user = None
|
||||
if settings.AUTH_TYPE == 'hmac':
|
||||
if settings.AUTH_TYPE == "hmac":
|
||||
user = hmac_load_user_from_request(request)
|
||||
elif settings.AUTH_TYPE == 'api_key':
|
||||
elif settings.AUTH_TYPE == "api_key":
|
||||
user = api_key_load_user_from_request(request)
|
||||
else:
|
||||
logger.warning("Unknown authentication type ({}). Using default (HMAC).".format(settings.AUTH_TYPE))
|
||||
logger.warning(
|
||||
"Unknown authentication type ({}). Using default (HMAC).".format(
|
||||
settings.AUTH_TYPE
|
||||
)
|
||||
)
|
||||
user = hmac_load_user_from_request(request)
|
||||
|
||||
if org_settings['auth_jwt_login_enabled'] and user is None:
|
||||
if org_settings["auth_jwt_login_enabled"] and user is None:
|
||||
user = jwt_token_load_user_from_request(request)
|
||||
return user
|
||||
|
||||
|
||||
def hmac_load_user_from_request(request):
|
||||
signature = request.args.get('signature')
|
||||
expires = float(request.args.get('expires') or 0)
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
user_id = request.args.get('user_id', None)
|
||||
signature = request.args.get("signature")
|
||||
expires = float(request.args.get("expires") or 0)
|
||||
query_id = request.view_args.get("query_id", None)
|
||||
user_id = request.args.get("user_id", None)
|
||||
|
||||
# TODO: 3600 should be a setting
|
||||
if signature and time.time() < expires <= time.time() + 3600:
|
||||
@@ -93,7 +99,12 @@ def hmac_load_user_from_request(request):
|
||||
calculated_signature = sign(query.api_key, request.path, expires)
|
||||
|
||||
if query.api_key and signature == calculated_signature:
|
||||
return models.ApiUser(query.api_key, query.org, list(query.groups.keys()), name="ApiKey: Query {}".format(query.id))
|
||||
return models.ApiUser(
|
||||
query.api_key,
|
||||
query.org,
|
||||
list(query.groups.keys()),
|
||||
name="ApiKey: Query {}".format(query.id),
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
@@ -118,22 +129,27 @@ def get_user_from_api_key(api_key, query_id):
|
||||
if query_id:
|
||||
query = models.Query.get_by_id_and_org(query_id, org)
|
||||
if query and query.api_key == api_key:
|
||||
user = models.ApiUser(api_key, query.org, list(query.groups.keys()), name="ApiKey: Query {}".format(query.id))
|
||||
user = models.ApiUser(
|
||||
api_key,
|
||||
query.org,
|
||||
list(query.groups.keys()),
|
||||
name="ApiKey: Query {}".format(query.id),
|
||||
)
|
||||
|
||||
return user
|
||||
|
||||
|
||||
def get_api_key_from_request(request):
|
||||
api_key = request.args.get('api_key', None)
|
||||
api_key = request.args.get("api_key", None)
|
||||
|
||||
if api_key is not None:
|
||||
return api_key
|
||||
|
||||
if request.headers.get('Authorization'):
|
||||
auth_header = request.headers.get('Authorization')
|
||||
api_key = auth_header.replace('Key ', '', 1)
|
||||
elif request.view_args is not None and request.view_args.get('token'):
|
||||
api_key = request.view_args['token']
|
||||
if request.headers.get("Authorization"):
|
||||
auth_header = request.headers.get("Authorization")
|
||||
api_key = auth_header.replace("Key ", "", 1)
|
||||
elif request.view_args is not None and request.view_args.get("token"):
|
||||
api_key = request.view_args["token"]
|
||||
|
||||
return api_key
|
||||
|
||||
@@ -141,7 +157,7 @@ def get_api_key_from_request(request):
|
||||
def api_key_load_user_from_request(request):
|
||||
api_key = get_api_key_from_request(request)
|
||||
if request.view_args is not None:
|
||||
query_id = request.view_args.get('query_id', None)
|
||||
query_id = request.view_args.get("query_id", None)
|
||||
user = get_user_from_api_key(api_key, query_id)
|
||||
else:
|
||||
user = None
|
||||
@@ -154,44 +170,44 @@ def jwt_token_load_user_from_request(request):
|
||||
|
||||
payload = None
|
||||
|
||||
if org_settings['auth_jwt_auth_cookie_name']:
|
||||
jwt_token = request.cookies.get(org_settings['auth_jwt_auth_cookie_name'], None)
|
||||
elif org_settings['auth_jwt_auth_header_name']:
|
||||
jwt_token = request.headers.get(org_settings['auth_jwt_auth_header_name'], None)
|
||||
if org_settings["auth_jwt_auth_cookie_name"]:
|
||||
jwt_token = request.cookies.get(org_settings["auth_jwt_auth_cookie_name"], None)
|
||||
elif org_settings["auth_jwt_auth_header_name"]:
|
||||
jwt_token = request.headers.get(org_settings["auth_jwt_auth_header_name"], None)
|
||||
else:
|
||||
return None
|
||||
|
||||
if jwt_token:
|
||||
payload, token_is_valid = jwt_auth.verify_jwt_token(
|
||||
jwt_token,
|
||||
expected_issuer=org_settings['auth_jwt_auth_issuer'],
|
||||
expected_audience=org_settings['auth_jwt_auth_audience'],
|
||||
algorithms=org_settings['auth_jwt_auth_algorithms'],
|
||||
public_certs_url=org_settings['auth_jwt_auth_public_certs_url'],
|
||||
expected_issuer=org_settings["auth_jwt_auth_issuer"],
|
||||
expected_audience=org_settings["auth_jwt_auth_audience"],
|
||||
algorithms=org_settings["auth_jwt_auth_algorithms"],
|
||||
public_certs_url=org_settings["auth_jwt_auth_public_certs_url"],
|
||||
)
|
||||
if not token_is_valid:
|
||||
raise Unauthorized('Invalid JWT token')
|
||||
raise Unauthorized("Invalid JWT token")
|
||||
|
||||
if not payload:
|
||||
return
|
||||
|
||||
try:
|
||||
user = models.User.get_by_email_and_org(payload['email'], org)
|
||||
user = models.User.get_by_email_and_org(payload["email"], org)
|
||||
except models.NoResultFound:
|
||||
user = create_and_login_user(current_org, payload['email'], payload['email'])
|
||||
user = create_and_login_user(current_org, payload["email"], payload["email"])
|
||||
|
||||
return user
|
||||
|
||||
|
||||
def log_user_logged_in(app, user):
|
||||
event = {
|
||||
'org_id': user.org_id,
|
||||
'user_id': user.id,
|
||||
'action': 'login',
|
||||
'object_type': 'redash',
|
||||
'timestamp': int(time.time()),
|
||||
'user_agent': request.user_agent.string,
|
||||
'ip': request.remote_addr
|
||||
"org_id": user.org_id,
|
||||
"user_id": user.id,
|
||||
"action": "login",
|
||||
"object_type": "redash",
|
||||
"timestamp": int(time.time()),
|
||||
"user_agent": request.user_agent.string,
|
||||
"ip": request.remote_addr,
|
||||
}
|
||||
|
||||
record_event.delay(event)
|
||||
@@ -199,8 +215,10 @@ def log_user_logged_in(app, user):
|
||||
|
||||
@login_manager.unauthorized_handler
|
||||
def redirect_to_login():
|
||||
if request.is_xhr or '/api/' in request.path:
|
||||
response = jsonify({'message': "Couldn't find resource. Please login and try again."})
|
||||
if request.is_xhr or "/api/" in request.path:
|
||||
response = jsonify(
|
||||
{"message": "Couldn't find resource. Please login and try again."}
|
||||
)
|
||||
response.status_code = 404
|
||||
return response
|
||||
|
||||
@@ -213,17 +231,22 @@ def logout_and_redirect_to_index():
|
||||
logout_user()
|
||||
|
||||
if settings.MULTI_ORG and current_org == None:
|
||||
index_url = '/'
|
||||
index_url = "/"
|
||||
elif settings.MULTI_ORG:
|
||||
index_url = url_for('redash.index', org_slug=current_org.slug, _external=False)
|
||||
index_url = url_for("redash.index", org_slug=current_org.slug, _external=False)
|
||||
else:
|
||||
index_url = url_for('redash.index', _external=False)
|
||||
index_url = url_for("redash.index", _external=False)
|
||||
|
||||
return redirect(index_url)
|
||||
|
||||
|
||||
def init_app(app):
|
||||
from redash.authentication import google_oauth, saml_auth, remote_user_auth, ldap_auth
|
||||
from redash.authentication import (
|
||||
google_oauth,
|
||||
saml_auth,
|
||||
remote_user_auth,
|
||||
ldap_auth,
|
||||
)
|
||||
|
||||
login_manager.init_app(app)
|
||||
login_manager.anonymous_user = models.AnonymousUser
|
||||
@@ -251,8 +274,14 @@ def create_and_login_user(org, name, email, picture=None):
|
||||
models.db.session.commit()
|
||||
except NoResultFound:
|
||||
logger.debug("Creating user object (%r)", name)
|
||||
user_object = models.User(org=org, name=name, email=email, is_invitation_pending=False,
|
||||
_profile_image_url=picture, group_ids=[org.default_group.id])
|
||||
user_object = models.User(
|
||||
org=org,
|
||||
name=name,
|
||||
email=email,
|
||||
is_invitation_pending=False,
|
||||
_profile_image_url=picture,
|
||||
group_ids=[org.default_group.id],
|
||||
)
|
||||
models.db.session.add(user_object)
|
||||
models.db.session.commit()
|
||||
|
||||
@@ -263,18 +292,18 @@ def create_and_login_user(org, name, email, picture=None):
|
||||
|
||||
def get_next_path(unsafe_next_path):
|
||||
if not unsafe_next_path:
|
||||
return ''
|
||||
return ""
|
||||
|
||||
# Preventing open redirection attacks
|
||||
parts = list(urlsplit(unsafe_next_path))
|
||||
parts[0] = '' # clear scheme
|
||||
parts[1] = '' # clear netloc
|
||||
parts[0] = "" # clear scheme
|
||||
parts[1] = "" # clear netloc
|
||||
safe_next_path = urlunsplit(parts)
|
||||
|
||||
# If the original path was a URL, we might end up with an empty
|
||||
# safe url, which will redirect to the login page. Changing to
|
||||
# safe url, which will redirect to the login page. Changing to
|
||||
# relative root to redirect to the app root after login.
|
||||
if not safe_next_path:
|
||||
safe_next_path = './'
|
||||
safe_next_path = "./"
|
||||
|
||||
return safe_next_path
|
||||
|
||||
@@ -4,6 +4,7 @@ from flask import render_template
|
||||
from redash import settings
|
||||
from redash.tasks import send_mail
|
||||
from redash.utils import base_url
|
||||
|
||||
# noinspection PyUnresolvedReferences
|
||||
from itsdangerous import URLSafeTimedSerializer, SignatureExpired, BadSignature
|
||||
|
||||
@@ -42,12 +43,9 @@ def validate_token(token):
|
||||
|
||||
|
||||
def send_verify_email(user, org):
|
||||
context = {
|
||||
'user': user,
|
||||
'verify_url': verify_link_for_user(user),
|
||||
}
|
||||
html_content = render_template('emails/verify.html', **context)
|
||||
text_content = render_template('emails/verify.txt', **context)
|
||||
context = {"user": user, "verify_url": verify_link_for_user(user)}
|
||||
html_content = render_template("emails/verify.html", **context)
|
||||
text_content = render_template("emails/verify.txt", **context)
|
||||
subject = "{}, please verify your email address".format(user.name)
|
||||
|
||||
send_mail.delay([user.email], subject, html_content, text_content)
|
||||
@@ -55,8 +53,8 @@ def send_verify_email(user, org):
|
||||
|
||||
def send_invite_email(inviter, invited, invite_url, org):
|
||||
context = dict(inviter=inviter, invited=invited, org=org, invite_url=invite_url)
|
||||
html_content = render_template('emails/invite.html', **context)
|
||||
text_content = render_template('emails/invite.txt', **context)
|
||||
html_content = render_template("emails/invite.html", **context)
|
||||
text_content = render_template("emails/invite.txt", **context)
|
||||
subject = "{} invited you to join Redash".format(inviter.name)
|
||||
|
||||
send_mail.delay([invited.email], subject, html_content, text_content)
|
||||
@@ -65,17 +63,17 @@ def send_invite_email(inviter, invited, invite_url, org):
|
||||
def send_password_reset_email(user):
|
||||
reset_link = reset_link_for_user(user)
|
||||
context = dict(user=user, reset_link=reset_link)
|
||||
html_content = render_template('emails/reset.html', **context)
|
||||
text_content = render_template('emails/reset.txt', **context)
|
||||
html_content = render_template("emails/reset.html", **context)
|
||||
text_content = render_template("emails/reset.txt", **context)
|
||||
subject = "Reset your password"
|
||||
|
||||
send_mail.delay([user.email], subject, html_content, text_content)
|
||||
return reset_link
|
||||
|
||||
|
||||
|
||||
def send_user_disabled_email(user):
|
||||
html_content = render_template('emails/reset_disabled.html', user=user)
|
||||
text_content = render_template('emails/reset_disabled.txt', user=user)
|
||||
html_content = render_template("emails/reset_disabled.html", user=user)
|
||||
text_content = render_template("emails/reset_disabled.txt", user=user)
|
||||
subject = "Your Redash account is disabled"
|
||||
|
||||
send_mail.delay([user.email], subject, html_content, text_content)
|
||||
|
||||
@@ -4,35 +4,43 @@ from flask import redirect, url_for, Blueprint, flash, request, session
|
||||
from flask_oauthlib.client import OAuth
|
||||
|
||||
from redash import models, settings
|
||||
from redash.authentication import create_and_login_user, logout_and_redirect_to_index, get_next_path
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
logout_and_redirect_to_index,
|
||||
get_next_path,
|
||||
)
|
||||
from redash.authentication.org_resolving import current_org
|
||||
|
||||
logger = logging.getLogger('google_oauth')
|
||||
logger = logging.getLogger("google_oauth")
|
||||
|
||||
oauth = OAuth()
|
||||
blueprint = Blueprint('google_oauth', __name__)
|
||||
blueprint = Blueprint("google_oauth", __name__)
|
||||
|
||||
|
||||
def google_remote_app():
|
||||
if 'google' not in oauth.remote_apps:
|
||||
oauth.remote_app('google',
|
||||
base_url='https://www.google.com/accounts/',
|
||||
authorize_url='https://accounts.google.com/o/oauth2/auth?prompt=select_account+consent',
|
||||
request_token_url=None,
|
||||
request_token_params={
|
||||
'scope': 'https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile',
|
||||
},
|
||||
access_token_url='https://accounts.google.com/o/oauth2/token',
|
||||
access_token_method='POST',
|
||||
consumer_key=settings.GOOGLE_CLIENT_ID,
|
||||
consumer_secret=settings.GOOGLE_CLIENT_SECRET)
|
||||
if "google" not in oauth.remote_apps:
|
||||
oauth.remote_app(
|
||||
"google",
|
||||
base_url="https://www.google.com/accounts/",
|
||||
authorize_url="https://accounts.google.com/o/oauth2/auth?prompt=select_account+consent",
|
||||
request_token_url=None,
|
||||
request_token_params={
|
||||
"scope": "https://www.googleapis.com/auth/userinfo.email https://www.googleapis.com/auth/userinfo.profile"
|
||||
},
|
||||
access_token_url="https://accounts.google.com/o/oauth2/token",
|
||||
access_token_method="POST",
|
||||
consumer_key=settings.GOOGLE_CLIENT_ID,
|
||||
consumer_secret=settings.GOOGLE_CLIENT_SECRET,
|
||||
)
|
||||
|
||||
return oauth.google
|
||||
|
||||
|
||||
def get_user_profile(access_token):
|
||||
headers = {'Authorization': 'OAuth {}'.format(access_token)}
|
||||
response = requests.get('https://www.googleapis.com/oauth2/v1/userinfo', headers=headers)
|
||||
headers = {"Authorization": "OAuth {}".format(access_token)}
|
||||
response = requests.get(
|
||||
"https://www.googleapis.com/oauth2/v1/userinfo", headers=headers
|
||||
)
|
||||
|
||||
if response.status_code == 401:
|
||||
logger.warning("Failed getting user profile (response code 401).")
|
||||
@@ -45,8 +53,8 @@ def verify_profile(org, profile):
|
||||
if org.is_public:
|
||||
return True
|
||||
|
||||
email = profile['email']
|
||||
domain = email.split('@')[-1]
|
||||
email = profile["email"]
|
||||
domain = email.split("@")[-1]
|
||||
|
||||
if domain in org.google_apps_domains:
|
||||
return True
|
||||
@@ -57,52 +65,60 @@ def verify_profile(org, profile):
|
||||
return False
|
||||
|
||||
|
||||
@blueprint.route('/<org_slug>/oauth/google', endpoint="authorize_org")
|
||||
@blueprint.route("/<org_slug>/oauth/google", endpoint="authorize_org")
|
||||
def org_login(org_slug):
|
||||
session['org_slug'] = current_org.slug
|
||||
return redirect(url_for(".authorize", next=request.args.get('next', None)))
|
||||
session["org_slug"] = current_org.slug
|
||||
return redirect(url_for(".authorize", next=request.args.get("next", None)))
|
||||
|
||||
|
||||
@blueprint.route('/oauth/google', endpoint="authorize")
|
||||
@blueprint.route("/oauth/google", endpoint="authorize")
|
||||
def login():
|
||||
callback = url_for('.callback', _external=True)
|
||||
next_path = request.args.get('next', url_for("redash.index", org_slug=session.get('org_slug')))
|
||||
callback = url_for(".callback", _external=True)
|
||||
next_path = request.args.get(
|
||||
"next", url_for("redash.index", org_slug=session.get("org_slug"))
|
||||
)
|
||||
logger.debug("Callback url: %s", callback)
|
||||
logger.debug("Next is: %s", next_path)
|
||||
return google_remote_app().authorize(callback=callback, state=next_path)
|
||||
|
||||
|
||||
@blueprint.route('/oauth/google_callback', endpoint="callback")
|
||||
@blueprint.route("/oauth/google_callback", endpoint="callback")
|
||||
def authorized():
|
||||
resp = google_remote_app().authorized_response()
|
||||
access_token = resp['access_token']
|
||||
access_token = resp["access_token"]
|
||||
|
||||
if access_token is None:
|
||||
logger.warning("Access token missing in call back request.")
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('redash.login'))
|
||||
return redirect(url_for("redash.login"))
|
||||
|
||||
profile = get_user_profile(access_token)
|
||||
if profile is None:
|
||||
flash("Validation error. Please retry.")
|
||||
return redirect(url_for('redash.login'))
|
||||
return redirect(url_for("redash.login"))
|
||||
|
||||
if 'org_slug' in session:
|
||||
org = models.Organization.get_by_slug(session.pop('org_slug'))
|
||||
if "org_slug" in session:
|
||||
org = models.Organization.get_by_slug(session.pop("org_slug"))
|
||||
else:
|
||||
org = current_org
|
||||
|
||||
if not verify_profile(org, profile):
|
||||
logger.warning("User tried to login with unauthorized domain name: %s (org: %s)", profile['email'], org)
|
||||
flash("Your Google Apps account ({}) isn't allowed.".format(profile['email']))
|
||||
return redirect(url_for('redash.login', org_slug=org.slug))
|
||||
logger.warning(
|
||||
"User tried to login with unauthorized domain name: %s (org: %s)",
|
||||
profile["email"],
|
||||
org,
|
||||
)
|
||||
flash("Your Google Apps account ({}) isn't allowed.".format(profile["email"]))
|
||||
return redirect(url_for("redash.login", org_slug=org.slug))
|
||||
|
||||
picture_url = "%s?sz=40" % profile['picture']
|
||||
user = create_and_login_user(org, profile['name'], profile['email'], picture_url)
|
||||
picture_url = "%s?sz=40" % profile["picture"]
|
||||
user = create_and_login_user(org, profile["name"], profile["email"], picture_url)
|
||||
if user is None:
|
||||
return logout_and_redirect_to_index()
|
||||
|
||||
unsafe_next_path = request.args.get('state') or url_for("redash.index", org_slug=org.slug)
|
||||
unsafe_next_path = request.args.get("state") or url_for(
|
||||
"redash.index", org_slug=org.slug
|
||||
)
|
||||
next_path = get_next_path(unsafe_next_path)
|
||||
|
||||
return redirect(next_path)
|
||||
|
||||
@@ -3,7 +3,7 @@ import jwt
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
logger = logging.getLogger('jwt_auth')
|
||||
logger = logging.getLogger("jwt_auth")
|
||||
|
||||
|
||||
def get_public_keys(url):
|
||||
@@ -18,10 +18,12 @@ def get_public_keys(url):
|
||||
r = requests.get(url)
|
||||
r.raise_for_status()
|
||||
data = r.json()
|
||||
if 'keys' in data:
|
||||
if "keys" in data:
|
||||
public_keys = []
|
||||
for key_dict in data['keys']:
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(simplejson.dumps(key_dict))
|
||||
for key_dict in data["keys"]:
|
||||
public_key = jwt.algorithms.RSAAlgorithm.from_jwk(
|
||||
simplejson.dumps(key_dict)
|
||||
)
|
||||
public_keys.append(public_key)
|
||||
|
||||
get_public_keys.key_cache[url] = public_keys
|
||||
@@ -34,13 +36,15 @@ def get_public_keys(url):
|
||||
get_public_keys.key_cache = {}
|
||||
|
||||
|
||||
def verify_jwt_token(jwt_token, expected_issuer, expected_audience, algorithms, public_certs_url):
|
||||
def verify_jwt_token(
|
||||
jwt_token, expected_issuer, expected_audience, algorithms, public_certs_url
|
||||
):
|
||||
# https://developers.cloudflare.com/access/setting-up-access/validate-jwt-tokens/
|
||||
# https://cloud.google.com/iap/docs/signed-headers-howto
|
||||
# Loop through the keys since we can't pass the key set to the decoder
|
||||
keys = get_public_keys(public_certs_url)
|
||||
|
||||
key_id = jwt.get_unverified_header(jwt_token).get('kid', '')
|
||||
key_id = jwt.get_unverified_header(jwt_token).get("kid", "")
|
||||
if key_id and isinstance(keys, dict):
|
||||
keys = [keys.get(key_id)]
|
||||
|
||||
@@ -50,14 +54,11 @@ def verify_jwt_token(jwt_token, expected_issuer, expected_audience, algorithms,
|
||||
try:
|
||||
# decode returns the claims which has the email if you need it
|
||||
payload = jwt.decode(
|
||||
jwt_token,
|
||||
key=key,
|
||||
audience=expected_audience,
|
||||
algorithms=algorithms
|
||||
jwt_token, key=key, audience=expected_audience, algorithms=algorithms
|
||||
)
|
||||
issuer = payload['iss']
|
||||
issuer = payload["iss"]
|
||||
if issuer != expected_issuer:
|
||||
raise Exception('Wrong issuer: {}'.format(issuer))
|
||||
raise Exception("Wrong issuer: {}".format(issuer))
|
||||
valid_token = True
|
||||
break
|
||||
except Exception as e:
|
||||
|
||||
@@ -10,54 +10,62 @@ try:
|
||||
from ldap3 import Server, Connection
|
||||
except ImportError:
|
||||
if settings.LDAP_LOGIN_ENABLED:
|
||||
sys.exit("The ldap3 library was not found. This is required to use LDAP authentication (see requirements.txt).")
|
||||
sys.exit(
|
||||
"The ldap3 library was not found. This is required to use LDAP authentication (see requirements.txt)."
|
||||
)
|
||||
|
||||
from redash.authentication import create_and_login_user, logout_and_redirect_to_index, get_next_path
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
logout_and_redirect_to_index,
|
||||
get_next_path,
|
||||
)
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
|
||||
logger = logging.getLogger('ldap_auth')
|
||||
logger = logging.getLogger("ldap_auth")
|
||||
|
||||
|
||||
blueprint = Blueprint('ldap_auth', __name__)
|
||||
blueprint = Blueprint("ldap_auth", __name__)
|
||||
|
||||
|
||||
@blueprint.route(org_scoped_rule("/ldap/login"), methods=['GET', 'POST'])
|
||||
@blueprint.route(org_scoped_rule("/ldap/login"), methods=["GET", "POST"])
|
||||
def login(org_slug=None):
|
||||
index_url = url_for("redash.index", org_slug=org_slug)
|
||||
unsafe_next_path = request.args.get('next', index_url)
|
||||
unsafe_next_path = request.args.get("next", index_url)
|
||||
next_path = get_next_path(unsafe_next_path)
|
||||
|
||||
if not settings.LDAP_LOGIN_ENABLED:
|
||||
logger.error("Cannot use LDAP for login without being enabled in settings")
|
||||
return redirect(url_for('redash.index', next=next_path))
|
||||
return redirect(url_for("redash.index", next=next_path))
|
||||
|
||||
if current_user.is_authenticated:
|
||||
return redirect(next_path)
|
||||
|
||||
if request.method == 'POST':
|
||||
ldap_user = auth_ldap_user(request.form['email'], request.form['password'])
|
||||
if request.method == "POST":
|
||||
ldap_user = auth_ldap_user(request.form["email"], request.form["password"])
|
||||
|
||||
if ldap_user is not None:
|
||||
user = create_and_login_user(
|
||||
current_org,
|
||||
ldap_user[settings.LDAP_DISPLAY_NAME_KEY][0],
|
||||
ldap_user[settings.LDAP_EMAIL_KEY][0]
|
||||
ldap_user[settings.LDAP_EMAIL_KEY][0],
|
||||
)
|
||||
if user is None:
|
||||
return logout_and_redirect_to_index()
|
||||
|
||||
return redirect(next_path or url_for('redash.index'))
|
||||
return redirect(next_path or url_for("redash.index"))
|
||||
else:
|
||||
flash("Incorrect credentials.")
|
||||
|
||||
return render_template("login.html",
|
||||
org_slug=org_slug,
|
||||
next=next_path,
|
||||
email=request.form.get('email', ''),
|
||||
show_password_login=True,
|
||||
username_prompt=settings.LDAP_CUSTOM_USERNAME_PROMPT,
|
||||
hide_forgot_password=True)
|
||||
return render_template(
|
||||
"login.html",
|
||||
org_slug=org_slug,
|
||||
next=next_path,
|
||||
email=request.form.get("email", ""),
|
||||
show_password_login=True,
|
||||
username_prompt=settings.LDAP_CUSTOM_USERNAME_PROMPT,
|
||||
hide_forgot_password=True,
|
||||
)
|
||||
|
||||
|
||||
def auth_ldap_user(username, password):
|
||||
@@ -68,12 +76,16 @@ def auth_ldap_user(username, password):
|
||||
settings.LDAP_BIND_DN,
|
||||
password=settings.LDAP_BIND_DN_PASSWORD,
|
||||
authentication=settings.LDAP_AUTH_METHOD,
|
||||
auto_bind=True
|
||||
auto_bind=True,
|
||||
)
|
||||
else:
|
||||
conn = Connection(server, auto_bind=True)
|
||||
|
||||
conn.search(settings.LDAP_SEARCH_DN, settings.LDAP_SEARCH_TEMPLATE % {"username": username}, attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY])
|
||||
conn.search(
|
||||
settings.LDAP_SEARCH_DN,
|
||||
settings.LDAP_SEARCH_TEMPLATE % {"username": username},
|
||||
attributes=[settings.LDAP_DISPLAY_NAME_KEY, settings.LDAP_EMAIL_KEY],
|
||||
)
|
||||
|
||||
if len(conn.entries) == 0:
|
||||
return None
|
||||
|
||||
@@ -7,13 +7,13 @@ from redash.models import Organization
|
||||
|
||||
|
||||
def _get_current_org():
|
||||
if 'org' in g:
|
||||
if "org" in g:
|
||||
return g.org
|
||||
|
||||
if request.view_args is None:
|
||||
slug = g.get('org_slug', 'default')
|
||||
slug = g.get("org_slug", "default")
|
||||
else:
|
||||
slug = request.view_args.get('org_slug', g.get('org_slug', 'default'))
|
||||
slug = request.view_args.get("org_slug", g.get("org_slug", "default"))
|
||||
|
||||
g.org = Organization.get_by_slug(slug)
|
||||
logging.debug("Current organization: %s (slug: %s)", g.org, slug)
|
||||
|
||||
@@ -1,23 +1,29 @@
|
||||
import logging
|
||||
from flask import redirect, url_for, Blueprint, request
|
||||
from redash.authentication import create_and_login_user, logout_and_redirect_to_index, get_next_path
|
||||
from redash.authentication import (
|
||||
create_and_login_user,
|
||||
logout_and_redirect_to_index,
|
||||
get_next_path,
|
||||
)
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
from redash import settings
|
||||
|
||||
logger = logging.getLogger('remote_user_auth')
|
||||
logger = logging.getLogger("remote_user_auth")
|
||||
|
||||
blueprint = Blueprint('remote_user_auth', __name__)
|
||||
blueprint = Blueprint("remote_user_auth", __name__)
|
||||
|
||||
|
||||
@blueprint.route(org_scoped_rule("/remote_user/login"))
|
||||
def login(org_slug=None):
|
||||
unsafe_next_path = request.args.get('next')
|
||||
unsafe_next_path = request.args.get("next")
|
||||
next_path = get_next_path(unsafe_next_path)
|
||||
|
||||
if not settings.REMOTE_USER_LOGIN_ENABLED:
|
||||
logger.error("Cannot use remote user for login without being enabled in settings")
|
||||
return redirect(url_for('redash.index', next=next_path, org_slug=org_slug))
|
||||
logger.error(
|
||||
"Cannot use remote user for login without being enabled in settings"
|
||||
)
|
||||
return redirect(url_for("redash.index", next=next_path, org_slug=org_slug))
|
||||
|
||||
email = request.headers.get(settings.REMOTE_USER_HEADER)
|
||||
|
||||
@@ -25,12 +31,16 @@ def login(org_slug=None):
|
||||
# falsey value. Special case that here so it Just Works for more installs.
|
||||
# '(null)' should never really be a value that anyone wants to legitimately
|
||||
# use as a redash user email.
|
||||
if email == '(null)':
|
||||
if email == "(null)":
|
||||
email = None
|
||||
|
||||
if not email:
|
||||
logger.error("Cannot use remote user for login when it's not provided in the request (looked in headers['" + settings.REMOTE_USER_HEADER + "'])")
|
||||
return redirect(url_for('redash.index', next=next_path, org_slug=org_slug))
|
||||
logger.error(
|
||||
"Cannot use remote user for login when it's not provided in the request (looked in headers['"
|
||||
+ settings.REMOTE_USER_HEADER
|
||||
+ "'])"
|
||||
)
|
||||
return redirect(url_for("redash.index", next=next_path, org_slug=org_slug))
|
||||
|
||||
logger.info("Logging in " + email + " via remote user")
|
||||
|
||||
@@ -38,4 +48,4 @@ def login(org_slug=None):
|
||||
if user is None:
|
||||
return logout_and_redirect_to_index()
|
||||
|
||||
return redirect(next_path or url_for('redash.index', org_slug=org_slug), code=302)
|
||||
return redirect(next_path or url_for("redash.index", org_slug=org_slug), code=302)
|
||||
|
||||
@@ -8,8 +8,8 @@ from saml2.client import Saml2Client
|
||||
from saml2.config import Config as Saml2Config
|
||||
from saml2.saml import NAMEID_FORMAT_TRANSIENT
|
||||
|
||||
logger = logging.getLogger('saml_auth')
|
||||
blueprint = Blueprint('saml_auth', __name__)
|
||||
logger = logging.getLogger("saml_auth")
|
||||
blueprint = Blueprint("saml_auth", __name__)
|
||||
|
||||
|
||||
def get_saml_client(org):
|
||||
@@ -23,34 +23,30 @@ def get_saml_client(org):
|
||||
acs_url = url_for("saml_auth.idp_initiated", org_slug=org.slug, _external=True)
|
||||
|
||||
saml_settings = {
|
||||
'metadata': {
|
||||
"remote": [{
|
||||
"url": metadata_url
|
||||
}]
|
||||
},
|
||||
'service': {
|
||||
'sp': {
|
||||
'endpoints': {
|
||||
'assertion_consumer_service': [
|
||||
"metadata": {"remote": [{"url": metadata_url}]},
|
||||
"service": {
|
||||
"sp": {
|
||||
"endpoints": {
|
||||
"assertion_consumer_service": [
|
||||
(acs_url, BINDING_HTTP_REDIRECT),
|
||||
(acs_url, BINDING_HTTP_POST)
|
||||
],
|
||||
(acs_url, BINDING_HTTP_POST),
|
||||
]
|
||||
},
|
||||
# Don't verify that the incoming requests originate from us via
|
||||
# the built-in cache for authn request ids in pysaml2
|
||||
'allow_unsolicited': True,
|
||||
"allow_unsolicited": True,
|
||||
# Don't sign authn requests, since signed requests only make
|
||||
# sense in a situation where you control both the SP and IdP
|
||||
'authn_requests_signed': False,
|
||||
'logout_requests_signed': True,
|
||||
'want_assertions_signed': True,
|
||||
'want_response_signed': False,
|
||||
},
|
||||
"authn_requests_signed": False,
|
||||
"logout_requests_signed": True,
|
||||
"want_assertions_signed": True,
|
||||
"want_response_signed": False,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
if entity_id is not None and entity_id != "":
|
||||
saml_settings['entityid'] = entity_id
|
||||
saml_settings["entityid"] = entity_id
|
||||
|
||||
sp_config = Saml2Config()
|
||||
sp_config.load(saml_settings)
|
||||
@@ -60,26 +56,29 @@ def get_saml_client(org):
|
||||
return saml_client
|
||||
|
||||
|
||||
@blueprint.route(org_scoped_rule('/saml/callback'), methods=['POST'])
|
||||
@blueprint.route(org_scoped_rule("/saml/callback"), methods=["POST"])
|
||||
def idp_initiated(org_slug=None):
|
||||
if not current_org.get_setting("auth_saml_enabled"):
|
||||
logger.error("SAML Login is not enabled")
|
||||
return redirect(url_for('redash.index', org_slug=org_slug))
|
||||
return redirect(url_for("redash.index", org_slug=org_slug))
|
||||
|
||||
saml_client = get_saml_client(current_org)
|
||||
try:
|
||||
authn_response = saml_client.parse_authn_request_response(
|
||||
request.form['SAMLResponse'],
|
||||
entity.BINDING_HTTP_POST)
|
||||
request.form["SAMLResponse"], entity.BINDING_HTTP_POST
|
||||
)
|
||||
except Exception:
|
||||
logger.error('Failed to parse SAML response', exc_info=True)
|
||||
flash('SAML login failed. Please try again later.')
|
||||
return redirect(url_for('redash.login', org_slug=org_slug))
|
||||
logger.error("Failed to parse SAML response", exc_info=True)
|
||||
flash("SAML login failed. Please try again later.")
|
||||
return redirect(url_for("redash.login", org_slug=org_slug))
|
||||
|
||||
authn_response.get_identity()
|
||||
user_info = authn_response.get_subject()
|
||||
email = user_info.text
|
||||
name = "%s %s" % (authn_response.ava['FirstName'][0], authn_response.ava['LastName'][0])
|
||||
name = "%s %s" % (
|
||||
authn_response.ava["FirstName"][0],
|
||||
authn_response.ava["LastName"][0],
|
||||
)
|
||||
|
||||
# This is what as known as "Just In Time (JIT) provisioning".
|
||||
# What that means is that, if a user in a SAML assertion
|
||||
@@ -88,11 +87,11 @@ def idp_initiated(org_slug=None):
|
||||
if user is None:
|
||||
return logout_and_redirect_to_index()
|
||||
|
||||
if 'RedashGroups' in authn_response.ava:
|
||||
group_names = authn_response.ava.get('RedashGroups')
|
||||
if "RedashGroups" in authn_response.ava:
|
||||
group_names = authn_response.ava.get("RedashGroups")
|
||||
user.update_group_assignments(group_names)
|
||||
|
||||
url = url_for('redash.index', org_slug=org_slug)
|
||||
url = url_for("redash.index", org_slug=org_slug)
|
||||
|
||||
return redirect(url)
|
||||
|
||||
@@ -101,10 +100,10 @@ def idp_initiated(org_slug=None):
|
||||
def sp_initiated(org_slug=None):
|
||||
if not current_org.get_setting("auth_saml_enabled"):
|
||||
logger.error("SAML Login is not enabled")
|
||||
return redirect(url_for('redash.index', org_slug=org_slug))
|
||||
return redirect(url_for("redash.index", org_slug=org_slug))
|
||||
|
||||
saml_client = get_saml_client(current_org)
|
||||
nameid_format = current_org.get_setting('auth_saml_nameid_format')
|
||||
nameid_format = current_org.get_setting("auth_saml_nameid_format")
|
||||
if nameid_format is None or nameid_format == "":
|
||||
nameid_format = NAMEID_FORMAT_TRANSIENT
|
||||
|
||||
@@ -112,8 +111,8 @@ def sp_initiated(org_slug=None):
|
||||
|
||||
redirect_url = None
|
||||
# Select the IdP URL to send the AuthN request to
|
||||
for key, value in info['headers']:
|
||||
if key == 'Location':
|
||||
for key, value in info["headers"]:
|
||||
if key == "Location":
|
||||
redirect_url = value
|
||||
response = redirect(redirect_url, code=302)
|
||||
|
||||
@@ -124,6 +123,6 @@ def sp_initiated(org_slug=None):
|
||||
# http://docs.oasis-open.org/security/saml/v2.0/saml-bindings-2.0-os.pdf
|
||||
# We set those headers here as a "belt and suspenders" approach,
|
||||
# since enterprise environments don't always conform to RFCs
|
||||
response.headers['Cache-Control'] = 'no-cache, no-store'
|
||||
response.headers['Pragma'] = 'no-cache'
|
||||
response.headers["Cache-Control"] = "no-cache, no-store"
|
||||
response.headers["Pragma"] = "no-cache"
|
||||
return response
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
|
||||
|
||||
import click
|
||||
import simplejson
|
||||
from flask import current_app
|
||||
@@ -17,10 +15,9 @@ def create(group):
|
||||
@app.shell_context_processor
|
||||
def shell_context():
|
||||
from redash import models, settings
|
||||
return {
|
||||
'models': models,
|
||||
'settings': settings,
|
||||
}
|
||||
|
||||
return {"models": models, "settings": settings}
|
||||
|
||||
return app
|
||||
|
||||
|
||||
@@ -58,7 +55,7 @@ def check_settings():
|
||||
|
||||
|
||||
@manager.command()
|
||||
@click.argument('email', default=settings.MAIL_DEFAULT_SENDER, required=False)
|
||||
@click.argument("email", default=settings.MAIL_DEFAULT_SENDER, required=False)
|
||||
def send_test_mail(email=None):
|
||||
"""
|
||||
Send test message to EMAIL (default: the address you defined in MAIL_DEFAULT_SENDER)
|
||||
@@ -69,8 +66,11 @@ def send_test_mail(email=None):
|
||||
if email is None:
|
||||
email = settings.MAIL_DEFAULT_SENDER
|
||||
|
||||
mail.send(Message(subject="Test Message from Redash", recipients=[email],
|
||||
body="Test message."))
|
||||
mail.send(
|
||||
Message(
|
||||
subject="Test Message from Redash", recipients=[email], body="Test message."
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@manager.command()
|
||||
@@ -79,13 +79,14 @@ def ipython():
|
||||
import sys
|
||||
import IPython
|
||||
from flask.globals import _app_ctx_stack
|
||||
|
||||
app = _app_ctx_stack.top.app
|
||||
|
||||
banner = 'Python %s on %s\nIPython: %s\nRedash version: %s\n' % (
|
||||
banner = "Python %s on %s\nIPython: %s\nRedash version: %s\n" % (
|
||||
sys.version,
|
||||
sys.platform,
|
||||
IPython.__version__,
|
||||
__version__
|
||||
__version__,
|
||||
)
|
||||
|
||||
ctx = {}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
from sys import exit
|
||||
|
||||
import click
|
||||
@@ -7,32 +6,39 @@ from six import text_type
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
from redash import models
|
||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||
query_runners)
|
||||
from redash.query_runner import (
|
||||
get_configuration_schema_for_query_runner_type,
|
||||
query_runners,
|
||||
)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
|
||||
manager = AppGroup(help="Data sources management commands.")
|
||||
|
||||
|
||||
@manager.command(name='list')
|
||||
@click.option('--org', 'organization', default=None,
|
||||
help="The organization the user belongs to (leave blank for "
|
||||
"all organizations).")
|
||||
@manager.command(name="list")
|
||||
@click.option(
|
||||
"--org",
|
||||
"organization",
|
||||
default=None,
|
||||
help="The organization the user belongs to (leave blank for " "all organizations).",
|
||||
)
|
||||
def list_command(organization=None):
|
||||
"""List currently configured data sources."""
|
||||
if organization:
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
data_sources = models.DataSource.query.filter(
|
||||
models.DataSource.org == org)
|
||||
data_sources = models.DataSource.query.filter(models.DataSource.org == org)
|
||||
else:
|
||||
data_sources = models.DataSource.query
|
||||
for i, ds in enumerate(data_sources.order_by(models.DataSource.name)):
|
||||
if i > 0:
|
||||
print("-" * 20)
|
||||
|
||||
print("Id: {}\nName: {}\nType: {}\nOptions: {}".format(
|
||||
ds.id, ds.name, ds.type, ds.options.to_json()))
|
||||
print(
|
||||
"Id: {}\nName: {}\nType: {}\nOptions: {}".format(
|
||||
ds.id, ds.name, ds.type, ds.options.to_json()
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@manager.command()
|
||||
@@ -46,26 +52,33 @@ def list_types():
|
||||
|
||||
def validate_data_source_type(type):
|
||||
if type not in query_runners.keys():
|
||||
print("Error: the type \"{}\" is not supported (supported types: {})."
|
||||
.format(type, ", ".join(query_runners.keys())))
|
||||
print(
|
||||
'Error: the type "{}" is not supported (supported types: {}).'.format(
|
||||
type, ", ".join(query_runners.keys())
|
||||
)
|
||||
)
|
||||
print("OJNK")
|
||||
exit(1)
|
||||
|
||||
|
||||
@manager.command()
|
||||
@click.argument('name')
|
||||
@click.option('--org', 'organization', default='default',
|
||||
help="The organization the user belongs to "
|
||||
"(leave blank for 'default').")
|
||||
def test(name, organization='default'):
|
||||
@click.argument("name")
|
||||
@click.option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the user belongs to " "(leave blank for 'default').",
|
||||
)
|
||||
def test(name, organization="default"):
|
||||
"""Test connection to data source by issuing a trivial query."""
|
||||
try:
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
data_source = models.DataSource.query.filter(
|
||||
models.DataSource.name == name,
|
||||
models.DataSource.org == org).one()
|
||||
print("Testing connection to data source: {} (id={})".format(
|
||||
name, data_source.id))
|
||||
models.DataSource.name == name, models.DataSource.org == org
|
||||
).one()
|
||||
print(
|
||||
"Testing connection to data source: {} (id={})".format(name, data_source.id)
|
||||
)
|
||||
try:
|
||||
data_source.query_runner.test_connection()
|
||||
except Exception as e:
|
||||
@@ -79,15 +92,16 @@ def test(name, organization='default'):
|
||||
|
||||
|
||||
@manager.command()
|
||||
@click.argument('name', default=None, required=False)
|
||||
@click.option('--type', default=None,
|
||||
help="new type for the data source")
|
||||
@click.option('--options', default=None,
|
||||
help="updated options for the data source")
|
||||
@click.option('--org', 'organization', default='default',
|
||||
help="The organization the user belongs to (leave blank for "
|
||||
"'default').")
|
||||
def new(name=None, type=None, options=None, organization='default'):
|
||||
@click.argument("name", default=None, required=False)
|
||||
@click.option("--type", default=None, help="new type for the data source")
|
||||
@click.option("--options", default=None, help="updated options for the data source")
|
||||
@click.option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the user belongs to (leave blank for " "'default').",
|
||||
)
|
||||
def new(name=None, type=None, options=None, organization="default"):
|
||||
"""Create new data source."""
|
||||
|
||||
if name is None:
|
||||
@@ -100,8 +114,7 @@ def new(name=None, type=None, options=None, organization='default'):
|
||||
|
||||
idx = 0
|
||||
while idx < 1 or idx > len(list(query_runners.keys())):
|
||||
idx = click.prompt("[{}-{}]".format(1, len(query_runners.keys())),
|
||||
type=int)
|
||||
idx = click.prompt("[{}-{}]".format(1, len(query_runners.keys())), type=int)
|
||||
|
||||
type = list(query_runners.keys())[idx - 1]
|
||||
else:
|
||||
@@ -111,28 +124,28 @@ def new(name=None, type=None, options=None, organization='default'):
|
||||
schema = query_runner.configuration_schema()
|
||||
|
||||
if options is None:
|
||||
types = {
|
||||
'string': text_type,
|
||||
'number': int,
|
||||
'boolean': bool
|
||||
}
|
||||
types = {"string": text_type, "number": int, "boolean": bool}
|
||||
|
||||
options_obj = {}
|
||||
|
||||
for k, prop in schema['properties'].items():
|
||||
required = k in schema.get('required', [])
|
||||
for k, prop in schema["properties"].items():
|
||||
required = k in schema.get("required", [])
|
||||
default_value = "<<DEFAULT_VALUE>>"
|
||||
if required:
|
||||
default_value = None
|
||||
|
||||
prompt = prop.get('title', k.capitalize())
|
||||
prompt = prop.get("title", k.capitalize())
|
||||
if required:
|
||||
prompt = "{} (required)".format(prompt)
|
||||
else:
|
||||
prompt = "{} (optional)".format(prompt)
|
||||
|
||||
value = click.prompt(prompt, default=default_value,
|
||||
type=types[prop['type']], show_default=False)
|
||||
value = click.prompt(
|
||||
prompt,
|
||||
default=default_value,
|
||||
type=types[prop["type"]],
|
||||
show_default=False,
|
||||
)
|
||||
if value != default_value:
|
||||
options_obj[k] = value
|
||||
|
||||
@@ -144,28 +157,37 @@ def new(name=None, type=None, options=None, organization='default'):
|
||||
print("Error: invalid configuration.")
|
||||
exit()
|
||||
|
||||
print("Creating {} data source ({}) with options:\n{}".format(
|
||||
type, name, options.to_json()))
|
||||
print(
|
||||
"Creating {} data source ({}) with options:\n{}".format(
|
||||
type, name, options.to_json()
|
||||
)
|
||||
)
|
||||
|
||||
data_source = models.DataSource.create_with_group(
|
||||
name=name, type=type, options=options,
|
||||
org=models.Organization.get_by_slug(organization))
|
||||
name=name,
|
||||
type=type,
|
||||
options=options,
|
||||
org=models.Organization.get_by_slug(organization),
|
||||
)
|
||||
models.db.session.commit()
|
||||
print("Id: {}".format(data_source.id))
|
||||
|
||||
|
||||
@manager.command()
|
||||
@click.argument('name')
|
||||
@click.option('--org', 'organization', default='default',
|
||||
help="The organization the user belongs to (leave blank for "
|
||||
"'default').")
|
||||
def delete(name, organization='default'):
|
||||
@click.argument("name")
|
||||
@click.option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the user belongs to (leave blank for " "'default').",
|
||||
)
|
||||
def delete(name, organization="default"):
|
||||
"""Delete data source by name."""
|
||||
try:
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
data_source = models.DataSource.query.filter(
|
||||
models.DataSource.name == name,
|
||||
models.DataSource.org == org).one()
|
||||
models.DataSource.name == name, models.DataSource.org == org
|
||||
).one()
|
||||
print("Deleting data source: {} (id={})".format(name, data_source.id))
|
||||
models.db.session.delete(data_source)
|
||||
models.db.session.commit()
|
||||
@@ -182,31 +204,30 @@ def update_attr(obj, attr, new_value):
|
||||
|
||||
|
||||
@manager.command()
|
||||
@click.argument('name')
|
||||
@click.option('--name', 'new_name', default=None,
|
||||
help="new name for the data source")
|
||||
@click.option('--options', default=None,
|
||||
help="updated options for the data source")
|
||||
@click.option('--type', default=None,
|
||||
help="new type for the data source")
|
||||
@click.option('--org', 'organization', default='default',
|
||||
help="The organization the user belongs to (leave blank for "
|
||||
"'default').")
|
||||
def edit(name, new_name=None, options=None, type=None, organization='default'):
|
||||
@click.argument("name")
|
||||
@click.option("--name", "new_name", default=None, help="new name for the data source")
|
||||
@click.option("--options", default=None, help="updated options for the data source")
|
||||
@click.option("--type", default=None, help="new type for the data source")
|
||||
@click.option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the user belongs to (leave blank for " "'default').",
|
||||
)
|
||||
def edit(name, new_name=None, options=None, type=None, organization="default"):
|
||||
"""Edit data source settings (name, options, type)."""
|
||||
try:
|
||||
if type is not None:
|
||||
validate_data_source_type(type)
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
data_source = models.DataSource.query.filter(
|
||||
models.DataSource.name == name,
|
||||
models.DataSource.org == org).one()
|
||||
models.DataSource.name == name, models.DataSource.org == org
|
||||
).one()
|
||||
update_attr(data_source, "name", new_name)
|
||||
update_attr(data_source, "type", type)
|
||||
|
||||
if options is not None:
|
||||
schema = get_configuration_schema_for_query_runner_type(
|
||||
data_source.type)
|
||||
schema = get_configuration_schema_for_query_runner_type(data_source.type)
|
||||
options = json_loads(options)
|
||||
data_source.options.set_schema(schema)
|
||||
data_source.options.update(options)
|
||||
|
||||
@@ -19,7 +19,7 @@ def _wait_for_db_connection(db):
|
||||
retried = False
|
||||
while not retried:
|
||||
try:
|
||||
db.engine.execute('SELECT 1;')
|
||||
db.engine.execute("SELECT 1;")
|
||||
return
|
||||
except DatabaseError:
|
||||
time.sleep(30)
|
||||
@@ -51,10 +51,9 @@ def drop_tables():
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('old_secret')
|
||||
@argument('new_secret')
|
||||
@option('--show-sql/--no-show-sql', default=False,
|
||||
help="show sql for debug")
|
||||
@argument("old_secret")
|
||||
@argument("new_secret")
|
||||
@option("--show-sql/--no-show-sql", default=False, help="show sql for debug")
|
||||
def reencrypt(old_secret, new_secret, show_sql):
|
||||
"""Reencrypt data encrypted by OLD_SECRET with NEW_SECRET."""
|
||||
from redash.models import db
|
||||
@@ -63,26 +62,39 @@ def reencrypt(old_secret, new_secret, show_sql):
|
||||
|
||||
if show_sql:
|
||||
import logging
|
||||
logging.basicConfig()
|
||||
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
|
||||
|
||||
table_for_select = sqlalchemy.Table('data_sources', sqlalchemy.MetaData(),
|
||||
Column('id', db.Integer, primary_key=True),
|
||||
Column('encrypted_options',
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
db.Text, old_secret, FernetEngine))))
|
||||
table_for_update = sqlalchemy.Table('data_sources', sqlalchemy.MetaData(),
|
||||
Column('id', db.Integer, primary_key=True),
|
||||
Column('encrypted_options',
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(
|
||||
db.Text, new_secret, FernetEngine))))
|
||||
logging.basicConfig()
|
||||
logging.getLogger("sqlalchemy.engine").setLevel(logging.INFO)
|
||||
|
||||
table_for_select = sqlalchemy.Table(
|
||||
"data_sources",
|
||||
sqlalchemy.MetaData(),
|
||||
Column("id", db.Integer, primary_key=True),
|
||||
Column(
|
||||
"encrypted_options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(db.Text, old_secret, FernetEngine)
|
||||
),
|
||||
),
|
||||
)
|
||||
table_for_update = sqlalchemy.Table(
|
||||
"data_sources",
|
||||
sqlalchemy.MetaData(),
|
||||
Column("id", db.Integer, primary_key=True),
|
||||
Column(
|
||||
"encrypted_options",
|
||||
ConfigurationContainer.as_mutable(
|
||||
EncryptedConfiguration(db.Text, new_secret, FernetEngine)
|
||||
),
|
||||
),
|
||||
)
|
||||
|
||||
update = table_for_update.update()
|
||||
data_sources = db.session.execute(select([table_for_select]))
|
||||
for ds in data_sources:
|
||||
stmt = update.where(table_for_update.c.id == ds['id']).values(encrypted_options=ds['encrypted_options'])
|
||||
stmt = update.where(table_for_update.c.id == ds["id"]).values(
|
||||
encrypted_options=ds["encrypted_options"]
|
||||
)
|
||||
db.session.execute(stmt)
|
||||
|
||||
data_sources.close()
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
from sys import exit
|
||||
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
@@ -11,17 +10,23 @@ manager = AppGroup(help="Groups management commands.")
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('name')
|
||||
@option('--org', 'organization', default='default',
|
||||
help="The organization the user belongs to (leave blank for "
|
||||
"'default').")
|
||||
@option('--permissions', default=None,
|
||||
help="Comma separated list of permissions ('create_dashboard',"
|
||||
" 'create_query', 'edit_dashboard', 'edit_query', "
|
||||
"'view_query', 'view_source', 'execute_query', 'list_users',"
|
||||
" 'schedule_query', 'list_dashboards', 'list_alerts',"
|
||||
" 'list_data_sources') (leave blank for default).")
|
||||
def create(name, permissions=None, organization='default'):
|
||||
@argument("name")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the user belongs to (leave blank for " "'default').",
|
||||
)
|
||||
@option(
|
||||
"--permissions",
|
||||
default=None,
|
||||
help="Comma separated list of permissions ('create_dashboard',"
|
||||
" 'create_query', 'edit_dashboard', 'edit_query', "
|
||||
"'view_query', 'view_source', 'execute_query', 'list_users',"
|
||||
" 'schedule_query', 'list_dashboards', 'list_alerts',"
|
||||
" 'list_data_sources') (leave blank for default).",
|
||||
)
|
||||
def create(name, permissions=None, organization="default"):
|
||||
print("Creating group (%s)..." % (name))
|
||||
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
@@ -31,9 +36,7 @@ def create(name, permissions=None, organization='default'):
|
||||
print("permissions: [%s]" % ",".join(permissions))
|
||||
|
||||
try:
|
||||
models.db.session.add(models.Group(
|
||||
name=name, org=org,
|
||||
permissions=permissions))
|
||||
models.db.session.add(models.Group(name=name, org=org, permissions=permissions))
|
||||
models.db.session.commit()
|
||||
except Exception as e:
|
||||
print("Failed create group: %s" % e)
|
||||
@@ -41,13 +44,16 @@ def create(name, permissions=None, organization='default'):
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('group_id')
|
||||
@option('--permissions', default=None,
|
||||
help="Comma separated list of permissions ('create_dashboard',"
|
||||
" 'create_query', 'edit_dashboard', 'edit_query',"
|
||||
" 'view_query', 'view_source', 'execute_query', 'list_users',"
|
||||
" 'schedule_query', 'list_dashboards', 'list_alerts',"
|
||||
" 'list_data_sources') (leave blank for default).")
|
||||
@argument("group_id")
|
||||
@option(
|
||||
"--permissions",
|
||||
default=None,
|
||||
help="Comma separated list of permissions ('create_dashboard',"
|
||||
" 'create_query', 'edit_dashboard', 'edit_query',"
|
||||
" 'view_query', 'view_source', 'execute_query', 'list_users',"
|
||||
" 'schedule_query', 'list_dashboards', 'list_alerts',"
|
||||
" 'list_data_sources') (leave blank for default).",
|
||||
)
|
||||
def change_permissions(group_id, permissions=None):
|
||||
print("Change permissions of group %s ..." % group_id)
|
||||
|
||||
@@ -58,8 +64,10 @@ def change_permissions(group_id, permissions=None):
|
||||
exit(1)
|
||||
|
||||
permissions = extract_permissions_string(permissions)
|
||||
print("current permissions [%s] will be modify to [%s]" % (
|
||||
",".join(group.permissions), ",".join(permissions)))
|
||||
print(
|
||||
"current permissions [%s] will be modify to [%s]"
|
||||
% (",".join(group.permissions), ",".join(permissions))
|
||||
)
|
||||
|
||||
group.permissions = permissions
|
||||
|
||||
@@ -75,14 +83,18 @@ def extract_permissions_string(permissions):
|
||||
if permissions is None:
|
||||
permissions = models.Group.DEFAULT_PERMISSIONS
|
||||
else:
|
||||
permissions = permissions.split(',')
|
||||
permissions = permissions.split(",")
|
||||
permissions = [p.strip() for p in permissions]
|
||||
return permissions
|
||||
|
||||
|
||||
@manager.command(name='list')
|
||||
@option('--org', 'organization', default=None,
|
||||
help="The organization to limit to (leave blank for all).")
|
||||
@manager.command(name="list")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default=None,
|
||||
help="The organization to limit to (leave blank for all).",
|
||||
)
|
||||
def list_command(organization=None):
|
||||
"""List all groups"""
|
||||
if organization:
|
||||
@@ -95,8 +107,15 @@ def list_command(organization=None):
|
||||
if i > 0:
|
||||
print("-" * 20)
|
||||
|
||||
print("Id: {}\nName: {}\nType: {}\nOrganization: {}\nPermissions: [{}]".format(
|
||||
group.id, group.name, group.type, group.org.slug, ",".join(group.permissions)))
|
||||
print(
|
||||
"Id: {}\nName: {}\nType: {}\nOrganization: {}\nPermissions: [{}]".format(
|
||||
group.id,
|
||||
group.name,
|
||||
group.type,
|
||||
group.org.slug,
|
||||
",".join(group.permissions),
|
||||
)
|
||||
)
|
||||
|
||||
members = models.Group.members(group.id)
|
||||
user_names = [m.name for m in members]
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
from click import argument
|
||||
from flask.cli import AppGroup
|
||||
|
||||
@@ -8,28 +7,34 @@ manager = AppGroup(help="Organization management commands.")
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('domains')
|
||||
@argument("domains")
|
||||
def set_google_apps_domains(domains):
|
||||
"""
|
||||
Sets the allowable domains to the comma separated list DOMAINS.
|
||||
"""
|
||||
organization = models.Organization.query.first()
|
||||
k = models.Organization.SETTING_GOOGLE_APPS_DOMAINS
|
||||
organization.settings[k] = domains.split(',')
|
||||
organization.settings[k] = domains.split(",")
|
||||
models.db.session.add(organization)
|
||||
models.db.session.commit()
|
||||
print("Updated list of allowed domains to: {}".format(
|
||||
organization.google_apps_domains))
|
||||
print(
|
||||
"Updated list of allowed domains to: {}".format(
|
||||
organization.google_apps_domains
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@manager.command()
|
||||
def show_google_apps_domains():
|
||||
organization = models.Organization.query.first()
|
||||
print("Current list of Google Apps domains: {}".format(
|
||||
', '.join(organization.google_apps_domains)))
|
||||
print(
|
||||
"Current list of Google Apps domains: {}".format(
|
||||
", ".join(organization.google_apps_domains)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@manager.command(name='list')
|
||||
@manager.command(name="list")
|
||||
def list_command():
|
||||
"""List all organizations"""
|
||||
orgs = models.Organization.query
|
||||
|
||||
@@ -6,8 +6,8 @@ manager = AppGroup(help="Queries management commands.")
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('query_id')
|
||||
@argument('tag')
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
def add_tag(query_id, tag):
|
||||
from redash import models
|
||||
|
||||
@@ -32,8 +32,8 @@ def add_tag(query_id, tag):
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('query_id')
|
||||
@argument('tag')
|
||||
@argument("query_id")
|
||||
@argument("tag")
|
||||
def remove_tag(query_id, tag):
|
||||
from redash import models
|
||||
|
||||
|
||||
@@ -9,7 +9,11 @@ from rq import Connection, Worker
|
||||
from sqlalchemy.orm import configure_mappers
|
||||
|
||||
from redash import rq_redis_connection
|
||||
from redash.schedule import rq_scheduler, schedule_periodic_jobs, periodic_job_definitions
|
||||
from redash.schedule import (
|
||||
rq_scheduler,
|
||||
schedule_periodic_jobs,
|
||||
periodic_job_definitions,
|
||||
)
|
||||
|
||||
manager = AppGroup(help="RQ management commands.")
|
||||
|
||||
@@ -22,15 +26,15 @@ def scheduler():
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('queues', nargs=-1)
|
||||
@argument("queues", nargs=-1)
|
||||
def worker(queues):
|
||||
# Configure any SQLAlchemy mappers loaded until now so that the mapping configuration
|
||||
# will already be available to the forked work horses and they won't need
|
||||
# Configure any SQLAlchemy mappers loaded until now so that the mapping configuration
|
||||
# will already be available to the forked work horses and they won't need
|
||||
# to spend valuable time re-doing that on every fork.
|
||||
configure_mappers()
|
||||
|
||||
if not queues:
|
||||
queues = ['periodic', 'emails', 'default', 'schemas']
|
||||
queues = ["periodic", "emails", "default", "schemas"]
|
||||
|
||||
with Connection(rq_redis_connection):
|
||||
w = Worker(queues, log_job_description=False)
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
|
||||
from sys import exit
|
||||
|
||||
from click import BOOL, argument, option, prompt
|
||||
@@ -15,8 +14,8 @@ manager = AppGroup(help="Users management commands.")
|
||||
|
||||
def build_groups(org, groups, is_admin):
|
||||
if isinstance(groups, string_types):
|
||||
groups = groups.split(',')
|
||||
groups.remove('') # in case it was empty string
|
||||
groups = groups.split(",")
|
||||
groups.remove("") # in case it was empty string
|
||||
groups = [int(g) for g in groups]
|
||||
|
||||
if groups is None:
|
||||
@@ -29,11 +28,14 @@ def build_groups(org, groups, is_admin):
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('email')
|
||||
@option('--org', 'organization', default='default',
|
||||
help="the organization the user belongs to, (leave blank for "
|
||||
"'default').")
|
||||
def grant_admin(email, organization='default'):
|
||||
@argument("email")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="the organization the user belongs to, (leave blank for " "'default').",
|
||||
)
|
||||
def grant_admin(email, organization="default"):
|
||||
"""
|
||||
Grant admin access to user EMAIL.
|
||||
"""
|
||||
@@ -54,28 +56,47 @@ def grant_admin(email, organization='default'):
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('email')
|
||||
@argument('name')
|
||||
@option('--org', 'organization', default='default',
|
||||
help="The organization the user belongs to (leave blank for "
|
||||
"'default').")
|
||||
@option('--admin', 'is_admin', is_flag=True, default=False,
|
||||
help="set user as admin")
|
||||
@option('--google', 'google_auth', is_flag=True,
|
||||
default=False, help="user uses Google Auth to login")
|
||||
@option('--password', 'password', default=None,
|
||||
help="Password for users who don't use Google Auth "
|
||||
"(leave blank for prompt).")
|
||||
@option('--groups', 'groups', default=None,
|
||||
help="Comma separated list of groups (leave blank for "
|
||||
"default).")
|
||||
def create(email, name, groups, is_admin=False, google_auth=False,
|
||||
password=None, organization='default'):
|
||||
@argument("email")
|
||||
@argument("name")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the user belongs to (leave blank for " "'default').",
|
||||
)
|
||||
@option("--admin", "is_admin", is_flag=True, default=False, help="set user as admin")
|
||||
@option(
|
||||
"--google",
|
||||
"google_auth",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="user uses Google Auth to login",
|
||||
)
|
||||
@option(
|
||||
"--password",
|
||||
"password",
|
||||
default=None,
|
||||
help="Password for users who don't use Google Auth " "(leave blank for prompt).",
|
||||
)
|
||||
@option(
|
||||
"--groups",
|
||||
"groups",
|
||||
default=None,
|
||||
help="Comma separated list of groups (leave blank for " "default).",
|
||||
)
|
||||
def create(
|
||||
email,
|
||||
name,
|
||||
groups,
|
||||
is_admin=False,
|
||||
google_auth=False,
|
||||
password=None,
|
||||
organization="default",
|
||||
):
|
||||
"""
|
||||
Create user EMAIL with display name NAME.
|
||||
"""
|
||||
print("Creating user (%s, %s) in organization %s..." % (email, name,
|
||||
organization))
|
||||
print("Creating user (%s, %s) in organization %s..." % (email, name, organization))
|
||||
print("Admin: %r" % is_admin)
|
||||
print("Login with Google Auth: %r\n" % google_auth)
|
||||
|
||||
@@ -84,8 +105,7 @@ def create(email, name, groups, is_admin=False, google_auth=False,
|
||||
|
||||
user = models.User(org=org, email=email, name=name, group_ids=groups)
|
||||
if not password and not google_auth:
|
||||
password = prompt("Password", hide_input=True,
|
||||
confirmation_prompt=True)
|
||||
password = prompt("Password", hide_input=True, confirmation_prompt=True)
|
||||
if not google_auth:
|
||||
user.hash_password(password)
|
||||
|
||||
@@ -98,20 +118,36 @@ def create(email, name, groups, is_admin=False, google_auth=False,
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('email')
|
||||
@argument('name')
|
||||
@option('--org', 'organization', default='default',
|
||||
help="The organization the root user belongs to (leave blank for 'default').")
|
||||
@option('--google', 'google_auth', is_flag=True,
|
||||
default=False, help="user uses Google Auth to login")
|
||||
@option('--password', 'password', default=None,
|
||||
help="Password for root user who don't use Google Auth "
|
||||
"(leave blank for prompt).")
|
||||
def create_root(email, name, google_auth=False, password=None, organization='default'):
|
||||
@argument("email")
|
||||
@argument("name")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the root user belongs to (leave blank for 'default').",
|
||||
)
|
||||
@option(
|
||||
"--google",
|
||||
"google_auth",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="user uses Google Auth to login",
|
||||
)
|
||||
@option(
|
||||
"--password",
|
||||
"password",
|
||||
default=None,
|
||||
help="Password for root user who don't use Google Auth "
|
||||
"(leave blank for prompt).",
|
||||
)
|
||||
def create_root(email, name, google_auth=False, password=None, organization="default"):
|
||||
"""
|
||||
Create root user.
|
||||
"""
|
||||
print("Creating root user (%s, %s) in organization %s..." % (email, name, organization))
|
||||
print(
|
||||
"Creating root user (%s, %s) in organization %s..."
|
||||
% (email, name, organization)
|
||||
)
|
||||
print("Login with Google Auth: %r\n" % google_auth)
|
||||
|
||||
user = models.User.query.filter(models.User.email == email).first()
|
||||
@@ -119,21 +155,35 @@ def create_root(email, name, google_auth=False, password=None, organization='def
|
||||
print("User [%s] is already exists." % email)
|
||||
exit(1)
|
||||
|
||||
slug = 'default'
|
||||
default_org = models.Organization.query.filter(models.Organization.slug == slug).first()
|
||||
slug = "default"
|
||||
default_org = models.Organization.query.filter(
|
||||
models.Organization.slug == slug
|
||||
).first()
|
||||
if default_org is None:
|
||||
default_org = models.Organization(name=organization, slug=slug, settings={})
|
||||
|
||||
admin_group = models.Group(name='admin', permissions=['admin', 'super_admin'],
|
||||
org=default_org, type=models.Group.BUILTIN_GROUP)
|
||||
default_group = models.Group(name='default', permissions=models.Group.DEFAULT_PERMISSIONS,
|
||||
org=default_org, type=models.Group.BUILTIN_GROUP)
|
||||
admin_group = models.Group(
|
||||
name="admin",
|
||||
permissions=["admin", "super_admin"],
|
||||
org=default_org,
|
||||
type=models.Group.BUILTIN_GROUP,
|
||||
)
|
||||
default_group = models.Group(
|
||||
name="default",
|
||||
permissions=models.Group.DEFAULT_PERMISSIONS,
|
||||
org=default_org,
|
||||
type=models.Group.BUILTIN_GROUP,
|
||||
)
|
||||
|
||||
models.db.session.add_all([default_org, admin_group, default_group])
|
||||
models.db.session.commit()
|
||||
|
||||
user = models.User(org=default_org, email=email, name=name,
|
||||
group_ids=[admin_group.id, default_group.id])
|
||||
user = models.User(
|
||||
org=default_org,
|
||||
email=email,
|
||||
name=name,
|
||||
group_ids=[admin_group.id, default_group.id],
|
||||
)
|
||||
if not google_auth:
|
||||
user.hash_password(password)
|
||||
|
||||
@@ -146,10 +196,13 @@ def create_root(email, name, google_auth=False, password=None, organization='def
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('email')
|
||||
@option('--org', 'organization', default=None,
|
||||
help="The organization the user belongs to (leave blank for all"
|
||||
" organizations).")
|
||||
@argument("email")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default=None,
|
||||
help="The organization the user belongs to (leave blank for all" " organizations).",
|
||||
)
|
||||
def delete(email, organization=None):
|
||||
"""
|
||||
Delete user EMAIL.
|
||||
@@ -157,22 +210,25 @@ def delete(email, organization=None):
|
||||
if organization:
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
deleted_count = models.User.query.filter(
|
||||
models.User.email == email,
|
||||
models.User.org == org.id,
|
||||
models.User.email == email, models.User.org == org.id
|
||||
).delete()
|
||||
else:
|
||||
deleted_count = models.User.query.filter(models.User.email == email).delete(
|
||||
synchronize_session=False)
|
||||
synchronize_session=False
|
||||
)
|
||||
models.db.session.commit()
|
||||
print("Deleted %d users." % deleted_count)
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('email')
|
||||
@argument('password')
|
||||
@option('--org', 'organization', default=None,
|
||||
help="The organization the user belongs to (leave blank for all "
|
||||
"organizations).")
|
||||
@argument("email")
|
||||
@argument("password")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default=None,
|
||||
help="The organization the user belongs to (leave blank for all " "organizations).",
|
||||
)
|
||||
def password(email, password, organization=None):
|
||||
"""
|
||||
Resets password for EMAIL to PASSWORD.
|
||||
@@ -180,8 +236,7 @@ def password(email, password, organization=None):
|
||||
if organization:
|
||||
org = models.Organization.get_by_slug(organization)
|
||||
user = models.User.query.filter(
|
||||
models.User.email == email,
|
||||
models.User.org == org,
|
||||
models.User.email == email, models.User.org == org
|
||||
).first()
|
||||
else:
|
||||
user = models.User.query.filter(models.User.email == email).first()
|
||||
@@ -197,17 +252,23 @@ def password(email, password, organization=None):
|
||||
|
||||
|
||||
@manager.command()
|
||||
@argument('email')
|
||||
@argument('name')
|
||||
@argument('inviter_email')
|
||||
@option('--org', 'organization', default='default',
|
||||
help="The organization the user belongs to (leave blank for 'default')")
|
||||
@option('--admin', 'is_admin', type=BOOL, default=False,
|
||||
help="set user as admin")
|
||||
@option('--groups', 'groups', default=None,
|
||||
help="Comma seperated list of groups (leave blank for default).")
|
||||
def invite(email, name, inviter_email, groups, is_admin=False,
|
||||
organization='default'):
|
||||
@argument("email")
|
||||
@argument("name")
|
||||
@argument("inviter_email")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default="default",
|
||||
help="The organization the user belongs to (leave blank for 'default')",
|
||||
)
|
||||
@option("--admin", "is_admin", type=BOOL, default=False, help="set user as admin")
|
||||
@option(
|
||||
"--groups",
|
||||
"groups",
|
||||
default=None,
|
||||
help="Comma seperated list of groups (leave blank for default).",
|
||||
)
|
||||
def invite(email, name, inviter_email, groups, is_admin=False, organization="default"):
|
||||
"""
|
||||
Sends an invitation to the given NAME and EMAIL from INVITER_EMAIL.
|
||||
"""
|
||||
@@ -230,10 +291,13 @@ def invite(email, name, inviter_email, groups, is_admin=False,
|
||||
print("The inviter [%s] was not found." % inviter_email)
|
||||
|
||||
|
||||
@manager.command(name='list')
|
||||
@option('--org', 'organization', default=None,
|
||||
help="The organization the user belongs to (leave blank for all"
|
||||
" organizations)")
|
||||
@manager.command(name="list")
|
||||
@option(
|
||||
"--org",
|
||||
"organization",
|
||||
default=None,
|
||||
help="The organization the user belongs to (leave blank for all" " organizations)",
|
||||
)
|
||||
def list_command(organization=None):
|
||||
"""List all users"""
|
||||
if organization:
|
||||
@@ -245,8 +309,11 @@ def list_command(organization=None):
|
||||
if i > 0:
|
||||
print("-" * 20)
|
||||
|
||||
print("Id: {}\nName: {}\nEmail: {}\nOrganization: {}\nActive: {}".format(
|
||||
user.id, user.name, user.email, user.org.name, not(user.is_disabled)))
|
||||
print(
|
||||
"Id: {}\nName: {}\nEmail: {}\nOrganization: {}\nActive: {}".format(
|
||||
user.id, user.name, user.email, user.org.name, not (user.is_disabled)
|
||||
)
|
||||
)
|
||||
|
||||
groups = models.Group.query.filter(models.Group.id.in_(user.group_ids)).all()
|
||||
group_names = [group.name for group in groups]
|
||||
|
||||
@@ -2,12 +2,7 @@ import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
__all__ = [
|
||||
'BaseDestination',
|
||||
'register',
|
||||
'get_destination',
|
||||
'import_destinations'
|
||||
]
|
||||
__all__ = ["BaseDestination", "register", "get_destination", "import_destinations"]
|
||||
|
||||
|
||||
class BaseDestination(object):
|
||||
@@ -26,7 +21,7 @@ class BaseDestination(object):
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-bullseye'
|
||||
return "fa-bullseye"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
@@ -42,10 +37,10 @@ class BaseDestination(object):
|
||||
@classmethod
|
||||
def to_dict(cls):
|
||||
return {
|
||||
'name': cls.name(),
|
||||
'type': cls.type(),
|
||||
'icon': cls.icon(),
|
||||
'configuration_schema': cls.configuration_schema()
|
||||
"name": cls.name(),
|
||||
"type": cls.type(),
|
||||
"icon": cls.icon(),
|
||||
"configuration_schema": cls.configuration_schema(),
|
||||
}
|
||||
|
||||
|
||||
@@ -55,10 +50,17 @@ destinations = {}
|
||||
def register(destination_class):
|
||||
global destinations
|
||||
if destination_class.enabled():
|
||||
logger.debug("Registering %s (%s) destinations.", destination_class.name(), destination_class.type())
|
||||
logger.debug(
|
||||
"Registering %s (%s) destinations.",
|
||||
destination_class.name(),
|
||||
destination_class.type(),
|
||||
)
|
||||
destinations[destination_class.type()] = destination_class
|
||||
else:
|
||||
logger.warning("%s destination enabled but not supported, not registering. Either disable or install missing dependencies.", destination_class.name())
|
||||
logger.warning(
|
||||
"%s destination enabled but not supported, not registering. Either disable or install missing dependencies.",
|
||||
destination_class.name(),
|
||||
)
|
||||
|
||||
|
||||
def get_destination(destination_type, configuration):
|
||||
|
||||
@@ -5,64 +5,72 @@ from redash.destinations import *
|
||||
|
||||
|
||||
class ChatWork(BaseDestination):
|
||||
ALERTS_DEFAULT_MESSAGE_TEMPLATE = '{alert_name} changed state to {new_state}.\\n{alert_url}\\n{query_url}'
|
||||
ALERTS_DEFAULT_MESSAGE_TEMPLATE = (
|
||||
"{alert_name} changed state to {new_state}.\\n{alert_url}\\n{query_url}"
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'api_token': {
|
||||
'type': 'string',
|
||||
'title': 'API Token'
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"api_token": {"type": "string", "title": "API Token"},
|
||||
"room_id": {"type": "string", "title": "Room ID"},
|
||||
"message_template": {
|
||||
"type": "string",
|
||||
"default": ChatWork.ALERTS_DEFAULT_MESSAGE_TEMPLATE,
|
||||
"title": "Message Template",
|
||||
},
|
||||
'room_id': {
|
||||
'type': 'string',
|
||||
'title': 'Room ID'
|
||||
},
|
||||
'message_template': {
|
||||
'type': 'string',
|
||||
'default': ChatWork.ALERTS_DEFAULT_MESSAGE_TEMPLATE,
|
||||
'title': 'Message Template'
|
||||
}
|
||||
},
|
||||
'required': ['message_template', 'api_token', 'room_id']
|
||||
"required": ["message_template", "api_token", "room_id"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-comment'
|
||||
return "fa-comment"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
try:
|
||||
# Documentation: http://developer.chatwork.com/ja/endpoint_rooms.html#POST-rooms-room_id-messages
|
||||
url = 'https://api.chatwork.com/v2/rooms/{room_id}/messages'.format(room_id=options.get('room_id'))
|
||||
url = "https://api.chatwork.com/v2/rooms/{room_id}/messages".format(
|
||||
room_id=options.get("room_id")
|
||||
)
|
||||
|
||||
message = ''
|
||||
message = ""
|
||||
if alert.custom_subject:
|
||||
message = alert.custom_subject + '\n'
|
||||
message = alert.custom_subject + "\n"
|
||||
if alert.custom_body:
|
||||
message += alert.custom_body
|
||||
else:
|
||||
alert_url = '{host}/alerts/{alert_id}'.format(host=host, alert_id=alert.id)
|
||||
query_url = '{host}/queries/{query_id}'.format(host=host, query_id=query.id)
|
||||
message_template = options.get('message_template', ChatWork.ALERTS_DEFAULT_MESSAGE_TEMPLATE)
|
||||
message += message_template.replace('\\n', '\n').format(
|
||||
alert_url = "{host}/alerts/{alert_id}".format(
|
||||
host=host, alert_id=alert.id
|
||||
)
|
||||
query_url = "{host}/queries/{query_id}".format(
|
||||
host=host, query_id=query.id
|
||||
)
|
||||
message_template = options.get(
|
||||
"message_template", ChatWork.ALERTS_DEFAULT_MESSAGE_TEMPLATE
|
||||
)
|
||||
message += message_template.replace("\\n", "\n").format(
|
||||
alert_name=alert.name,
|
||||
new_state=new_state.upper(),
|
||||
alert_url=alert_url,
|
||||
query_url=query_url
|
||||
query_url=query_url,
|
||||
)
|
||||
|
||||
headers = {'X-ChatWorkToken': options.get('api_token')}
|
||||
payload = {'body': message}
|
||||
headers = {"X-ChatWorkToken": options.get("api_token")}
|
||||
payload = {"body": message}
|
||||
|
||||
resp = requests.post(url, headers=headers, data=payload, timeout=5.0)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error('ChatWork send ERROR. status_code => {status}'.format(status=resp.status_code))
|
||||
logging.error(
|
||||
"ChatWork send ERROR. status_code => {status}".format(
|
||||
status=resp.status_code
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception('ChatWork send ERROR.')
|
||||
logging.exception("ChatWork send ERROR.")
|
||||
|
||||
|
||||
register(ChatWork)
|
||||
|
||||
@@ -6,31 +6,30 @@ from redash.destinations import *
|
||||
|
||||
|
||||
class Email(BaseDestination):
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"addresses": {
|
||||
"type": "string"
|
||||
},
|
||||
"addresses": {"type": "string"},
|
||||
"subject_template": {
|
||||
"type": "string",
|
||||
"default": settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE,
|
||||
"title": "Subject Template"
|
||||
}
|
||||
"title": "Subject Template",
|
||||
},
|
||||
},
|
||||
"required": ["addresses"],
|
||||
"extra_options": ["subject_template"]
|
||||
"extra_options": ["subject_template"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-envelope'
|
||||
return "fa-envelope"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
recipients = [email for email in options.get('addresses', '').split(',') if email]
|
||||
recipients = [
|
||||
email for email in options.get("addresses", "").split(",") if email
|
||||
]
|
||||
|
||||
if not recipients:
|
||||
logging.warning("No emails given. Skipping send.")
|
||||
@@ -41,23 +40,23 @@ class Email(BaseDestination):
|
||||
html = """
|
||||
Check <a href="{host}/alerts/{alert_id}">alert</a> / check
|
||||
<a href="{host}/queries/{query_id}">query</a> </br>.
|
||||
""".format(host=host, alert_id=alert.id, query_id=query.id)
|
||||
""".format(
|
||||
host=host, alert_id=alert.id, query_id=query.id
|
||||
)
|
||||
logging.debug("Notifying: %s", recipients)
|
||||
|
||||
try:
|
||||
alert_name = alert.name.encode('utf-8', 'ignore')
|
||||
alert_name = alert.name.encode("utf-8", "ignore")
|
||||
state = new_state.upper()
|
||||
if alert.custom_subject:
|
||||
subject = alert.custom_subject
|
||||
else:
|
||||
subject_template = options.get('subject_template', settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE)
|
||||
subject_template = options.get(
|
||||
"subject_template", settings.ALERTS_DEFAULT_MAIL_SUBJECT_TEMPLATE
|
||||
)
|
||||
subject = subject_template.format(alert_name=alert_name, state=state)
|
||||
|
||||
message = Message(
|
||||
recipients=recipients,
|
||||
subject=subject,
|
||||
html=html
|
||||
)
|
||||
message = Message(recipients=recipients, subject=subject, html=html)
|
||||
mail.send(message)
|
||||
except Exception:
|
||||
logging.exception("Mail send error.")
|
||||
|
||||
@@ -21,28 +21,30 @@ class HangoutsChat(BaseDestination):
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"title": "Webhook URL (get it from the room settings)"
|
||||
"title": "Webhook URL (get it from the room settings)",
|
||||
},
|
||||
"icon_url": {
|
||||
"type": "string",
|
||||
"title": "Icon URL (32x32 or multiple, png format)"
|
||||
}
|
||||
"title": "Icon URL (32x32 or multiple, png format)",
|
||||
},
|
||||
},
|
||||
"required": ["url"]
|
||||
"required": ["url"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-bolt'
|
||||
return "fa-bolt"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
try:
|
||||
if new_state == "triggered":
|
||||
message = "<b><font color=\"#c0392b\">Triggered</font></b>"
|
||||
message = '<b><font color="#c0392b">Triggered</font></b>'
|
||||
elif new_state == "ok":
|
||||
message = "<font color=\"#27ae60\">Went back to normal</font>"
|
||||
message = '<font color="#27ae60">Went back to normal</font>'
|
||||
else:
|
||||
message = "Unable to determine status. Check Query and Alert configuration."
|
||||
message = (
|
||||
"Unable to determine status. Check Query and Alert configuration."
|
||||
)
|
||||
|
||||
if alert.custom_subject:
|
||||
title = alert.custom_subject
|
||||
@@ -52,59 +54,53 @@ class HangoutsChat(BaseDestination):
|
||||
data = {
|
||||
"cards": [
|
||||
{
|
||||
"header": {
|
||||
"title": title
|
||||
},
|
||||
"header": {"title": title},
|
||||
"sections": [
|
||||
{
|
||||
"widgets": [
|
||||
{
|
||||
"textParagraph": {
|
||||
"text": message
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
{"widgets": [{"textParagraph": {"text": message}}]}
|
||||
],
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
if alert.custom_body:
|
||||
data["cards"][0]["sections"].append({
|
||||
"widgets": [
|
||||
{
|
||||
"textParagraph": {
|
||||
"text": alert.custom_body
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
data["cards"][0]["sections"].append(
|
||||
{"widgets": [{"textParagraph": {"text": alert.custom_body}}]}
|
||||
)
|
||||
|
||||
if options.get("icon_url"):
|
||||
data["cards"][0]["header"]["imageUrl"] = options.get("icon_url")
|
||||
|
||||
# Hangouts Chat will create a blank card if an invalid URL (no hostname) is posted.
|
||||
if host:
|
||||
data["cards"][0]["sections"][0]["widgets"].append({
|
||||
"buttons": [
|
||||
{
|
||||
"textButton": {
|
||||
"text": "OPEN QUERY",
|
||||
"onClick": {
|
||||
"openLink": {
|
||||
"url": "{host}/queries/{query_id}".format(host=host, query_id=query.id)
|
||||
}
|
||||
data["cards"][0]["sections"][0]["widgets"].append(
|
||||
{
|
||||
"buttons": [
|
||||
{
|
||||
"textButton": {
|
||||
"text": "OPEN QUERY",
|
||||
"onClick": {
|
||||
"openLink": {
|
||||
"url": "{host}/queries/{query_id}".format(
|
||||
host=host, query_id=query.id
|
||||
)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
})
|
||||
]
|
||||
}
|
||||
)
|
||||
|
||||
headers = {"Content-Type": "application/json; charset=UTF-8"}
|
||||
resp = requests.post(options.get("url"), data=json_dumps(data), headers=headers, timeout=5.0)
|
||||
resp = requests.post(
|
||||
options.get("url"), data=json_dumps(data), headers=headers, timeout=5.0
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
logging.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
logging.error(
|
||||
"webhook send ERROR. status_code => {status}".format(
|
||||
status=resp.status_code
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("webhook send ERROR.")
|
||||
|
||||
|
||||
@@ -7,9 +7,9 @@ from redash.utils import json_dumps, deprecated
|
||||
|
||||
|
||||
colors = {
|
||||
Alert.OK_STATE: 'green',
|
||||
Alert.TRIGGERED_STATE: 'red',
|
||||
Alert.UNKNOWN_STATE: 'yellow'
|
||||
Alert.OK_STATE: "green",
|
||||
Alert.TRIGGERED_STATE: "red",
|
||||
Alert.UNKNOWN_STATE: "yellow",
|
||||
}
|
||||
|
||||
|
||||
@@ -22,35 +22,38 @@ class HipChat(BaseDestination):
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"title": "HipChat Notification URL (get it from the Integrations page)"
|
||||
},
|
||||
"title": "HipChat Notification URL (get it from the Integrations page)",
|
||||
}
|
||||
},
|
||||
"required": ["url"]
|
||||
"required": ["url"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-comment-o'
|
||||
return "fa-comment-o"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
try:
|
||||
alert_url = '{host}/alerts/{alert_id}'.format(host=host, alert_id=alert.id)
|
||||
query_url = '{host}/queries/{query_id}'.format(host=host, query_id=query.id)
|
||||
alert_url = "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id)
|
||||
query_url = "{host}/queries/{query_id}".format(host=host, query_id=query.id)
|
||||
|
||||
message = '<a href="{alert_url}">{alert_name}</a> changed state to {new_state} (based on <a href="{query_url}">this query</a>).'.format(
|
||||
alert_name=alert.name, new_state=new_state.upper(),
|
||||
alert_name=alert.name,
|
||||
new_state=new_state.upper(),
|
||||
alert_url=alert_url,
|
||||
query_url=query_url)
|
||||
query_url=query_url,
|
||||
)
|
||||
|
||||
data = {
|
||||
'message': message,
|
||||
'color': colors.get(new_state, 'green')
|
||||
}
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
response = requests.post(options['url'], data=json_dumps(data), headers=headers, timeout=5.0)
|
||||
data = {"message": message, "color": colors.get(new_state, "green")}
|
||||
headers = {"Content-Type": "application/json"}
|
||||
response = requests.post(
|
||||
options["url"], data=json_dumps(data), headers=headers, timeout=5.0
|
||||
)
|
||||
|
||||
if response.status_code != 204:
|
||||
logging.error('Bad status code received from HipChat: %d', response.status_code)
|
||||
logging.error(
|
||||
"Bad status code received from HipChat: %d", response.status_code
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("HipChat Send ERROR.")
|
||||
|
||||
|
||||
@@ -9,30 +9,18 @@ class Mattermost(BaseDestination):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string',
|
||||
'title': 'Mattermost Webhook URL'
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'title': 'Username'
|
||||
},
|
||||
'icon_url': {
|
||||
'type': 'string',
|
||||
'title': 'Icon (URL)'
|
||||
},
|
||||
'channel': {
|
||||
'type': 'string',
|
||||
'title': 'Channel'
|
||||
}
|
||||
}
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {"type": "string", "title": "Mattermost Webhook URL"},
|
||||
"username": {"type": "string", "title": "Username"},
|
||||
"icon_url": {"type": "string", "title": "Icon (URL)"},
|
||||
"channel": {"type": "string", "title": "Channel"},
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-bolt'
|
||||
return "fa-bolt"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
if alert.custom_subject:
|
||||
@@ -41,27 +29,32 @@ class Mattermost(BaseDestination):
|
||||
text = "#### " + alert.name + " just triggered"
|
||||
else:
|
||||
text = "#### " + alert.name + " went back to normal"
|
||||
payload = {'text': text}
|
||||
payload = {"text": text}
|
||||
|
||||
if alert.custom_body:
|
||||
payload['attachments'] = [{'fields': [{
|
||||
"title": "Description",
|
||||
"value": alert.custom_body
|
||||
}]}]
|
||||
payload["attachments"] = [
|
||||
{"fields": [{"title": "Description", "value": alert.custom_body}]}
|
||||
]
|
||||
|
||||
if options.get('username'):
|
||||
payload['username'] = options.get('username')
|
||||
if options.get('icon_url'):
|
||||
payload['icon_url'] = options.get('icon_url')
|
||||
if options.get('channel'):
|
||||
payload['channel'] = options.get('channel')
|
||||
if options.get("username"):
|
||||
payload["username"] = options.get("username")
|
||||
if options.get("icon_url"):
|
||||
payload["icon_url"] = options.get("icon_url")
|
||||
if options.get("channel"):
|
||||
payload["channel"] = options.get("channel")
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get('url'), data=json_dumps(payload), timeout=5.0)
|
||||
resp = requests.post(
|
||||
options.get("url"), data=json_dumps(payload), timeout=5.0
|
||||
)
|
||||
logging.warning(resp.text)
|
||||
|
||||
if resp.status_code != 200:
|
||||
logging.error("Mattermost webhook send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
logging.error(
|
||||
"Mattermost webhook send ERROR. status_code => {status}".format(
|
||||
status=resp.status_code
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Mattermost webhook send ERROR.")
|
||||
|
||||
|
||||
@@ -11,8 +11,8 @@ except ImportError:
|
||||
|
||||
class PagerDuty(BaseDestination):
|
||||
|
||||
KEY_STRING = '{alert_id}_{query_id}'
|
||||
DESCRIPTION_STR = 'Alert: {alert_name}'
|
||||
KEY_STRING = "{alert_id}_{query_id}"
|
||||
DESCRIPTION_STR = "Alert: {alert_name}"
|
||||
|
||||
@classmethod
|
||||
def enabled(cls):
|
||||
@@ -21,55 +21,55 @@ class PagerDuty(BaseDestination):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'integration_key': {
|
||||
'type': 'string',
|
||||
'title': 'PagerDuty Service Integration Key'
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"integration_key": {
|
||||
"type": "string",
|
||||
"title": "PagerDuty Service Integration Key",
|
||||
},
|
||||
"description": {
|
||||
"type": "string",
|
||||
"title": "Description for the event, defaults to alert name",
|
||||
},
|
||||
'description': {
|
||||
'type': 'string',
|
||||
'title': 'Description for the event, defaults to alert name',
|
||||
}
|
||||
},
|
||||
"required": ["integration_key"]
|
||||
"required": ["integration_key"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'creative-commons-pd-alt'
|
||||
return "creative-commons-pd-alt"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
|
||||
if alert.custom_subject:
|
||||
default_desc = alert.custom_subject
|
||||
elif options.get('description'):
|
||||
default_desc = options.get('description')
|
||||
elif options.get("description"):
|
||||
default_desc = options.get("description")
|
||||
else:
|
||||
default_desc = self.DESCRIPTION_STR.format(alert_name=alert.name)
|
||||
|
||||
incident_key = self.KEY_STRING.format(alert_id=alert.id, query_id=query.id)
|
||||
data = {
|
||||
'routing_key': options.get('integration_key'),
|
||||
'incident_key': incident_key,
|
||||
'dedup_key': incident_key,
|
||||
'payload': {
|
||||
'summary': default_desc,
|
||||
'severity': 'error',
|
||||
'source': 'redash',
|
||||
}
|
||||
"routing_key": options.get("integration_key"),
|
||||
"incident_key": incident_key,
|
||||
"dedup_key": incident_key,
|
||||
"payload": {
|
||||
"summary": default_desc,
|
||||
"severity": "error",
|
||||
"source": "redash",
|
||||
},
|
||||
}
|
||||
|
||||
if alert.custom_body:
|
||||
data['payload']['custom_details'] = alert.custom_body
|
||||
data["payload"]["custom_details"] = alert.custom_body
|
||||
|
||||
if new_state == 'triggered':
|
||||
data['event_action'] = 'trigger'
|
||||
if new_state == "triggered":
|
||||
data["event_action"] = "trigger"
|
||||
elif new_state == "unknown":
|
||||
logging.info('Unknown state, doing nothing')
|
||||
logging.info("Unknown state, doing nothing")
|
||||
return
|
||||
else:
|
||||
data['event_action'] = 'resolve'
|
||||
data["event_action"] = "resolve"
|
||||
|
||||
try:
|
||||
|
||||
|
||||
@@ -9,54 +9,40 @@ class Slack(BaseDestination):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string',
|
||||
'title': 'Slack Webhook URL'
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'title': 'Username'
|
||||
},
|
||||
'icon_emoji': {
|
||||
'type': 'string',
|
||||
'title': 'Icon (Emoji)'
|
||||
},
|
||||
'icon_url': {
|
||||
'type': 'string',
|
||||
'title': 'Icon (URL)'
|
||||
},
|
||||
'channel': {
|
||||
'type': 'string',
|
||||
'title': 'Channel'
|
||||
}
|
||||
}
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {"type": "string", "title": "Slack Webhook URL"},
|
||||
"username": {"type": "string", "title": "Username"},
|
||||
"icon_emoji": {"type": "string", "title": "Icon (Emoji)"},
|
||||
"icon_url": {"type": "string", "title": "Icon (URL)"},
|
||||
"channel": {"type": "string", "title": "Channel"},
|
||||
},
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-slack'
|
||||
return "fa-slack"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
# Documentation: https://api.slack.com/docs/attachments
|
||||
fields = [
|
||||
{
|
||||
"title": "Query",
|
||||
"value": "{host}/queries/{query_id}".format(host=host, query_id=query.id),
|
||||
"short": True
|
||||
"value": "{host}/queries/{query_id}".format(
|
||||
host=host, query_id=query.id
|
||||
),
|
||||
"short": True,
|
||||
},
|
||||
{
|
||||
"title": "Alert",
|
||||
"value": "{host}/alerts/{alert_id}".format(host=host, alert_id=alert.id),
|
||||
"short": True
|
||||
}
|
||||
"value": "{host}/alerts/{alert_id}".format(
|
||||
host=host, alert_id=alert.id
|
||||
),
|
||||
"short": True,
|
||||
},
|
||||
]
|
||||
if alert.custom_body:
|
||||
fields.append({
|
||||
"title": "Description",
|
||||
"value": alert.custom_body
|
||||
})
|
||||
fields.append({"title": "Description", "value": alert.custom_body})
|
||||
if new_state == "triggered":
|
||||
if alert.custom_subject:
|
||||
text = alert.custom_subject
|
||||
@@ -67,18 +53,28 @@ class Slack(BaseDestination):
|
||||
text = alert.name + " went back to normal"
|
||||
color = "#27ae60"
|
||||
|
||||
payload = {'attachments': [{'text': text, 'color': color, 'fields': fields}]}
|
||||
payload = {"attachments": [{"text": text, "color": color, "fields": fields}]}
|
||||
|
||||
if options.get('username'): payload['username'] = options.get('username')
|
||||
if options.get('icon_emoji'): payload['icon_emoji'] = options.get('icon_emoji')
|
||||
if options.get('icon_url'): payload['icon_url'] = options.get('icon_url')
|
||||
if options.get('channel'): payload['channel'] = options.get('channel')
|
||||
if options.get("username"):
|
||||
payload["username"] = options.get("username")
|
||||
if options.get("icon_emoji"):
|
||||
payload["icon_emoji"] = options.get("icon_emoji")
|
||||
if options.get("icon_url"):
|
||||
payload["icon_url"] = options.get("icon_url")
|
||||
if options.get("channel"):
|
||||
payload["channel"] = options.get("channel")
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get('url'), data=json_dumps(payload), timeout=5.0)
|
||||
resp = requests.post(
|
||||
options.get("url"), data=json_dumps(payload), timeout=5.0
|
||||
)
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
logging.error(
|
||||
"Slack send ERROR. status_code => {status}".format(
|
||||
status=resp.status_code
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Slack send ERROR.")
|
||||
|
||||
|
||||
@@ -13,40 +13,48 @@ class Webhook(BaseDestination):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
},
|
||||
"username": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
}
|
||||
"url": {"type": "string"},
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
},
|
||||
"required": ["url"],
|
||||
"secret": ["password"]
|
||||
"secret": ["password"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def icon(cls):
|
||||
return 'fa-bolt'
|
||||
return "fa-bolt"
|
||||
|
||||
def notify(self, alert, query, user, new_state, app, host, options):
|
||||
try:
|
||||
data = {
|
||||
'event': 'alert_state_change',
|
||||
'alert': serialize_alert(alert, full=False),
|
||||
'url_base': host,
|
||||
"event": "alert_state_change",
|
||||
"alert": serialize_alert(alert, full=False),
|
||||
"url_base": host,
|
||||
}
|
||||
|
||||
data['alert']['description'] = alert.custom_body
|
||||
data['alert']['title'] = alert.custom_subject
|
||||
data["alert"]["description"] = alert.custom_body
|
||||
data["alert"]["title"] = alert.custom_subject
|
||||
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
auth = HTTPBasicAuth(options.get('username'), options.get('password')) if options.get('username') else None
|
||||
resp = requests.post(options.get('url'), data=json_dumps(data), auth=auth, headers=headers, timeout=5.0)
|
||||
headers = {"Content-Type": "application/json"}
|
||||
auth = (
|
||||
HTTPBasicAuth(options.get("username"), options.get("password"))
|
||||
if options.get("username")
|
||||
else None
|
||||
)
|
||||
resp = requests.post(
|
||||
options.get("url"),
|
||||
data=json_dumps(data),
|
||||
auth=auth,
|
||||
headers=headers,
|
||||
timeout=5.0,
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
logging.error("webhook send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
logging.error(
|
||||
"webhook send ERROR. status_code => {status}".format(
|
||||
status=resp.status_code
|
||||
)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("webhook send ERROR.")
|
||||
|
||||
|
||||
@@ -8,13 +8,13 @@ from redash.permissions import require_super_admin
|
||||
from redash.security import talisman
|
||||
|
||||
|
||||
@routes.route('/ping', methods=['GET'])
|
||||
@routes.route("/ping", methods=["GET"])
|
||||
@talisman(force_https=False)
|
||||
def ping():
|
||||
return 'PONG.'
|
||||
return "PONG."
|
||||
|
||||
|
||||
@routes.route('/status.json')
|
||||
@routes.route("/status.json")
|
||||
@login_required
|
||||
@require_super_admin
|
||||
def status_api():
|
||||
@@ -23,6 +23,15 @@ def status_api():
|
||||
|
||||
|
||||
def init_app(app):
|
||||
from redash.handlers import embed, queries, static, authentication, admin, setup, organization
|
||||
from redash.handlers import (
|
||||
embed,
|
||||
queries,
|
||||
static,
|
||||
authentication,
|
||||
admin,
|
||||
setup,
|
||||
organization,
|
||||
)
|
||||
|
||||
app.register_blueprint(routes)
|
||||
api.init_app(app)
|
||||
|
||||
@@ -11,56 +11,59 @@ from redash.utils import json_loads
|
||||
from redash.monitor import celery_tasks, rq_status
|
||||
|
||||
|
||||
@routes.route('/api/admin/queries/outdated', methods=['GET'])
|
||||
@routes.route("/api/admin/queries/outdated", methods=["GET"])
|
||||
@require_super_admin
|
||||
@login_required
|
||||
def outdated_queries():
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
query_ids = json_loads(manager_status.get('query_ids', '[]'))
|
||||
manager_status = redis_connection.hgetall("redash:status")
|
||||
query_ids = json_loads(manager_status.get("query_ids", "[]"))
|
||||
if query_ids:
|
||||
outdated_queries = (
|
||||
models.Query.query.outerjoin(models.QueryResult)
|
||||
.filter(models.Query.id.in_(query_ids))
|
||||
.order_by(models.Query.created_at.desc())
|
||||
.filter(models.Query.id.in_(query_ids))
|
||||
.order_by(models.Query.created_at.desc())
|
||||
)
|
||||
else:
|
||||
outdated_queries = []
|
||||
|
||||
record_event(current_org, current_user._get_current_object(), {
|
||||
'action': 'list',
|
||||
'object_type': 'outdated_queries',
|
||||
})
|
||||
record_event(
|
||||
current_org,
|
||||
current_user._get_current_object(),
|
||||
{"action": "list", "object_type": "outdated_queries"},
|
||||
)
|
||||
|
||||
response = {
|
||||
'queries': QuerySerializer(outdated_queries, with_stats=True, with_last_modified_by=False).serialize(),
|
||||
'updated_at': manager_status['last_refresh_at'],
|
||||
"queries": QuerySerializer(
|
||||
outdated_queries, with_stats=True, with_last_modified_by=False
|
||||
).serialize(),
|
||||
"updated_at": manager_status["last_refresh_at"],
|
||||
}
|
||||
return json_response(response)
|
||||
|
||||
|
||||
@routes.route('/api/admin/queries/tasks', methods=['GET'])
|
||||
@routes.route("/api/admin/queries/tasks", methods=["GET"])
|
||||
@require_super_admin
|
||||
@login_required
|
||||
def queries_tasks():
|
||||
record_event(current_org, current_user._get_current_object(), {
|
||||
'action': 'list',
|
||||
'object_type': 'celery_tasks'
|
||||
})
|
||||
record_event(
|
||||
current_org,
|
||||
current_user._get_current_object(),
|
||||
{"action": "list", "object_type": "celery_tasks"},
|
||||
)
|
||||
|
||||
response = {
|
||||
'tasks': celery_tasks(),
|
||||
}
|
||||
response = {"tasks": celery_tasks()}
|
||||
|
||||
return json_response(response)
|
||||
|
||||
|
||||
@routes.route('/api/admin/queries/rq_status', methods=['GET'])
|
||||
@routes.route("/api/admin/queries/rq_status", methods=["GET"])
|
||||
@require_super_admin
|
||||
@login_required
|
||||
def queries_rq_status():
|
||||
record_event(current_org, current_user._get_current_object(), {
|
||||
'action': 'list',
|
||||
'object_type': 'rq_status'
|
||||
})
|
||||
record_event(
|
||||
current_org,
|
||||
current_user._get_current_object(),
|
||||
{"action": "list", "object_type": "rq_status"},
|
||||
)
|
||||
|
||||
return json_response(rq_status())
|
||||
|
||||
@@ -5,43 +5,48 @@ from funcy import project
|
||||
|
||||
from redash import models
|
||||
from redash.serializers import serialize_alert
|
||||
from redash.handlers.base import (BaseResource, get_object_or_404,
|
||||
require_fields)
|
||||
from redash.permissions import (require_access, require_admin_or_owner,
|
||||
require_permission, view_only)
|
||||
from redash.handlers.base import BaseResource, get_object_or_404, require_fields
|
||||
from redash.permissions import (
|
||||
require_access,
|
||||
require_admin_or_owner,
|
||||
require_permission,
|
||||
view_only,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class AlertResource(BaseResource):
|
||||
def get(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
alert = get_object_or_404(
|
||||
models.Alert.get_by_id_and_org, alert_id, self.current_org
|
||||
)
|
||||
require_access(alert, self.current_user, view_only)
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "view", "object_id": alert.id, "object_type": "alert"}
|
||||
)
|
||||
return serialize_alert(alert)
|
||||
|
||||
def post(self, alert_id):
|
||||
req = request.get_json(True)
|
||||
params = project(req, ('options', 'name', 'query_id', 'rearm'))
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
params = project(req, ("options", "name", "query_id", "rearm"))
|
||||
alert = get_object_or_404(
|
||||
models.Alert.get_by_id_and_org, alert_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(alert.user.id)
|
||||
|
||||
self.update_model(alert, params)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'edit',
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "edit", "object_id": alert.id, "object_type": "alert"}
|
||||
)
|
||||
|
||||
return serialize_alert(alert)
|
||||
|
||||
def delete(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
alert = get_object_or_404(
|
||||
models.Alert.get_by_id_and_org, alert_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(alert.user_id)
|
||||
models.db.session.delete(alert)
|
||||
models.db.session.commit()
|
||||
@@ -49,68 +54,65 @@ class AlertResource(BaseResource):
|
||||
|
||||
class AlertMuteResource(BaseResource):
|
||||
def post(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
alert = get_object_or_404(
|
||||
models.Alert.get_by_id_and_org, alert_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(alert.user.id)
|
||||
|
||||
alert.options['muted'] = True
|
||||
alert.options["muted"] = True
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'mute',
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "mute", "object_id": alert.id, "object_type": "alert"}
|
||||
)
|
||||
|
||||
def delete(self, alert_id):
|
||||
alert = get_object_or_404(models.Alert.get_by_id_and_org, alert_id, self.current_org)
|
||||
alert = get_object_or_404(
|
||||
models.Alert.get_by_id_and_org, alert_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(alert.user.id)
|
||||
|
||||
alert.options['muted'] = False
|
||||
alert.options["muted"] = False
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'unmute',
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "unmute", "object_id": alert.id, "object_type": "alert"}
|
||||
)
|
||||
|
||||
|
||||
class AlertListResource(BaseResource):
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
require_fields(req, ('options', 'name', 'query_id'))
|
||||
require_fields(req, ("options", "name", "query_id"))
|
||||
|
||||
query = models.Query.get_by_id_and_org(req['query_id'],
|
||||
self.current_org)
|
||||
query = models.Query.get_by_id_and_org(req["query_id"], self.current_org)
|
||||
require_access(query, self.current_user, view_only)
|
||||
|
||||
alert = models.Alert(
|
||||
name=req['name'],
|
||||
name=req["name"],
|
||||
query_rel=query,
|
||||
user=self.current_user,
|
||||
rearm=req.get('rearm'),
|
||||
options=req['options'],
|
||||
rearm=req.get("rearm"),
|
||||
options=req["options"],
|
||||
)
|
||||
|
||||
models.db.session.add(alert)
|
||||
models.db.session.flush()
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'create',
|
||||
'object_id': alert.id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "create", "object_id": alert.id, "object_type": "alert"}
|
||||
)
|
||||
|
||||
return serialize_alert(alert)
|
||||
|
||||
@require_permission('list_alerts')
|
||||
@require_permission("list_alerts")
|
||||
def get(self):
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_type': 'alert'
|
||||
})
|
||||
return [serialize_alert(alert) for alert in models.Alert.all(group_ids=self.current_user.group_ids)]
|
||||
self.record_event({"action": "list", "object_type": "alert"})
|
||||
return [
|
||||
serialize_alert(alert)
|
||||
for alert in models.Alert.all(group_ids=self.current_user.group_ids)
|
||||
]
|
||||
|
||||
|
||||
class AlertSubscriptionListResource(BaseResource):
|
||||
@@ -119,22 +121,26 @@ class AlertSubscriptionListResource(BaseResource):
|
||||
|
||||
alert = models.Alert.get_by_id_and_org(alert_id, self.current_org)
|
||||
require_access(alert, self.current_user, view_only)
|
||||
kwargs = {'alert': alert, 'user': self.current_user}
|
||||
kwargs = {"alert": alert, "user": self.current_user}
|
||||
|
||||
if 'destination_id' in req:
|
||||
destination = models.NotificationDestination.get_by_id_and_org(req['destination_id'], self.current_org)
|
||||
kwargs['destination'] = destination
|
||||
if "destination_id" in req:
|
||||
destination = models.NotificationDestination.get_by_id_and_org(
|
||||
req["destination_id"], self.current_org
|
||||
)
|
||||
kwargs["destination"] = destination
|
||||
|
||||
subscription = models.AlertSubscription(**kwargs)
|
||||
models.db.session.add(subscription)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'subscribe',
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert',
|
||||
'destination': req.get('destination_id')
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "subscribe",
|
||||
"object_id": alert_id,
|
||||
"object_type": "alert",
|
||||
"destination": req.get("destination_id"),
|
||||
}
|
||||
)
|
||||
|
||||
d = subscription.to_dict()
|
||||
return d
|
||||
@@ -155,8 +161,6 @@ class AlertSubscriptionResource(BaseResource):
|
||||
models.db.session.delete(subscription)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'unsubscribe',
|
||||
'object_id': alert_id,
|
||||
'object_type': 'alert'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "unsubscribe", "object_id": alert_id, "object_type": "alert"}
|
||||
)
|
||||
|
||||
@@ -2,56 +2,86 @@ from flask import make_response
|
||||
from flask_restful import Api
|
||||
from werkzeug.wrappers import Response
|
||||
|
||||
from redash.handlers.alerts import (AlertListResource,
|
||||
AlertResource, AlertMuteResource,
|
||||
AlertSubscriptionListResource,
|
||||
AlertSubscriptionResource)
|
||||
from redash.handlers.alerts import (
|
||||
AlertListResource,
|
||||
AlertResource,
|
||||
AlertMuteResource,
|
||||
AlertSubscriptionListResource,
|
||||
AlertSubscriptionResource,
|
||||
)
|
||||
from redash.handlers.base import org_scoped_rule
|
||||
from redash.handlers.dashboards import (DashboardFavoriteListResource,
|
||||
DashboardListResource,
|
||||
DashboardResource,
|
||||
DashboardShareResource,
|
||||
DashboardTagsResource,
|
||||
PublicDashboardResource)
|
||||
from redash.handlers.data_sources import (DataSourceListResource,
|
||||
DataSourcePauseResource,
|
||||
DataSourceResource,
|
||||
DataSourceSchemaResource,
|
||||
DataSourceTestResource,
|
||||
DataSourceTypeListResource)
|
||||
from redash.handlers.destinations import (DestinationListResource,
|
||||
DestinationResource,
|
||||
DestinationTypeListResource)
|
||||
from redash.handlers.dashboards import (
|
||||
DashboardFavoriteListResource,
|
||||
DashboardListResource,
|
||||
DashboardResource,
|
||||
DashboardShareResource,
|
||||
DashboardTagsResource,
|
||||
PublicDashboardResource,
|
||||
)
|
||||
from redash.handlers.data_sources import (
|
||||
DataSourceListResource,
|
||||
DataSourcePauseResource,
|
||||
DataSourceResource,
|
||||
DataSourceSchemaResource,
|
||||
DataSourceTestResource,
|
||||
DataSourceTypeListResource,
|
||||
)
|
||||
from redash.handlers.destinations import (
|
||||
DestinationListResource,
|
||||
DestinationResource,
|
||||
DestinationTypeListResource,
|
||||
)
|
||||
from redash.handlers.events import EventsResource
|
||||
from redash.handlers.favorites import (DashboardFavoriteResource,
|
||||
QueryFavoriteResource)
|
||||
from redash.handlers.groups import (GroupDataSourceListResource,
|
||||
GroupDataSourceResource, GroupListResource,
|
||||
GroupMemberListResource,
|
||||
GroupMemberResource, GroupResource)
|
||||
from redash.handlers.permissions import (CheckPermissionResource,
|
||||
ObjectPermissionsListResource)
|
||||
from redash.handlers.queries import (MyQueriesResource, QueryArchiveResource,
|
||||
QueryFavoriteListResource,
|
||||
QueryForkResource, QueryListResource,
|
||||
QueryRecentResource, QueryRefreshResource,
|
||||
QueryResource, QuerySearchResource,
|
||||
QueryTagsResource,
|
||||
QueryRegenerateApiKeyResource)
|
||||
from redash.handlers.query_results import (JobResource,
|
||||
QueryResultDropdownResource,
|
||||
QueryDropdownsResource,
|
||||
QueryResultListResource,
|
||||
QueryResultResource)
|
||||
from redash.handlers.query_snippets import (QuerySnippetListResource,
|
||||
QuerySnippetResource)
|
||||
from redash.handlers.favorites import DashboardFavoriteResource, QueryFavoriteResource
|
||||
from redash.handlers.groups import (
|
||||
GroupDataSourceListResource,
|
||||
GroupDataSourceResource,
|
||||
GroupListResource,
|
||||
GroupMemberListResource,
|
||||
GroupMemberResource,
|
||||
GroupResource,
|
||||
)
|
||||
from redash.handlers.permissions import (
|
||||
CheckPermissionResource,
|
||||
ObjectPermissionsListResource,
|
||||
)
|
||||
from redash.handlers.queries import (
|
||||
MyQueriesResource,
|
||||
QueryArchiveResource,
|
||||
QueryFavoriteListResource,
|
||||
QueryForkResource,
|
||||
QueryListResource,
|
||||
QueryRecentResource,
|
||||
QueryRefreshResource,
|
||||
QueryResource,
|
||||
QuerySearchResource,
|
||||
QueryTagsResource,
|
||||
QueryRegenerateApiKeyResource,
|
||||
)
|
||||
from redash.handlers.query_results import (
|
||||
JobResource,
|
||||
QueryResultDropdownResource,
|
||||
QueryDropdownsResource,
|
||||
QueryResultListResource,
|
||||
QueryResultResource,
|
||||
)
|
||||
from redash.handlers.query_snippets import (
|
||||
QuerySnippetListResource,
|
||||
QuerySnippetResource,
|
||||
)
|
||||
from redash.handlers.settings import OrganizationSettings
|
||||
from redash.handlers.users import (UserDisableResource, UserInviteResource,
|
||||
UserListResource,
|
||||
UserRegenerateApiKeyResource,
|
||||
UserResetPasswordResource, UserResource)
|
||||
from redash.handlers.visualizations import (VisualizationListResource,
|
||||
VisualizationResource)
|
||||
from redash.handlers.users import (
|
||||
UserDisableResource,
|
||||
UserInviteResource,
|
||||
UserListResource,
|
||||
UserRegenerateApiKeyResource,
|
||||
UserResetPasswordResource,
|
||||
UserResource,
|
||||
)
|
||||
from redash.handlers.visualizations import (
|
||||
VisualizationListResource,
|
||||
VisualizationResource,
|
||||
)
|
||||
from redash.handlers.widgets import WidgetListResource, WidgetResource
|
||||
from redash.utils import json_dumps
|
||||
|
||||
@@ -65,7 +95,7 @@ class ApiExt(Api):
|
||||
api = ApiExt()
|
||||
|
||||
|
||||
@api.representation('application/json')
|
||||
@api.representation("application/json")
|
||||
def json_representation(data, code, headers=None):
|
||||
# Flask-Restful checks only for flask.Response but flask-login uses werkzeug.wrappers.Response
|
||||
if isinstance(data, Response):
|
||||
@@ -75,91 +105,211 @@ def json_representation(data, code, headers=None):
|
||||
return resp
|
||||
|
||||
|
||||
api.add_org_resource(AlertResource, '/api/alerts/<alert_id>', endpoint='alert')
|
||||
api.add_org_resource(AlertMuteResource, '/api/alerts/<alert_id>/mute', endpoint='alert_mute')
|
||||
api.add_org_resource(AlertSubscriptionListResource, '/api/alerts/<alert_id>/subscriptions', endpoint='alert_subscriptions')
|
||||
api.add_org_resource(AlertSubscriptionResource, '/api/alerts/<alert_id>/subscriptions/<subscriber_id>', endpoint='alert_subscription')
|
||||
api.add_org_resource(AlertListResource, '/api/alerts', endpoint='alerts')
|
||||
api.add_org_resource(AlertResource, "/api/alerts/<alert_id>", endpoint="alert")
|
||||
api.add_org_resource(
|
||||
AlertMuteResource, "/api/alerts/<alert_id>/mute", endpoint="alert_mute"
|
||||
)
|
||||
api.add_org_resource(
|
||||
AlertSubscriptionListResource,
|
||||
"/api/alerts/<alert_id>/subscriptions",
|
||||
endpoint="alert_subscriptions",
|
||||
)
|
||||
api.add_org_resource(
|
||||
AlertSubscriptionResource,
|
||||
"/api/alerts/<alert_id>/subscriptions/<subscriber_id>",
|
||||
endpoint="alert_subscription",
|
||||
)
|
||||
api.add_org_resource(AlertListResource, "/api/alerts", endpoint="alerts")
|
||||
|
||||
api.add_org_resource(DashboardListResource, '/api/dashboards', endpoint='dashboards')
|
||||
api.add_org_resource(DashboardResource, '/api/dashboards/<dashboard_slug>', endpoint='dashboard')
|
||||
api.add_org_resource(PublicDashboardResource, '/api/dashboards/public/<token>', endpoint='public_dashboard')
|
||||
api.add_org_resource(DashboardShareResource, '/api/dashboards/<dashboard_id>/share', endpoint='dashboard_share')
|
||||
api.add_org_resource(DashboardListResource, "/api/dashboards", endpoint="dashboards")
|
||||
api.add_org_resource(
|
||||
DashboardResource, "/api/dashboards/<dashboard_slug>", endpoint="dashboard"
|
||||
)
|
||||
api.add_org_resource(
|
||||
PublicDashboardResource,
|
||||
"/api/dashboards/public/<token>",
|
||||
endpoint="public_dashboard",
|
||||
)
|
||||
api.add_org_resource(
|
||||
DashboardShareResource,
|
||||
"/api/dashboards/<dashboard_id>/share",
|
||||
endpoint="dashboard_share",
|
||||
)
|
||||
|
||||
api.add_org_resource(DataSourceTypeListResource, '/api/data_sources/types', endpoint='data_source_types')
|
||||
api.add_org_resource(DataSourceListResource, '/api/data_sources', endpoint='data_sources')
|
||||
api.add_org_resource(DataSourceSchemaResource, '/api/data_sources/<data_source_id>/schema')
|
||||
api.add_org_resource(DataSourcePauseResource, '/api/data_sources/<data_source_id>/pause')
|
||||
api.add_org_resource(DataSourceTestResource, '/api/data_sources/<data_source_id>/test')
|
||||
api.add_org_resource(DataSourceResource, '/api/data_sources/<data_source_id>', endpoint='data_source')
|
||||
api.add_org_resource(
|
||||
DataSourceTypeListResource, "/api/data_sources/types", endpoint="data_source_types"
|
||||
)
|
||||
api.add_org_resource(
|
||||
DataSourceListResource, "/api/data_sources", endpoint="data_sources"
|
||||
)
|
||||
api.add_org_resource(
|
||||
DataSourceSchemaResource, "/api/data_sources/<data_source_id>/schema"
|
||||
)
|
||||
api.add_org_resource(
|
||||
DataSourcePauseResource, "/api/data_sources/<data_source_id>/pause"
|
||||
)
|
||||
api.add_org_resource(DataSourceTestResource, "/api/data_sources/<data_source_id>/test")
|
||||
api.add_org_resource(
|
||||
DataSourceResource, "/api/data_sources/<data_source_id>", endpoint="data_source"
|
||||
)
|
||||
|
||||
api.add_org_resource(GroupListResource, '/api/groups', endpoint='groups')
|
||||
api.add_org_resource(GroupResource, '/api/groups/<group_id>', endpoint='group')
|
||||
api.add_org_resource(GroupMemberListResource, '/api/groups/<group_id>/members', endpoint='group_members')
|
||||
api.add_org_resource(GroupMemberResource, '/api/groups/<group_id>/members/<user_id>', endpoint='group_member')
|
||||
api.add_org_resource(GroupDataSourceListResource, '/api/groups/<group_id>/data_sources', endpoint='group_data_sources')
|
||||
api.add_org_resource(GroupDataSourceResource, '/api/groups/<group_id>/data_sources/<data_source_id>', endpoint='group_data_source')
|
||||
api.add_org_resource(GroupListResource, "/api/groups", endpoint="groups")
|
||||
api.add_org_resource(GroupResource, "/api/groups/<group_id>", endpoint="group")
|
||||
api.add_org_resource(
|
||||
GroupMemberListResource, "/api/groups/<group_id>/members", endpoint="group_members"
|
||||
)
|
||||
api.add_org_resource(
|
||||
GroupMemberResource,
|
||||
"/api/groups/<group_id>/members/<user_id>",
|
||||
endpoint="group_member",
|
||||
)
|
||||
api.add_org_resource(
|
||||
GroupDataSourceListResource,
|
||||
"/api/groups/<group_id>/data_sources",
|
||||
endpoint="group_data_sources",
|
||||
)
|
||||
api.add_org_resource(
|
||||
GroupDataSourceResource,
|
||||
"/api/groups/<group_id>/data_sources/<data_source_id>",
|
||||
endpoint="group_data_source",
|
||||
)
|
||||
|
||||
api.add_org_resource(EventsResource, '/api/events', endpoint='events')
|
||||
api.add_org_resource(EventsResource, "/api/events", endpoint="events")
|
||||
|
||||
api.add_org_resource(QueryFavoriteListResource, '/api/queries/favorites', endpoint='query_favorites')
|
||||
api.add_org_resource(QueryFavoriteResource, '/api/queries/<query_id>/favorite', endpoint='query_favorite')
|
||||
api.add_org_resource(DashboardFavoriteListResource, '/api/dashboards/favorites', endpoint='dashboard_favorites')
|
||||
api.add_org_resource(DashboardFavoriteResource, '/api/dashboards/<object_id>/favorite', endpoint='dashboard_favorite')
|
||||
api.add_org_resource(
|
||||
QueryFavoriteListResource, "/api/queries/favorites", endpoint="query_favorites"
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryFavoriteResource, "/api/queries/<query_id>/favorite", endpoint="query_favorite"
|
||||
)
|
||||
api.add_org_resource(
|
||||
DashboardFavoriteListResource,
|
||||
"/api/dashboards/favorites",
|
||||
endpoint="dashboard_favorites",
|
||||
)
|
||||
api.add_org_resource(
|
||||
DashboardFavoriteResource,
|
||||
"/api/dashboards/<object_id>/favorite",
|
||||
endpoint="dashboard_favorite",
|
||||
)
|
||||
|
||||
api.add_org_resource(QueryTagsResource, '/api/queries/tags', endpoint='query_tags')
|
||||
api.add_org_resource(DashboardTagsResource, '/api/dashboards/tags', endpoint='dashboard_tags')
|
||||
api.add_org_resource(QueryTagsResource, "/api/queries/tags", endpoint="query_tags")
|
||||
api.add_org_resource(
|
||||
DashboardTagsResource, "/api/dashboards/tags", endpoint="dashboard_tags"
|
||||
)
|
||||
|
||||
api.add_org_resource(QuerySearchResource, '/api/queries/search', endpoint='queries_search')
|
||||
api.add_org_resource(QueryRecentResource, '/api/queries/recent', endpoint='recent_queries')
|
||||
api.add_org_resource(QueryArchiveResource, '/api/queries/archive', endpoint='queries_archive')
|
||||
api.add_org_resource(QueryListResource, '/api/queries', endpoint='queries')
|
||||
api.add_org_resource(MyQueriesResource, '/api/queries/my', endpoint='my_queries')
|
||||
api.add_org_resource(QueryRefreshResource, '/api/queries/<query_id>/refresh', endpoint='query_refresh')
|
||||
api.add_org_resource(QueryResource, '/api/queries/<query_id>', endpoint='query')
|
||||
api.add_org_resource(QueryForkResource, '/api/queries/<query_id>/fork', endpoint='query_fork')
|
||||
api.add_org_resource(QueryRegenerateApiKeyResource,
|
||||
'/api/queries/<query_id>/regenerate_api_key',
|
||||
endpoint='query_regenerate_api_key')
|
||||
api.add_org_resource(
|
||||
QuerySearchResource, "/api/queries/search", endpoint="queries_search"
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryRecentResource, "/api/queries/recent", endpoint="recent_queries"
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryArchiveResource, "/api/queries/archive", endpoint="queries_archive"
|
||||
)
|
||||
api.add_org_resource(QueryListResource, "/api/queries", endpoint="queries")
|
||||
api.add_org_resource(MyQueriesResource, "/api/queries/my", endpoint="my_queries")
|
||||
api.add_org_resource(
|
||||
QueryRefreshResource, "/api/queries/<query_id>/refresh", endpoint="query_refresh"
|
||||
)
|
||||
api.add_org_resource(QueryResource, "/api/queries/<query_id>", endpoint="query")
|
||||
api.add_org_resource(
|
||||
QueryForkResource, "/api/queries/<query_id>/fork", endpoint="query_fork"
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryRegenerateApiKeyResource,
|
||||
"/api/queries/<query_id>/regenerate_api_key",
|
||||
endpoint="query_regenerate_api_key",
|
||||
)
|
||||
|
||||
api.add_org_resource(ObjectPermissionsListResource, '/api/<object_type>/<object_id>/acl', endpoint='object_permissions')
|
||||
api.add_org_resource(CheckPermissionResource, '/api/<object_type>/<object_id>/acl/<access_type>', endpoint='check_permissions')
|
||||
api.add_org_resource(
|
||||
ObjectPermissionsListResource,
|
||||
"/api/<object_type>/<object_id>/acl",
|
||||
endpoint="object_permissions",
|
||||
)
|
||||
api.add_org_resource(
|
||||
CheckPermissionResource,
|
||||
"/api/<object_type>/<object_id>/acl/<access_type>",
|
||||
endpoint="check_permissions",
|
||||
)
|
||||
|
||||
api.add_org_resource(QueryResultListResource, '/api/query_results', endpoint='query_results')
|
||||
api.add_org_resource(QueryResultDropdownResource, '/api/queries/<query_id>/dropdown', endpoint='query_result_dropdown')
|
||||
api.add_org_resource(QueryDropdownsResource, '/api/queries/<query_id>/dropdowns/<dropdown_query_id>', endpoint='query_result_dropdowns')
|
||||
api.add_org_resource(QueryResultResource,
|
||||
'/api/query_results/<query_result_id>.<filetype>',
|
||||
'/api/query_results/<query_result_id>',
|
||||
'/api/queries/<query_id>/results',
|
||||
'/api/queries/<query_id>/results.<filetype>',
|
||||
'/api/queries/<query_id>/results/<query_result_id>.<filetype>',
|
||||
endpoint='query_result')
|
||||
api.add_org_resource(JobResource,
|
||||
'/api/jobs/<job_id>',
|
||||
'/api/queries/<query_id>/jobs/<job_id>',
|
||||
endpoint='job')
|
||||
api.add_org_resource(
|
||||
QueryResultListResource, "/api/query_results", endpoint="query_results"
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryResultDropdownResource,
|
||||
"/api/queries/<query_id>/dropdown",
|
||||
endpoint="query_result_dropdown",
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryDropdownsResource,
|
||||
"/api/queries/<query_id>/dropdowns/<dropdown_query_id>",
|
||||
endpoint="query_result_dropdowns",
|
||||
)
|
||||
api.add_org_resource(
|
||||
QueryResultResource,
|
||||
"/api/query_results/<query_result_id>.<filetype>",
|
||||
"/api/query_results/<query_result_id>",
|
||||
"/api/queries/<query_id>/results",
|
||||
"/api/queries/<query_id>/results.<filetype>",
|
||||
"/api/queries/<query_id>/results/<query_result_id>.<filetype>",
|
||||
endpoint="query_result",
|
||||
)
|
||||
api.add_org_resource(
|
||||
JobResource,
|
||||
"/api/jobs/<job_id>",
|
||||
"/api/queries/<query_id>/jobs/<job_id>",
|
||||
endpoint="job",
|
||||
)
|
||||
|
||||
api.add_org_resource(UserListResource, '/api/users', endpoint='users')
|
||||
api.add_org_resource(UserResource, '/api/users/<user_id>', endpoint='user')
|
||||
api.add_org_resource(UserInviteResource, '/api/users/<user_id>/invite', endpoint='user_invite')
|
||||
api.add_org_resource(UserResetPasswordResource, '/api/users/<user_id>/reset_password', endpoint='user_reset_password')
|
||||
api.add_org_resource(UserRegenerateApiKeyResource,
|
||||
'/api/users/<user_id>/regenerate_api_key',
|
||||
endpoint='user_regenerate_api_key')
|
||||
api.add_org_resource(UserDisableResource, '/api/users/<user_id>/disable', endpoint='user_disable')
|
||||
api.add_org_resource(UserListResource, "/api/users", endpoint="users")
|
||||
api.add_org_resource(UserResource, "/api/users/<user_id>", endpoint="user")
|
||||
api.add_org_resource(
|
||||
UserInviteResource, "/api/users/<user_id>/invite", endpoint="user_invite"
|
||||
)
|
||||
api.add_org_resource(
|
||||
UserResetPasswordResource,
|
||||
"/api/users/<user_id>/reset_password",
|
||||
endpoint="user_reset_password",
|
||||
)
|
||||
api.add_org_resource(
|
||||
UserRegenerateApiKeyResource,
|
||||
"/api/users/<user_id>/regenerate_api_key",
|
||||
endpoint="user_regenerate_api_key",
|
||||
)
|
||||
api.add_org_resource(
|
||||
UserDisableResource, "/api/users/<user_id>/disable", endpoint="user_disable"
|
||||
)
|
||||
|
||||
api.add_org_resource(VisualizationListResource, '/api/visualizations', endpoint='visualizations')
|
||||
api.add_org_resource(VisualizationResource, '/api/visualizations/<visualization_id>', endpoint='visualization')
|
||||
api.add_org_resource(
|
||||
VisualizationListResource, "/api/visualizations", endpoint="visualizations"
|
||||
)
|
||||
api.add_org_resource(
|
||||
VisualizationResource,
|
||||
"/api/visualizations/<visualization_id>",
|
||||
endpoint="visualization",
|
||||
)
|
||||
|
||||
api.add_org_resource(WidgetListResource, '/api/widgets', endpoint='widgets')
|
||||
api.add_org_resource(WidgetResource, '/api/widgets/<int:widget_id>', endpoint='widget')
|
||||
api.add_org_resource(WidgetListResource, "/api/widgets", endpoint="widgets")
|
||||
api.add_org_resource(WidgetResource, "/api/widgets/<int:widget_id>", endpoint="widget")
|
||||
|
||||
api.add_org_resource(DestinationTypeListResource, '/api/destinations/types', endpoint='destination_types')
|
||||
api.add_org_resource(DestinationResource, '/api/destinations/<destination_id>', endpoint='destination')
|
||||
api.add_org_resource(DestinationListResource, '/api/destinations', endpoint='destinations')
|
||||
api.add_org_resource(
|
||||
DestinationTypeListResource, "/api/destinations/types", endpoint="destination_types"
|
||||
)
|
||||
api.add_org_resource(
|
||||
DestinationResource, "/api/destinations/<destination_id>", endpoint="destination"
|
||||
)
|
||||
api.add_org_resource(
|
||||
DestinationListResource, "/api/destinations", endpoint="destinations"
|
||||
)
|
||||
|
||||
api.add_org_resource(QuerySnippetResource, '/api/query_snippets/<snippet_id>', endpoint='query_snippet')
|
||||
api.add_org_resource(QuerySnippetListResource, '/api/query_snippets', endpoint='query_snippets')
|
||||
api.add_org_resource(
|
||||
QuerySnippetResource, "/api/query_snippets/<snippet_id>", endpoint="query_snippet"
|
||||
)
|
||||
api.add_org_resource(
|
||||
QuerySnippetListResource, "/api/query_snippets", endpoint="query_snippets"
|
||||
)
|
||||
|
||||
api.add_org_resource(OrganizationSettings, '/api/settings/organization', endpoint='organization_settings')
|
||||
api.add_org_resource(
|
||||
OrganizationSettings, "/api/settings/organization", endpoint="organization_settings"
|
||||
)
|
||||
|
||||
@@ -5,11 +5,14 @@ from flask import abort, flash, redirect, render_template, request, url_for
|
||||
from flask_login import current_user, login_required, login_user, logout_user
|
||||
from redash import __version__, limiter, models, settings
|
||||
from redash.authentication import current_org, get_login_url, get_next_path
|
||||
from redash.authentication.account import (BadSignature, SignatureExpired,
|
||||
send_password_reset_email,
|
||||
send_user_disabled_email,
|
||||
send_verify_email,
|
||||
validate_token)
|
||||
from redash.authentication.account import (
|
||||
BadSignature,
|
||||
SignatureExpired,
|
||||
send_password_reset_email,
|
||||
send_user_disabled_email,
|
||||
send_verify_email,
|
||||
validate_token,
|
||||
)
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.version_check import get_latest_version
|
||||
@@ -20,9 +23,11 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
def get_google_auth_url(next_path):
|
||||
if settings.MULTI_ORG:
|
||||
google_auth_url = url_for('google_oauth.authorize_org', next=next_path, org_slug=current_org.slug)
|
||||
google_auth_url = url_for(
|
||||
"google_oauth.authorize_org", next=next_path, org_slug=current_org.slug
|
||||
)
|
||||
else:
|
||||
google_auth_url = url_for('google_oauth.authorize', next=next_path)
|
||||
google_auth_url = url_for("google_oauth.authorize", next=next_path)
|
||||
return google_auth_url
|
||||
|
||||
|
||||
@@ -32,90 +37,125 @@ def render_token_login_page(template, org_slug, token, invite):
|
||||
org = current_org._get_current_object()
|
||||
user = models.User.get_by_id_and_org(user_id, org)
|
||||
except NoResultFound:
|
||||
logger.exception("Bad user id in token. Token= , User id= %s, Org=%s", user_id, token, org_slug)
|
||||
return render_template("error.html", error_message="Invalid invite link. Please ask for a new one."), 400
|
||||
logger.exception(
|
||||
"Bad user id in token. Token= , User id= %s, Org=%s",
|
||||
user_id,
|
||||
token,
|
||||
org_slug,
|
||||
)
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Invalid invite link. Please ask for a new one.",
|
||||
),
|
||||
400,
|
||||
)
|
||||
except (SignatureExpired, BadSignature):
|
||||
logger.exception("Failed to verify invite token: %s, org=%s", token, org_slug)
|
||||
return render_template("error.html",
|
||||
error_message="Your invite link has expired. Please ask for a new one."), 400
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Your invite link has expired. Please ask for a new one.",
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
if invite and user.details.get('is_invitation_pending') is False:
|
||||
return render_template("error.html",
|
||||
error_message=("This invitation has already been accepted. "
|
||||
"Please try resetting your password instead.")), 400
|
||||
if invite and user.details.get("is_invitation_pending") is False:
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message=(
|
||||
"This invitation has already been accepted. "
|
||||
"Please try resetting your password instead."
|
||||
),
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
status_code = 200
|
||||
if request.method == 'POST':
|
||||
if 'password' not in request.form:
|
||||
flash('Bad Request')
|
||||
if request.method == "POST":
|
||||
if "password" not in request.form:
|
||||
flash("Bad Request")
|
||||
status_code = 400
|
||||
elif not request.form['password']:
|
||||
flash('Cannot use empty password.')
|
||||
elif not request.form["password"]:
|
||||
flash("Cannot use empty password.")
|
||||
status_code = 400
|
||||
elif len(request.form['password']) < 6:
|
||||
flash('Password length is too short (<6).')
|
||||
elif len(request.form["password"]) < 6:
|
||||
flash("Password length is too short (<6).")
|
||||
status_code = 400
|
||||
else:
|
||||
if invite:
|
||||
user.is_invitation_pending = False
|
||||
user.hash_password(request.form['password'])
|
||||
user.hash_password(request.form["password"])
|
||||
models.db.session.add(user)
|
||||
login_user(user)
|
||||
models.db.session.commit()
|
||||
return redirect(url_for('redash.index', org_slug=org_slug))
|
||||
return redirect(url_for("redash.index", org_slug=org_slug))
|
||||
|
||||
google_auth_url = get_google_auth_url(url_for('redash.index', org_slug=org_slug))
|
||||
google_auth_url = get_google_auth_url(url_for("redash.index", org_slug=org_slug))
|
||||
|
||||
return render_template(template,
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
google_auth_url=google_auth_url,
|
||||
show_saml_login=current_org.get_setting('auth_saml_enabled'),
|
||||
show_remote_user_login=settings.REMOTE_USER_LOGIN_ENABLED,
|
||||
show_ldap_login=settings.LDAP_LOGIN_ENABLED,
|
||||
org_slug=org_slug,
|
||||
user=user), status_code
|
||||
return (
|
||||
render_template(
|
||||
template,
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
google_auth_url=google_auth_url,
|
||||
show_saml_login=current_org.get_setting("auth_saml_enabled"),
|
||||
show_remote_user_login=settings.REMOTE_USER_LOGIN_ENABLED,
|
||||
show_ldap_login=settings.LDAP_LOGIN_ENABLED,
|
||||
org_slug=org_slug,
|
||||
user=user,
|
||||
),
|
||||
status_code,
|
||||
)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/invite/<token>'), methods=['GET', 'POST'])
|
||||
@routes.route(org_scoped_rule("/invite/<token>"), methods=["GET", "POST"])
|
||||
def invite(token, org_slug=None):
|
||||
return render_token_login_page("invite.html", org_slug, token, True)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/reset/<token>'), methods=['GET', 'POST'])
|
||||
@routes.route(org_scoped_rule("/reset/<token>"), methods=["GET", "POST"])
|
||||
def reset(token, org_slug=None):
|
||||
return render_token_login_page("reset.html", org_slug, token, False)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/verify/<token>'), methods=['GET'])
|
||||
@routes.route(org_scoped_rule("/verify/<token>"), methods=["GET"])
|
||||
def verify(token, org_slug=None):
|
||||
try:
|
||||
user_id = validate_token(token)
|
||||
org = current_org._get_current_object()
|
||||
user = models.User.get_by_id_and_org(user_id, org)
|
||||
except (BadSignature, NoResultFound):
|
||||
logger.exception("Failed to verify email verification token: %s, org=%s", token, org_slug)
|
||||
return render_template("error.html",
|
||||
error_message="Your verification link is invalid. Please ask for a new one."), 400
|
||||
logger.exception(
|
||||
"Failed to verify email verification token: %s, org=%s", token, org_slug
|
||||
)
|
||||
return (
|
||||
render_template(
|
||||
"error.html",
|
||||
error_message="Your verification link is invalid. Please ask for a new one.",
|
||||
),
|
||||
400,
|
||||
)
|
||||
|
||||
user.is_email_verified = True
|
||||
models.db.session.add(user)
|
||||
models.db.session.commit()
|
||||
|
||||
template_context = {"org_slug": org_slug} if settings.MULTI_ORG else {}
|
||||
next_url = url_for('redash.index', **template_context)
|
||||
next_url = url_for("redash.index", **template_context)
|
||||
|
||||
return render_template("verify.html", next_url=next_url)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/forgot'), methods=['GET', 'POST'])
|
||||
@routes.route(org_scoped_rule("/forgot"), methods=["GET", "POST"])
|
||||
def forgot_password(org_slug=None):
|
||||
if not current_org.get_setting('auth_password_login_enabled'):
|
||||
if not current_org.get_setting("auth_password_login_enabled"):
|
||||
abort(404)
|
||||
|
||||
submitted = False
|
||||
if request.method == 'POST' and request.form['email']:
|
||||
if request.method == "POST" and request.form["email"]:
|
||||
submitted = True
|
||||
email = request.form['email']
|
||||
email = request.form["email"]
|
||||
try:
|
||||
org = current_org._get_current_object()
|
||||
user = models.User.get_by_email_and_org(email, org)
|
||||
@@ -129,38 +169,44 @@ def forgot_password(org_slug=None):
|
||||
return render_template("forgot.html", submitted=submitted)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/verification_email/'), methods=['POST'])
|
||||
@routes.route(org_scoped_rule("/verification_email/"), methods=["POST"])
|
||||
def verification_email(org_slug=None):
|
||||
if not current_user.is_email_verified:
|
||||
send_verify_email(current_user, current_org)
|
||||
|
||||
return json_response({
|
||||
"message": "Please check your email inbox in order to verify your email address."
|
||||
})
|
||||
return json_response(
|
||||
{
|
||||
"message": "Please check your email inbox in order to verify your email address."
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/login'), methods=['GET', 'POST'])
|
||||
@routes.route(org_scoped_rule("/login"), methods=["GET", "POST"])
|
||||
@limiter.limit(settings.THROTTLE_LOGIN_PATTERN)
|
||||
def login(org_slug=None):
|
||||
# We intentionally use == as otherwise it won't actually use the proxy. So weird :O
|
||||
# noinspection PyComparisonWithNone
|
||||
if current_org == None and not settings.MULTI_ORG:
|
||||
return redirect('/setup')
|
||||
return redirect("/setup")
|
||||
elif current_org == None:
|
||||
return redirect('/')
|
||||
return redirect("/")
|
||||
|
||||
index_url = url_for('redash.index', org_slug=org_slug)
|
||||
unsafe_next_path = request.args.get('next', index_url)
|
||||
index_url = url_for("redash.index", org_slug=org_slug)
|
||||
unsafe_next_path = request.args.get("next", index_url)
|
||||
next_path = get_next_path(unsafe_next_path)
|
||||
if current_user.is_authenticated:
|
||||
return redirect(next_path)
|
||||
|
||||
if request.method == 'POST':
|
||||
if request.method == "POST":
|
||||
try:
|
||||
org = current_org._get_current_object()
|
||||
user = models.User.get_by_email_and_org(request.form['email'], org)
|
||||
if user and not user.is_disabled and user.verify_password(request.form['password']):
|
||||
remember = ('remember' in request.form)
|
||||
user = models.User.get_by_email_and_org(request.form["email"], org)
|
||||
if (
|
||||
user
|
||||
and not user.is_disabled
|
||||
and user.verify_password(request.form["password"])
|
||||
):
|
||||
remember = "remember" in request.form
|
||||
login_user(user, remember=remember)
|
||||
return redirect(next_path)
|
||||
else:
|
||||
@@ -170,19 +216,21 @@ def login(org_slug=None):
|
||||
|
||||
google_auth_url = get_google_auth_url(next_path)
|
||||
|
||||
return render_template("login.html",
|
||||
org_slug=org_slug,
|
||||
next=next_path,
|
||||
email=request.form.get('email', ''),
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
google_auth_url=google_auth_url,
|
||||
show_password_login=current_org.get_setting('auth_password_login_enabled'),
|
||||
show_saml_login=current_org.get_setting('auth_saml_enabled'),
|
||||
show_remote_user_login=settings.REMOTE_USER_LOGIN_ENABLED,
|
||||
show_ldap_login=settings.LDAP_LOGIN_ENABLED)
|
||||
return render_template(
|
||||
"login.html",
|
||||
org_slug=org_slug,
|
||||
next=next_path,
|
||||
email=request.form.get("email", ""),
|
||||
show_google_openid=settings.GOOGLE_OAUTH_ENABLED,
|
||||
google_auth_url=google_auth_url,
|
||||
show_password_login=current_org.get_setting("auth_password_login_enabled"),
|
||||
show_saml_login=current_org.get_setting("auth_saml_enabled"),
|
||||
show_remote_user_login=settings.REMOTE_USER_LOGIN_ENABLED,
|
||||
show_ldap_login=settings.LDAP_LOGIN_ENABLED,
|
||||
)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/logout'))
|
||||
@routes.route(org_scoped_rule("/logout"))
|
||||
def logout(org_slug=None):
|
||||
logout_user()
|
||||
return redirect(get_login_url(next=None))
|
||||
@@ -190,64 +238,67 @@ def logout(org_slug=None):
|
||||
|
||||
def base_href():
|
||||
if settings.MULTI_ORG:
|
||||
base_href = url_for('redash.index', _external=True, org_slug=current_org.slug)
|
||||
base_href = url_for("redash.index", _external=True, org_slug=current_org.slug)
|
||||
else:
|
||||
base_href = url_for('redash.index', _external=True)
|
||||
base_href = url_for("redash.index", _external=True)
|
||||
|
||||
return base_href
|
||||
|
||||
|
||||
def date_time_format_config():
|
||||
date_format = current_org.get_setting('date_format')
|
||||
date_format = current_org.get_setting("date_format")
|
||||
date_format_list = set(["DD/MM/YY", "MM/DD/YY", "YYYY-MM-DD", settings.DATE_FORMAT])
|
||||
time_format = current_org.get_setting('time_format')
|
||||
time_format = current_org.get_setting("time_format")
|
||||
time_format_list = set(["HH:mm", "HH:mm:ss", "HH:mm:ss.SSS", settings.TIME_FORMAT])
|
||||
return {
|
||||
'dateFormat': date_format,
|
||||
'dateFormatList': list(date_format_list),
|
||||
'timeFormatList': list(time_format_list),
|
||||
'dateTimeFormat': "{0} {1}".format(date_format, time_format),
|
||||
"dateFormat": date_format,
|
||||
"dateFormatList": list(date_format_list),
|
||||
"timeFormatList": list(time_format_list),
|
||||
"dateTimeFormat": "{0} {1}".format(date_format, time_format),
|
||||
}
|
||||
|
||||
|
||||
def number_format_config():
|
||||
return {
|
||||
'integerFormat': current_org.get_setting('integer_format'),
|
||||
'floatFormat': current_org.get_setting('float_format'),
|
||||
"integerFormat": current_org.get_setting("integer_format"),
|
||||
"floatFormat": current_org.get_setting("float_format"),
|
||||
}
|
||||
|
||||
|
||||
def client_config():
|
||||
if not current_user.is_api_user() and current_user.is_authenticated:
|
||||
client_config = {
|
||||
'newVersionAvailable': bool(get_latest_version()),
|
||||
'version': __version__
|
||||
"newVersionAvailable": bool(get_latest_version()),
|
||||
"version": __version__,
|
||||
}
|
||||
else:
|
||||
client_config = {}
|
||||
|
||||
if current_user.has_permission('admin') and current_org.get_setting('beacon_consent') is None:
|
||||
client_config['showBeaconConsentMessage'] = True
|
||||
|
||||
if (
|
||||
current_user.has_permission("admin")
|
||||
and current_org.get_setting("beacon_consent") is None
|
||||
):
|
||||
client_config["showBeaconConsentMessage"] = True
|
||||
|
||||
defaults = {
|
||||
'allowScriptsInUserInput': settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
'showPermissionsControl': current_org.get_setting("feature_show_permissions_control"),
|
||||
'allowCustomJSVisualizations': settings.FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS,
|
||||
'autoPublishNamedQueries': settings.FEATURE_AUTO_PUBLISH_NAMED_QUERIES,
|
||||
'extendedAlertOptions': settings.FEATURE_EXTENDED_ALERT_OPTIONS,
|
||||
'mailSettingsMissing': not settings.email_server_is_configured(),
|
||||
'dashboardRefreshIntervals': settings.DASHBOARD_REFRESH_INTERVALS,
|
||||
'queryRefreshIntervals': settings.QUERY_REFRESH_INTERVALS,
|
||||
'googleLoginEnabled': settings.GOOGLE_OAUTH_ENABLED,
|
||||
'pageSize': settings.PAGE_SIZE,
|
||||
'pageSizeOptions': settings.PAGE_SIZE_OPTIONS,
|
||||
'tableCellMaxJSONSize': settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||
"allowScriptsInUserInput": settings.ALLOW_SCRIPTS_IN_USER_INPUT,
|
||||
"showPermissionsControl": current_org.get_setting(
|
||||
"feature_show_permissions_control"
|
||||
),
|
||||
"allowCustomJSVisualizations": settings.FEATURE_ALLOW_CUSTOM_JS_VISUALIZATIONS,
|
||||
"autoPublishNamedQueries": settings.FEATURE_AUTO_PUBLISH_NAMED_QUERIES,
|
||||
"extendedAlertOptions": settings.FEATURE_EXTENDED_ALERT_OPTIONS,
|
||||
"mailSettingsMissing": not settings.email_server_is_configured(),
|
||||
"dashboardRefreshIntervals": settings.DASHBOARD_REFRESH_INTERVALS,
|
||||
"queryRefreshIntervals": settings.QUERY_REFRESH_INTERVALS,
|
||||
"googleLoginEnabled": settings.GOOGLE_OAUTH_ENABLED,
|
||||
"pageSize": settings.PAGE_SIZE,
|
||||
"pageSizeOptions": settings.PAGE_SIZE_OPTIONS,
|
||||
"tableCellMaxJSONSize": settings.TABLE_CELL_MAX_JSON_SIZE,
|
||||
}
|
||||
|
||||
client_config.update(defaults)
|
||||
client_config.update({
|
||||
'basePath': base_href()
|
||||
})
|
||||
client_config.update({"basePath": base_href()})
|
||||
client_config.update(date_time_format_config())
|
||||
client_config.update(number_format_config())
|
||||
|
||||
@@ -258,43 +309,41 @@ def messages():
|
||||
messages = []
|
||||
|
||||
if not current_user.is_email_verified:
|
||||
messages.append('email-not-verified')
|
||||
messages.append("email-not-verified")
|
||||
|
||||
if settings.ALLOW_PARAMETERS_IN_EMBEDS:
|
||||
messages.append('using-deprecated-embed-feature')
|
||||
messages.append("using-deprecated-embed-feature")
|
||||
|
||||
return messages
|
||||
|
||||
|
||||
@routes.route('/api/config', methods=['GET'])
|
||||
@routes.route("/api/config", methods=["GET"])
|
||||
def config(org_slug=None):
|
||||
return json_response({
|
||||
'org_slug': current_org.slug,
|
||||
'client_config': client_config()
|
||||
})
|
||||
return json_response(
|
||||
{"org_slug": current_org.slug, "client_config": client_config()}
|
||||
)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/api/session'), methods=['GET'])
|
||||
@routes.route(org_scoped_rule("/api/session"), methods=["GET"])
|
||||
@login_required
|
||||
def session(org_slug=None):
|
||||
if current_user.is_api_user():
|
||||
user = {
|
||||
'permissions': [],
|
||||
'apiKey': current_user.id
|
||||
}
|
||||
user = {"permissions": [], "apiKey": current_user.id}
|
||||
else:
|
||||
user = {
|
||||
'profile_image_url': current_user.profile_image_url,
|
||||
'id': current_user.id,
|
||||
'name': current_user.name,
|
||||
'email': current_user.email,
|
||||
'groups': current_user.group_ids,
|
||||
'permissions': current_user.permissions
|
||||
"profile_image_url": current_user.profile_image_url,
|
||||
"id": current_user.id,
|
||||
"name": current_user.name,
|
||||
"email": current_user.email,
|
||||
"groups": current_user.group_ids,
|
||||
"permissions": current_user.permissions,
|
||||
}
|
||||
|
||||
return json_response({
|
||||
'user': user,
|
||||
'messages': messages(),
|
||||
'org_slug': current_org.slug,
|
||||
'client_config': client_config()
|
||||
})
|
||||
return json_response(
|
||||
{
|
||||
"user": user,
|
||||
"messages": messages(),
|
||||
"org_slug": current_org.slug,
|
||||
"client_config": client_config(),
|
||||
}
|
||||
)
|
||||
|
||||
@@ -15,7 +15,9 @@ from sqlalchemy import cast
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy_utils import sort_query
|
||||
|
||||
routes = Blueprint('redash', __name__, template_folder=settings.fix_assets_path('templates'))
|
||||
routes = Blueprint(
|
||||
"redash", __name__, template_folder=settings.fix_assets_path("templates")
|
||||
)
|
||||
|
||||
|
||||
class BaseResource(Resource):
|
||||
@@ -26,7 +28,7 @@ class BaseResource(Resource):
|
||||
self._user = None
|
||||
|
||||
def dispatch_request(self, *args, **kwargs):
|
||||
kwargs.pop('org_slug', None)
|
||||
kwargs.pop("org_slug", None)
|
||||
|
||||
return super(BaseResource, self).dispatch_request(*args, **kwargs)
|
||||
|
||||
@@ -49,24 +51,14 @@ class BaseResource(Resource):
|
||||
|
||||
def record_event(org, user, options):
|
||||
if user.is_api_user():
|
||||
options.update({
|
||||
'api_key': user.name,
|
||||
'org_id': org.id
|
||||
})
|
||||
options.update({"api_key": user.name, "org_id": org.id})
|
||||
else:
|
||||
options.update({
|
||||
'user_id': user.id,
|
||||
'user_name': user.name,
|
||||
'org_id': org.id
|
||||
})
|
||||
options.update({"user_id": user.id, "user_name": user.name, "org_id": org.id})
|
||||
|
||||
options.update({
|
||||
'user_agent': request.user_agent.string,
|
||||
'ip': request.remote_addr
|
||||
})
|
||||
options.update({"user_agent": request.user_agent.string, "ip": request.remote_addr})
|
||||
|
||||
if 'timestamp' not in options:
|
||||
options['timestamp'] = int(time.time())
|
||||
if "timestamp" not in options:
|
||||
options["timestamp"] = int(time.time())
|
||||
|
||||
record_event_task.delay(options)
|
||||
|
||||
@@ -91,13 +83,13 @@ def paginate(query_set, page, page_size, serializer, **kwargs):
|
||||
count = query_set.count()
|
||||
|
||||
if page < 1:
|
||||
abort(400, message='Page must be positive integer.')
|
||||
abort(400, message="Page must be positive integer.")
|
||||
|
||||
if (page - 1) * page_size + 1 > count > 0:
|
||||
abort(400, message='Page is out of range.')
|
||||
abort(400, message="Page is out of range.")
|
||||
|
||||
if page_size > 250 or page_size < 1:
|
||||
abort(400, message='Page size is out of range (1-250).')
|
||||
abort(400, message="Page size is out of range (1-250).")
|
||||
|
||||
results = query_set.paginate(page, page_size)
|
||||
|
||||
@@ -107,12 +99,7 @@ def paginate(query_set, page, page_size, serializer, **kwargs):
|
||||
else:
|
||||
items = [serializer(result) for result in results.items]
|
||||
|
||||
return {
|
||||
'count': count,
|
||||
'page': page,
|
||||
'page_size': page_size,
|
||||
'results': items,
|
||||
}
|
||||
return {"count": count, "page": page, "page_size": page_size, "results": items}
|
||||
|
||||
|
||||
def org_scoped_rule(rule):
|
||||
@@ -123,13 +110,15 @@ def org_scoped_rule(rule):
|
||||
|
||||
|
||||
def json_response(response):
|
||||
return current_app.response_class(json_dumps(response), mimetype='application/json')
|
||||
return current_app.response_class(json_dumps(response), mimetype="application/json")
|
||||
|
||||
|
||||
def filter_by_tags(result_set, column):
|
||||
if request.args.getlist('tags'):
|
||||
tags = request.args.getlist('tags')
|
||||
result_set = result_set.filter(cast(column, postgresql.ARRAY(db.Text)).contains(tags))
|
||||
if request.args.getlist("tags"):
|
||||
tags = request.args.getlist("tags")
|
||||
result_set = result_set.filter(
|
||||
cast(column, postgresql.ARRAY(db.Text)).contains(tags)
|
||||
)
|
||||
return result_set
|
||||
|
||||
|
||||
@@ -139,7 +128,7 @@ def order_results(results, default_order, allowed_orders, fallback=True):
|
||||
"order" request query parameter or the given default order.
|
||||
"""
|
||||
# See if a particular order has been requested
|
||||
requested_order = request.args.get('order', '').strip()
|
||||
requested_order = request.args.get("order", "").strip()
|
||||
|
||||
# and if not (and no fallback is wanted) return results as is
|
||||
if not requested_order and not fallback:
|
||||
|
||||
@@ -3,12 +3,19 @@ from funcy import project, partial
|
||||
|
||||
from flask_restful import abort
|
||||
from redash import models, serializers
|
||||
from redash.handlers.base import (BaseResource, get_object_or_404, paginate,
|
||||
filter_by_tags,
|
||||
order_results as _order_results)
|
||||
from redash.permissions import (can_modify, require_admin_or_owner,
|
||||
require_object_modify_permission,
|
||||
require_permission)
|
||||
from redash.handlers.base import (
|
||||
BaseResource,
|
||||
get_object_or_404,
|
||||
paginate,
|
||||
filter_by_tags,
|
||||
order_results as _order_results,
|
||||
)
|
||||
from redash.permissions import (
|
||||
can_modify,
|
||||
require_admin_or_owner,
|
||||
require_object_modify_permission,
|
||||
require_permission,
|
||||
)
|
||||
from redash.security import csp_allows_embeding
|
||||
from redash.serializers import serialize_dashboard
|
||||
from sqlalchemy.orm.exc import StaleDataError
|
||||
@@ -16,21 +23,19 @@ from sqlalchemy.orm.exc import StaleDataError
|
||||
|
||||
# Ordering map for relationships
|
||||
order_map = {
|
||||
'name': 'lowercase_name',
|
||||
'-name': '-lowercase_name',
|
||||
'created_at': 'created_at',
|
||||
'-created_at': '-created_at',
|
||||
"name": "lowercase_name",
|
||||
"-name": "-lowercase_name",
|
||||
"created_at": "created_at",
|
||||
"-created_at": "-created_at",
|
||||
}
|
||||
|
||||
order_results = partial(
|
||||
_order_results,
|
||||
default_order='-created_at',
|
||||
allowed_orders=order_map,
|
||||
_order_results, default_order="-created_at", allowed_orders=order_map
|
||||
)
|
||||
|
||||
|
||||
class DashboardListResource(BaseResource):
|
||||
@require_permission('list_dashboards')
|
||||
@require_permission("list_dashboards")
|
||||
def get(self):
|
||||
"""
|
||||
Lists all accessible dashboards.
|
||||
@@ -43,7 +48,7 @@ class DashboardListResource(BaseResource):
|
||||
Responds with an array of :ref:`dashboard <dashboard-response-label>`
|
||||
objects.
|
||||
"""
|
||||
search_term = request.args.get('q')
|
||||
search_term = request.args.get("q")
|
||||
|
||||
if search_term:
|
||||
results = models.Dashboard.search(
|
||||
@@ -54,9 +59,7 @@ class DashboardListResource(BaseResource):
|
||||
)
|
||||
else:
|
||||
results = models.Dashboard.all(
|
||||
self.current_org,
|
||||
self.current_user.group_ids,
|
||||
self.current_user.id,
|
||||
self.current_org, self.current_user.group_ids, self.current_user.id
|
||||
)
|
||||
|
||||
results = filter_by_tags(results, models.Dashboard.tags)
|
||||
@@ -66,8 +69,8 @@ class DashboardListResource(BaseResource):
|
||||
# provides an order by search rank
|
||||
ordered_results = order_results(results, fallback=not bool(search_term))
|
||||
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page = request.args.get("page", 1, type=int)
|
||||
page_size = request.args.get("page_size", 25, type=int)
|
||||
|
||||
response = paginate(
|
||||
ordered_results,
|
||||
@@ -77,20 +80,15 @@ class DashboardListResource(BaseResource):
|
||||
)
|
||||
|
||||
if search_term:
|
||||
self.record_event({
|
||||
'action': 'search',
|
||||
'object_type': 'dashboard',
|
||||
'term': search_term,
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "search", "object_type": "dashboard", "term": search_term}
|
||||
)
|
||||
else:
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_type': 'dashboard',
|
||||
})
|
||||
self.record_event({"action": "list", "object_type": "dashboard"})
|
||||
|
||||
return response
|
||||
|
||||
@require_permission('create_dashboard')
|
||||
@require_permission("create_dashboard")
|
||||
def post(self):
|
||||
"""
|
||||
Creates a new dashboard.
|
||||
@@ -100,18 +98,20 @@ class DashboardListResource(BaseResource):
|
||||
Responds with a :ref:`dashboard <dashboard-response-label>`.
|
||||
"""
|
||||
dashboard_properties = request.get_json(force=True)
|
||||
dashboard = models.Dashboard(name=dashboard_properties['name'],
|
||||
org=self.current_org,
|
||||
user=self.current_user,
|
||||
is_draft=True,
|
||||
layout='[]')
|
||||
dashboard = models.Dashboard(
|
||||
name=dashboard_properties["name"],
|
||||
org=self.current_org,
|
||||
user=self.current_user,
|
||||
is_draft=True,
|
||||
layout="[]",
|
||||
)
|
||||
models.db.session.add(dashboard)
|
||||
models.db.session.commit()
|
||||
return serialize_dashboard(dashboard)
|
||||
|
||||
|
||||
class DashboardResource(BaseResource):
|
||||
@require_permission('list_dashboards')
|
||||
@require_permission("list_dashboards")
|
||||
def get(self, dashboard_slug=None):
|
||||
"""
|
||||
Retrieves a dashboard.
|
||||
@@ -146,25 +146,32 @@ class DashboardResource(BaseResource):
|
||||
:>json string widget.created_at: ISO format timestamp for widget creation
|
||||
:>json string widget.updated_at: ISO format timestamp for last widget modification
|
||||
"""
|
||||
dashboard = get_object_or_404(models.Dashboard.get_by_slug_and_org, dashboard_slug, self.current_org)
|
||||
response = serialize_dashboard(dashboard, with_widgets=True, user=self.current_user)
|
||||
dashboard = get_object_or_404(
|
||||
models.Dashboard.get_by_slug_and_org, dashboard_slug, self.current_org
|
||||
)
|
||||
response = serialize_dashboard(
|
||||
dashboard, with_widgets=True, user=self.current_user
|
||||
)
|
||||
|
||||
api_key = models.ApiKey.get_by_object(dashboard)
|
||||
if api_key:
|
||||
response['public_url'] = url_for('redash.public_dashboard', token=api_key.api_key, org_slug=self.current_org.slug, _external=True)
|
||||
response['api_key'] = api_key.api_key
|
||||
response["public_url"] = url_for(
|
||||
"redash.public_dashboard",
|
||||
token=api_key.api_key,
|
||||
org_slug=self.current_org.slug,
|
||||
_external=True,
|
||||
)
|
||||
response["api_key"] = api_key.api_key
|
||||
|
||||
response['can_edit'] = can_modify(dashboard, self.current_user)
|
||||
response["can_edit"] = can_modify(dashboard, self.current_user)
|
||||
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "view", "object_id": dashboard.id, "object_type": "dashboard"}
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@require_permission('edit_dashboard')
|
||||
@require_permission("edit_dashboard")
|
||||
def post(self, dashboard_slug):
|
||||
"""
|
||||
Modifies a dashboard.
|
||||
@@ -182,16 +189,25 @@ class DashboardResource(BaseResource):
|
||||
|
||||
require_object_modify_permission(dashboard, self.current_user)
|
||||
|
||||
updates = project(dashboard_properties, ('name', 'layout', 'version', 'tags',
|
||||
'is_draft', 'dashboard_filters_enabled'))
|
||||
updates = project(
|
||||
dashboard_properties,
|
||||
(
|
||||
"name",
|
||||
"layout",
|
||||
"version",
|
||||
"tags",
|
||||
"is_draft",
|
||||
"dashboard_filters_enabled",
|
||||
),
|
||||
)
|
||||
|
||||
# SQLAlchemy handles the case where a concurrent transaction beats us
|
||||
# to the update. But we still have to make sure that we're not starting
|
||||
# out behind.
|
||||
if 'version' in updates and updates['version'] != dashboard.version:
|
||||
if "version" in updates and updates["version"] != dashboard.version:
|
||||
abort(409)
|
||||
|
||||
updates['changed_by'] = self.current_user
|
||||
updates["changed_by"] = self.current_user
|
||||
|
||||
self.update_model(dashboard, updates)
|
||||
models.db.session.add(dashboard)
|
||||
@@ -200,17 +216,17 @@ class DashboardResource(BaseResource):
|
||||
except StaleDataError:
|
||||
abort(409)
|
||||
|
||||
result = serialize_dashboard(dashboard, with_widgets=True, user=self.current_user)
|
||||
result = serialize_dashboard(
|
||||
dashboard, with_widgets=True, user=self.current_user
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'edit',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "edit", "object_id": dashboard.id, "object_type": "dashboard"}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@require_permission('edit_dashboard')
|
||||
@require_permission("edit_dashboard")
|
||||
def delete(self, dashboard_slug):
|
||||
"""
|
||||
Archives a dashboard.
|
||||
@@ -219,18 +235,18 @@ class DashboardResource(BaseResource):
|
||||
|
||||
Responds with the archived :ref:`dashboard <dashboard-response-label>`.
|
||||
"""
|
||||
dashboard = models.Dashboard.get_by_slug_and_org(dashboard_slug, self.current_org)
|
||||
dashboard = models.Dashboard.get_by_slug_and_org(
|
||||
dashboard_slug, self.current_org
|
||||
)
|
||||
dashboard.is_archived = True
|
||||
dashboard.record_changes(changed_by=self.current_user)
|
||||
models.db.session.add(dashboard)
|
||||
d = serialize_dashboard(dashboard, with_widgets=True, user=self.current_user)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'archive',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "archive", "object_id": dashboard.id, "object_type": "dashboard"}
|
||||
)
|
||||
|
||||
return d
|
||||
|
||||
@@ -269,15 +285,22 @@ class DashboardShareResource(BaseResource):
|
||||
models.db.session.flush()
|
||||
models.db.session.commit()
|
||||
|
||||
public_url = url_for('redash.public_dashboard', token=api_key.api_key, org_slug=self.current_org.slug, _external=True)
|
||||
public_url = url_for(
|
||||
"redash.public_dashboard",
|
||||
token=api_key.api_key,
|
||||
org_slug=self.current_org.slug,
|
||||
_external=True,
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'activate_api_key',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard',
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "activate_api_key",
|
||||
"object_id": dashboard.id,
|
||||
"object_type": "dashboard",
|
||||
}
|
||||
)
|
||||
|
||||
return {'public_url': public_url, 'api_key': api_key.api_key}
|
||||
return {"public_url": public_url, "api_key": api_key.api_key}
|
||||
|
||||
def delete(self, dashboard_id):
|
||||
"""
|
||||
@@ -294,38 +317,39 @@ class DashboardShareResource(BaseResource):
|
||||
models.db.session.add(api_key)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'deactivate_api_key',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard',
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "deactivate_api_key",
|
||||
"object_id": dashboard.id,
|
||||
"object_type": "dashboard",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class DashboardTagsResource(BaseResource):
|
||||
@require_permission('list_dashboards')
|
||||
@require_permission("list_dashboards")
|
||||
def get(self):
|
||||
"""
|
||||
Lists all accessible dashboards.
|
||||
"""
|
||||
tags = models.Dashboard.all_tags(self.current_org, self.current_user)
|
||||
return {
|
||||
'tags': [
|
||||
{
|
||||
'name': name,
|
||||
'count': count,
|
||||
}
|
||||
for name, count in tags
|
||||
]
|
||||
}
|
||||
return {"tags": [{"name": name, "count": count} for name, count in tags]}
|
||||
|
||||
|
||||
class DashboardFavoriteListResource(BaseResource):
|
||||
def get(self):
|
||||
search_term = request.args.get('q')
|
||||
search_term = request.args.get("q")
|
||||
|
||||
if search_term:
|
||||
base_query = models.Dashboard.search(self.current_org, self.current_user.group_ids, self.current_user.id, search_term)
|
||||
favorites = models.Dashboard.favorites(self.current_user, base_query=base_query)
|
||||
base_query = models.Dashboard.search(
|
||||
self.current_org,
|
||||
self.current_user.group_ids,
|
||||
self.current_user.id,
|
||||
search_term,
|
||||
)
|
||||
favorites = models.Dashboard.favorites(
|
||||
self.current_user, base_query=base_query
|
||||
)
|
||||
else:
|
||||
favorites = models.Dashboard.favorites(self.current_user)
|
||||
|
||||
@@ -336,18 +360,20 @@ class DashboardFavoriteListResource(BaseResource):
|
||||
# provides an order by search rank
|
||||
favorites = order_results(favorites, fallback=not bool(search_term))
|
||||
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page = request.args.get("page", 1, type=int)
|
||||
page_size = request.args.get("page_size", 25, type=int)
|
||||
response = paginate(favorites, page, page_size, serialize_dashboard)
|
||||
|
||||
self.record_event({
|
||||
'action': 'load_favorites',
|
||||
'object_type': 'dashboard',
|
||||
'params': {
|
||||
'q': search_term,
|
||||
'tags': request.args.getlist('tags'),
|
||||
'page': page
|
||||
self.record_event(
|
||||
{
|
||||
"action": "load_favorites",
|
||||
"object_type": "dashboard",
|
||||
"params": {
|
||||
"q": search_term,
|
||||
"tags": request.args.getlist("tags"),
|
||||
"page": page,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@@ -8,10 +8,17 @@ from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from redash import models
|
||||
from redash.handlers.base import BaseResource, get_object_or_404, require_fields
|
||||
from redash.permissions import (require_access, require_admin,
|
||||
require_permission, view_only)
|
||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||
query_runners, NotSupported)
|
||||
from redash.permissions import (
|
||||
require_access,
|
||||
require_admin,
|
||||
require_permission,
|
||||
view_only,
|
||||
)
|
||||
from redash.query_runner import (
|
||||
get_configuration_schema_for_query_runner_type,
|
||||
query_runners,
|
||||
NotSupported,
|
||||
)
|
||||
from redash.utils import filter_none
|
||||
from redash.utils.configuration import ConfigurationContainer, ValidationError
|
||||
|
||||
@@ -19,77 +26,92 @@ from redash.utils.configuration import ConfigurationContainer, ValidationError
|
||||
class DataSourceTypeListResource(BaseResource):
|
||||
@require_admin
|
||||
def get(self):
|
||||
available_query_runners = [q for q in query_runners.values() if not q.deprecated]
|
||||
return [q.to_dict() for q in sorted(available_query_runners, key=lambda q: q.name())]
|
||||
available_query_runners = [
|
||||
q for q in query_runners.values() if not q.deprecated
|
||||
]
|
||||
return [
|
||||
q.to_dict() for q in sorted(available_query_runners, key=lambda q: q.name())
|
||||
]
|
||||
|
||||
|
||||
class DataSourceResource(BaseResource):
|
||||
@require_admin
|
||||
def get(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
data_source_id, self.current_org
|
||||
)
|
||||
ds = data_source.to_dict(all=True)
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': data_source_id,
|
||||
'object_type': 'datasource',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "view", "object_id": data_source_id, "object_type": "datasource"}
|
||||
)
|
||||
return ds
|
||||
|
||||
@require_admin
|
||||
def post(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
data_source_id, self.current_org
|
||||
)
|
||||
req = request.get_json(True)
|
||||
|
||||
schema = get_configuration_schema_for_query_runner_type(req['type'])
|
||||
schema = get_configuration_schema_for_query_runner_type(req["type"])
|
||||
if schema is None:
|
||||
abort(400)
|
||||
try:
|
||||
data_source.options.set_schema(schema)
|
||||
data_source.options.update(filter_none(req['options']))
|
||||
data_source.options.update(filter_none(req["options"]))
|
||||
except ValidationError:
|
||||
abort(400)
|
||||
|
||||
data_source.type = req['type']
|
||||
data_source.name = req['name']
|
||||
data_source.type = req["type"]
|
||||
data_source.name = req["name"]
|
||||
models.db.session.add(data_source)
|
||||
|
||||
try:
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if req['name'] in str(e):
|
||||
abort(400, message="Data source with the name {} already exists.".format(req['name']))
|
||||
if req["name"] in str(e):
|
||||
abort(
|
||||
400,
|
||||
message="Data source with the name {} already exists.".format(
|
||||
req["name"]
|
||||
),
|
||||
)
|
||||
|
||||
abort(400)
|
||||
|
||||
self.record_event({
|
||||
'action': 'edit',
|
||||
'object_id': data_source.id,
|
||||
'object_type': 'datasource',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "edit", "object_id": data_source.id, "object_type": "datasource"}
|
||||
)
|
||||
|
||||
return data_source.to_dict(all=True)
|
||||
|
||||
@require_admin
|
||||
def delete(self, data_source_id):
|
||||
data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
data_source_id, self.current_org
|
||||
)
|
||||
data_source.delete()
|
||||
|
||||
self.record_event({
|
||||
'action': 'delete',
|
||||
'object_id': data_source_id,
|
||||
'object_type': 'datasource',
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "delete",
|
||||
"object_id": data_source_id,
|
||||
"object_type": "datasource",
|
||||
}
|
||||
)
|
||||
|
||||
return make_response('', 204)
|
||||
return make_response("", 204)
|
||||
|
||||
|
||||
class DataSourceListResource(BaseResource):
|
||||
@require_permission('list_data_sources')
|
||||
@require_permission("list_data_sources")
|
||||
def get(self):
|
||||
if self.current_user.has_permission('admin'):
|
||||
if self.current_user.has_permission("admin"):
|
||||
data_sources = models.DataSource.all(self.current_org)
|
||||
else:
|
||||
data_sources = models.DataSource.all(self.current_org, group_ids=self.current_user.group_ids)
|
||||
data_sources = models.DataSource.all(
|
||||
self.current_org, group_ids=self.current_user.group_ids
|
||||
)
|
||||
|
||||
response = {}
|
||||
for ds in data_sources:
|
||||
@@ -98,74 +120,85 @@ class DataSourceListResource(BaseResource):
|
||||
|
||||
try:
|
||||
d = ds.to_dict()
|
||||
d['view_only'] = all(project(ds.groups, self.current_user.group_ids).values())
|
||||
d["view_only"] = all(
|
||||
project(ds.groups, self.current_user.group_ids).values()
|
||||
)
|
||||
response[ds.id] = d
|
||||
except AttributeError:
|
||||
logging.exception("Error with DataSource#to_dict (data source id: %d)", ds.id)
|
||||
logging.exception(
|
||||
"Error with DataSource#to_dict (data source id: %d)", ds.id
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_id': 'admin/data_sources',
|
||||
'object_type': 'datasource',
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "list",
|
||||
"object_id": "admin/data_sources",
|
||||
"object_type": "datasource",
|
||||
}
|
||||
)
|
||||
|
||||
return sorted(list(response.values()), key=lambda d: d['name'].lower())
|
||||
return sorted(list(response.values()), key=lambda d: d["name"].lower())
|
||||
|
||||
@require_admin
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
require_fields(req, ('options', 'name', 'type'))
|
||||
require_fields(req, ("options", "name", "type"))
|
||||
|
||||
schema = get_configuration_schema_for_query_runner_type(req['type'])
|
||||
schema = get_configuration_schema_for_query_runner_type(req["type"])
|
||||
if schema is None:
|
||||
abort(400)
|
||||
|
||||
config = ConfigurationContainer(filter_none(req['options']), schema)
|
||||
config = ConfigurationContainer(filter_none(req["options"]), schema)
|
||||
if not config.is_valid():
|
||||
abort(400)
|
||||
|
||||
try:
|
||||
datasource = models.DataSource.create_with_group(org=self.current_org,
|
||||
name=req['name'],
|
||||
type=req['type'],
|
||||
options=config)
|
||||
datasource = models.DataSource.create_with_group(
|
||||
org=self.current_org, name=req["name"], type=req["type"], options=config
|
||||
)
|
||||
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if req['name'] in str(e):
|
||||
abort(400, message="Data source with the name {} already exists.".format(req['name']))
|
||||
if req["name"] in str(e):
|
||||
abort(
|
||||
400,
|
||||
message="Data source with the name {} already exists.".format(
|
||||
req["name"]
|
||||
),
|
||||
)
|
||||
|
||||
abort(400)
|
||||
|
||||
self.record_event({
|
||||
'action': 'create',
|
||||
'object_id': datasource.id,
|
||||
'object_type': 'datasource'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "create",
|
||||
"object_id": datasource.id,
|
||||
"object_type": "datasource",
|
||||
}
|
||||
)
|
||||
|
||||
return datasource.to_dict(all=True)
|
||||
|
||||
|
||||
class DataSourceSchemaResource(BaseResource):
|
||||
def get(self, data_source_id):
|
||||
data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
|
||||
data_source = get_object_or_404(
|
||||
models.DataSource.get_by_id_and_org, data_source_id, self.current_org
|
||||
)
|
||||
require_access(data_source, self.current_user, view_only)
|
||||
refresh = request.args.get('refresh') is not None
|
||||
refresh = request.args.get("refresh") is not None
|
||||
|
||||
response = {}
|
||||
|
||||
try:
|
||||
response['schema'] = data_source.get_schema(refresh)
|
||||
response["schema"] = data_source.get_schema(refresh)
|
||||
except NotSupported:
|
||||
response['error'] = {
|
||||
'code': 1,
|
||||
'message': 'Data source type does not support retrieving schema'
|
||||
response["error"] = {
|
||||
"code": 1,
|
||||
"message": "Data source type does not support retrieving schema",
|
||||
}
|
||||
except Exception:
|
||||
response['error'] = {
|
||||
'code': 2,
|
||||
'message': 'Error retrieving schema.'
|
||||
}
|
||||
response["error"] = {"code": 2, "message": "Error retrieving schema."}
|
||||
|
||||
return response
|
||||
|
||||
@@ -173,39 +206,49 @@ class DataSourceSchemaResource(BaseResource):
|
||||
class DataSourcePauseResource(BaseResource):
|
||||
@require_admin
|
||||
def post(self, data_source_id):
|
||||
data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
|
||||
data_source = get_object_or_404(
|
||||
models.DataSource.get_by_id_and_org, data_source_id, self.current_org
|
||||
)
|
||||
data = request.get_json(force=True, silent=True)
|
||||
if data:
|
||||
reason = data.get('reason')
|
||||
reason = data.get("reason")
|
||||
else:
|
||||
reason = request.args.get('reason')
|
||||
reason = request.args.get("reason")
|
||||
|
||||
data_source.pause(reason)
|
||||
|
||||
self.record_event({
|
||||
'action': 'pause',
|
||||
'object_id': data_source.id,
|
||||
'object_type': 'datasource'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "pause",
|
||||
"object_id": data_source.id,
|
||||
"object_type": "datasource",
|
||||
}
|
||||
)
|
||||
return data_source.to_dict()
|
||||
|
||||
@require_admin
|
||||
def delete(self, data_source_id):
|
||||
data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
|
||||
data_source = get_object_or_404(
|
||||
models.DataSource.get_by_id_and_org, data_source_id, self.current_org
|
||||
)
|
||||
data_source.resume()
|
||||
|
||||
self.record_event({
|
||||
'action': 'resume',
|
||||
'object_id': data_source.id,
|
||||
'object_type': 'datasource'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "resume",
|
||||
"object_id": data_source.id,
|
||||
"object_type": "datasource",
|
||||
}
|
||||
)
|
||||
return data_source.to_dict()
|
||||
|
||||
|
||||
class DataSourceTestResource(BaseResource):
|
||||
@require_admin
|
||||
def post(self, data_source_id):
|
||||
data_source = get_object_or_404(models.DataSource.get_by_id_and_org, data_source_id, self.current_org)
|
||||
data_source = get_object_or_404(
|
||||
models.DataSource.get_by_id_and_org, data_source_id, self.current_org
|
||||
)
|
||||
|
||||
response = {}
|
||||
try:
|
||||
@@ -215,10 +258,12 @@ class DataSourceTestResource(BaseResource):
|
||||
else:
|
||||
response = {"message": "success", "ok": True}
|
||||
|
||||
self.record_event({
|
||||
'action': 'test',
|
||||
'object_id': data_source_id,
|
||||
'object_type': 'datasource',
|
||||
'result': response,
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "test",
|
||||
"object_id": data_source_id,
|
||||
"object_type": "datasource",
|
||||
"result": response,
|
||||
}
|
||||
)
|
||||
return response
|
||||
|
||||
@@ -3,8 +3,10 @@ from flask_restful import abort
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from redash import models
|
||||
from redash.destinations import (destinations,
|
||||
get_configuration_schema_for_destination_type)
|
||||
from redash.destinations import (
|
||||
destinations,
|
||||
get_configuration_schema_for_destination_type,
|
||||
)
|
||||
from redash.handlers.base import BaseResource, require_fields
|
||||
from redash.permissions import require_admin
|
||||
from redash.utils.configuration import ConfigurationContainer, ValidationError
|
||||
@@ -20,53 +22,68 @@ class DestinationTypeListResource(BaseResource):
|
||||
class DestinationResource(BaseResource):
|
||||
@require_admin
|
||||
def get(self, destination_id):
|
||||
destination = models.NotificationDestination.get_by_id_and_org(destination_id, self.current_org)
|
||||
destination = models.NotificationDestination.get_by_id_and_org(
|
||||
destination_id, self.current_org
|
||||
)
|
||||
d = destination.to_dict(all=True)
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': destination_id,
|
||||
'object_type': 'destination',
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "view",
|
||||
"object_id": destination_id,
|
||||
"object_type": "destination",
|
||||
}
|
||||
)
|
||||
return d
|
||||
|
||||
@require_admin
|
||||
def post(self, destination_id):
|
||||
destination = models.NotificationDestination.get_by_id_and_org(destination_id, self.current_org)
|
||||
destination = models.NotificationDestination.get_by_id_and_org(
|
||||
destination_id, self.current_org
|
||||
)
|
||||
req = request.get_json(True)
|
||||
|
||||
schema = get_configuration_schema_for_destination_type(req['type'])
|
||||
schema = get_configuration_schema_for_destination_type(req["type"])
|
||||
if schema is None:
|
||||
abort(400)
|
||||
|
||||
try:
|
||||
destination.type = req['type']
|
||||
destination.name = req['name']
|
||||
destination.type = req["type"]
|
||||
destination.name = req["name"]
|
||||
destination.options.set_schema(schema)
|
||||
destination.options.update(req['options'])
|
||||
destination.options.update(req["options"])
|
||||
models.db.session.add(destination)
|
||||
models.db.session.commit()
|
||||
except ValidationError:
|
||||
abort(400)
|
||||
except IntegrityError as e:
|
||||
if 'name' in str(e):
|
||||
abort(400, message="Alert Destination with the name {} already exists.".format(req['name']))
|
||||
if "name" in str(e):
|
||||
abort(
|
||||
400,
|
||||
message="Alert Destination with the name {} already exists.".format(
|
||||
req["name"]
|
||||
),
|
||||
)
|
||||
abort(500)
|
||||
|
||||
return destination.to_dict(all=True)
|
||||
|
||||
@require_admin
|
||||
def delete(self, destination_id):
|
||||
destination = models.NotificationDestination.get_by_id_and_org(destination_id, self.current_org)
|
||||
destination = models.NotificationDestination.get_by_id_and_org(
|
||||
destination_id, self.current_org
|
||||
)
|
||||
models.db.session.delete(destination)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'delete',
|
||||
'object_id': destination_id,
|
||||
'object_type': 'destination'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "delete",
|
||||
"object_id": destination_id,
|
||||
"object_type": "destination",
|
||||
}
|
||||
)
|
||||
|
||||
return make_response('', 204)
|
||||
return make_response("", 204)
|
||||
|
||||
|
||||
class DestinationListResource(BaseResource):
|
||||
@@ -81,39 +98,48 @@ class DestinationListResource(BaseResource):
|
||||
d = ds.to_dict()
|
||||
response[ds.id] = d
|
||||
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_id': 'admin/destinations',
|
||||
'object_type': 'destination',
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "list",
|
||||
"object_id": "admin/destinations",
|
||||
"object_type": "destination",
|
||||
}
|
||||
)
|
||||
|
||||
return list(response.values())
|
||||
|
||||
@require_admin
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
require_fields(req, ('options', 'name', 'type'))
|
||||
require_fields(req, ("options", "name", "type"))
|
||||
|
||||
schema = get_configuration_schema_for_destination_type(req['type'])
|
||||
schema = get_configuration_schema_for_destination_type(req["type"])
|
||||
if schema is None:
|
||||
abort(400)
|
||||
|
||||
config = ConfigurationContainer(req['options'], schema)
|
||||
config = ConfigurationContainer(req["options"], schema)
|
||||
if not config.is_valid():
|
||||
abort(400)
|
||||
|
||||
destination = models.NotificationDestination(org=self.current_org,
|
||||
name=req['name'],
|
||||
type=req['type'],
|
||||
options=config,
|
||||
user=self.current_user)
|
||||
destination = models.NotificationDestination(
|
||||
org=self.current_org,
|
||||
name=req["name"],
|
||||
type=req["type"],
|
||||
options=config,
|
||||
user=self.current_user,
|
||||
)
|
||||
|
||||
try:
|
||||
models.db.session.add(destination)
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if 'name' in str(e):
|
||||
abort(400, message="Alert Destination with the name {} already exists.".format(req['name']))
|
||||
if "name" in str(e):
|
||||
abort(
|
||||
400,
|
||||
message="Alert Destination with the name {} already exists.".format(
|
||||
req["name"]
|
||||
),
|
||||
)
|
||||
abort(500)
|
||||
|
||||
return destination.to_dict(all=True)
|
||||
|
||||
@@ -1,33 +1,37 @@
|
||||
|
||||
|
||||
from flask import request
|
||||
|
||||
from .authentication import current_org
|
||||
from flask_login import current_user, login_required
|
||||
from redash import models
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import (get_object_or_404, org_scoped_rule,
|
||||
record_event)
|
||||
from redash.handlers.base import get_object_or_404, org_scoped_rule, record_event
|
||||
from redash.handlers.static import render_index
|
||||
from redash.security import csp_allows_embeding
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/embed/query/<query_id>/visualization/<visualization_id>'), methods=['GET'])
|
||||
@routes.route(
|
||||
org_scoped_rule("/embed/query/<query_id>/visualization/<visualization_id>"),
|
||||
methods=["GET"],
|
||||
)
|
||||
@login_required
|
||||
@csp_allows_embeding
|
||||
def embed(query_id, visualization_id, org_slug=None):
|
||||
record_event(current_org, current_user._get_current_object(), {
|
||||
'action': 'view',
|
||||
'object_id': visualization_id,
|
||||
'object_type': 'visualization',
|
||||
'query_id': query_id,
|
||||
'embed': True,
|
||||
'referer': request.headers.get('Referer')
|
||||
})
|
||||
record_event(
|
||||
current_org,
|
||||
current_user._get_current_object(),
|
||||
{
|
||||
"action": "view",
|
||||
"object_id": visualization_id,
|
||||
"object_type": "visualization",
|
||||
"query_id": query_id,
|
||||
"embed": True,
|
||||
"referer": request.headers.get("Referer"),
|
||||
},
|
||||
)
|
||||
return render_index()
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/public/dashboards/<token>'), methods=['GET'])
|
||||
@routes.route(org_scoped_rule("/public/dashboards/<token>"), methods=["GET"])
|
||||
@login_required
|
||||
@csp_allows_embeding
|
||||
def public_dashboard(token, org_slug=None):
|
||||
@@ -37,12 +41,16 @@ def public_dashboard(token, org_slug=None):
|
||||
api_key = get_object_or_404(models.ApiKey.get_by_api_key, token)
|
||||
dashboard = api_key.object
|
||||
|
||||
record_event(current_org, current_user, {
|
||||
'action': 'view',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard',
|
||||
'public': True,
|
||||
'headless': 'embed' in request.args,
|
||||
'referer': request.headers.get('Referer')
|
||||
})
|
||||
record_event(
|
||||
current_org,
|
||||
current_user,
|
||||
{
|
||||
"action": "view",
|
||||
"object_id": dashboard.id,
|
||||
"object_type": "dashboard",
|
||||
"public": True,
|
||||
"headless": "embed" in request.args,
|
||||
"referer": request.headers.get("Referer"),
|
||||
},
|
||||
)
|
||||
return render_index()
|
||||
|
||||
@@ -14,44 +14,46 @@ def get_location(ip):
|
||||
with maxminddb.open_database(geolite2.geolite2_database()) as reader:
|
||||
try:
|
||||
match = reader.get(ip)
|
||||
return match['country']['names']['en']
|
||||
return match["country"]["names"]["en"]
|
||||
except Exception:
|
||||
return "Unknown"
|
||||
|
||||
|
||||
def event_details(event):
|
||||
details = {}
|
||||
if event.object_type == 'data_source' and event.action == 'execute_query':
|
||||
details['query'] = event.additional_properties['query']
|
||||
details['data_source'] = event.object_id
|
||||
elif event.object_type == 'page' and event.action == 'view':
|
||||
details['page'] = event.object_id
|
||||
if event.object_type == "data_source" and event.action == "execute_query":
|
||||
details["query"] = event.additional_properties["query"]
|
||||
details["data_source"] = event.object_id
|
||||
elif event.object_type == "page" and event.action == "view":
|
||||
details["page"] = event.object_id
|
||||
else:
|
||||
details['object_id'] = event.object_id
|
||||
details['object_type'] = event.object_type
|
||||
details["object_id"] = event.object_id
|
||||
details["object_type"] = event.object_type
|
||||
|
||||
return details
|
||||
|
||||
|
||||
def serialize_event(event):
|
||||
d = {
|
||||
'org_id': event.org_id,
|
||||
'user_id': event.user_id,
|
||||
'action': event.action,
|
||||
'object_type': event.object_type,
|
||||
'object_id': event.object_id,
|
||||
'created_at': event.created_at
|
||||
"org_id": event.org_id,
|
||||
"user_id": event.user_id,
|
||||
"action": event.action,
|
||||
"object_type": event.object_type,
|
||||
"object_id": event.object_id,
|
||||
"created_at": event.created_at,
|
||||
}
|
||||
|
||||
if event.user_id:
|
||||
d['user_name'] = event.additional_properties.get('user_name', 'User {}'.format(event.user_id))
|
||||
d["user_name"] = event.additional_properties.get(
|
||||
"user_name", "User {}".format(event.user_id)
|
||||
)
|
||||
|
||||
if not event.user_id:
|
||||
d['user_name'] = event.additional_properties.get('api_key', 'Unknown')
|
||||
d["user_name"] = event.additional_properties.get("api_key", "Unknown")
|
||||
|
||||
d['browser'] = str(parse_ua(event.additional_properties.get('user_agent', '')))
|
||||
d['location'] = get_location(event.additional_properties.get('ip'))
|
||||
d['details'] = event_details(event)
|
||||
d["browser"] = str(parse_ua(event.additional_properties.get("user_agent", "")))
|
||||
d["location"] = get_location(event.additional_properties.get("ip"))
|
||||
d["details"] = event_details(event)
|
||||
|
||||
return d
|
||||
|
||||
@@ -64,6 +66,6 @@ class EventsResource(BaseResource):
|
||||
|
||||
@require_admin
|
||||
def get(self):
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page = request.args.get("page", 1, type=int)
|
||||
page_size = request.args.get("page_size", 25, type=int)
|
||||
return paginate(self.current_org.events, page, page_size, serialize_event)
|
||||
|
||||
@@ -2,77 +2,91 @@ from flask import request
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
|
||||
from redash import models
|
||||
from redash.handlers.base import (BaseResource,
|
||||
get_object_or_404, paginate)
|
||||
from redash.handlers.base import BaseResource, get_object_or_404, paginate
|
||||
from redash.permissions import require_access, view_only
|
||||
|
||||
|
||||
class QueryFavoriteResource(BaseResource):
|
||||
def post(self, query_id):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_access(query, self.current_user, view_only)
|
||||
|
||||
fav = models.Favorite(org_id=self.current_org.id, object=query, user=self.current_user)
|
||||
fav = models.Favorite(
|
||||
org_id=self.current_org.id, object=query, user=self.current_user
|
||||
)
|
||||
models.db.session.add(fav)
|
||||
|
||||
try:
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if 'unique_favorite' in str(e):
|
||||
if "unique_favorite" in str(e):
|
||||
models.db.session.rollback()
|
||||
else:
|
||||
raise e
|
||||
|
||||
self.record_event({
|
||||
'action': 'favorite',
|
||||
'object_id': query.id,
|
||||
'object_type': 'query'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "favorite", "object_id": query.id, "object_type": "query"}
|
||||
)
|
||||
|
||||
def delete(self, query_id):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_access(query, self.current_user, view_only)
|
||||
|
||||
models.Favorite.query.filter(
|
||||
models.Favorite.object_id == query_id,
|
||||
models.Favorite.object_type == 'Query',
|
||||
models.Favorite.object_type == "Query",
|
||||
models.Favorite.user == self.current_user,
|
||||
).delete()
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'favorite',
|
||||
'object_id': query.id,
|
||||
'object_type': 'query'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "favorite", "object_id": query.id, "object_type": "query"}
|
||||
)
|
||||
|
||||
|
||||
class DashboardFavoriteResource(BaseResource):
|
||||
def post(self, object_id):
|
||||
dashboard = get_object_or_404(models.Dashboard.get_by_slug_and_org, object_id, self.current_org)
|
||||
fav = models.Favorite(org_id=self.current_org.id, object=dashboard, user=self.current_user)
|
||||
dashboard = get_object_or_404(
|
||||
models.Dashboard.get_by_slug_and_org, object_id, self.current_org
|
||||
)
|
||||
fav = models.Favorite(
|
||||
org_id=self.current_org.id, object=dashboard, user=self.current_user
|
||||
)
|
||||
models.db.session.add(fav)
|
||||
|
||||
try:
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if 'unique_favorite' in str(e):
|
||||
if "unique_favorite" in str(e):
|
||||
models.db.session.rollback()
|
||||
else:
|
||||
raise e
|
||||
|
||||
self.record_event({
|
||||
'action': 'favorite',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "favorite",
|
||||
"object_id": dashboard.id,
|
||||
"object_type": "dashboard",
|
||||
}
|
||||
)
|
||||
|
||||
def delete(self, object_id):
|
||||
dashboard = get_object_or_404(models.Dashboard.get_by_slug_and_org, object_id, self.current_org)
|
||||
models.Favorite.query.filter(models.Favorite.object == dashboard, models.Favorite.user == self.current_user).delete()
|
||||
dashboard = get_object_or_404(
|
||||
models.Dashboard.get_by_slug_and_org, object_id, self.current_org
|
||||
)
|
||||
models.Favorite.query.filter(
|
||||
models.Favorite.object == dashboard,
|
||||
models.Favorite.user == self.current_user,
|
||||
).delete()
|
||||
models.db.session.commit()
|
||||
self.record_event({
|
||||
'action': 'unfavorite',
|
||||
'object_id': dashboard.id,
|
||||
'object_type': 'dashboard'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "unfavorite",
|
||||
"object_id": dashboard.id,
|
||||
"object_type": "dashboard",
|
||||
}
|
||||
)
|
||||
|
||||
@@ -9,31 +9,28 @@ from redash.handlers.base import BaseResource, get_object_or_404
|
||||
class GroupListResource(BaseResource):
|
||||
@require_admin
|
||||
def post(self):
|
||||
name = request.json['name']
|
||||
name = request.json["name"]
|
||||
group = models.Group(name=name, org=self.current_org)
|
||||
models.db.session.add(group)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'create',
|
||||
'object_id': group.id,
|
||||
'object_type': 'group'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "create", "object_id": group.id, "object_type": "group"}
|
||||
)
|
||||
|
||||
return group.to_dict()
|
||||
|
||||
def get(self):
|
||||
if self.current_user.has_permission('admin'):
|
||||
if self.current_user.has_permission("admin"):
|
||||
groups = models.Group.all(self.current_org)
|
||||
else:
|
||||
groups = models.Group.query.filter(
|
||||
models.Group.id.in_(self.current_user.group_ids))
|
||||
models.Group.id.in_(self.current_user.group_ids)
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_id': 'groups',
|
||||
'object_type': 'group',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "list", "object_id": "groups", "object_type": "group"}
|
||||
)
|
||||
|
||||
return [g.to_dict() for g in groups]
|
||||
|
||||
@@ -46,28 +43,27 @@ class GroupResource(BaseResource):
|
||||
if group.type == models.Group.BUILTIN_GROUP:
|
||||
abort(400, message="Can't modify built-in groups.")
|
||||
|
||||
group.name = request.json['name']
|
||||
group.name = request.json["name"]
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'edit',
|
||||
'object_id': group.id,
|
||||
'object_type': 'group'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "edit", "object_id": group.id, "object_type": "group"}
|
||||
)
|
||||
|
||||
return group.to_dict()
|
||||
|
||||
def get(self, group_id):
|
||||
if not (self.current_user.has_permission('admin') or int(group_id) in self.current_user.group_ids):
|
||||
if not (
|
||||
self.current_user.has_permission("admin")
|
||||
or int(group_id) in self.current_user.group_ids
|
||||
):
|
||||
abort(403)
|
||||
|
||||
group = models.Group.get_by_id_and_org(group_id, self.current_org)
|
||||
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': group_id,
|
||||
'object_type': 'group',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "view", "object_id": group_id, "object_type": "group"}
|
||||
)
|
||||
|
||||
return group.to_dict()
|
||||
|
||||
@@ -89,23 +85,28 @@ class GroupResource(BaseResource):
|
||||
class GroupMemberListResource(BaseResource):
|
||||
@require_admin
|
||||
def post(self, group_id):
|
||||
user_id = request.json['user_id']
|
||||
user_id = request.json["user_id"]
|
||||
user = models.User.get_by_id_and_org(user_id, self.current_org)
|
||||
group = models.Group.get_by_id_and_org(group_id, self.current_org)
|
||||
user.group_ids.append(group.id)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'add_member',
|
||||
'object_id': group.id,
|
||||
'object_type': 'group',
|
||||
'member_id': user.id
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "add_member",
|
||||
"object_id": group.id,
|
||||
"object_type": "group",
|
||||
"member_id": user.id,
|
||||
}
|
||||
)
|
||||
return user.to_dict()
|
||||
|
||||
@require_permission('list_users')
|
||||
@require_permission("list_users")
|
||||
def get(self, group_id):
|
||||
if not (self.current_user.has_permission('admin') or int(group_id) in self.current_user.group_ids):
|
||||
if not (
|
||||
self.current_user.has_permission("admin")
|
||||
or int(group_id) in self.current_user.group_ids
|
||||
):
|
||||
abort(403)
|
||||
|
||||
members = models.Group.members(group_id)
|
||||
@@ -119,54 +120,59 @@ class GroupMemberResource(BaseResource):
|
||||
user.group_ids.remove(int(group_id))
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'remove_member',
|
||||
'object_id': group_id,
|
||||
'object_type': 'group',
|
||||
'member_id': user.id
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "remove_member",
|
||||
"object_id": group_id,
|
||||
"object_type": "group",
|
||||
"member_id": user.id,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def serialize_data_source_with_group(data_source, data_source_group):
|
||||
d = data_source.to_dict()
|
||||
d['view_only'] = data_source_group.view_only
|
||||
d["view_only"] = data_source_group.view_only
|
||||
return d
|
||||
|
||||
|
||||
class GroupDataSourceListResource(BaseResource):
|
||||
@require_admin
|
||||
def post(self, group_id):
|
||||
data_source_id = request.json['data_source_id']
|
||||
data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
|
||||
data_source_id = request.json["data_source_id"]
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
data_source_id, self.current_org
|
||||
)
|
||||
group = models.Group.get_by_id_and_org(group_id, self.current_org)
|
||||
|
||||
data_source_group = data_source.add_group(group)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'add_data_source',
|
||||
'object_id': group_id,
|
||||
'object_type': 'group',
|
||||
'member_id': data_source.id
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "add_data_source",
|
||||
"object_id": group_id,
|
||||
"object_type": "group",
|
||||
"member_id": data_source.id,
|
||||
}
|
||||
)
|
||||
|
||||
return serialize_data_source_with_group(data_source, data_source_group)
|
||||
|
||||
@require_admin
|
||||
def get(self, group_id):
|
||||
group = get_object_or_404(models.Group.get_by_id_and_org, group_id,
|
||||
self.current_org)
|
||||
group = get_object_or_404(
|
||||
models.Group.get_by_id_and_org, group_id, self.current_org
|
||||
)
|
||||
|
||||
# TOOD: move to models
|
||||
data_sources = (models.DataSource.query
|
||||
.join(models.DataSourceGroup)
|
||||
.filter(models.DataSourceGroup.group == group))
|
||||
data_sources = models.DataSource.query.join(models.DataSourceGroup).filter(
|
||||
models.DataSourceGroup.group == group
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_id': group_id,
|
||||
'object_type': 'group',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "list", "object_id": group_id, "object_type": "group"}
|
||||
)
|
||||
|
||||
return [ds.to_dict(with_permissions_for=group) for ds in data_sources]
|
||||
|
||||
@@ -174,34 +180,42 @@ class GroupDataSourceListResource(BaseResource):
|
||||
class GroupDataSourceResource(BaseResource):
|
||||
@require_admin
|
||||
def post(self, group_id, data_source_id):
|
||||
data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
data_source_id, self.current_org
|
||||
)
|
||||
group = models.Group.get_by_id_and_org(group_id, self.current_org)
|
||||
view_only = request.json['view_only']
|
||||
view_only = request.json["view_only"]
|
||||
|
||||
data_source_group = data_source.update_group_permission(group, view_only)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'change_data_source_permission',
|
||||
'object_id': group_id,
|
||||
'object_type': 'group',
|
||||
'member_id': data_source.id,
|
||||
'view_only': view_only
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "change_data_source_permission",
|
||||
"object_id": group_id,
|
||||
"object_type": "group",
|
||||
"member_id": data_source.id,
|
||||
"view_only": view_only,
|
||||
}
|
||||
)
|
||||
|
||||
return serialize_data_source_with_group(data_source, data_source_group)
|
||||
|
||||
@require_admin
|
||||
def delete(self, group_id, data_source_id):
|
||||
data_source = models.DataSource.get_by_id_and_org(data_source_id, self.current_org)
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
data_source_id, self.current_org
|
||||
)
|
||||
group = models.Group.get_by_id_and_org(group_id, self.current_org)
|
||||
|
||||
data_source.remove_group(group)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'remove_data_source',
|
||||
'object_id': group_id,
|
||||
'object_type': 'group',
|
||||
'member_id': data_source.id
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "remove_data_source",
|
||||
"object_id": group_id,
|
||||
"object_type": "group",
|
||||
"member_id": data_source.id,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -6,15 +6,21 @@ from redash.handlers.base import json_response, org_scoped_rule
|
||||
from redash.authentication import current_org
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/api/organization/status'), methods=['GET'])
|
||||
@routes.route(org_scoped_rule("/api/organization/status"), methods=["GET"])
|
||||
@login_required
|
||||
def organization_status(org_slug=None):
|
||||
counters = {
|
||||
'users': models.User.all(current_org).count(),
|
||||
'alerts': models.Alert.all(group_ids=current_user.group_ids).count(),
|
||||
'data_sources': models.DataSource.all(current_org, group_ids=current_user.group_ids).count(),
|
||||
'queries': models.Query.all_queries(current_user.group_ids, current_user.id, include_drafts=True).count(),
|
||||
'dashboards': models.Dashboard.query.filter(models.Dashboard.org == current_org, models.Dashboard.is_archived == False).count(),
|
||||
"users": models.User.all(current_org).count(),
|
||||
"alerts": models.Alert.all(group_ids=current_user.group_ids).count(),
|
||||
"data_sources": models.DataSource.all(
|
||||
current_org, group_ids=current_user.group_ids
|
||||
).count(),
|
||||
"queries": models.Query.all_queries(
|
||||
current_user.group_ids, current_user.id, include_drafts=True
|
||||
).count(),
|
||||
"dashboards": models.Dashboard.query.filter(
|
||||
models.Dashboard.org == current_org, models.Dashboard.is_archived == False
|
||||
).count(),
|
||||
}
|
||||
|
||||
return json_response(dict(object_counters=counters))
|
||||
|
||||
@@ -8,10 +8,7 @@ from flask_restful import abort
|
||||
from sqlalchemy.orm.exc import NoResultFound
|
||||
|
||||
|
||||
model_to_types = {
|
||||
'queries': Query,
|
||||
'dashboards': Dashboard
|
||||
}
|
||||
model_to_types = {"queries": Query, "dashboards": Dashboard}
|
||||
|
||||
|
||||
def get_model_from_type(type):
|
||||
@@ -44,63 +41,66 @@ class ObjectPermissionsListResource(BaseResource):
|
||||
|
||||
req = request.get_json(True)
|
||||
|
||||
access_type = req['access_type']
|
||||
access_type = req["access_type"]
|
||||
|
||||
if access_type not in ACCESS_TYPES:
|
||||
abort(400, message='Unknown access type.')
|
||||
abort(400, message="Unknown access type.")
|
||||
|
||||
try:
|
||||
grantee = User.get_by_id_and_org(req['user_id'], self.current_org)
|
||||
grantee = User.get_by_id_and_org(req["user_id"], self.current_org)
|
||||
except NoResultFound:
|
||||
abort(400, message='User not found.')
|
||||
abort(400, message="User not found.")
|
||||
|
||||
permission = AccessPermission.grant(obj, access_type, grantee, self.current_user)
|
||||
permission = AccessPermission.grant(
|
||||
obj, access_type, grantee, self.current_user
|
||||
)
|
||||
db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'grant_permission',
|
||||
'object_id': object_id,
|
||||
'object_type': object_type,
|
||||
'grantee': grantee.id,
|
||||
'access_type': access_type,
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "grant_permission",
|
||||
"object_id": object_id,
|
||||
"object_type": object_type,
|
||||
"grantee": grantee.id,
|
||||
"access_type": access_type,
|
||||
}
|
||||
)
|
||||
|
||||
return permission.to_dict()
|
||||
|
||||
def delete(self, object_type, object_id):
|
||||
model = get_model_from_type(object_type)
|
||||
obj = get_object_or_404(model.get_by_id_and_org, object_id,
|
||||
self.current_org)
|
||||
obj = get_object_or_404(model.get_by_id_and_org, object_id, self.current_org)
|
||||
|
||||
require_admin_or_owner(obj.user_id)
|
||||
|
||||
req = request.get_json(True)
|
||||
grantee_id = req['user_id']
|
||||
access_type = req['access_type']
|
||||
grantee_id = req["user_id"]
|
||||
access_type = req["access_type"]
|
||||
|
||||
grantee = User.query.get(req['user_id'])
|
||||
grantee = User.query.get(req["user_id"])
|
||||
if grantee is None:
|
||||
abort(400, message='User not found.')
|
||||
abort(400, message="User not found.")
|
||||
|
||||
AccessPermission.revoke(obj, grantee, access_type)
|
||||
db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'revoke_permission',
|
||||
'object_id': object_id,
|
||||
'object_type': object_type,
|
||||
'access_type': access_type,
|
||||
'grantee_id': grantee_id
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "revoke_permission",
|
||||
"object_id": object_id,
|
||||
"object_type": object_type,
|
||||
"access_type": access_type,
|
||||
"grantee_id": grantee_id,
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class CheckPermissionResource(BaseResource):
|
||||
def get(self, object_type, object_id, access_type):
|
||||
model = get_model_from_type(object_type)
|
||||
obj = get_object_or_404(model.get_by_id_and_org, object_id,
|
||||
self.current_org)
|
||||
obj = get_object_or_404(model.get_by_id_and_org, object_id, self.current_org)
|
||||
|
||||
has_access = AccessPermission.exists(obj, access_type,
|
||||
self.current_user)
|
||||
has_access = AccessPermission.exists(obj, access_type, self.current_user)
|
||||
|
||||
return {'response': has_access}
|
||||
return {"response": has_access}
|
||||
|
||||
@@ -7,13 +7,25 @@ from funcy import partial
|
||||
|
||||
from redash import models, settings
|
||||
from redash.authentication.org_resolving import current_org
|
||||
from redash.handlers.base import (BaseResource, filter_by_tags, get_object_or_404,
|
||||
org_scoped_rule, paginate, routes, order_results as _order_results)
|
||||
from redash.handlers.base import (
|
||||
BaseResource,
|
||||
filter_by_tags,
|
||||
get_object_or_404,
|
||||
org_scoped_rule,
|
||||
paginate,
|
||||
routes,
|
||||
order_results as _order_results,
|
||||
)
|
||||
from redash.handlers.query_results import run_query
|
||||
from redash.permissions import (can_modify, not_view_only, require_access,
|
||||
require_admin_or_owner,
|
||||
require_object_modify_permission,
|
||||
require_permission, view_only)
|
||||
from redash.permissions import (
|
||||
can_modify,
|
||||
not_view_only,
|
||||
require_access,
|
||||
require_admin_or_owner,
|
||||
require_object_modify_permission,
|
||||
require_permission,
|
||||
view_only,
|
||||
)
|
||||
from redash.utils import collect_parameters_from_request
|
||||
from redash.serializers import QuerySerializer
|
||||
from redash.models.parameterized_query import ParameterizedQuery
|
||||
@@ -21,28 +33,26 @@ from redash.models.parameterized_query import ParameterizedQuery
|
||||
|
||||
# Ordering map for relationships
|
||||
order_map = {
|
||||
'name': 'lowercase_name',
|
||||
'-name': '-lowercase_name',
|
||||
'created_at': 'created_at',
|
||||
'-created_at': '-created_at',
|
||||
'schedule': 'schedule',
|
||||
'-schedule': '-schedule',
|
||||
'runtime': 'query_results-runtime',
|
||||
'-runtime': '-query_results-runtime',
|
||||
'executed_at': 'query_results-retrieved_at',
|
||||
'-executed_at': '-query_results-retrieved_at',
|
||||
'created_by': 'users-name',
|
||||
'-created_by': '-users-name',
|
||||
"name": "lowercase_name",
|
||||
"-name": "-lowercase_name",
|
||||
"created_at": "created_at",
|
||||
"-created_at": "-created_at",
|
||||
"schedule": "schedule",
|
||||
"-schedule": "-schedule",
|
||||
"runtime": "query_results-runtime",
|
||||
"-runtime": "-query_results-runtime",
|
||||
"executed_at": "query_results-retrieved_at",
|
||||
"-executed_at": "-query_results-retrieved_at",
|
||||
"created_by": "users-name",
|
||||
"-created_by": "-users-name",
|
||||
}
|
||||
|
||||
order_results = partial(
|
||||
_order_results,
|
||||
default_order='-created_at',
|
||||
allowed_orders=order_map,
|
||||
_order_results, default_order="-created_at", allowed_orders=order_map
|
||||
)
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/api/queries/format'), methods=['POST'])
|
||||
@routes.route(org_scoped_rule("/api/queries/format"), methods=["POST"])
|
||||
@login_required
|
||||
def format_sql_query(org_slug=None):
|
||||
"""
|
||||
@@ -54,11 +64,13 @@ def format_sql_query(org_slug=None):
|
||||
arguments = request.get_json(force=True)
|
||||
query = arguments.get("query", "")
|
||||
|
||||
return jsonify({'query': sqlparse.format(query, **settings.SQLPARSE_FORMAT_OPTIONS)})
|
||||
return jsonify(
|
||||
{"query": sqlparse.format(query, **settings.SQLPARSE_FORMAT_OPTIONS)}
|
||||
)
|
||||
|
||||
|
||||
class QuerySearchResource(BaseResource):
|
||||
@require_permission('view_query')
|
||||
@require_permission("view_query")
|
||||
def get(self):
|
||||
"""
|
||||
Search query text, names, and descriptions.
|
||||
@@ -68,30 +80,26 @@ class QuerySearchResource(BaseResource):
|
||||
|
||||
Responds with a list of :ref:`query <query-response-label>` objects.
|
||||
"""
|
||||
term = request.args.get('q', '')
|
||||
term = request.args.get("q", "")
|
||||
if not term:
|
||||
return []
|
||||
|
||||
include_drafts = request.args.get('include_drafts') is not None
|
||||
include_drafts = request.args.get("include_drafts") is not None
|
||||
|
||||
self.record_event({
|
||||
'action': 'search',
|
||||
'object_type': 'query',
|
||||
'term': term,
|
||||
})
|
||||
self.record_event({"action": "search", "object_type": "query", "term": term})
|
||||
|
||||
# this redirects to the new query list API that is aware of search
|
||||
new_location = url_for(
|
||||
'queries',
|
||||
"queries",
|
||||
q=term,
|
||||
org_slug=current_org.slug,
|
||||
drafts='true' if include_drafts else 'false',
|
||||
drafts="true" if include_drafts else "false",
|
||||
)
|
||||
return {}, 301, {'Location': new_location}
|
||||
return {}, 301, {"Location": new_location}
|
||||
|
||||
|
||||
class QueryRecentResource(BaseResource):
|
||||
@require_permission('view_query')
|
||||
@require_permission("view_query")
|
||||
def get(self):
|
||||
"""
|
||||
Retrieve up to 10 queries recently modified by the user.
|
||||
@@ -99,12 +107,17 @@ class QueryRecentResource(BaseResource):
|
||||
Responds with a list of :ref:`query <query-response-label>` objects.
|
||||
"""
|
||||
|
||||
results = models.Query.by_user(self.current_user).order_by(models.Query.updated_at.desc()).limit(10)
|
||||
return QuerySerializer(results, with_last_modified_by=False, with_user=False).serialize()
|
||||
results = (
|
||||
models.Query.by_user(self.current_user)
|
||||
.order_by(models.Query.updated_at.desc())
|
||||
.limit(10)
|
||||
)
|
||||
return QuerySerializer(
|
||||
results, with_last_modified_by=False, with_user=False
|
||||
).serialize()
|
||||
|
||||
|
||||
class BaseQueryListResource(BaseResource):
|
||||
|
||||
def get_queries(self, search_term):
|
||||
if search_term:
|
||||
results = models.Query.search(
|
||||
@@ -112,17 +125,15 @@ class BaseQueryListResource(BaseResource):
|
||||
self.current_user.group_ids,
|
||||
self.current_user.id,
|
||||
include_drafts=True,
|
||||
multi_byte_search=current_org.get_setting('multi_byte_search_enabled'),
|
||||
multi_byte_search=current_org.get_setting("multi_byte_search_enabled"),
|
||||
)
|
||||
else:
|
||||
results = models.Query.all_queries(
|
||||
self.current_user.group_ids,
|
||||
self.current_user.id,
|
||||
include_drafts=True,
|
||||
self.current_user.group_ids, self.current_user.id, include_drafts=True
|
||||
)
|
||||
return filter_by_tags(results, models.Query.tags)
|
||||
|
||||
@require_permission('view_query')
|
||||
@require_permission("view_query")
|
||||
def get(self):
|
||||
"""
|
||||
Retrieve a list of queries.
|
||||
@@ -135,7 +146,7 @@ class BaseQueryListResource(BaseResource):
|
||||
Responds with an array of :ref:`query <query-response-label>` objects.
|
||||
"""
|
||||
# See if we want to do full-text search or just regular queries
|
||||
search_term = request.args.get('q', '')
|
||||
search_term = request.args.get("q", "")
|
||||
|
||||
queries = self.get_queries(search_term)
|
||||
|
||||
@@ -146,8 +157,8 @@ class BaseQueryListResource(BaseResource):
|
||||
# provides an order by search rank
|
||||
ordered_results = order_results(results, fallback=not bool(search_term))
|
||||
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page = request.args.get("page", 1, type=int)
|
||||
page_size = request.args.get("page_size", 25, type=int)
|
||||
|
||||
response = paginate(
|
||||
ordered_results,
|
||||
@@ -155,40 +166,40 @@ class BaseQueryListResource(BaseResource):
|
||||
page_size=page_size,
|
||||
serializer=QuerySerializer,
|
||||
with_stats=True,
|
||||
with_last_modified_by=False
|
||||
with_last_modified_by=False,
|
||||
)
|
||||
|
||||
if search_term:
|
||||
self.record_event({
|
||||
'action': 'search',
|
||||
'object_type': 'query',
|
||||
'term': search_term,
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "search", "object_type": "query", "term": search_term}
|
||||
)
|
||||
else:
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_type': 'query',
|
||||
})
|
||||
self.record_event({"action": "list", "object_type": "query"})
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def require_access_to_dropdown_queries(user, query_def):
|
||||
parameters = query_def.get('options', {}).get('parameters', [])
|
||||
dropdown_query_ids = set([str(p['queryId']) for p in parameters if p['type'] == 'query'])
|
||||
parameters = query_def.get("options", {}).get("parameters", [])
|
||||
dropdown_query_ids = set(
|
||||
[str(p["queryId"]) for p in parameters if p["type"] == "query"]
|
||||
)
|
||||
|
||||
if dropdown_query_ids:
|
||||
groups = models.Query.all_groups_for_query_ids(dropdown_query_ids)
|
||||
|
||||
if len(groups) < len(dropdown_query_ids):
|
||||
abort(400, message="You are trying to associate a dropdown query that does not have a matching group. "
|
||||
"Please verify the dropdown query id you are trying to associate with this query.")
|
||||
abort(
|
||||
400,
|
||||
message="You are trying to associate a dropdown query that does not have a matching group. "
|
||||
"Please verify the dropdown query id you are trying to associate with this query.",
|
||||
)
|
||||
|
||||
require_access(dict(groups), user, view_only)
|
||||
|
||||
|
||||
class QueryListResource(BaseQueryListResource):
|
||||
@require_permission('create_query')
|
||||
@require_permission("create_query")
|
||||
def post(self):
|
||||
"""
|
||||
Create a new query.
|
||||
@@ -223,33 +234,39 @@ class QueryListResource(BaseQueryListResource):
|
||||
:>json number runtime: Runtime of last query execution, in seconds (may be null)
|
||||
"""
|
||||
query_def = request.get_json(force=True)
|
||||
data_source = models.DataSource.get_by_id_and_org(query_def.pop('data_source_id'), self.current_org)
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
query_def.pop("data_source_id"), self.current_org
|
||||
)
|
||||
require_access(data_source, self.current_user, not_view_only)
|
||||
require_access_to_dropdown_queries(self.current_user, query_def)
|
||||
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'last_modified_by']:
|
||||
for field in [
|
||||
"id",
|
||||
"created_at",
|
||||
"api_key",
|
||||
"visualizations",
|
||||
"latest_query_data",
|
||||
"last_modified_by",
|
||||
]:
|
||||
query_def.pop(field, None)
|
||||
|
||||
query_def['query_text'] = query_def.pop('query')
|
||||
query_def['user'] = self.current_user
|
||||
query_def['data_source'] = data_source
|
||||
query_def['org'] = self.current_org
|
||||
query_def['is_draft'] = True
|
||||
query_def["query_text"] = query_def.pop("query")
|
||||
query_def["user"] = self.current_user
|
||||
query_def["data_source"] = data_source
|
||||
query_def["org"] = self.current_org
|
||||
query_def["is_draft"] = True
|
||||
query = models.Query.create(**query_def)
|
||||
models.db.session.add(query)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'create',
|
||||
'object_id': query.id,
|
||||
'object_type': 'query'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "create", "object_id": query.id, "object_type": "query"}
|
||||
)
|
||||
|
||||
return QuerySerializer(query, with_visualizations=True).serialize()
|
||||
|
||||
|
||||
class QueryArchiveResource(BaseQueryListResource):
|
||||
|
||||
def get_queries(self, search_term):
|
||||
if search_term:
|
||||
return models.Query.search(
|
||||
@@ -258,7 +275,7 @@ class QueryArchiveResource(BaseQueryListResource):
|
||||
self.current_user.id,
|
||||
include_drafts=False,
|
||||
include_archived=True,
|
||||
multi_byte_search=current_org.get_setting('multi_byte_search_enabled'),
|
||||
multi_byte_search=current_org.get_setting("multi_byte_search_enabled"),
|
||||
)
|
||||
else:
|
||||
return models.Query.all_queries(
|
||||
@@ -270,7 +287,7 @@ class QueryArchiveResource(BaseQueryListResource):
|
||||
|
||||
|
||||
class MyQueriesResource(BaseResource):
|
||||
@require_permission('view_query')
|
||||
@require_permission("view_query")
|
||||
def get(self):
|
||||
"""
|
||||
Retrieve a list of queries created by the current user.
|
||||
@@ -282,7 +299,7 @@ class MyQueriesResource(BaseResource):
|
||||
|
||||
Responds with an array of :ref:`query <query-response-label>` objects.
|
||||
"""
|
||||
search_term = request.args.get('q', '')
|
||||
search_term = request.args.get("q", "")
|
||||
if search_term:
|
||||
results = models.Query.search_by_user(search_term, self.current_user)
|
||||
else:
|
||||
@@ -295,8 +312,8 @@ class MyQueriesResource(BaseResource):
|
||||
# provides an order by search rank
|
||||
ordered_results = order_results(results, fallback=not bool(search_term))
|
||||
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page = request.args.get("page", 1, type=int)
|
||||
page_size = request.args.get("page_size", 25, type=int)
|
||||
return paginate(
|
||||
ordered_results,
|
||||
page,
|
||||
@@ -308,7 +325,7 @@ class MyQueriesResource(BaseResource):
|
||||
|
||||
|
||||
class QueryResource(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
@require_permission("edit_query")
|
||||
def post(self, query_id):
|
||||
"""
|
||||
Modify a query.
|
||||
@@ -323,27 +340,38 @@ class QueryResource(BaseResource):
|
||||
|
||||
Responds with the updated :ref:`query <query-response-label>` object.
|
||||
"""
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
query_def = request.get_json(force=True)
|
||||
|
||||
require_object_modify_permission(query, self.current_user)
|
||||
require_access_to_dropdown_queries(self.current_user, query_def)
|
||||
|
||||
for field in ['id', 'created_at', 'api_key', 'visualizations', 'latest_query_data', 'user', 'last_modified_by', 'org']:
|
||||
for field in [
|
||||
"id",
|
||||
"created_at",
|
||||
"api_key",
|
||||
"visualizations",
|
||||
"latest_query_data",
|
||||
"user",
|
||||
"last_modified_by",
|
||||
"org",
|
||||
]:
|
||||
query_def.pop(field, None)
|
||||
|
||||
if 'query' in query_def:
|
||||
query_def['query_text'] = query_def.pop('query')
|
||||
if "query" in query_def:
|
||||
query_def["query_text"] = query_def.pop("query")
|
||||
|
||||
if 'tags' in query_def:
|
||||
query_def['tags'] = [tag for tag in query_def['tags'] if tag]
|
||||
if "tags" in query_def:
|
||||
query_def["tags"] = [tag for tag in query_def["tags"] if tag]
|
||||
|
||||
query_def['last_modified_by'] = self.current_user
|
||||
query_def['changed_by'] = self.current_user
|
||||
query_def["last_modified_by"] = self.current_user
|
||||
query_def["changed_by"] = self.current_user
|
||||
# SQLAlchemy handles the case where a concurrent transaction beats us
|
||||
# to the update. But we still have to make sure that we're not starting
|
||||
# out behind.
|
||||
if 'version' in query_def and query_def['version'] != query.version:
|
||||
if "version" in query_def and query_def["version"] != query.version:
|
||||
abort(409)
|
||||
|
||||
try:
|
||||
@@ -354,7 +382,7 @@ class QueryResource(BaseResource):
|
||||
|
||||
return QuerySerializer(query, with_visualizations=True).serialize()
|
||||
|
||||
@require_permission('view_query')
|
||||
@require_permission("view_query")
|
||||
def get(self, query_id):
|
||||
"""
|
||||
Retrieve a query.
|
||||
@@ -363,17 +391,17 @@ class QueryResource(BaseResource):
|
||||
|
||||
Responds with the :ref:`query <query-response-label>` contents.
|
||||
"""
|
||||
q = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
q = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_access(q, self.current_user, view_only)
|
||||
|
||||
result = QuerySerializer(q, with_visualizations=True).serialize()
|
||||
result['can_edit'] = can_modify(q, self.current_user)
|
||||
result["can_edit"] = can_modify(q, self.current_user)
|
||||
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': query_id,
|
||||
'object_type': 'query',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "view", "object_id": query_id, "object_type": "query"}
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
@@ -384,32 +412,38 @@ class QueryResource(BaseResource):
|
||||
|
||||
:param query_id: ID of query to archive
|
||||
"""
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(query.user_id)
|
||||
query.archive(self.current_user)
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
class QueryRegenerateApiKeyResource(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
@require_permission("edit_query")
|
||||
def post(self, query_id):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(query.user_id)
|
||||
query.regenerate_api_key()
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'regnerate_api_key',
|
||||
'object_id': query_id,
|
||||
'object_type': 'query',
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "regnerate_api_key",
|
||||
"object_id": query_id,
|
||||
"object_type": "query",
|
||||
}
|
||||
)
|
||||
|
||||
result = QuerySerializer(query).serialize()
|
||||
return result
|
||||
|
||||
|
||||
class QueryForkResource(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
@require_permission("edit_query")
|
||||
def post(self, query_id):
|
||||
"""
|
||||
Creates a new query, copying the query text from an existing one.
|
||||
@@ -418,16 +452,16 @@ class QueryForkResource(BaseResource):
|
||||
|
||||
Responds with created :ref:`query <query-response-label>` object.
|
||||
"""
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_access(query.data_source, self.current_user, not_view_only)
|
||||
forked_query = query.fork(self.current_user)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'fork',
|
||||
'object_id': query_id,
|
||||
'object_type': 'query',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "fork", "object_id": query_id, "object_type": "query"}
|
||||
)
|
||||
|
||||
return QuerySerializer(forked_query, with_visualizations=True).serialize()
|
||||
|
||||
@@ -447,13 +481,17 @@ class QueryRefreshResource(BaseResource):
|
||||
if self.current_user.is_api_user():
|
||||
abort(403, message="Please use a user API key.")
|
||||
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_access(query, self.current_user, not_view_only)
|
||||
|
||||
parameter_values = collect_parameters_from_request(request.args)
|
||||
parameterized_query = ParameterizedQuery(query.query_text, org=self.current_org)
|
||||
|
||||
return run_query(parameterized_query, parameter_values, query.data_source, query.id)
|
||||
return run_query(
|
||||
parameterized_query, parameter_values, query.data_source, query.id
|
||||
)
|
||||
|
||||
|
||||
class QueryTagsResource(BaseResource):
|
||||
@@ -462,23 +500,20 @@ class QueryTagsResource(BaseResource):
|
||||
Returns all query tags including those for drafts.
|
||||
"""
|
||||
tags = models.Query.all_tags(self.current_user, include_drafts=True)
|
||||
return {
|
||||
'tags': [
|
||||
{
|
||||
'name': name,
|
||||
'count': count,
|
||||
}
|
||||
for name, count in tags
|
||||
]
|
||||
}
|
||||
return {"tags": [{"name": name, "count": count} for name, count in tags]}
|
||||
|
||||
|
||||
class QueryFavoriteListResource(BaseResource):
|
||||
def get(self):
|
||||
search_term = request.args.get('q')
|
||||
search_term = request.args.get("q")
|
||||
|
||||
if search_term:
|
||||
base_query = models.Query.search(search_term, self.current_user.group_ids, include_drafts=True, limit=None)
|
||||
base_query = models.Query.search(
|
||||
search_term,
|
||||
self.current_user.group_ids,
|
||||
include_drafts=True,
|
||||
limit=None,
|
||||
)
|
||||
favorites = models.Query.favorites(self.current_user, base_query=base_query)
|
||||
else:
|
||||
favorites = models.Query.favorites(self.current_user)
|
||||
@@ -490,8 +525,8 @@ class QueryFavoriteListResource(BaseResource):
|
||||
# provides an order by search rank
|
||||
ordered_favorites = order_results(favorites, fallback=not bool(search_term))
|
||||
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page = request.args.get("page", 1, type=int)
|
||||
page_size = request.args.get("page_size", 25, type=int)
|
||||
response = paginate(
|
||||
ordered_favorites,
|
||||
page,
|
||||
@@ -501,14 +536,16 @@ class QueryFavoriteListResource(BaseResource):
|
||||
with_last_modified_by=False,
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'load_favorites',
|
||||
'object_type': 'query',
|
||||
'params': {
|
||||
'q': search_term,
|
||||
'tags': request.args.getlist('tags'),
|
||||
'page': page
|
||||
self.record_event(
|
||||
{
|
||||
"action": "load_favorites",
|
||||
"object_type": "query",
|
||||
"params": {
|
||||
"q": search_term,
|
||||
"tags": request.args.getlist("tags"),
|
||||
"page": page,
|
||||
},
|
||||
}
|
||||
})
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
@@ -6,34 +6,65 @@ from flask_login import current_user
|
||||
from flask_restful import abort
|
||||
from redash import models, settings
|
||||
from redash.handlers.base import BaseResource, get_object_or_404, record_event
|
||||
from redash.permissions import (has_access, not_view_only, require_access,
|
||||
require_permission, view_only)
|
||||
from redash.permissions import (
|
||||
has_access,
|
||||
not_view_only,
|
||||
require_access,
|
||||
require_permission,
|
||||
view_only,
|
||||
)
|
||||
from redash.tasks import QueryTask
|
||||
from redash.tasks.queries import enqueue_query
|
||||
from redash.utils import (collect_parameters_from_request, gen_query_hash, json_dumps, utcnow, to_filename)
|
||||
from redash.models.parameterized_query import (ParameterizedQuery, InvalidParameterError,
|
||||
QueryDetachedFromDataSourceError, dropdown_values)
|
||||
from redash.serializers import serialize_query_result, serialize_query_result_to_csv, serialize_query_result_to_xlsx
|
||||
from redash.utils import (
|
||||
collect_parameters_from_request,
|
||||
gen_query_hash,
|
||||
json_dumps,
|
||||
utcnow,
|
||||
to_filename,
|
||||
)
|
||||
from redash.models.parameterized_query import (
|
||||
ParameterizedQuery,
|
||||
InvalidParameterError,
|
||||
QueryDetachedFromDataSourceError,
|
||||
dropdown_values,
|
||||
)
|
||||
from redash.serializers import (
|
||||
serialize_query_result,
|
||||
serialize_query_result_to_csv,
|
||||
serialize_query_result_to_xlsx,
|
||||
)
|
||||
|
||||
|
||||
def error_response(message, http_status=400):
|
||||
return {'job': {'status': 4, 'error': message}}, http_status
|
||||
return {"job": {"status": 4, "error": message}}, http_status
|
||||
|
||||
|
||||
error_messages = {
|
||||
'unsafe_when_shared': error_response('This query contains potentially unsafe parameters and cannot be executed on a shared dashboard or an embedded visualization.', 403),
|
||||
'unsafe_on_view_only': error_response('This query contains potentially unsafe parameters and cannot be executed with read-only access to this data source.', 403),
|
||||
'no_permission': error_response('You do not have permission to run queries with this data source.', 403),
|
||||
'select_data_source': error_response('Please select data source to run this query.', 401)
|
||||
"unsafe_when_shared": error_response(
|
||||
"This query contains potentially unsafe parameters and cannot be executed on a shared dashboard or an embedded visualization.",
|
||||
403,
|
||||
),
|
||||
"unsafe_on_view_only": error_response(
|
||||
"This query contains potentially unsafe parameters and cannot be executed with read-only access to this data source.",
|
||||
403,
|
||||
),
|
||||
"no_permission": error_response(
|
||||
"You do not have permission to run queries with this data source.", 403
|
||||
),
|
||||
"select_data_source": error_response(
|
||||
"Please select data source to run this query.", 401
|
||||
),
|
||||
}
|
||||
|
||||
|
||||
def run_query(query, parameters, data_source, query_id, max_age=0):
|
||||
if data_source.paused:
|
||||
if data_source.pause_reason:
|
||||
message = '{} is paused ({}). Please try later.'.format(data_source.name, data_source.pause_reason)
|
||||
message = "{} is paused ({}). Please try later.".format(
|
||||
data_source.name, data_source.pause_reason
|
||||
)
|
||||
else:
|
||||
message = '{} is paused. Please try later.'.format(data_source.name)
|
||||
message = "{} is paused. Please try later.".format(data_source.name)
|
||||
|
||||
return error_response(message)
|
||||
|
||||
@@ -43,44 +74,62 @@ def run_query(query, parameters, data_source, query_id, max_age=0):
|
||||
abort(400, message=e.message)
|
||||
|
||||
if query.missing_params:
|
||||
return error_response('Missing parameter value for: {}'.format(", ".join(query.missing_params)))
|
||||
return error_response(
|
||||
"Missing parameter value for: {}".format(", ".join(query.missing_params))
|
||||
)
|
||||
|
||||
if max_age == 0:
|
||||
query_result = None
|
||||
else:
|
||||
query_result = models.QueryResult.get_latest(data_source, query.text, max_age)
|
||||
|
||||
record_event(current_user.org, current_user, {
|
||||
'action': 'execute_query',
|
||||
'cache': 'hit' if query_result else 'miss',
|
||||
'object_id': data_source.id,
|
||||
'object_type': 'data_source',
|
||||
'query': query.text,
|
||||
'query_id': query_id,
|
||||
'parameters': parameters
|
||||
})
|
||||
record_event(
|
||||
current_user.org,
|
||||
current_user,
|
||||
{
|
||||
"action": "execute_query",
|
||||
"cache": "hit" if query_result else "miss",
|
||||
"object_id": data_source.id,
|
||||
"object_type": "data_source",
|
||||
"query": query.text,
|
||||
"query_id": query_id,
|
||||
"parameters": parameters,
|
||||
},
|
||||
)
|
||||
|
||||
if query_result:
|
||||
return {'query_result': serialize_query_result(query_result, current_user.is_api_user())}
|
||||
return {
|
||||
"query_result": serialize_query_result(
|
||||
query_result, current_user.is_api_user()
|
||||
)
|
||||
}
|
||||
else:
|
||||
job = enqueue_query(query.text, data_source, current_user.id, current_user.is_api_user(), metadata={
|
||||
"Username": repr(current_user) if current_user.is_api_user() else current_user.email,
|
||||
"Query ID": query_id
|
||||
})
|
||||
return {'job': job.to_dict()}
|
||||
job = enqueue_query(
|
||||
query.text,
|
||||
data_source,
|
||||
current_user.id,
|
||||
current_user.is_api_user(),
|
||||
metadata={
|
||||
"Username": repr(current_user)
|
||||
if current_user.is_api_user()
|
||||
else current_user.email,
|
||||
"Query ID": query_id,
|
||||
},
|
||||
)
|
||||
return {"job": job.to_dict()}
|
||||
|
||||
|
||||
def get_download_filename(query_result, query, filetype):
|
||||
retrieved_at = query_result.retrieved_at.strftime("%Y_%m_%d")
|
||||
if query:
|
||||
filename = to_filename(query.name) if query.name != '' else str(query.id)
|
||||
filename = to_filename(query.name) if query.name != "" else str(query.id)
|
||||
else:
|
||||
filename = str(query_result.id)
|
||||
return "{}_{}.{}".format(filename, retrieved_at, filetype)
|
||||
|
||||
|
||||
class QueryResultListResource(BaseResource):
|
||||
@require_permission('execute_query')
|
||||
@require_permission("execute_query")
|
||||
def post(self):
|
||||
"""
|
||||
Execute a query (or retrieve recent results).
|
||||
@@ -96,27 +145,33 @@ class QueryResultListResource(BaseResource):
|
||||
"""
|
||||
params = request.get_json(force=True)
|
||||
|
||||
query = params['query']
|
||||
max_age = params.get('max_age', -1)
|
||||
query = params["query"]
|
||||
max_age = params.get("max_age", -1)
|
||||
# max_age might have the value of None, in which case calling int(None) will fail
|
||||
if max_age is None:
|
||||
max_age = -1
|
||||
max_age = int(max_age)
|
||||
query_id = params.get('query_id', 'adhoc')
|
||||
parameters = params.get('parameters', collect_parameters_from_request(request.args))
|
||||
query_id = params.get("query_id", "adhoc")
|
||||
parameters = params.get(
|
||||
"parameters", collect_parameters_from_request(request.args)
|
||||
)
|
||||
|
||||
parameterized_query = ParameterizedQuery(query, org=self.current_org)
|
||||
|
||||
data_source_id = params.get('data_source_id')
|
||||
data_source_id = params.get("data_source_id")
|
||||
if data_source_id:
|
||||
data_source = models.DataSource.get_by_id_and_org(params.get('data_source_id'), self.current_org)
|
||||
data_source = models.DataSource.get_by_id_and_org(
|
||||
params.get("data_source_id"), self.current_org
|
||||
)
|
||||
else:
|
||||
return error_messages['select_data_source']
|
||||
return error_messages["select_data_source"]
|
||||
|
||||
if not has_access(data_source, self.current_user, not_view_only):
|
||||
return error_messages['no_permission']
|
||||
return error_messages["no_permission"]
|
||||
|
||||
return run_query(parameterized_query, parameters, data_source, query_id, max_age)
|
||||
return run_query(
|
||||
parameterized_query, parameters, data_source, query_id, max_age
|
||||
)
|
||||
|
||||
|
||||
ONE_YEAR = 60 * 60 * 24 * 365.25
|
||||
@@ -124,7 +179,9 @@ ONE_YEAR = 60 * 60 * 24 * 365.25
|
||||
|
||||
class QueryResultDropdownResource(BaseResource):
|
||||
def get(self, query_id):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_access(query.data_source, current_user, view_only)
|
||||
try:
|
||||
return dropdown_values(query_id, self.current_org)
|
||||
@@ -134,12 +191,18 @@ class QueryResultDropdownResource(BaseResource):
|
||||
|
||||
class QueryDropdownsResource(BaseResource):
|
||||
def get(self, query_id, dropdown_query_id):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
require_access(query, current_user, view_only)
|
||||
|
||||
related_queries_ids = [p['queryId'] for p in query.parameters if p['type'] == 'query']
|
||||
related_queries_ids = [
|
||||
p["queryId"] for p in query.parameters if p["type"] == "query"
|
||||
]
|
||||
if int(dropdown_query_id) not in related_queries_ids:
|
||||
dropdown_query = get_object_or_404(models.Query.get_by_id_and_org, dropdown_query_id, self.current_org)
|
||||
dropdown_query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, dropdown_query_id, self.current_org
|
||||
)
|
||||
require_access(dropdown_query.data_source, current_user, view_only)
|
||||
|
||||
return dropdown_values(dropdown_query_id, self.current_org)
|
||||
@@ -148,27 +211,33 @@ class QueryDropdownsResource(BaseResource):
|
||||
class QueryResultResource(BaseResource):
|
||||
@staticmethod
|
||||
def add_cors_headers(headers):
|
||||
if 'Origin' in request.headers:
|
||||
origin = request.headers['Origin']
|
||||
if "Origin" in request.headers:
|
||||
origin = request.headers["Origin"]
|
||||
|
||||
if set(['*', origin]) & settings.ACCESS_CONTROL_ALLOW_ORIGIN:
|
||||
headers['Access-Control-Allow-Origin'] = origin
|
||||
headers['Access-Control-Allow-Credentials'] = str(settings.ACCESS_CONTROL_ALLOW_CREDENTIALS).lower()
|
||||
if set(["*", origin]) & settings.ACCESS_CONTROL_ALLOW_ORIGIN:
|
||||
headers["Access-Control-Allow-Origin"] = origin
|
||||
headers["Access-Control-Allow-Credentials"] = str(
|
||||
settings.ACCESS_CONTROL_ALLOW_CREDENTIALS
|
||||
).lower()
|
||||
|
||||
@require_permission('view_query')
|
||||
def options(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
@require_permission("view_query")
|
||||
def options(self, query_id=None, query_result_id=None, filetype="json"):
|
||||
headers = {}
|
||||
self.add_cors_headers(headers)
|
||||
|
||||
if settings.ACCESS_CONTROL_REQUEST_METHOD:
|
||||
headers['Access-Control-Request-Method'] = settings.ACCESS_CONTROL_REQUEST_METHOD
|
||||
headers[
|
||||
"Access-Control-Request-Method"
|
||||
] = settings.ACCESS_CONTROL_REQUEST_METHOD
|
||||
|
||||
if settings.ACCESS_CONTROL_ALLOW_HEADERS:
|
||||
headers['Access-Control-Allow-Headers'] = settings.ACCESS_CONTROL_ALLOW_HEADERS
|
||||
headers[
|
||||
"Access-Control-Allow-Headers"
|
||||
] = settings.ACCESS_CONTROL_ALLOW_HEADERS
|
||||
|
||||
return make_response("", 200, headers)
|
||||
|
||||
@require_permission('view_query')
|
||||
@require_permission("view_query")
|
||||
def post(self, query_id):
|
||||
"""
|
||||
Execute a saved query.
|
||||
@@ -181,31 +250,41 @@ class QueryResultResource(BaseResource):
|
||||
always execute.
|
||||
"""
|
||||
params = request.get_json(force=True, silent=True) or {}
|
||||
parameter_values = params.get('parameters', {})
|
||||
parameter_values = params.get("parameters", {})
|
||||
|
||||
max_age = params.get('max_age', -1)
|
||||
max_age = params.get("max_age", -1)
|
||||
# max_age might have the value of None, in which case calling int(None) will fail
|
||||
if max_age is None:
|
||||
max_age = -1
|
||||
max_age = int(max_age)
|
||||
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
|
||||
allow_executing_with_view_only_permissions = query.parameterized.is_safe
|
||||
|
||||
if has_access(query, self.current_user, allow_executing_with_view_only_permissions):
|
||||
return run_query(query.parameterized, parameter_values, query.data_source, query_id, max_age)
|
||||
if has_access(
|
||||
query, self.current_user, allow_executing_with_view_only_permissions
|
||||
):
|
||||
return run_query(
|
||||
query.parameterized,
|
||||
parameter_values,
|
||||
query.data_source,
|
||||
query_id,
|
||||
max_age,
|
||||
)
|
||||
else:
|
||||
if not query.parameterized.is_safe:
|
||||
if current_user.is_api_user():
|
||||
return error_messages['unsafe_when_shared']
|
||||
return error_messages["unsafe_when_shared"]
|
||||
else:
|
||||
return error_messages['unsafe_on_view_only']
|
||||
return error_messages["unsafe_on_view_only"]
|
||||
else:
|
||||
return error_messages['no_permission']
|
||||
return error_messages["no_permission"]
|
||||
|
||||
@require_permission('view_query')
|
||||
def get(self, query_id=None, query_result_id=None, filetype='json'):
|
||||
@require_permission("view_query")
|
||||
def get(self, query_id=None, query_result_id=None, filetype="json"):
|
||||
"""
|
||||
Retrieve query results.
|
||||
|
||||
@@ -228,52 +307,66 @@ class QueryResultResource(BaseResource):
|
||||
should_cache = query_result_id is not None
|
||||
|
||||
parameter_values = collect_parameters_from_request(request.args)
|
||||
max_age = int(request.args.get('maxAge', 0))
|
||||
max_age = int(request.args.get("maxAge", 0))
|
||||
|
||||
query_result = None
|
||||
query = None
|
||||
|
||||
if query_result_id:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org, query_result_id, self.current_org)
|
||||
query_result = get_object_or_404(
|
||||
models.QueryResult.get_by_id_and_org, query_result_id, self.current_org
|
||||
)
|
||||
|
||||
if query_id is not None:
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, query_id, self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, query_id, self.current_org
|
||||
)
|
||||
|
||||
if query_result is None and query is not None and query.latest_query_data_id is not None:
|
||||
query_result = get_object_or_404(models.QueryResult.get_by_id_and_org,
|
||||
query.latest_query_data_id,
|
||||
self.current_org)
|
||||
if (
|
||||
query_result is None
|
||||
and query is not None
|
||||
and query.latest_query_data_id is not None
|
||||
):
|
||||
query_result = get_object_or_404(
|
||||
models.QueryResult.get_by_id_and_org,
|
||||
query.latest_query_data_id,
|
||||
self.current_org,
|
||||
)
|
||||
|
||||
if query is not None and query_result is not None and self.current_user.is_api_user():
|
||||
if (
|
||||
query is not None
|
||||
and query_result is not None
|
||||
and self.current_user.is_api_user()
|
||||
):
|
||||
if query.query_hash != query_result.query_hash:
|
||||
abort(404, message='No cached result found for this query.')
|
||||
abort(404, message="No cached result found for this query.")
|
||||
|
||||
if query_result:
|
||||
require_access(query_result.data_source, self.current_user, view_only)
|
||||
|
||||
if isinstance(self.current_user, models.ApiUser):
|
||||
event = {
|
||||
'user_id': None,
|
||||
'org_id': self.current_org.id,
|
||||
'action': 'api_get',
|
||||
'api_key': self.current_user.name,
|
||||
'file_type': filetype,
|
||||
'user_agent': request.user_agent.string,
|
||||
'ip': request.remote_addr
|
||||
"user_id": None,
|
||||
"org_id": self.current_org.id,
|
||||
"action": "api_get",
|
||||
"api_key": self.current_user.name,
|
||||
"file_type": filetype,
|
||||
"user_agent": request.user_agent.string,
|
||||
"ip": request.remote_addr,
|
||||
}
|
||||
|
||||
if query_id:
|
||||
event['object_type'] = 'query'
|
||||
event['object_id'] = query_id
|
||||
event["object_type"] = "query"
|
||||
event["object_id"] = query_id
|
||||
else:
|
||||
event['object_type'] = 'query_result'
|
||||
event['object_id'] = query_result_id
|
||||
event["object_type"] = "query_result"
|
||||
event["object_id"] = query_result_id
|
||||
|
||||
self.record_event(event)
|
||||
|
||||
if filetype == 'json':
|
||||
if filetype == "json":
|
||||
response = self.make_json_response(query_result)
|
||||
elif filetype == 'xlsx':
|
||||
elif filetype == "xlsx":
|
||||
response = self.make_excel_response(query_result)
|
||||
else:
|
||||
response = self.make_csv_response(query_result)
|
||||
@@ -282,33 +375,36 @@ class QueryResultResource(BaseResource):
|
||||
self.add_cors_headers(response.headers)
|
||||
|
||||
if should_cache:
|
||||
response.headers.add_header('Cache-Control', 'private,max-age=%d' % ONE_YEAR)
|
||||
response.headers.add_header(
|
||||
"Cache-Control", "private,max-age=%d" % ONE_YEAR
|
||||
)
|
||||
|
||||
filename = get_download_filename(query_result, query, filetype)
|
||||
|
||||
response.headers.add_header(
|
||||
"Content-Disposition",
|
||||
'attachment; filename="{}"'.format(filename)
|
||||
"Content-Disposition", 'attachment; filename="{}"'.format(filename)
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
else:
|
||||
abort(404, message='No cached result found for this query.')
|
||||
abort(404, message="No cached result found for this query.")
|
||||
|
||||
def make_json_response(self, query_result):
|
||||
data = json_dumps({'query_result': query_result.to_dict()})
|
||||
headers = {'Content-Type': "application/json"}
|
||||
data = json_dumps({"query_result": query_result.to_dict()})
|
||||
headers = {"Content-Type": "application/json"}
|
||||
return make_response(data, 200, headers)
|
||||
|
||||
@staticmethod
|
||||
def make_csv_response(query_result):
|
||||
headers = {'Content-Type': "text/csv; charset=UTF-8"}
|
||||
headers = {"Content-Type": "text/csv; charset=UTF-8"}
|
||||
return make_response(serialize_query_result_to_csv(query_result), 200, headers)
|
||||
|
||||
@staticmethod
|
||||
def make_excel_response(query_result):
|
||||
headers = {'Content-Type': "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"}
|
||||
headers = {
|
||||
"Content-Type": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
|
||||
}
|
||||
return make_response(serialize_query_result_to_xlsx(query_result), 200, headers)
|
||||
|
||||
|
||||
@@ -318,7 +414,7 @@ class JobResource(BaseResource):
|
||||
Retrieve info about a running query job.
|
||||
"""
|
||||
job = QueryTask(job_id=job_id)
|
||||
return {'job': job.to_dict()}
|
||||
return {"job": job.to_dict()}
|
||||
|
||||
def delete(self, job_id):
|
||||
"""
|
||||
|
||||
@@ -3,82 +3,83 @@ from funcy import project
|
||||
|
||||
from redash import models
|
||||
from redash.permissions import require_admin_or_owner
|
||||
from redash.handlers.base import (BaseResource, require_fields,
|
||||
get_object_or_404)
|
||||
from redash.handlers.base import BaseResource, require_fields, get_object_or_404
|
||||
|
||||
|
||||
class QuerySnippetResource(BaseResource):
|
||||
def get(self, snippet_id):
|
||||
snippet = get_object_or_404(models.QuerySnippet.get_by_id_and_org,
|
||||
snippet_id, self.current_org)
|
||||
snippet = get_object_or_404(
|
||||
models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': snippet_id,
|
||||
'object_type': 'query_snippet',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "view", "object_id": snippet_id, "object_type": "query_snippet"}
|
||||
)
|
||||
|
||||
return snippet.to_dict()
|
||||
|
||||
def post(self, snippet_id):
|
||||
req = request.get_json(True)
|
||||
params = project(req, ('trigger', 'description', 'snippet'))
|
||||
snippet = get_object_or_404(models.QuerySnippet.get_by_id_and_org,
|
||||
snippet_id, self.current_org)
|
||||
params = project(req, ("trigger", "description", "snippet"))
|
||||
snippet = get_object_or_404(
|
||||
models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(snippet.user.id)
|
||||
|
||||
self.update_model(snippet, params)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'edit',
|
||||
'object_id': snippet.id,
|
||||
'object_type': 'query_snippet'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "edit", "object_id": snippet.id, "object_type": "query_snippet"}
|
||||
)
|
||||
return snippet.to_dict()
|
||||
|
||||
def delete(self, snippet_id):
|
||||
snippet = get_object_or_404(models.QuerySnippet.get_by_id_and_org,
|
||||
snippet_id, self.current_org)
|
||||
snippet = get_object_or_404(
|
||||
models.QuerySnippet.get_by_id_and_org, snippet_id, self.current_org
|
||||
)
|
||||
require_admin_or_owner(snippet.user.id)
|
||||
models.db.session.delete(snippet)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'delete',
|
||||
'object_id': snippet.id,
|
||||
'object_type': 'query_snippet'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "delete",
|
||||
"object_id": snippet.id,
|
||||
"object_type": "query_snippet",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
class QuerySnippetListResource(BaseResource):
|
||||
def post(self):
|
||||
req = request.get_json(True)
|
||||
require_fields(req, ('trigger', 'description', 'snippet'))
|
||||
require_fields(req, ("trigger", "description", "snippet"))
|
||||
|
||||
snippet = models.QuerySnippet(
|
||||
trigger=req['trigger'],
|
||||
description=req['description'],
|
||||
snippet=req['snippet'],
|
||||
trigger=req["trigger"],
|
||||
description=req["description"],
|
||||
snippet=req["snippet"],
|
||||
user=self.current_user,
|
||||
org=self.current_org
|
||||
org=self.current_org,
|
||||
)
|
||||
|
||||
models.db.session.add(snippet)
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'create',
|
||||
'object_id': snippet.id,
|
||||
'object_type': 'query_snippet'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "create",
|
||||
"object_id": snippet.id,
|
||||
"object_type": "query_snippet",
|
||||
}
|
||||
)
|
||||
|
||||
return snippet.to_dict()
|
||||
|
||||
def get(self):
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_type': 'query_snippet',
|
||||
})
|
||||
return [snippet.to_dict() for snippet in
|
||||
models.QuerySnippet.all(org=self.current_org)]
|
||||
self.record_event({"action": "list", "object_type": "query_snippet"})
|
||||
return [
|
||||
snippet.to_dict()
|
||||
for snippet in models.QuerySnippet.all(org=self.current_org)
|
||||
]
|
||||
|
||||
@@ -7,7 +7,7 @@ from redash.settings.organization import settings as org_settings
|
||||
|
||||
|
||||
def get_settings_with_defaults(defaults, org):
|
||||
values = org.settings.get('settings', {})
|
||||
values = org.settings.get("settings", {})
|
||||
settings = {}
|
||||
|
||||
for setting, default_value in defaults.items():
|
||||
@@ -20,7 +20,7 @@ def get_settings_with_defaults(defaults, org):
|
||||
else:
|
||||
settings[setting] = current_value
|
||||
|
||||
settings['auth_google_apps_domains'] = org.google_apps_domains
|
||||
settings["auth_google_apps_domains"] = org.google_apps_domains
|
||||
|
||||
return settings
|
||||
|
||||
@@ -30,39 +30,39 @@ class OrganizationSettings(BaseResource):
|
||||
def get(self):
|
||||
settings = get_settings_with_defaults(org_settings, self.current_org)
|
||||
|
||||
return {
|
||||
"settings": settings
|
||||
}
|
||||
return {"settings": settings}
|
||||
|
||||
@require_admin
|
||||
def post(self):
|
||||
new_values = request.json
|
||||
|
||||
if self.current_org.settings.get('settings') is None:
|
||||
self.current_org.settings['settings'] = {}
|
||||
if self.current_org.settings.get("settings") is None:
|
||||
self.current_org.settings["settings"] = {}
|
||||
|
||||
previous_values = {}
|
||||
for k, v in new_values.items():
|
||||
if k == 'auth_google_apps_domains':
|
||||
if k == "auth_google_apps_domains":
|
||||
previous_values[k] = self.current_org.google_apps_domains
|
||||
self.current_org.settings[Organization.SETTING_GOOGLE_APPS_DOMAINS] = v
|
||||
else:
|
||||
previous_values[k] = self.current_org.get_setting(k, raise_on_missing=False)
|
||||
previous_values[k] = self.current_org.get_setting(
|
||||
k, raise_on_missing=False
|
||||
)
|
||||
self.current_org.set_setting(k, v)
|
||||
|
||||
db.session.add(self.current_org)
|
||||
db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'edit',
|
||||
'object_id': self.current_org.id,
|
||||
'object_type': 'settings',
|
||||
'new_values': new_values,
|
||||
'previous_values': previous_values
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "edit",
|
||||
"object_id": self.current_org.id,
|
||||
"object_type": "settings",
|
||||
"new_values": new_values,
|
||||
"previous_values": previous_values,
|
||||
}
|
||||
)
|
||||
|
||||
settings = get_settings_with_defaults(org_settings, self.current_org)
|
||||
|
||||
return {
|
||||
"settings": settings
|
||||
}
|
||||
return {"settings": settings}
|
||||
|
||||
@@ -11,26 +11,38 @@ from wtforms.fields.html5 import EmailField
|
||||
|
||||
|
||||
class SetupForm(Form):
|
||||
name = StringField('Name', validators=[validators.InputRequired()])
|
||||
email = EmailField('Email Address', validators=[validators.Email()])
|
||||
password = PasswordField('Password', validators=[validators.Length(6)])
|
||||
name = StringField("Name", validators=[validators.InputRequired()])
|
||||
email = EmailField("Email Address", validators=[validators.Email()])
|
||||
password = PasswordField("Password", validators=[validators.Length(6)])
|
||||
org_name = StringField("Organization Name", validators=[validators.InputRequired()])
|
||||
security_notifications = BooleanField()
|
||||
newsletter = BooleanField()
|
||||
|
||||
|
||||
def create_org(org_name, user_name, email, password):
|
||||
default_org = Organization(name=org_name, slug='default', settings={})
|
||||
admin_group = Group(name='admin', permissions=['admin', 'super_admin'], org=default_org, type=Group.BUILTIN_GROUP)
|
||||
default_group = Group(name='default', permissions=Group.DEFAULT_PERMISSIONS, org=default_org, type=Group.BUILTIN_GROUP)
|
||||
default_org = Organization(name=org_name, slug="default", settings={})
|
||||
admin_group = Group(
|
||||
name="admin",
|
||||
permissions=["admin", "super_admin"],
|
||||
org=default_org,
|
||||
type=Group.BUILTIN_GROUP,
|
||||
)
|
||||
default_group = Group(
|
||||
name="default",
|
||||
permissions=Group.DEFAULT_PERMISSIONS,
|
||||
org=default_org,
|
||||
type=Group.BUILTIN_GROUP,
|
||||
)
|
||||
|
||||
db.session.add_all([default_org, admin_group, default_group])
|
||||
db.session.commit()
|
||||
|
||||
user = User(org=default_org,
|
||||
name=user_name,
|
||||
email=email,
|
||||
group_ids=[admin_group.id, default_group.id])
|
||||
user = User(
|
||||
org=default_org,
|
||||
name=user_name,
|
||||
email=email,
|
||||
group_ids=[admin_group.id, default_group.id],
|
||||
)
|
||||
user.hash_password(password)
|
||||
|
||||
db.session.add(user)
|
||||
@@ -39,17 +51,19 @@ def create_org(org_name, user_name, email, password):
|
||||
return default_org, user
|
||||
|
||||
|
||||
@routes.route('/setup', methods=['GET', 'POST'])
|
||||
@routes.route("/setup", methods=["GET", "POST"])
|
||||
def setup():
|
||||
if current_org != None or settings.MULTI_ORG:
|
||||
return redirect('/')
|
||||
return redirect("/")
|
||||
|
||||
form = SetupForm(request.form)
|
||||
form.newsletter.data = True
|
||||
form.security_notifications.data = True
|
||||
|
||||
if request.method == 'POST' and form.validate():
|
||||
default_org, user = create_org(form.org_name.data, form.name.data, form.email.data, form.password.data)
|
||||
if request.method == "POST" and form.validate():
|
||||
default_org, user = create_org(
|
||||
form.org_name.data, form.name.data, form.email.data, form.password.data
|
||||
)
|
||||
|
||||
g.org = default_org
|
||||
login_user(user)
|
||||
@@ -58,6 +72,6 @@ def setup():
|
||||
if form.newsletter.data or form.security_notifications:
|
||||
subscribe.delay(form.data)
|
||||
|
||||
return redirect(url_for('redash.index', org_slug=None))
|
||||
return redirect(url_for("redash.index", org_slug=None))
|
||||
|
||||
return render_template('setup.html', form=form)
|
||||
return render_template("setup.html", form=form)
|
||||
|
||||
@@ -12,21 +12,21 @@ def render_index():
|
||||
if settings.MULTI_ORG:
|
||||
response = render_template("multi_org.html", base_href=base_href())
|
||||
else:
|
||||
full_path = safe_join(settings.STATIC_ASSETS_PATH, 'index.html')
|
||||
full_path = safe_join(settings.STATIC_ASSETS_PATH, "index.html")
|
||||
response = send_file(full_path, **dict(cache_timeout=0, conditional=True))
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/dashboard/<slug>'), methods=['GET'])
|
||||
@routes.route(org_scoped_rule("/dashboard/<slug>"), methods=["GET"])
|
||||
@login_required
|
||||
@csp_allows_embeding
|
||||
def dashboard(slug, org_slug=None):
|
||||
return render_index()
|
||||
|
||||
|
||||
@routes.route(org_scoped_rule('/<path:path>'))
|
||||
@routes.route(org_scoped_rule('/'))
|
||||
@routes.route(org_scoped_rule("/<path:path>"))
|
||||
@routes.route(org_scoped_rule("/"))
|
||||
@login_required
|
||||
def index(**kwargs):
|
||||
return render_index()
|
||||
|
||||
@@ -10,31 +10,45 @@ from disposable_email_domains import blacklist
|
||||
from funcy import partial
|
||||
|
||||
from redash import models, limiter
|
||||
from redash.permissions import require_permission, require_admin_or_owner, is_admin_or_owner, \
|
||||
require_permission_or_owner, require_admin
|
||||
from redash.handlers.base import BaseResource, require_fields, get_object_or_404, paginate, order_results as _order_results
|
||||
from redash.permissions import (
|
||||
require_permission,
|
||||
require_admin_or_owner,
|
||||
is_admin_or_owner,
|
||||
require_permission_or_owner,
|
||||
require_admin,
|
||||
)
|
||||
from redash.handlers.base import (
|
||||
BaseResource,
|
||||
require_fields,
|
||||
get_object_or_404,
|
||||
paginate,
|
||||
order_results as _order_results,
|
||||
)
|
||||
|
||||
from redash.authentication.account import invite_link_for_user, send_invite_email, send_password_reset_email, send_verify_email
|
||||
from redash.authentication.account import (
|
||||
invite_link_for_user,
|
||||
send_invite_email,
|
||||
send_password_reset_email,
|
||||
send_verify_email,
|
||||
)
|
||||
from redash.settings import parse_boolean
|
||||
from redash import settings
|
||||
|
||||
|
||||
# Ordering map for relationships
|
||||
order_map = {
|
||||
'name': 'name',
|
||||
'-name': '-name',
|
||||
'active_at': 'active_at',
|
||||
'-active_at': '-active_at',
|
||||
'created_at': 'created_at',
|
||||
'-created_at': '-created_at',
|
||||
'groups': 'group_ids',
|
||||
'-groups': '-group_ids',
|
||||
"name": "name",
|
||||
"-name": "-name",
|
||||
"active_at": "active_at",
|
||||
"-active_at": "-active_at",
|
||||
"created_at": "created_at",
|
||||
"-created_at": "-created_at",
|
||||
"groups": "group_ids",
|
||||
"-groups": "-group_ids",
|
||||
}
|
||||
|
||||
order_results = partial(
|
||||
_order_results,
|
||||
default_order='-created_at',
|
||||
allowed_orders=order_map,
|
||||
_order_results, default_order="-created_at", allowed_orders=order_map
|
||||
)
|
||||
|
||||
|
||||
@@ -45,14 +59,15 @@ def invite_user(org, inviter, user, send_email=True):
|
||||
if settings.email_server_is_configured() and send_email:
|
||||
send_invite_email(inviter, user, invite_url, org)
|
||||
else:
|
||||
d['invite_link'] = invite_url
|
||||
d["invite_link"] = invite_url
|
||||
|
||||
return d
|
||||
|
||||
|
||||
class UserListResource(BaseResource):
|
||||
decorators = BaseResource.decorators + \
|
||||
[limiter.limit('200/day;50/hour', methods=['POST'])]
|
||||
decorators = BaseResource.decorators + [
|
||||
limiter.limit("200/day;50/hour", methods=["POST"])
|
||||
]
|
||||
|
||||
def get_users(self, disabled, pending, search_term):
|
||||
if disabled:
|
||||
@@ -65,50 +80,52 @@ class UserListResource(BaseResource):
|
||||
|
||||
if search_term:
|
||||
users = models.User.search(users, search_term)
|
||||
self.record_event({
|
||||
'action': 'search',
|
||||
'object_type': 'user',
|
||||
'term': search_term,
|
||||
'pending': pending,
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "search",
|
||||
"object_type": "user",
|
||||
"term": search_term,
|
||||
"pending": pending,
|
||||
}
|
||||
)
|
||||
else:
|
||||
self.record_event({
|
||||
'action': 'list',
|
||||
'object_type': 'user',
|
||||
'pending': pending,
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "list", "object_type": "user", "pending": pending}
|
||||
)
|
||||
|
||||
# order results according to passed order parameter,
|
||||
# special-casing search queries where the database
|
||||
# provides an order by search rank
|
||||
return order_results(users, fallback=not bool(search_term))
|
||||
|
||||
@require_permission('list_users')
|
||||
@require_permission("list_users")
|
||||
def get(self):
|
||||
page = request.args.get('page', 1, type=int)
|
||||
page_size = request.args.get('page_size', 25, type=int)
|
||||
page = request.args.get("page", 1, type=int)
|
||||
page_size = request.args.get("page_size", 25, type=int)
|
||||
|
||||
groups = {group.id: group for group in models.Group.all(self.current_org)}
|
||||
|
||||
def serialize_user(user):
|
||||
d = user.to_dict()
|
||||
user_groups = []
|
||||
for group_id in set(d['groups']):
|
||||
for group_id in set(d["groups"]):
|
||||
group = groups.get(group_id)
|
||||
|
||||
if group:
|
||||
user_groups.append({'id': group.id, 'name': group.name})
|
||||
user_groups.append({"id": group.id, "name": group.name})
|
||||
|
||||
d['groups'] = user_groups
|
||||
d["groups"] = user_groups
|
||||
|
||||
return d
|
||||
|
||||
search_term = request.args.get('q', '')
|
||||
search_term = request.args.get("q", "")
|
||||
|
||||
disabled = request.args.get('disabled', 'false') # get enabled users by default
|
||||
disabled = request.args.get("disabled", "false") # get enabled users by default
|
||||
disabled = parse_boolean(disabled)
|
||||
|
||||
pending = request.args.get('pending', None) # get both active and pending by default
|
||||
pending = request.args.get(
|
||||
"pending", None
|
||||
) # get both active and pending by default
|
||||
if pending is not None:
|
||||
pending = parse_boolean(pending)
|
||||
|
||||
@@ -119,37 +136,39 @@ class UserListResource(BaseResource):
|
||||
@require_admin
|
||||
def post(self):
|
||||
req = request.get_json(force=True)
|
||||
require_fields(req, ('name', 'email'))
|
||||
require_fields(req, ("name", "email"))
|
||||
|
||||
if '@' not in req['email']:
|
||||
abort(400, message='Bad email address.')
|
||||
name, domain = req['email'].split('@', 1)
|
||||
if "@" not in req["email"]:
|
||||
abort(400, message="Bad email address.")
|
||||
name, domain = req["email"].split("@", 1)
|
||||
|
||||
if domain.lower() in blacklist or domain.lower() == 'qq.com':
|
||||
abort(400, message='Bad email address.')
|
||||
if domain.lower() in blacklist or domain.lower() == "qq.com":
|
||||
abort(400, message="Bad email address.")
|
||||
|
||||
user = models.User(org=self.current_org,
|
||||
name=req['name'],
|
||||
email=req['email'],
|
||||
is_invitation_pending=True,
|
||||
group_ids=[self.current_org.default_group.id])
|
||||
user = models.User(
|
||||
org=self.current_org,
|
||||
name=req["name"],
|
||||
email=req["email"],
|
||||
is_invitation_pending=True,
|
||||
group_ids=[self.current_org.default_group.id],
|
||||
)
|
||||
|
||||
try:
|
||||
models.db.session.add(user)
|
||||
models.db.session.commit()
|
||||
except IntegrityError as e:
|
||||
if "email" in str(e):
|
||||
abort(400, message='Email already taken.')
|
||||
abort(400, message="Email already taken.")
|
||||
abort(500)
|
||||
|
||||
self.record_event({
|
||||
'action': 'create',
|
||||
'object_id': user.id,
|
||||
'object_type': 'user'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "create", "object_id": user.id, "object_type": "user"}
|
||||
)
|
||||
|
||||
should_send_invitation = 'no_invite' not in request.args
|
||||
return invite_user(self.current_org, self.current_user, user, send_email=should_send_invitation)
|
||||
should_send_invitation = "no_invite" not in request.args
|
||||
return invite_user(
|
||||
self.current_org, self.current_user, user, send_email=should_send_invitation
|
||||
)
|
||||
|
||||
|
||||
class UserInviteResource(BaseResource):
|
||||
@@ -164,47 +183,42 @@ class UserResetPasswordResource(BaseResource):
|
||||
def post(self, user_id):
|
||||
user = models.User.get_by_id_and_org(user_id, self.current_org)
|
||||
if user.is_disabled:
|
||||
abort(404, message='Not found')
|
||||
abort(404, message="Not found")
|
||||
reset_link = send_password_reset_email(user)
|
||||
|
||||
return {
|
||||
'reset_link': reset_link,
|
||||
}
|
||||
return {"reset_link": reset_link}
|
||||
|
||||
|
||||
class UserRegenerateApiKeyResource(BaseResource):
|
||||
def post(self, user_id):
|
||||
user = models.User.get_by_id_and_org(user_id, self.current_org)
|
||||
if user.is_disabled:
|
||||
abort(404, message='Not found')
|
||||
abort(404, message="Not found")
|
||||
if not is_admin_or_owner(user_id):
|
||||
abort(403)
|
||||
|
||||
user.regenerate_api_key()
|
||||
models.db.session.commit()
|
||||
|
||||
self.record_event({
|
||||
'action': 'regnerate_api_key',
|
||||
'object_id': user.id,
|
||||
'object_type': 'user'
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "regnerate_api_key", "object_id": user.id, "object_type": "user"}
|
||||
)
|
||||
|
||||
return user.to_dict(with_api_key=True)
|
||||
|
||||
|
||||
class UserResource(BaseResource):
|
||||
decorators = BaseResource.decorators + \
|
||||
[limiter.limit('50/hour', methods=['POST'])]
|
||||
decorators = BaseResource.decorators + [limiter.limit("50/hour", methods=["POST"])]
|
||||
|
||||
def get(self, user_id):
|
||||
require_permission_or_owner('list_users', user_id)
|
||||
user = get_object_or_404(models.User.get_by_id_and_org, user_id, self.current_org)
|
||||
require_permission_or_owner("list_users", user_id)
|
||||
user = get_object_or_404(
|
||||
models.User.get_by_id_and_org, user_id, self.current_org
|
||||
)
|
||||
|
||||
self.record_event({
|
||||
'action': 'view',
|
||||
'object_id': user_id,
|
||||
'object_type': 'user',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "view", "object_id": user_id, "object_type": "user"}
|
||||
)
|
||||
|
||||
return user.to_dict(with_api_key=is_admin_or_owner(user_id))
|
||||
|
||||
@@ -214,39 +228,45 @@ class UserResource(BaseResource):
|
||||
|
||||
req = request.get_json(True)
|
||||
|
||||
params = project(req, ('email', 'name', 'password', 'old_password', 'group_ids'))
|
||||
params = project(
|
||||
req, ("email", "name", "password", "old_password", "group_ids")
|
||||
)
|
||||
|
||||
if 'password' in params and 'old_password' not in params:
|
||||
if "password" in params and "old_password" not in params:
|
||||
abort(403, message="Must provide current password to update password.")
|
||||
|
||||
if 'old_password' in params and not user.verify_password(params['old_password']):
|
||||
if "old_password" in params and not user.verify_password(
|
||||
params["old_password"]
|
||||
):
|
||||
abort(403, message="Incorrect current password.")
|
||||
|
||||
if 'password' in params:
|
||||
user.hash_password(params.pop('password'))
|
||||
params.pop('old_password')
|
||||
if "password" in params:
|
||||
user.hash_password(params.pop("password"))
|
||||
params.pop("old_password")
|
||||
|
||||
if 'group_ids' in params:
|
||||
if not self.current_user.has_permission('admin'):
|
||||
if "group_ids" in params:
|
||||
if not self.current_user.has_permission("admin"):
|
||||
abort(403, message="Must be admin to change groups membership.")
|
||||
|
||||
for group_id in params['group_ids']:
|
||||
for group_id in params["group_ids"]:
|
||||
try:
|
||||
models.Group.get_by_id_and_org(group_id, self.current_org)
|
||||
except NoResultFound:
|
||||
abort(400, message="Group id {} is invalid.".format(group_id))
|
||||
|
||||
if len(params['group_ids']) == 0:
|
||||
params.pop('group_ids')
|
||||
if len(params["group_ids"]) == 0:
|
||||
params.pop("group_ids")
|
||||
|
||||
if 'email' in params:
|
||||
_, domain = params['email'].split('@', 1)
|
||||
if "email" in params:
|
||||
_, domain = params["email"].split("@", 1)
|
||||
|
||||
if domain.lower() in blacklist or domain.lower() == 'qq.com':
|
||||
abort(400, message='Bad email address.')
|
||||
if domain.lower() in blacklist or domain.lower() == "qq.com":
|
||||
abort(400, message="Bad email address.")
|
||||
|
||||
email_address_changed = 'email' in params and params['email'] != user.email
|
||||
needs_to_verify_email = email_address_changed and settings.email_server_is_configured()
|
||||
email_address_changed = "email" in params and params["email"] != user.email
|
||||
needs_to_verify_email = (
|
||||
email_address_changed and settings.email_server_is_configured()
|
||||
)
|
||||
if needs_to_verify_email:
|
||||
user.is_email_verified = False
|
||||
|
||||
@@ -270,12 +290,14 @@ class UserResource(BaseResource):
|
||||
|
||||
abort(400, message=message)
|
||||
|
||||
self.record_event({
|
||||
'action': 'edit',
|
||||
'object_id': user.id,
|
||||
'object_type': 'user',
|
||||
'updated_fields': list(params.keys())
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "edit",
|
||||
"object_id": user.id,
|
||||
"object_type": "user",
|
||||
"updated_fields": list(params.keys()),
|
||||
}
|
||||
)
|
||||
|
||||
return user.to_dict(with_api_key=is_admin_or_owner(user_id))
|
||||
|
||||
@@ -285,11 +307,17 @@ class UserResource(BaseResource):
|
||||
# admin cannot delete self; current user is an admin (`@require_admin`)
|
||||
# so just check user id
|
||||
if user.id == current_user.id:
|
||||
abort(403, message="You cannot delete your own account. "
|
||||
"Please ask another admin to do this for you.")
|
||||
abort(
|
||||
403,
|
||||
message="You cannot delete your own account. "
|
||||
"Please ask another admin to do this for you.",
|
||||
)
|
||||
elif not user.is_invitation_pending:
|
||||
abort(403, message="You cannot delete activated users. "
|
||||
"Please disable the user instead.")
|
||||
abort(
|
||||
403,
|
||||
message="You cannot delete activated users. "
|
||||
"Please disable the user instead.",
|
||||
)
|
||||
models.db.session.delete(user)
|
||||
models.db.session.commit()
|
||||
|
||||
@@ -303,8 +331,11 @@ class UserDisableResource(BaseResource):
|
||||
# admin cannot disable self; current user is an admin (`@require_admin`)
|
||||
# so just check user id
|
||||
if user.id == current_user.id:
|
||||
abort(403, message="You cannot disable your own account. "
|
||||
"Please ask another admin to do this for you.")
|
||||
abort(
|
||||
403,
|
||||
message="You cannot disable your own account. "
|
||||
"Please ask another admin to do this for you.",
|
||||
)
|
||||
user.disable()
|
||||
models.db.session.commit()
|
||||
|
||||
|
||||
@@ -3,21 +3,22 @@ from flask import request
|
||||
from redash import models
|
||||
from redash.handlers.base import BaseResource, get_object_or_404
|
||||
from redash.serializers import serialize_visualization
|
||||
from redash.permissions import (require_object_modify_permission,
|
||||
require_permission)
|
||||
from redash.permissions import require_object_modify_permission, require_permission
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class VisualizationListResource(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
@require_permission("edit_query")
|
||||
def post(self):
|
||||
kwargs = request.get_json(force=True)
|
||||
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop('query_id'), self.current_org)
|
||||
query = get_object_or_404(
|
||||
models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org
|
||||
)
|
||||
require_object_modify_permission(query, self.current_user)
|
||||
|
||||
kwargs['options'] = json_dumps(kwargs['options'])
|
||||
kwargs['query_rel'] = query
|
||||
kwargs["options"] = json_dumps(kwargs["options"])
|
||||
kwargs["query_rel"] = query
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
models.db.session.add(vis)
|
||||
@@ -26,31 +27,37 @@ class VisualizationListResource(BaseResource):
|
||||
|
||||
|
||||
class VisualizationResource(BaseResource):
|
||||
@require_permission('edit_query')
|
||||
@require_permission("edit_query")
|
||||
def post(self, visualization_id):
|
||||
vis = get_object_or_404(models.Visualization.get_by_id_and_org, visualization_id, self.current_org)
|
||||
vis = get_object_or_404(
|
||||
models.Visualization.get_by_id_and_org, visualization_id, self.current_org
|
||||
)
|
||||
require_object_modify_permission(vis.query_rel, self.current_user)
|
||||
|
||||
kwargs = request.get_json(force=True)
|
||||
if 'options' in kwargs:
|
||||
kwargs['options'] = json_dumps(kwargs['options'])
|
||||
if "options" in kwargs:
|
||||
kwargs["options"] = json_dumps(kwargs["options"])
|
||||
|
||||
kwargs.pop('id', None)
|
||||
kwargs.pop('query_id', None)
|
||||
kwargs.pop("id", None)
|
||||
kwargs.pop("query_id", None)
|
||||
|
||||
self.update_model(vis, kwargs)
|
||||
d = serialize_visualization(vis, with_query=False)
|
||||
models.db.session.commit()
|
||||
return d
|
||||
|
||||
@require_permission('edit_query')
|
||||
@require_permission("edit_query")
|
||||
def delete(self, visualization_id):
|
||||
vis = get_object_or_404(models.Visualization.get_by_id_and_org, visualization_id, self.current_org)
|
||||
vis = get_object_or_404(
|
||||
models.Visualization.get_by_id_and_org, visualization_id, self.current_org
|
||||
)
|
||||
require_object_modify_permission(vis.query_rel, self.current_user)
|
||||
self.record_event({
|
||||
'action': 'delete',
|
||||
'object_id': visualization_id,
|
||||
'object_type': 'Visualization'
|
||||
})
|
||||
self.record_event(
|
||||
{
|
||||
"action": "delete",
|
||||
"object_id": visualization_id,
|
||||
"object_type": "Visualization",
|
||||
}
|
||||
)
|
||||
models.db.session.delete(vis)
|
||||
models.db.session.commit()
|
||||
|
||||
@@ -2,27 +2,27 @@ import os
|
||||
import simplejson
|
||||
from flask import url_for
|
||||
|
||||
WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), '../../client/dist/', 'asset-manifest.json')
|
||||
WEBPACK_MANIFEST_PATH = os.path.join(
|
||||
os.path.dirname(__file__), "../../client/dist/", "asset-manifest.json"
|
||||
)
|
||||
|
||||
|
||||
def configure_webpack(app):
|
||||
app.extensions['webpack'] = {'assets': None}
|
||||
app.extensions["webpack"] = {"assets": None}
|
||||
|
||||
def get_asset(path):
|
||||
assets = app.extensions['webpack']['assets']
|
||||
assets = app.extensions["webpack"]["assets"]
|
||||
# in debug we read in this file each request
|
||||
if assets is None or app.debug:
|
||||
try:
|
||||
with open(WEBPACK_MANIFEST_PATH) as fp:
|
||||
assets = simplejson.load(fp)
|
||||
except IOError:
|
||||
app.logger.exception('Unable to load webpack manifest')
|
||||
app.logger.exception("Unable to load webpack manifest")
|
||||
assets = {}
|
||||
app.extensions['webpack']['assets'] = assets
|
||||
return url_for('static', filename=assets.get(path, path))
|
||||
app.extensions["webpack"]["assets"] = assets
|
||||
return url_for("static", filename=assets.get(path, path))
|
||||
|
||||
@app.context_processor
|
||||
def webpack_assets():
|
||||
return {
|
||||
'asset_url': get_asset,
|
||||
}
|
||||
return {"asset_url": get_asset}
|
||||
|
||||
@@ -3,14 +3,17 @@ from flask import request
|
||||
from redash import models
|
||||
from redash.handlers.base import BaseResource
|
||||
from redash.serializers import serialize_widget
|
||||
from redash.permissions import (require_access,
|
||||
require_object_modify_permission,
|
||||
require_permission, view_only)
|
||||
from redash.permissions import (
|
||||
require_access,
|
||||
require_object_modify_permission,
|
||||
require_permission,
|
||||
view_only,
|
||||
)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class WidgetListResource(BaseResource):
|
||||
@require_permission('edit_dashboard')
|
||||
@require_permission("edit_dashboard")
|
||||
def post(self):
|
||||
"""
|
||||
Add a widget to a dashboard.
|
||||
@@ -24,20 +27,24 @@ class WidgetListResource(BaseResource):
|
||||
:>json object widget: The created widget
|
||||
"""
|
||||
widget_properties = request.get_json(force=True)
|
||||
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get('dashboard_id'), self.current_org)
|
||||
dashboard = models.Dashboard.get_by_id_and_org(
|
||||
widget_properties.get("dashboard_id"), self.current_org
|
||||
)
|
||||
require_object_modify_permission(dashboard, self.current_user)
|
||||
|
||||
widget_properties['options'] = json_dumps(widget_properties['options'])
|
||||
widget_properties.pop('id', None)
|
||||
widget_properties["options"] = json_dumps(widget_properties["options"])
|
||||
widget_properties.pop("id", None)
|
||||
|
||||
visualization_id = widget_properties.pop('visualization_id')
|
||||
visualization_id = widget_properties.pop("visualization_id")
|
||||
if visualization_id:
|
||||
visualization = models.Visualization.get_by_id_and_org(visualization_id, self.current_org)
|
||||
visualization = models.Visualization.get_by_id_and_org(
|
||||
visualization_id, self.current_org
|
||||
)
|
||||
require_access(visualization.query_rel, self.current_user, view_only)
|
||||
else:
|
||||
visualization = None
|
||||
|
||||
widget_properties['visualization'] = visualization
|
||||
widget_properties["visualization"] = visualization
|
||||
|
||||
widget = models.Widget(**widget_properties)
|
||||
models.db.session.add(widget)
|
||||
@@ -48,7 +55,7 @@ class WidgetListResource(BaseResource):
|
||||
|
||||
|
||||
class WidgetResource(BaseResource):
|
||||
@require_permission('edit_dashboard')
|
||||
@require_permission("edit_dashboard")
|
||||
def post(self, widget_id):
|
||||
"""
|
||||
Updates a widget in a dashboard.
|
||||
@@ -61,12 +68,12 @@ class WidgetResource(BaseResource):
|
||||
widget = models.Widget.get_by_id_and_org(widget_id, self.current_org)
|
||||
require_object_modify_permission(widget.dashboard, self.current_user)
|
||||
widget_properties = request.get_json(force=True)
|
||||
widget.text = widget_properties['text']
|
||||
widget.options = json_dumps(widget_properties['options'])
|
||||
widget.text = widget_properties["text"]
|
||||
widget.options = json_dumps(widget_properties["options"])
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
|
||||
@require_permission('edit_dashboard')
|
||||
@require_permission("edit_dashboard")
|
||||
def delete(self, widget_id):
|
||||
"""
|
||||
Remove a widget from a dashboard.
|
||||
@@ -75,10 +82,8 @@ class WidgetResource(BaseResource):
|
||||
"""
|
||||
widget = models.Widget.get_by_id_and_org(widget_id, self.current_org)
|
||||
require_object_modify_permission(widget.dashboard, self.current_user)
|
||||
self.record_event({
|
||||
'action': 'delete',
|
||||
'object_id': widget_id,
|
||||
'object_type': 'widget',
|
||||
})
|
||||
self.record_event(
|
||||
{"action": "delete", "object_id": widget_id, "object_type": "widget"}
|
||||
)
|
||||
models.db.session.delete(widget)
|
||||
models.db.session.commit()
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
|
||||
|
||||
import logging
|
||||
import socket
|
||||
import time
|
||||
from redash import settings
|
||||
|
||||
from celery.concurrency import asynpool
|
||||
|
||||
asynpool.PROC_ALIVE_TIMEOUT = settings.CELERY_INIT_TIMEOUT
|
||||
|
||||
from celery.signals import task_postrun, task_prerun
|
||||
@@ -34,23 +33,29 @@ def metric_name(name, tags):
|
||||
|
||||
|
||||
@task_postrun.connect
|
||||
def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, state, **kw):
|
||||
def task_postrun_handler(
|
||||
signal, sender, task_id, task, args, kwargs, retval, state, **kw
|
||||
):
|
||||
try:
|
||||
run_time = 1000 * (time.time() - tasks_start_time.pop(task_id))
|
||||
|
||||
state = (state or 'unknown').lower()
|
||||
tags = {'state': state, 'hostname': socket.gethostname()}
|
||||
if task.name == 'redash.tasks.execute_query':
|
||||
state = (state or "unknown").lower()
|
||||
tags = {"state": state, "hostname": socket.gethostname()}
|
||||
if task.name == "redash.tasks.execute_query":
|
||||
if isinstance(retval, Exception):
|
||||
tags['state'] = 'exception'
|
||||
state = 'exception'
|
||||
tags["state"] = "exception"
|
||||
state = "exception"
|
||||
|
||||
tags['data_source_id'] = args[1]
|
||||
tags["data_source_id"] = args[1]
|
||||
|
||||
normalized_task_name = task.name.replace('redash.tasks.', '').replace('.', '_')
|
||||
normalized_task_name = task.name.replace("redash.tasks.", "").replace(".", "_")
|
||||
metric = "celery.task_runtime.{}".format(normalized_task_name)
|
||||
logging.debug("metric=%s", json_dumps({'metric': metric, 'tags': tags, 'value': run_time}))
|
||||
logging.debug(
|
||||
"metric=%s", json_dumps({"metric": metric, "tags": tags, "value": run_time})
|
||||
)
|
||||
statsd_client.timing(metric_name(metric, tags), run_time)
|
||||
statsd_client.incr(metric_name('celery.task.{}.{}'.format(normalized_task_name, state), tags))
|
||||
statsd_client.incr(
|
||||
metric_name("celery.task.{}.{}".format(normalized_task_name, state), tags)
|
||||
)
|
||||
except Exception:
|
||||
logging.exception("Exception during task_postrun handler.")
|
||||
|
||||
@@ -26,21 +26,21 @@ def _table_name_from_select_element(elt):
|
||||
|
||||
@listens_for(Engine, "before_execute")
|
||||
def before_execute(conn, elt, multiparams, params):
|
||||
conn.info.setdefault('query_start_time', []).append(time.time())
|
||||
conn.info.setdefault("query_start_time", []).append(time.time())
|
||||
|
||||
|
||||
@listens_for(Engine, "after_execute")
|
||||
def after_execute(conn, elt, multiparams, params, result):
|
||||
duration = 1000 * (time.time() - conn.info['query_start_time'].pop(-1))
|
||||
duration = 1000 * (time.time() - conn.info["query_start_time"].pop(-1))
|
||||
action = elt.__class__.__name__
|
||||
|
||||
if action == 'Select':
|
||||
name = 'unknown'
|
||||
if action == "Select":
|
||||
name = "unknown"
|
||||
try:
|
||||
name = _table_name_from_select_element(elt)
|
||||
except Exception:
|
||||
logging.exception('Failed finding table name.')
|
||||
elif action in ['Update', 'Insert', 'Delete']:
|
||||
logging.exception("Failed finding table name.")
|
||||
elif action in ["Update", "Insert", "Delete"]:
|
||||
name = elt.table.name
|
||||
else:
|
||||
# create/drop tables, sqlalchemy internal schema queries, etc
|
||||
@@ -48,13 +48,12 @@ def after_execute(conn, elt, multiparams, params, result):
|
||||
|
||||
action = action.lower()
|
||||
|
||||
statsd_client.timing('db.{}.{}'.format(name, action), duration)
|
||||
metrics_logger.debug("table=%s query=%s duration=%.2f", name, action,
|
||||
duration)
|
||||
statsd_client.timing("db.{}.{}".format(name, action), duration)
|
||||
metrics_logger.debug("table=%s query=%s duration=%.2f", name, action, duration)
|
||||
|
||||
if has_request_context():
|
||||
g.setdefault('queries_count', 0)
|
||||
g.setdefault('queries_duration', 0)
|
||||
g.setdefault("queries_count", 0)
|
||||
g.setdefault("queries_duration", 0)
|
||||
g.queries_count += 1
|
||||
g.queries_duration += duration
|
||||
|
||||
|
||||
@@ -14,36 +14,42 @@ def record_requets_start_time():
|
||||
|
||||
|
||||
def calculate_metrics(response):
|
||||
if 'start_time' not in g:
|
||||
if "start_time" not in g:
|
||||
return response
|
||||
|
||||
request_duration = (time.time() - g.start_time) * 1000
|
||||
queries_duration = g.get('queries_duration', 0.0)
|
||||
queries_count = g.get('queries_count', 0.0)
|
||||
endpoint = (request.endpoint or 'unknown').replace('.', '_')
|
||||
queries_duration = g.get("queries_duration", 0.0)
|
||||
queries_count = g.get("queries_count", 0.0)
|
||||
endpoint = (request.endpoint or "unknown").replace(".", "_")
|
||||
|
||||
metrics_logger.info("method=%s path=%s endpoint=%s status=%d content_type=%s content_length=%d duration=%.2f query_count=%d query_duration=%.2f",
|
||||
request.method,
|
||||
request.path,
|
||||
endpoint,
|
||||
response.status_code,
|
||||
response.content_type,
|
||||
response.content_length or -1,
|
||||
request_duration,
|
||||
queries_count,
|
||||
queries_duration)
|
||||
metrics_logger.info(
|
||||
"method=%s path=%s endpoint=%s status=%d content_type=%s content_length=%d duration=%.2f query_count=%d query_duration=%.2f",
|
||||
request.method,
|
||||
request.path,
|
||||
endpoint,
|
||||
response.status_code,
|
||||
response.content_type,
|
||||
response.content_length or -1,
|
||||
request_duration,
|
||||
queries_count,
|
||||
queries_duration,
|
||||
)
|
||||
|
||||
statsd_client.timing('requests.{}.{}'.format(endpoint, request.method.lower()), request_duration)
|
||||
statsd_client.timing(
|
||||
"requests.{}.{}".format(endpoint, request.method.lower()), request_duration
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
|
||||
MockResponse = namedtuple('MockResponse', ['status_code', 'content_type', 'content_length'])
|
||||
MockResponse = namedtuple(
|
||||
"MockResponse", ["status_code", "content_type", "content_length"]
|
||||
)
|
||||
|
||||
|
||||
def calculate_metrics_on_exception(error):
|
||||
if error is not None:
|
||||
calculate_metrics(MockResponse(500, '?', -1))
|
||||
calculate_metrics(MockResponse(500, "?", -1))
|
||||
|
||||
|
||||
def init_app(app):
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -17,14 +17,12 @@ class RedashSQLAlchemy(SQLAlchemy):
|
||||
def apply_pool_defaults(self, app, options):
|
||||
super(RedashSQLAlchemy, self).apply_pool_defaults(app, options)
|
||||
if settings.SQLALCHEMY_DISABLE_POOL:
|
||||
options['poolclass'] = NullPool
|
||||
options["poolclass"] = NullPool
|
||||
# Remove options NullPool does not support:
|
||||
options.pop('max_overflow', None)
|
||||
options.pop("max_overflow", None)
|
||||
|
||||
|
||||
db = RedashSQLAlchemy(session_options={
|
||||
'expire_on_commit': False
|
||||
})
|
||||
db = RedashSQLAlchemy(session_options={"expire_on_commit": False})
|
||||
# Make sure the SQLAlchemy mappers are all properly configured first.
|
||||
# This is required by SQLAlchemy-Searchable as it adds DDL listeners
|
||||
# on the configuration phase of models.
|
||||
@@ -32,7 +30,7 @@ db.configure_mappers()
|
||||
|
||||
# listen to a few database events to set up functions, trigger updates
|
||||
# and indexes for the full text search
|
||||
make_searchable(options={'regconfig': 'pg_catalog.simple'})
|
||||
make_searchable(options={"regconfig": "pg_catalog.simple"})
|
||||
|
||||
|
||||
class SearchBaseQuery(BaseQuery, SearchQueryMixin):
|
||||
@@ -63,6 +61,7 @@ class GFKBase(object):
|
||||
"""
|
||||
Compatibility with 'generic foreign key' approach Peewee used.
|
||||
"""
|
||||
|
||||
object_type = Column(db.String(255))
|
||||
object_id = Column(db.Integer)
|
||||
|
||||
@@ -75,8 +74,11 @@ class GFKBase(object):
|
||||
return self._object
|
||||
else:
|
||||
object_class = _gfk_types[self.object_type]
|
||||
self._object = session.query(object_class).filter(
|
||||
object_class.id == self.object_id).first()
|
||||
self._object = (
|
||||
session.query(object_class)
|
||||
.filter(object_class.id == self.object_id)
|
||||
.first()
|
||||
)
|
||||
return self._object
|
||||
|
||||
@object.setter
|
||||
|
||||
@@ -5,48 +5,49 @@ from .base import GFKBase, db, Column
|
||||
from .types import PseudoJSON
|
||||
|
||||
|
||||
@generic_repr('id', 'object_type', 'object_id', 'created_at')
|
||||
@generic_repr("id", "object_type", "object_id", "created_at")
|
||||
class Change(GFKBase, db.Model):
|
||||
id = Column(db.Integer, primary_key=True)
|
||||
# 'object' defined in GFKBase
|
||||
object_version = Column(db.Integer, default=0)
|
||||
user_id = Column(db.Integer, db.ForeignKey("users.id"))
|
||||
user = db.relationship("User", backref='changes')
|
||||
user = db.relationship("User", backref="changes")
|
||||
change = Column(PseudoJSON)
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = 'changes'
|
||||
__tablename__ = "changes"
|
||||
|
||||
def to_dict(self, full=True):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'object_id': self.object_id,
|
||||
'object_type': self.object_type,
|
||||
'change_type': self.change_type,
|
||||
'object_version': self.object_version,
|
||||
'change': self.change,
|
||||
'created_at': self.created_at
|
||||
"id": self.id,
|
||||
"object_id": self.object_id,
|
||||
"object_type": self.object_type,
|
||||
"change_type": self.change_type,
|
||||
"object_version": self.object_version,
|
||||
"change": self.change,
|
||||
"created_at": self.created_at,
|
||||
}
|
||||
|
||||
if full:
|
||||
d['user'] = self.user.to_dict()
|
||||
d["user"] = self.user.to_dict()
|
||||
else:
|
||||
d['user_id'] = self.user_id
|
||||
d["user_id"] = self.user_id
|
||||
|
||||
return d
|
||||
|
||||
@classmethod
|
||||
def last_change(cls, obj):
|
||||
return cls.query.filter(
|
||||
cls.object_id == obj.id,
|
||||
cls.object_type == obj.__class__.__tablename__
|
||||
).order_by(
|
||||
cls.object_version.desc()
|
||||
).first()
|
||||
return (
|
||||
cls.query.filter(
|
||||
cls.object_id == obj.id, cls.object_type == obj.__class__.__tablename__
|
||||
)
|
||||
.order_by(cls.object_version.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
|
||||
class ChangeTrackingMixin(object):
|
||||
skipped_fields = ('id', 'created_at', 'updated_at', 'version')
|
||||
skipped_fields = ("id", "created_at", "updated_at", "version")
|
||||
_clean_values = None
|
||||
|
||||
def __init__(self, *a, **kw):
|
||||
@@ -54,7 +55,7 @@ class ChangeTrackingMixin(object):
|
||||
self.record_changes(self.user)
|
||||
|
||||
def prep_cleanvalues(self):
|
||||
self.__dict__['_clean_values'] = {}
|
||||
self.__dict__["_clean_values"] = {}
|
||||
for attr in inspect(self.__class__).column_attrs:
|
||||
col, = attr.columns
|
||||
# 'query' is col name but not attr name
|
||||
@@ -77,10 +78,16 @@ class ChangeTrackingMixin(object):
|
||||
for attr in inspect(self.__class__).column_attrs:
|
||||
col, = attr.columns
|
||||
if attr.key not in self.skipped_fields:
|
||||
changes[col.name] = {'previous': self._clean_values[col.name],
|
||||
'current': getattr(self, attr.key)}
|
||||
changes[col.name] = {
|
||||
"previous": self._clean_values[col.name],
|
||||
"current": getattr(self, attr.key),
|
||||
}
|
||||
|
||||
db.session.add(Change(object=self,
|
||||
object_version=self.version,
|
||||
user=changed_by,
|
||||
change=changes))
|
||||
db.session.add(
|
||||
Change(
|
||||
object=self,
|
||||
object_version=self.version,
|
||||
user=changed_by,
|
||||
change=changes,
|
||||
)
|
||||
)
|
||||
|
||||
@@ -8,10 +8,10 @@ class TimestampMixin(object):
|
||||
created_at = Column(db.DateTime(True), default=db.func.now(), nullable=False)
|
||||
|
||||
|
||||
@listens_for(TimestampMixin, 'before_update', propagate=True)
|
||||
@listens_for(TimestampMixin, "before_update", propagate=True)
|
||||
def timestamp_before_update(mapper, connection, target):
|
||||
# Check if we really want to update the updated_at value
|
||||
if hasattr(target, 'skip_updated_at'):
|
||||
if hasattr(target, "skip_updated_at"):
|
||||
return
|
||||
|
||||
target.updated_at = db.func.now()
|
||||
|
||||
@@ -9,9 +9,9 @@ from .types import MutableDict, PseudoJSON
|
||||
from .users import User, Group
|
||||
|
||||
|
||||
@generic_repr('id', 'name', 'slug')
|
||||
@generic_repr("id", "name", "slug")
|
||||
class Organization(TimestampMixin, db.Model):
|
||||
SETTING_GOOGLE_APPS_DOMAINS = 'google_apps_domains'
|
||||
SETTING_GOOGLE_APPS_DOMAINS = "google_apps_domains"
|
||||
SETTING_IS_PUBLIC = "is_public"
|
||||
|
||||
id = Column(db.Integer, primary_key=True)
|
||||
@@ -19,12 +19,12 @@ class Organization(TimestampMixin, db.Model):
|
||||
slug = Column(db.String(255), unique=True)
|
||||
settings = Column(MutableDict.as_mutable(PseudoJSON))
|
||||
groups = db.relationship("Group", lazy="dynamic")
|
||||
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)",)
|
||||
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)")
|
||||
|
||||
__tablename__ = 'organizations'
|
||||
__tablename__ = "organizations"
|
||||
|
||||
def __str__(self):
|
||||
return '%s (%s)' % (self.name, self.id)
|
||||
return "%s (%s)" % (self.name, self.id)
|
||||
|
||||
@classmethod
|
||||
def get_by_slug(cls, slug):
|
||||
@@ -36,7 +36,9 @@ class Organization(TimestampMixin, db.Model):
|
||||
|
||||
@property
|
||||
def default_group(self):
|
||||
return self.groups.filter(Group.name == 'default', Group.type == Group.BUILTIN_GROUP).first()
|
||||
return self.groups.filter(
|
||||
Group.name == "default", Group.type == Group.BUILTIN_GROUP
|
||||
).first()
|
||||
|
||||
@property
|
||||
def google_apps_domains(self):
|
||||
@@ -48,25 +50,25 @@ class Organization(TimestampMixin, db.Model):
|
||||
|
||||
@property
|
||||
def is_disabled(self):
|
||||
return self.settings.get('is_disabled', False)
|
||||
return self.settings.get("is_disabled", False)
|
||||
|
||||
def disable(self):
|
||||
self.settings['is_disabled'] = True
|
||||
self.settings["is_disabled"] = True
|
||||
|
||||
def enable(self):
|
||||
self.settings['is_disabled'] = False
|
||||
self.settings["is_disabled"] = False
|
||||
|
||||
def set_setting(self, key, value):
|
||||
if key not in org_settings:
|
||||
raise KeyError(key)
|
||||
|
||||
self.settings.setdefault('settings', {})
|
||||
self.settings['settings'][key] = value
|
||||
flag_modified(self, 'settings')
|
||||
self.settings.setdefault("settings", {})
|
||||
self.settings["settings"][key] = value
|
||||
flag_modified(self, "settings")
|
||||
|
||||
def get_setting(self, key, raise_on_missing=True):
|
||||
if key in self.settings.get('settings', {}):
|
||||
return self.settings['settings'][key]
|
||||
if key in self.settings.get("settings", {}):
|
||||
return self.settings["settings"][key]
|
||||
|
||||
if key in org_settings:
|
||||
return org_settings[key]
|
||||
@@ -78,7 +80,9 @@ class Organization(TimestampMixin, db.Model):
|
||||
|
||||
@property
|
||||
def admin_group(self):
|
||||
return self.groups.filter(Group.name == 'admin', Group.type == Group.BUILTIN_GROUP).first()
|
||||
return self.groups.filter(
|
||||
Group.name == "admin", Group.type == Group.BUILTIN_GROUP
|
||||
).first()
|
||||
|
||||
def has_user(self, email):
|
||||
return self.users.filter(User.email == email).count() == 1
|
||||
|
||||
@@ -23,7 +23,9 @@ def _load_result(query_id, org):
|
||||
query = models.Query.get_by_id_and_org(query_id, org)
|
||||
|
||||
if query.data_source:
|
||||
query_result = models.QueryResult.get_by_id_and_org(query.latest_query_data_id, org)
|
||||
query_result = models.QueryResult.get_by_id_and_org(
|
||||
query.latest_query_data_id, org
|
||||
)
|
||||
return query_result.data
|
||||
else:
|
||||
raise QueryDetachedFromDataSourceError(query_id)
|
||||
@@ -40,12 +42,16 @@ def join_parameter_list_values(parameters, schema):
|
||||
updated_parameters = {}
|
||||
for (key, value) in parameters.items():
|
||||
if isinstance(value, list):
|
||||
definition = next((definition for definition in schema if definition["name"] == key), {})
|
||||
multi_values_options = definition.get('multiValuesOptions', {})
|
||||
separator = str(multi_values_options.get('separator', ','))
|
||||
prefix = str(multi_values_options.get('prefix', ''))
|
||||
suffix = str(multi_values_options.get('suffix', ''))
|
||||
updated_parameters[key] = separator.join([prefix + v + suffix for v in value])
|
||||
definition = next(
|
||||
(definition for definition in schema if definition["name"] == key), {}
|
||||
)
|
||||
multi_values_options = definition.get("multiValuesOptions", {})
|
||||
separator = str(multi_values_options.get("separator", ","))
|
||||
prefix = str(multi_values_options.get("prefix", ""))
|
||||
suffix = str(multi_values_options.get("suffix", ""))
|
||||
updated_parameters[key] = separator.join(
|
||||
[prefix + v + suffix for v in value]
|
||||
)
|
||||
else:
|
||||
updated_parameters[key] = value
|
||||
return updated_parameters
|
||||
@@ -74,7 +80,7 @@ def _parameter_names(parameter_values):
|
||||
for key, value in parameter_values.items():
|
||||
if isinstance(value, dict):
|
||||
for inner_key in value.keys():
|
||||
names.append('{}.{}'.format(key, inner_key))
|
||||
names.append("{}.{}".format(key, inner_key))
|
||||
else:
|
||||
names.append(key)
|
||||
|
||||
@@ -122,12 +128,16 @@ class ParameterizedQuery(object):
|
||||
self.parameters = {}
|
||||
|
||||
def apply(self, parameters):
|
||||
invalid_parameter_names = [key for (key, value) in parameters.items() if not self._valid(key, value)]
|
||||
invalid_parameter_names = [
|
||||
key for (key, value) in parameters.items() if not self._valid(key, value)
|
||||
]
|
||||
if invalid_parameter_names:
|
||||
raise InvalidParameterError(invalid_parameter_names)
|
||||
else:
|
||||
self.parameters.update(parameters)
|
||||
self.query = mustache_render(self.template, join_parameter_list_values(parameters, self.schema))
|
||||
self.query = mustache_render(
|
||||
self.template, join_parameter_list_values(parameters, self.schema)
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
@@ -135,27 +145,32 @@ class ParameterizedQuery(object):
|
||||
if not self.schema:
|
||||
return True
|
||||
|
||||
definition = next((definition for definition in self.schema if definition["name"] == name), None)
|
||||
definition = next(
|
||||
(definition for definition in self.schema if definition["name"] == name),
|
||||
None,
|
||||
)
|
||||
|
||||
if not definition:
|
||||
return False
|
||||
|
||||
enum_options = definition.get('enumOptions')
|
||||
query_id = definition.get('queryId')
|
||||
allow_multiple_values = isinstance(definition.get('multiValuesOptions'), dict)
|
||||
enum_options = definition.get("enumOptions")
|
||||
query_id = definition.get("queryId")
|
||||
allow_multiple_values = isinstance(definition.get("multiValuesOptions"), dict)
|
||||
|
||||
if isinstance(enum_options, string_types):
|
||||
enum_options = enum_options.split('\n')
|
||||
enum_options = enum_options.split("\n")
|
||||
|
||||
validators = {
|
||||
"text": lambda value: isinstance(value, string_types),
|
||||
"number": _is_number,
|
||||
"enum": lambda value: _is_value_within_options(value,
|
||||
enum_options,
|
||||
allow_multiple_values),
|
||||
"query": lambda value: _is_value_within_options(value,
|
||||
[v["value"] for v in dropdown_values(query_id, self.org)],
|
||||
allow_multiple_values),
|
||||
"enum": lambda value: _is_value_within_options(
|
||||
value, enum_options, allow_multiple_values
|
||||
),
|
||||
"query": lambda value: _is_value_within_options(
|
||||
value,
|
||||
[v["value"] for v in dropdown_values(query_id, self.org)],
|
||||
allow_multiple_values,
|
||||
),
|
||||
"date": _is_date,
|
||||
"datetime-local": _is_date,
|
||||
"datetime-with-seconds": _is_date,
|
||||
@@ -186,7 +201,9 @@ class ParameterizedQuery(object):
|
||||
class InvalidParameterError(Exception):
|
||||
def __init__(self, parameters):
|
||||
parameter_names = ", ".join(parameters)
|
||||
message = "The following parameter values are incompatible with their definitions: {}".format(parameter_names)
|
||||
message = "The following parameter values are incompatible with their definitions: {}".format(
|
||||
parameter_names
|
||||
)
|
||||
super(InvalidParameterError, self).__init__(message)
|
||||
|
||||
|
||||
@@ -194,4 +211,5 @@ class QueryDetachedFromDataSourceError(Exception):
|
||||
def __init__(self, query_id):
|
||||
self.query_id = query_id
|
||||
super(QueryDetachedFromDataSourceError, self).__init__(
|
||||
"This query is detached from any data source. Please select a different query.")
|
||||
"This query is detached from any data source. Please select a different query."
|
||||
)
|
||||
|
||||
@@ -22,10 +22,14 @@ class Configuration(TypeDecorator):
|
||||
|
||||
class EncryptedConfiguration(EncryptedType):
|
||||
def process_bind_param(self, value, dialect):
|
||||
return super(EncryptedConfiguration, self).process_bind_param(value.to_json(), dialect)
|
||||
return super(EncryptedConfiguration, self).process_bind_param(
|
||||
value.to_json(), dialect
|
||||
)
|
||||
|
||||
def process_result_value(self, value, dialect):
|
||||
return ConfigurationContainer.from_json(super(EncryptedConfiguration, self).process_result_value(value, dialect))
|
||||
return ConfigurationContainer.from_json(
|
||||
super(EncryptedConfiguration, self).process_result_value(value, dialect)
|
||||
)
|
||||
|
||||
|
||||
# XXX replace PseudoJSON and MutableDict with real JSON field
|
||||
|
||||
@@ -25,7 +25,7 @@ from .types import json_cast_property, MutableDict, MutableList
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
LAST_ACTIVE_KEY = 'users:last_active_at'
|
||||
LAST_ACTIVE_KEY = "users:last_active_at"
|
||||
|
||||
|
||||
def sync_last_active_at():
|
||||
@@ -68,46 +68,57 @@ class PermissionsCheckMixin(object):
|
||||
return self.has_permissions((permission,))
|
||||
|
||||
def has_permissions(self, permissions):
|
||||
has_permissions = reduce(lambda a, b: a and b,
|
||||
[permission in self.permissions for permission in permissions],
|
||||
True)
|
||||
has_permissions = reduce(
|
||||
lambda a, b: a and b,
|
||||
[permission in self.permissions for permission in permissions],
|
||||
True,
|
||||
)
|
||||
|
||||
return has_permissions
|
||||
|
||||
|
||||
@generic_repr('id', 'name', 'email')
|
||||
class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCheckMixin):
|
||||
@generic_repr("id", "name", "email")
|
||||
class User(
|
||||
TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCheckMixin
|
||||
):
|
||||
id = Column(db.Integer, primary_key=True)
|
||||
org_id = Column(db.Integer, db.ForeignKey('organizations.id'))
|
||||
org_id = Column(db.Integer, db.ForeignKey("organizations.id"))
|
||||
org = db.relationship("Organization", backref=db.backref("users", lazy="dynamic"))
|
||||
name = Column(db.String(320))
|
||||
email = Column(EmailType)
|
||||
_profile_image_url = Column('profile_image_url', db.String(320), nullable=True)
|
||||
_profile_image_url = Column("profile_image_url", db.String(320), nullable=True)
|
||||
password_hash = Column(db.String(128), nullable=True)
|
||||
group_ids = Column('groups', MutableList.as_mutable(postgresql.ARRAY(db.Integer)), nullable=True)
|
||||
api_key = Column(db.String(40),
|
||||
default=lambda: generate_token(40),
|
||||
unique=True)
|
||||
group_ids = Column(
|
||||
"groups", MutableList.as_mutable(postgresql.ARRAY(db.Integer)), nullable=True
|
||||
)
|
||||
api_key = Column(db.String(40), default=lambda: generate_token(40), unique=True)
|
||||
|
||||
disabled_at = Column(db.DateTime(True), default=None, nullable=True)
|
||||
details = Column(MutableDict.as_mutable(postgresql.JSON), nullable=True,
|
||||
server_default='{}', default={})
|
||||
active_at = json_cast_property(db.DateTime(True), 'details', 'active_at',
|
||||
default=None)
|
||||
is_invitation_pending = json_cast_property(db.Boolean(True), 'details', 'is_invitation_pending', default=False)
|
||||
is_email_verified = json_cast_property(db.Boolean(True), 'details', 'is_email_verified', default=True)
|
||||
|
||||
__tablename__ = 'users'
|
||||
__table_args__ = (
|
||||
db.Index('users_org_id_email', 'org_id', 'email', unique=True),
|
||||
details = Column(
|
||||
MutableDict.as_mutable(postgresql.JSON),
|
||||
nullable=True,
|
||||
server_default="{}",
|
||||
default={},
|
||||
)
|
||||
active_at = json_cast_property(
|
||||
db.DateTime(True), "details", "active_at", default=None
|
||||
)
|
||||
is_invitation_pending = json_cast_property(
|
||||
db.Boolean(True), "details", "is_invitation_pending", default=False
|
||||
)
|
||||
is_email_verified = json_cast_property(
|
||||
db.Boolean(True), "details", "is_email_verified", default=True
|
||||
)
|
||||
|
||||
__tablename__ = "users"
|
||||
__table_args__ = (db.Index("users_org_id_email", "org_id", "email", unique=True),)
|
||||
|
||||
def __str__(self):
|
||||
return '%s (%s)' % (self.name, self.email)
|
||||
return "%s (%s)" % (self.name, self.email)
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if kwargs.get('email') is not None:
|
||||
kwargs['email'] = kwargs['email'].lower()
|
||||
if kwargs.get("email") is not None:
|
||||
kwargs["email"] = kwargs["email"].lower()
|
||||
super(User, self).__init__(*args, **kwargs)
|
||||
|
||||
@property
|
||||
@@ -126,32 +137,32 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
def to_dict(self, with_api_key=False):
|
||||
profile_image_url = self.profile_image_url
|
||||
if self.is_disabled:
|
||||
assets = app.extensions['webpack']['assets'] or {}
|
||||
path = 'images/avatar.svg'
|
||||
profile_image_url = url_for('static', filename=assets.get(path, path))
|
||||
assets = app.extensions["webpack"]["assets"] or {}
|
||||
path = "images/avatar.svg"
|
||||
profile_image_url = url_for("static", filename=assets.get(path, path))
|
||||
|
||||
d = {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'email': self.email,
|
||||
'profile_image_url': profile_image_url,
|
||||
'groups': self.group_ids,
|
||||
'updated_at': self.updated_at,
|
||||
'created_at': self.created_at,
|
||||
'disabled_at': self.disabled_at,
|
||||
'is_disabled': self.is_disabled,
|
||||
'active_at': self.active_at,
|
||||
'is_invitation_pending': self.is_invitation_pending,
|
||||
'is_email_verified': self.is_email_verified,
|
||||
"id": self.id,
|
||||
"name": self.name,
|
||||
"email": self.email,
|
||||
"profile_image_url": profile_image_url,
|
||||
"groups": self.group_ids,
|
||||
"updated_at": self.updated_at,
|
||||
"created_at": self.created_at,
|
||||
"disabled_at": self.disabled_at,
|
||||
"is_disabled": self.is_disabled,
|
||||
"active_at": self.active_at,
|
||||
"is_invitation_pending": self.is_invitation_pending,
|
||||
"is_email_verified": self.is_email_verified,
|
||||
}
|
||||
|
||||
if self.password_hash is None:
|
||||
d['auth_type'] = 'external'
|
||||
d["auth_type"] = "external"
|
||||
else:
|
||||
d['auth_type'] = 'password'
|
||||
d["auth_type"] = "password"
|
||||
|
||||
if with_api_key:
|
||||
d['api_key'] = self.api_key
|
||||
d["api_key"] = self.api_key
|
||||
|
||||
return d
|
||||
|
||||
@@ -169,8 +180,14 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
@property
|
||||
def permissions(self):
|
||||
# TODO: this should be cached.
|
||||
return list(itertools.chain(*[g.permissions for g in
|
||||
Group.query.filter(Group.id.in_(self.group_ids))]))
|
||||
return list(
|
||||
itertools.chain(
|
||||
*[
|
||||
g.permissions
|
||||
for g in Group.query.filter(Group.id.in_(self.group_ids))
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def get_by_org(cls, org):
|
||||
@@ -198,7 +215,7 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
|
||||
@classmethod
|
||||
def search(cls, base_query, term):
|
||||
term = '%{}%'.format(term)
|
||||
term = "%{}%".format(term)
|
||||
search_filter = or_(cls.name.ilike(term), cls.email.like(term))
|
||||
|
||||
return base_query.filter(search_filter)
|
||||
@@ -208,7 +225,9 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
if pending:
|
||||
return base_query.filter(cls.is_invitation_pending.is_(True))
|
||||
else:
|
||||
return base_query.filter(cls.is_invitation_pending.isnot(True)) # check for both `false`/`null`
|
||||
return base_query.filter(
|
||||
cls.is_invitation_pending.isnot(True)
|
||||
) # check for both `false`/`null`
|
||||
|
||||
@classmethod
|
||||
def find_by_email(cls, email):
|
||||
@@ -237,38 +256,49 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
||||
return "{0}-{1}".format(self.id, identity)
|
||||
|
||||
|
||||
@generic_repr('id', 'name', 'type', 'org_id')
|
||||
@generic_repr("id", "name", "type", "org_id")
|
||||
class Group(db.Model, BelongsToOrgMixin):
|
||||
DEFAULT_PERMISSIONS = ['create_dashboard', 'create_query', 'edit_dashboard', 'edit_query',
|
||||
'view_query', 'view_source', 'execute_query', 'list_users', 'schedule_query',
|
||||
'list_dashboards', 'list_alerts', 'list_data_sources']
|
||||
DEFAULT_PERMISSIONS = [
|
||||
"create_dashboard",
|
||||
"create_query",
|
||||
"edit_dashboard",
|
||||
"edit_query",
|
||||
"view_query",
|
||||
"view_source",
|
||||
"execute_query",
|
||||
"list_users",
|
||||
"schedule_query",
|
||||
"list_dashboards",
|
||||
"list_alerts",
|
||||
"list_data_sources",
|
||||
]
|
||||
|
||||
BUILTIN_GROUP = 'builtin'
|
||||
REGULAR_GROUP = 'regular'
|
||||
BUILTIN_GROUP = "builtin"
|
||||
REGULAR_GROUP = "regular"
|
||||
|
||||
id = Column(db.Integer, primary_key=True)
|
||||
data_sources = db.relationship("DataSourceGroup", back_populates="group",
|
||||
cascade="all")
|
||||
org_id = Column(db.Integer, db.ForeignKey('organizations.id'))
|
||||
data_sources = db.relationship(
|
||||
"DataSourceGroup", back_populates="group", cascade="all"
|
||||
)
|
||||
org_id = Column(db.Integer, db.ForeignKey("organizations.id"))
|
||||
org = db.relationship("Organization", back_populates="groups")
|
||||
type = Column(db.String(255), default=REGULAR_GROUP)
|
||||
name = Column(db.String(100))
|
||||
permissions = Column(postgresql.ARRAY(db.String(255)),
|
||||
default=DEFAULT_PERMISSIONS)
|
||||
permissions = Column(postgresql.ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
|
||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||
|
||||
__tablename__ = 'groups'
|
||||
__tablename__ = "groups"
|
||||
|
||||
def __str__(self):
|
||||
return text_type(self.id)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'id': self.id,
|
||||
'name': self.name,
|
||||
'permissions': self.permissions,
|
||||
'type': self.type,
|
||||
'created_at': self.created_at
|
||||
"id": self.id,
|
||||
"name": self.name,
|
||||
"permissions": self.permissions,
|
||||
"type": self.type,
|
||||
"created_at": self.created_at,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -285,32 +315,38 @@ class Group(db.Model, BelongsToOrgMixin):
|
||||
return list(result)
|
||||
|
||||
|
||||
@generic_repr('id', 'object_type', 'object_id', 'access_type', 'grantor_id', 'grantee_id')
|
||||
@generic_repr(
|
||||
"id", "object_type", "object_id", "access_type", "grantor_id", "grantee_id"
|
||||
)
|
||||
class AccessPermission(GFKBase, db.Model):
|
||||
id = Column(db.Integer, primary_key=True)
|
||||
# 'object' defined in GFKBase
|
||||
access_type = Column(db.String(255))
|
||||
grantor_id = Column(db.Integer, db.ForeignKey("users.id"))
|
||||
grantor = db.relationship(User, backref='grantor', foreign_keys=[grantor_id])
|
||||
grantor = db.relationship(User, backref="grantor", foreign_keys=[grantor_id])
|
||||
grantee_id = Column(db.Integer, db.ForeignKey("users.id"))
|
||||
grantee = db.relationship(User, backref='grantee', foreign_keys=[grantee_id])
|
||||
grantee = db.relationship(User, backref="grantee", foreign_keys=[grantee_id])
|
||||
|
||||
__tablename__ = 'access_permissions'
|
||||
__tablename__ = "access_permissions"
|
||||
|
||||
@classmethod
|
||||
def grant(cls, obj, access_type, grantee, grantor):
|
||||
grant = cls.query.filter(cls.object_type == obj.__tablename__,
|
||||
cls.object_id == obj.id,
|
||||
cls.access_type == access_type,
|
||||
cls.grantee == grantee,
|
||||
cls.grantor == grantor).one_or_none()
|
||||
grant = cls.query.filter(
|
||||
cls.object_type == obj.__tablename__,
|
||||
cls.object_id == obj.id,
|
||||
cls.access_type == access_type,
|
||||
cls.grantee == grantee,
|
||||
cls.grantor == grantor,
|
||||
).one_or_none()
|
||||
|
||||
if not grant:
|
||||
grant = cls(object_type=obj.__tablename__,
|
||||
object_id=obj.id,
|
||||
access_type=access_type,
|
||||
grantee=grantee,
|
||||
grantor=grantor)
|
||||
grant = cls(
|
||||
object_type=obj.__tablename__,
|
||||
object_id=obj.id,
|
||||
access_type=access_type,
|
||||
grantee=grantee,
|
||||
grantor=grantor,
|
||||
)
|
||||
db.session.add(grant)
|
||||
|
||||
return grant
|
||||
@@ -330,8 +366,9 @@ class AccessPermission(GFKBase, db.Model):
|
||||
|
||||
@classmethod
|
||||
def _query(cls, obj, access_type=None, grantee=None, grantor=None):
|
||||
q = cls.query.filter(cls.object_id == obj.id,
|
||||
cls.object_type == obj.__tablename__)
|
||||
q = cls.query.filter(
|
||||
cls.object_id == obj.id, cls.object_type == obj.__tablename__
|
||||
)
|
||||
|
||||
if access_type:
|
||||
q = q.filter(AccessPermission.access_type == access_type)
|
||||
@@ -346,12 +383,12 @@ class AccessPermission(GFKBase, db.Model):
|
||||
|
||||
def to_dict(self):
|
||||
d = {
|
||||
'id': self.id,
|
||||
'object_id': self.object_id,
|
||||
'object_type': self.object_type,
|
||||
'access_type': self.access_type,
|
||||
'grantor': self.grantor_id,
|
||||
'grantee': self.grantee_id
|
||||
"id": self.id,
|
||||
"object_id": self.object_id,
|
||||
"object_type": self.object_type,
|
||||
"access_type": self.access_type,
|
||||
"grantor": self.grantor_id,
|
||||
"grantee": self.grantee_id,
|
||||
}
|
||||
return d
|
||||
|
||||
@@ -392,7 +429,7 @@ class ApiUser(UserMixin, PermissionsCheckMixin):
|
||||
|
||||
@property
|
||||
def permissions(self):
|
||||
return ['view_query']
|
||||
return ["view_query"]
|
||||
|
||||
def has_access(self, obj, access_type):
|
||||
return False
|
||||
|
||||
@@ -12,17 +12,20 @@ from rq.registry import StartedJobRegistry
|
||||
|
||||
def get_redis_status():
|
||||
info = redis_connection.info()
|
||||
return {'redis_used_memory': info['used_memory'], 'redis_used_memory_human': info['used_memory_human']}
|
||||
return {
|
||||
"redis_used_memory": info["used_memory"],
|
||||
"redis_used_memory_human": info["used_memory_human"],
|
||||
}
|
||||
|
||||
|
||||
def get_object_counts():
|
||||
status = {}
|
||||
status['queries_count'] = Query.query.count()
|
||||
status["queries_count"] = Query.query.count()
|
||||
if settings.FEATURE_SHOW_QUERY_RESULTS_COUNT:
|
||||
status['query_results_count'] = QueryResult.query.count()
|
||||
status['unused_query_results_count'] = QueryResult.unused().count()
|
||||
status['dashboards_count'] = Dashboard.query.count()
|
||||
status['widgets_count'] = Widget.query.count()
|
||||
status["query_results_count"] = QueryResult.query.count()
|
||||
status["unused_query_results_count"] = QueryResult.unused().count()
|
||||
status["dashboards_count"] = Dashboard.query.count()
|
||||
status["widgets_count"] = Widget.query.count()
|
||||
return status
|
||||
|
||||
|
||||
@@ -31,19 +34,30 @@ def get_celery_queues():
|
||||
scheduled_queue_names = db.session.query(DataSource.scheduled_queue_name).distinct()
|
||||
query = db.session.execute(union_all(queue_names, scheduled_queue_names))
|
||||
|
||||
return ['celery'] + [row[0] for row in query]
|
||||
return ["celery"] + [row[0] for row in query]
|
||||
|
||||
|
||||
def get_queues_status():
|
||||
return {**{queue: {'size': redis_connection.llen(queue)} for queue in get_celery_queues()},
|
||||
**{queue.name: {'size': len(queue)} for queue in Queue.all(connection=rq_redis_connection)}}
|
||||
return {
|
||||
**{
|
||||
queue: {"size": redis_connection.llen(queue)}
|
||||
for queue in get_celery_queues()
|
||||
},
|
||||
**{
|
||||
queue.name: {"size": len(queue)}
|
||||
for queue in Queue.all(connection=rq_redis_connection)
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def get_db_sizes():
|
||||
database_metrics = []
|
||||
queries = [
|
||||
['Query Results Size', "select pg_total_relation_size('query_results') as size from (select 1) as a"],
|
||||
['Redash DB Size', "select pg_database_size('postgres') as size"]
|
||||
[
|
||||
"Query Results Size",
|
||||
"select pg_total_relation_size('query_results') as size from (select 1) as a",
|
||||
],
|
||||
["Redash DB Size", "select pg_database_size('postgres') as size"],
|
||||
]
|
||||
for query_name, query in queries:
|
||||
result = db.session.execute(query).first()
|
||||
@@ -53,16 +67,13 @@ def get_db_sizes():
|
||||
|
||||
|
||||
def get_status():
|
||||
status = {
|
||||
'version': __version__,
|
||||
'workers': []
|
||||
}
|
||||
status = {"version": __version__, "workers": []}
|
||||
status.update(get_redis_status())
|
||||
status.update(get_object_counts())
|
||||
status['manager'] = redis_connection.hgetall('redash:status')
|
||||
status['manager']['queues'] = get_queues_status()
|
||||
status['database_metrics'] = {}
|
||||
status['database_metrics']['metrics'] = get_db_sizes()
|
||||
status["manager"] = redis_connection.hgetall("redash:status")
|
||||
status["manager"]["queues"] = get_queues_status()
|
||||
status["database_metrics"] = {}
|
||||
status["database_metrics"]["metrics"] = get_db_sizes()
|
||||
|
||||
return status
|
||||
|
||||
@@ -72,20 +83,20 @@ def get_waiting_in_queue(queue_name):
|
||||
for raw in redis_connection.lrange(queue_name, 0, -1):
|
||||
job = json_loads(raw)
|
||||
try:
|
||||
args = json_loads(job['headers']['argsrepr'])
|
||||
if args.get('query_id') == 'adhoc':
|
||||
args['query_id'] = None
|
||||
args = json_loads(job["headers"]["argsrepr"])
|
||||
if args.get("query_id") == "adhoc":
|
||||
args["query_id"] = None
|
||||
except ValueError:
|
||||
args = {}
|
||||
|
||||
job_row = {
|
||||
'state': 'waiting_in_queue',
|
||||
'task_name': job['headers']['task'],
|
||||
'worker': None,
|
||||
'worker_pid': None,
|
||||
'start_time': None,
|
||||
'task_id': job['headers']['id'],
|
||||
'queue': job['properties']['delivery_info']['routing_key']
|
||||
"state": "waiting_in_queue",
|
||||
"task_name": job["headers"]["task"],
|
||||
"worker": None,
|
||||
"worker_pid": None,
|
||||
"start_time": None,
|
||||
"task_id": job["headers"]["id"],
|
||||
"queue": job["properties"]["delivery_info"]["routing_key"],
|
||||
}
|
||||
|
||||
job_row.update(args)
|
||||
@@ -99,23 +110,23 @@ def parse_tasks(task_lists, state):
|
||||
|
||||
for task in itertools.chain(*task_lists.values()):
|
||||
task_row = {
|
||||
'state': state,
|
||||
'task_name': task['name'],
|
||||
'worker': task['hostname'],
|
||||
'queue': task['delivery_info']['routing_key'],
|
||||
'task_id': task['id'],
|
||||
'worker_pid': task['worker_pid'],
|
||||
'start_time': task['time_start'],
|
||||
"state": state,
|
||||
"task_name": task["name"],
|
||||
"worker": task["hostname"],
|
||||
"queue": task["delivery_info"]["routing_key"],
|
||||
"task_id": task["id"],
|
||||
"worker_pid": task["worker_pid"],
|
||||
"start_time": task["time_start"],
|
||||
}
|
||||
|
||||
if task['name'] == 'redash.tasks.execute_query':
|
||||
if task["name"] == "redash.tasks.execute_query":
|
||||
try:
|
||||
args = json_loads(task['args'])
|
||||
args = json_loads(task["args"])
|
||||
except ValueError:
|
||||
args = {}
|
||||
|
||||
if args.get('query_id') == 'adhoc':
|
||||
args['query_id'] = None
|
||||
if args.get("query_id") == "adhoc":
|
||||
args["query_id"] = None
|
||||
|
||||
task_row.update(args)
|
||||
|
||||
@@ -125,8 +136,8 @@ def parse_tasks(task_lists, state):
|
||||
|
||||
|
||||
def celery_tasks():
|
||||
tasks = parse_tasks(celery.control.inspect().active(), 'active')
|
||||
tasks += parse_tasks(celery.control.inspect().reserved(), 'reserved')
|
||||
tasks = parse_tasks(celery.control.inspect().active(), "active")
|
||||
tasks += parse_tasks(celery.control.inspect().reserved(), "reserved")
|
||||
|
||||
for queue_name in get_celery_queues():
|
||||
tasks += get_waiting_in_queue(queue_name)
|
||||
@@ -135,46 +146,52 @@ def celery_tasks():
|
||||
|
||||
|
||||
def fetch_jobs(queue, job_ids):
|
||||
return [{
|
||||
'id': job.id,
|
||||
'name': job.func_name,
|
||||
'queue': queue.name,
|
||||
'enqueued_at': job.enqueued_at,
|
||||
'started_at': job.started_at
|
||||
} for job in Job.fetch_many(job_ids, connection=rq_redis_connection) if job is not None]
|
||||
return [
|
||||
{
|
||||
"id": job.id,
|
||||
"name": job.func_name,
|
||||
"queue": queue.name,
|
||||
"enqueued_at": job.enqueued_at,
|
||||
"started_at": job.started_at,
|
||||
}
|
||||
for job in Job.fetch_many(job_ids, connection=rq_redis_connection)
|
||||
if job is not None
|
||||
]
|
||||
|
||||
|
||||
def rq_queues():
|
||||
return {
|
||||
q.name: {
|
||||
'name': q.name,
|
||||
'started': fetch_jobs(q, StartedJobRegistry(queue=q).get_job_ids()),
|
||||
'queued': len(q.job_ids)
|
||||
} for q in Queue.all(connection=rq_redis_connection)}
|
||||
"name": q.name,
|
||||
"started": fetch_jobs(q, StartedJobRegistry(queue=q).get_job_ids()),
|
||||
"queued": len(q.job_ids),
|
||||
}
|
||||
for q in Queue.all(connection=rq_redis_connection)
|
||||
}
|
||||
|
||||
|
||||
def describe_job(job):
|
||||
return '{} ({})'.format(job.id, job.func_name.split(".").pop()) if job else None
|
||||
return "{} ({})".format(job.id, job.func_name.split(".").pop()) if job else None
|
||||
|
||||
|
||||
def rq_workers():
|
||||
return [{
|
||||
'name': w.name,
|
||||
'hostname': w.hostname,
|
||||
'pid': w.pid,
|
||||
'queues': ", ".join([q.name for q in w.queues]),
|
||||
'state': w.state,
|
||||
'last_heartbeat': w.last_heartbeat,
|
||||
'birth_date': w.birth_date,
|
||||
'current_job': describe_job(w.get_current_job()),
|
||||
'successful_jobs': w.successful_job_count,
|
||||
'failed_jobs': w.failed_job_count,
|
||||
'total_working_time': w.total_working_time
|
||||
} for w in Worker.all(connection=rq_redis_connection)]
|
||||
return [
|
||||
{
|
||||
"name": w.name,
|
||||
"hostname": w.hostname,
|
||||
"pid": w.pid,
|
||||
"queues": ", ".join([q.name for q in w.queues]),
|
||||
"state": w.state,
|
||||
"last_heartbeat": w.last_heartbeat,
|
||||
"birth_date": w.birth_date,
|
||||
"current_job": describe_job(w.get_current_job()),
|
||||
"successful_jobs": w.successful_job_count,
|
||||
"failed_jobs": w.failed_job_count,
|
||||
"total_working_time": w.total_working_time,
|
||||
}
|
||||
for w in Worker.all(connection=rq_redis_connection)
|
||||
]
|
||||
|
||||
|
||||
def rq_status():
|
||||
return {
|
||||
'queues': rq_queues(),
|
||||
'workers': rq_workers()
|
||||
}
|
||||
return {"queues": rq_queues(), "workers": rq_workers()}
|
||||
|
||||
@@ -7,15 +7,15 @@ from funcy import flatten
|
||||
view_only = True
|
||||
not_view_only = False
|
||||
|
||||
ACCESS_TYPE_VIEW = 'view'
|
||||
ACCESS_TYPE_MODIFY = 'modify'
|
||||
ACCESS_TYPE_DELETE = 'delete'
|
||||
ACCESS_TYPE_VIEW = "view"
|
||||
ACCESS_TYPE_MODIFY = "modify"
|
||||
ACCESS_TYPE_DELETE = "delete"
|
||||
|
||||
ACCESS_TYPES = (ACCESS_TYPE_VIEW, ACCESS_TYPE_MODIFY, ACCESS_TYPE_DELETE)
|
||||
|
||||
|
||||
def has_access(obj, user, need_view_only):
|
||||
if hasattr(obj, 'api_key') and user.is_api_user():
|
||||
if hasattr(obj, "api_key") and user.is_api_user():
|
||||
return has_access_to_object(obj, user.id, need_view_only)
|
||||
else:
|
||||
return has_access_to_groups(obj, user, need_view_only)
|
||||
@@ -24,7 +24,7 @@ def has_access(obj, user, need_view_only):
|
||||
def has_access_to_object(obj, api_key, need_view_only):
|
||||
if obj.api_key == api_key:
|
||||
return need_view_only
|
||||
elif hasattr(obj, 'dashboard_api_keys'):
|
||||
elif hasattr(obj, "dashboard_api_keys"):
|
||||
# check if api_key belongs to a dashboard containing this query
|
||||
return api_key in obj.dashboard_api_keys and need_view_only
|
||||
else:
|
||||
@@ -32,9 +32,9 @@ def has_access_to_object(obj, api_key, need_view_only):
|
||||
|
||||
|
||||
def has_access_to_groups(obj, user, need_view_only):
|
||||
groups = obj.groups if hasattr(obj, 'groups') else obj
|
||||
groups = obj.groups if hasattr(obj, "groups") else obj
|
||||
|
||||
if 'admin' in user.permissions:
|
||||
if "admin" in user.permissions:
|
||||
return True
|
||||
|
||||
matching_groups = set(groups.keys()).intersection(user.group_ids)
|
||||
@@ -76,19 +76,21 @@ def require_permission(permission):
|
||||
|
||||
|
||||
def require_admin(fn):
|
||||
return require_permission('admin')(fn)
|
||||
return require_permission("admin")(fn)
|
||||
|
||||
|
||||
def require_super_admin(fn):
|
||||
return require_permission('super_admin')(fn)
|
||||
return require_permission("super_admin")(fn)
|
||||
|
||||
|
||||
def has_permission_or_owner(permission, object_owner_id):
|
||||
return int(object_owner_id) == current_user.id or current_user.has_permission(permission)
|
||||
return int(object_owner_id) == current_user.id or current_user.has_permission(
|
||||
permission
|
||||
)
|
||||
|
||||
|
||||
def is_admin_or_owner(object_owner_id):
|
||||
return has_permission_or_owner('admin', object_owner_id)
|
||||
return has_permission_or_owner("admin", object_owner_id)
|
||||
|
||||
|
||||
def require_permission_or_owner(permission, object_owner_id):
|
||||
|
||||
@@ -11,39 +11,34 @@ from redash.utils import json_loads
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
__all__ = [
|
||||
'BaseQueryRunner',
|
||||
'BaseHTTPQueryRunner',
|
||||
'InterruptException',
|
||||
'BaseSQLQueryRunner',
|
||||
'TYPE_DATETIME',
|
||||
'TYPE_BOOLEAN',
|
||||
'TYPE_INTEGER',
|
||||
'TYPE_STRING',
|
||||
'TYPE_DATE',
|
||||
'TYPE_FLOAT',
|
||||
'SUPPORTED_COLUMN_TYPES',
|
||||
'register',
|
||||
'get_query_runner',
|
||||
'import_query_runners',
|
||||
'guess_type'
|
||||
"BaseQueryRunner",
|
||||
"BaseHTTPQueryRunner",
|
||||
"InterruptException",
|
||||
"BaseSQLQueryRunner",
|
||||
"TYPE_DATETIME",
|
||||
"TYPE_BOOLEAN",
|
||||
"TYPE_INTEGER",
|
||||
"TYPE_STRING",
|
||||
"TYPE_DATE",
|
||||
"TYPE_FLOAT",
|
||||
"SUPPORTED_COLUMN_TYPES",
|
||||
"register",
|
||||
"get_query_runner",
|
||||
"import_query_runners",
|
||||
"guess_type",
|
||||
]
|
||||
|
||||
# Valid types of columns returned in results:
|
||||
TYPE_INTEGER = 'integer'
|
||||
TYPE_FLOAT = 'float'
|
||||
TYPE_BOOLEAN = 'boolean'
|
||||
TYPE_STRING = 'string'
|
||||
TYPE_DATETIME = 'datetime'
|
||||
TYPE_DATE = 'date'
|
||||
TYPE_INTEGER = "integer"
|
||||
TYPE_FLOAT = "float"
|
||||
TYPE_BOOLEAN = "boolean"
|
||||
TYPE_STRING = "string"
|
||||
TYPE_DATETIME = "datetime"
|
||||
TYPE_DATE = "date"
|
||||
|
||||
SUPPORTED_COLUMN_TYPES = set([
|
||||
TYPE_INTEGER,
|
||||
TYPE_FLOAT,
|
||||
TYPE_BOOLEAN,
|
||||
TYPE_STRING,
|
||||
TYPE_DATETIME,
|
||||
TYPE_DATE
|
||||
])
|
||||
SUPPORTED_COLUMN_TYPES = set(
|
||||
[TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN, TYPE_STRING, TYPE_DATETIME, TYPE_DATE]
|
||||
)
|
||||
|
||||
|
||||
class InterruptException(Exception):
|
||||
@@ -60,7 +55,7 @@ class BaseQueryRunner(object):
|
||||
noop_query = None
|
||||
|
||||
def __init__(self, configuration):
|
||||
self.syntax = 'sql'
|
||||
self.syntax = "sql"
|
||||
self.configuration = configuration
|
||||
|
||||
@classmethod
|
||||
@@ -110,9 +105,9 @@ class BaseQueryRunner(object):
|
||||
duplicates_counter += 1
|
||||
|
||||
column_names.append(column_name)
|
||||
new_columns.append({'name': column_name,
|
||||
'friendly_name': column_name,
|
||||
'type': col[1]})
|
||||
new_columns.append(
|
||||
{"name": column_name, "friendly_name": column_name, "type": col[1]}
|
||||
)
|
||||
|
||||
return new_columns
|
||||
|
||||
@@ -124,19 +119,18 @@ class BaseQueryRunner(object):
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed running query [%s]." % query)
|
||||
return json_loads(results)['rows']
|
||||
return json_loads(results)["rows"]
|
||||
|
||||
@classmethod
|
||||
def to_dict(cls):
|
||||
return {
|
||||
'name': cls.name(),
|
||||
'type': cls.type(),
|
||||
'configuration_schema': cls.configuration_schema()
|
||||
"name": cls.name(),
|
||||
"type": cls.type(),
|
||||
"configuration_schema": cls.configuration_schema(),
|
||||
}
|
||||
|
||||
|
||||
class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
schema_dict = {}
|
||||
self._get_tables(schema_dict)
|
||||
@@ -150,8 +144,8 @@ class BaseSQLQueryRunner(BaseQueryRunner):
|
||||
def _get_tables_stats(self, tables_dict):
|
||||
for t in tables_dict.keys():
|
||||
if type(tables_dict[t]) == dict:
|
||||
res = self._run_query_internal('select count(*) as cnt from %s' % t)
|
||||
tables_dict[t]['size'] = res[0]['cnt']
|
||||
res = self._run_query_internal("select count(*) as cnt from %s" % t)
|
||||
tables_dict[t]["size"] = res[0]["cnt"]
|
||||
|
||||
|
||||
class BaseHTTPQueryRunner(BaseQueryRunner):
|
||||
@@ -159,45 +153,36 @@ class BaseHTTPQueryRunner(BaseQueryRunner):
|
||||
response_error = "Endpoint returned unexpected status code"
|
||||
requires_authentication = False
|
||||
requires_url = True
|
||||
url_title = 'URL base path'
|
||||
username_title = 'HTTP Basic Auth Username'
|
||||
password_title = 'HTTP Basic Auth Password'
|
||||
url_title = "URL base path"
|
||||
username_title = "HTTP Basic Auth Username"
|
||||
password_title = "HTTP Basic Auth Password"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
schema = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'url': {
|
||||
'type': 'string',
|
||||
'title': cls.url_title,
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'title': cls.username_title,
|
||||
},
|
||||
'password': {
|
||||
'type': 'string',
|
||||
'title': cls.password_title,
|
||||
},
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {"type": "string", "title": cls.url_title},
|
||||
"username": {"type": "string", "title": cls.username_title},
|
||||
"password": {"type": "string", "title": cls.password_title},
|
||||
},
|
||||
'secret': ['password'],
|
||||
'order': ['url', 'username', 'password']
|
||||
"secret": ["password"],
|
||||
"order": ["url", "username", "password"],
|
||||
}
|
||||
|
||||
if cls.requires_url or cls.requires_authentication:
|
||||
schema['required'] = []
|
||||
schema["required"] = []
|
||||
|
||||
if cls.requires_url:
|
||||
schema['required'] += ['url']
|
||||
schema["required"] += ["url"]
|
||||
|
||||
if cls.requires_authentication:
|
||||
schema['required'] += ['username', 'password']
|
||||
schema["required"] += ["username", "password"]
|
||||
return schema
|
||||
|
||||
def get_auth(self):
|
||||
username = self.configuration.get('username')
|
||||
password = self.configuration.get('password')
|
||||
username = self.configuration.get("username")
|
||||
password = self.configuration.get("password")
|
||||
if username and password:
|
||||
return (username, password)
|
||||
if self.requires_authentication:
|
||||
@@ -205,7 +190,7 @@ class BaseHTTPQueryRunner(BaseQueryRunner):
|
||||
else:
|
||||
return None
|
||||
|
||||
def get_response(self, url, auth=None, http_method='get', **kwargs):
|
||||
def get_response(self, url, auth=None, http_method="get", **kwargs):
|
||||
# Get authentication values if not given
|
||||
if auth is None:
|
||||
auth = self.get_auth()
|
||||
@@ -223,19 +208,12 @@ class BaseHTTPQueryRunner(BaseQueryRunner):
|
||||
|
||||
# Any other responses (e.g. 2xx and 3xx):
|
||||
if response.status_code != 200:
|
||||
error = '{} ({}).'.format(
|
||||
self.response_error,
|
||||
response.status_code,
|
||||
)
|
||||
error = "{} ({}).".format(self.response_error, response.status_code)
|
||||
|
||||
except requests.HTTPError as exc:
|
||||
logger.exception(exc)
|
||||
error = (
|
||||
"Failed to execute query. "
|
||||
"Return Code: {} Reason: {}".format(
|
||||
response.status_code,
|
||||
response.text
|
||||
)
|
||||
error = "Failed to execute query. " "Return Code: {} Reason: {}".format(
|
||||
response.status_code, response.text
|
||||
)
|
||||
except requests.RequestException as exc:
|
||||
# Catch all other requests exceptions and return the error.
|
||||
@@ -252,11 +230,18 @@ query_runners = {}
|
||||
def register(query_runner_class):
|
||||
global query_runners
|
||||
if query_runner_class.enabled():
|
||||
logger.debug("Registering %s (%s) query runner.", query_runner_class.name(), query_runner_class.type())
|
||||
logger.debug(
|
||||
"Registering %s (%s) query runner.",
|
||||
query_runner_class.name(),
|
||||
query_runner_class.type(),
|
||||
)
|
||||
query_runners[query_runner_class.type()] = query_runner_class
|
||||
else:
|
||||
logger.debug("%s query runner enabled but not supported, not registering. Either disable or install missing "
|
||||
"dependencies.", query_runner_class.name())
|
||||
logger.debug(
|
||||
"%s query runner enabled but not supported, not registering. Either disable or install missing "
|
||||
"dependencies.",
|
||||
query_runner_class.name(),
|
||||
)
|
||||
|
||||
|
||||
def get_query_runner(query_runner_type, configuration):
|
||||
@@ -292,7 +277,7 @@ def guess_type(value):
|
||||
|
||||
|
||||
def guess_type_from_string(string_value):
|
||||
if string_value == '' or string_value is None:
|
||||
if string_value == "" or string_value is None:
|
||||
return TYPE_STRING
|
||||
|
||||
try:
|
||||
@@ -307,7 +292,7 @@ def guess_type_from_string(string_value):
|
||||
except (ValueError, OverflowError):
|
||||
pass
|
||||
|
||||
if text_type(string_value).lower() in ('true', 'false'):
|
||||
if text_type(string_value).lower() in ("true", "false"):
|
||||
return TYPE_BOOLEAN
|
||||
|
||||
try:
|
||||
|
||||
@@ -4,6 +4,7 @@ from . import register
|
||||
try:
|
||||
from requests_aws_sign import AWSV4Sign
|
||||
from botocore import session, credentials
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
@@ -25,45 +26,42 @@ class AmazonElasticsearchService(ElasticSearch):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'server': {
|
||||
'type': 'string',
|
||||
'title': 'Endpoint'
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"server": {"type": "string", "title": "Endpoint"},
|
||||
"region": {"type": "string"},
|
||||
"access_key": {"type": "string", "title": "Access Key"},
|
||||
"secret_key": {"type": "string", "title": "Secret Key"},
|
||||
"use_aws_iam_profile": {
|
||||
"type": "boolean",
|
||||
"title": "Use AWS IAM Profile",
|
||||
},
|
||||
'region': {
|
||||
'type': 'string',
|
||||
},
|
||||
'access_key': {
|
||||
'type': 'string',
|
||||
'title': 'Access Key'
|
||||
},
|
||||
'secret_key': {
|
||||
'type': 'string',
|
||||
'title': 'Secret Key'
|
||||
},
|
||||
'use_aws_iam_profile': {
|
||||
'type': 'boolean',
|
||||
'title': 'Use AWS IAM Profile'
|
||||
}
|
||||
},
|
||||
"secret": ["secret_key"],
|
||||
"order": ["server", "region", "access_key", "secret_key", "use_aws_iam_profile"],
|
||||
"required": ['server', 'region']
|
||||
"order": [
|
||||
"server",
|
||||
"region",
|
||||
"access_key",
|
||||
"secret_key",
|
||||
"use_aws_iam_profile",
|
||||
],
|
||||
"required": ["server", "region"],
|
||||
}
|
||||
|
||||
def __init__(self, configuration):
|
||||
super(AmazonElasticsearchService, self).__init__(configuration)
|
||||
|
||||
region = configuration['region']
|
||||
region = configuration["region"]
|
||||
cred = None
|
||||
if configuration.get('use_aws_iam_profile', False):
|
||||
if configuration.get("use_aws_iam_profile", False):
|
||||
cred = credentials.get_credentials(session.Session())
|
||||
else:
|
||||
cred = credentials.Credentials(access_key=configuration.get('access_key', ''),
|
||||
secret_key=configuration.get('secret_key', ''))
|
||||
cred = credentials.Credentials(
|
||||
access_key=configuration.get("access_key", ""),
|
||||
secret_key=configuration.get("secret_key", ""),
|
||||
)
|
||||
|
||||
self.auth = AWSV4Sign(cred, region, 'es')
|
||||
self.auth = AWSV4Sign(cred, region, "es")
|
||||
|
||||
|
||||
register(AmazonElasticsearchService)
|
||||
|
||||
@@ -6,34 +6,39 @@ from redash.settings import parse_boolean
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
ANNOTATE_QUERY = parse_boolean(os.environ.get('ATHENA_ANNOTATE_QUERY', 'true'))
|
||||
SHOW_EXTRA_SETTINGS = parse_boolean(os.environ.get('ATHENA_SHOW_EXTRA_SETTINGS', 'true'))
|
||||
ASSUME_ROLE = parse_boolean(os.environ.get('ATHENA_ASSUME_ROLE', 'false'))
|
||||
OPTIONAL_CREDENTIALS = parse_boolean(os.environ.get('ATHENA_OPTIONAL_CREDENTIALS', 'true'))
|
||||
ANNOTATE_QUERY = parse_boolean(os.environ.get("ATHENA_ANNOTATE_QUERY", "true"))
|
||||
SHOW_EXTRA_SETTINGS = parse_boolean(
|
||||
os.environ.get("ATHENA_SHOW_EXTRA_SETTINGS", "true")
|
||||
)
|
||||
ASSUME_ROLE = parse_boolean(os.environ.get("ATHENA_ASSUME_ROLE", "false"))
|
||||
OPTIONAL_CREDENTIALS = parse_boolean(
|
||||
os.environ.get("ATHENA_OPTIONAL_CREDENTIALS", "true")
|
||||
)
|
||||
|
||||
try:
|
||||
import pyathena
|
||||
import boto3
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
|
||||
_TYPE_MAPPINGS = {
|
||||
'boolean': TYPE_BOOLEAN,
|
||||
'tinyint': TYPE_INTEGER,
|
||||
'smallint': TYPE_INTEGER,
|
||||
'integer': TYPE_INTEGER,
|
||||
'bigint': TYPE_INTEGER,
|
||||
'double': TYPE_FLOAT,
|
||||
'varchar': TYPE_STRING,
|
||||
'timestamp': TYPE_DATETIME,
|
||||
'date': TYPE_DATE,
|
||||
'varbinary': TYPE_STRING,
|
||||
'array': TYPE_STRING,
|
||||
'map': TYPE_STRING,
|
||||
'row': TYPE_STRING,
|
||||
'decimal': TYPE_FLOAT,
|
||||
"boolean": TYPE_BOOLEAN,
|
||||
"tinyint": TYPE_INTEGER,
|
||||
"smallint": TYPE_INTEGER,
|
||||
"integer": TYPE_INTEGER,
|
||||
"bigint": TYPE_INTEGER,
|
||||
"double": TYPE_FLOAT,
|
||||
"varchar": TYPE_STRING,
|
||||
"timestamp": TYPE_DATETIME,
|
||||
"date": TYPE_DATE,
|
||||
"varbinary": TYPE_STRING,
|
||||
"array": TYPE_STRING,
|
||||
"map": TYPE_STRING,
|
||||
"row": TYPE_STRING,
|
||||
"decimal": TYPE_FLOAT,
|
||||
}
|
||||
|
||||
|
||||
@@ -43,7 +48,7 @@ class SimpleFormatter(object):
|
||||
|
||||
|
||||
class Athena(BaseQueryRunner):
|
||||
noop_query = 'SELECT 1'
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
@@ -52,82 +57,68 @@ class Athena(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
schema = {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'region': {
|
||||
'type': 'string',
|
||||
'title': 'AWS Region'
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"region": {"type": "string", "title": "AWS Region"},
|
||||
"aws_access_key": {"type": "string", "title": "AWS Access Key"},
|
||||
"aws_secret_key": {"type": "string", "title": "AWS Secret Key"},
|
||||
"s3_staging_dir": {
|
||||
"type": "string",
|
||||
"title": "S3 Staging (Query Results) Bucket Path",
|
||||
},
|
||||
'aws_access_key': {
|
||||
'type': 'string',
|
||||
'title': 'AWS Access Key'
|
||||
"schema": {
|
||||
"type": "string",
|
||||
"title": "Schema Name",
|
||||
"default": "default",
|
||||
},
|
||||
'aws_secret_key': {
|
||||
'type': 'string',
|
||||
'title': 'AWS Secret Key'
|
||||
},
|
||||
's3_staging_dir': {
|
||||
'type': 'string',
|
||||
'title': 'S3 Staging (Query Results) Bucket Path'
|
||||
},
|
||||
'schema': {
|
||||
'type': 'string',
|
||||
'title': 'Schema Name',
|
||||
'default': 'default'
|
||||
},
|
||||
'glue': {
|
||||
'type': 'boolean',
|
||||
'title': 'Use Glue Data Catalog',
|
||||
},
|
||||
'work_group': {
|
||||
'type': 'string',
|
||||
'title': 'Athena Work Group',
|
||||
'default': 'primary'
|
||||
"glue": {"type": "boolean", "title": "Use Glue Data Catalog"},
|
||||
"work_group": {
|
||||
"type": "string",
|
||||
"title": "Athena Work Group",
|
||||
"default": "primary",
|
||||
},
|
||||
},
|
||||
'required': ['region', 's3_staging_dir'],
|
||||
'extra_options': ['glue'],
|
||||
'order': ['region', 's3_staging_dir', 'schema', 'work_group'],
|
||||
'secret': ['aws_secret_key']
|
||||
"required": ["region", "s3_staging_dir"],
|
||||
"extra_options": ["glue"],
|
||||
"order": ["region", "s3_staging_dir", "schema", "work_group"],
|
||||
"secret": ["aws_secret_key"],
|
||||
}
|
||||
|
||||
if SHOW_EXTRA_SETTINGS:
|
||||
schema['properties'].update({
|
||||
'encryption_option': {
|
||||
'type': 'string',
|
||||
'title': 'Encryption Option',
|
||||
},
|
||||
'kms_key': {
|
||||
'type': 'string',
|
||||
'title': 'KMS Key',
|
||||
},
|
||||
})
|
||||
schema['extra_options'].append('encryption_option')
|
||||
schema['extra_options'].append('kms_key')
|
||||
schema["properties"].update(
|
||||
{
|
||||
"encryption_option": {
|
||||
"type": "string",
|
||||
"title": "Encryption Option",
|
||||
},
|
||||
"kms_key": {"type": "string", "title": "KMS Key"},
|
||||
}
|
||||
)
|
||||
schema["extra_options"].append("encryption_option")
|
||||
schema["extra_options"].append("kms_key")
|
||||
|
||||
if ASSUME_ROLE:
|
||||
del schema['properties']['aws_access_key']
|
||||
del schema['properties']['aws_secret_key']
|
||||
schema['secret'] = []
|
||||
del schema["properties"]["aws_access_key"]
|
||||
del schema["properties"]["aws_secret_key"]
|
||||
schema["secret"] = []
|
||||
|
||||
schema['order'].insert(1, 'iam_role')
|
||||
schema['order'].insert(2, 'external_id')
|
||||
schema['properties'].update({
|
||||
'iam_role': {
|
||||
'type': 'string',
|
||||
'title': 'IAM role to assume',
|
||||
},
|
||||
'external_id': {
|
||||
'type': 'string',
|
||||
'title': 'External ID to be used while STS assume role',
|
||||
},
|
||||
})
|
||||
schema["order"].insert(1, "iam_role")
|
||||
schema["order"].insert(2, "external_id")
|
||||
schema["properties"].update(
|
||||
{
|
||||
"iam_role": {"type": "string", "title": "IAM role to assume"},
|
||||
"external_id": {
|
||||
"type": "string",
|
||||
"title": "External ID to be used while STS assume role",
|
||||
},
|
||||
}
|
||||
)
|
||||
else:
|
||||
schema['order'].insert(1, 'aws_access_key')
|
||||
schema['order'].insert(2, 'aws_secret_key')
|
||||
schema["order"].insert(1, "aws_access_key")
|
||||
schema["order"].insert(2, "aws_secret_key")
|
||||
|
||||
if not OPTIONAL_CREDENTIALS and not ASSUME_ROLE:
|
||||
schema['required'] += ['aws_access_key', 'aws_secret_key']
|
||||
schema["required"] += ["aws_access_key", "aws_secret_key"]
|
||||
|
||||
return schema
|
||||
|
||||
@@ -146,47 +137,50 @@ class Athena(BaseQueryRunner):
|
||||
|
||||
def _get_iam_credentials(self, user=None):
|
||||
if ASSUME_ROLE:
|
||||
role_session_name = 'redash' if user is None else user.email
|
||||
sts = boto3.client('sts')
|
||||
role_session_name = "redash" if user is None else user.email
|
||||
sts = boto3.client("sts")
|
||||
creds = sts.assume_role(
|
||||
RoleArn=self.configuration.get('iam_role'),
|
||||
RoleArn=self.configuration.get("iam_role"),
|
||||
RoleSessionName=role_session_name,
|
||||
ExternalId=self.configuration.get('external_id')
|
||||
)
|
||||
ExternalId=self.configuration.get("external_id"),
|
||||
)
|
||||
return {
|
||||
'aws_access_key_id': creds['Credentials']['AccessKeyId'],
|
||||
'aws_secret_access_key': creds['Credentials']['SecretAccessKey'],
|
||||
'aws_session_token': creds['Credentials']['SessionToken'],
|
||||
'region_name': self.configuration['region']
|
||||
"aws_access_key_id": creds["Credentials"]["AccessKeyId"],
|
||||
"aws_secret_access_key": creds["Credentials"]["SecretAccessKey"],
|
||||
"aws_session_token": creds["Credentials"]["SessionToken"],
|
||||
"region_name": self.configuration["region"],
|
||||
}
|
||||
else:
|
||||
return {
|
||||
'aws_access_key_id': self.configuration.get('aws_access_key', None),
|
||||
'aws_secret_access_key': self.configuration.get('aws_secret_key', None),
|
||||
'region_name': self.configuration['region']
|
||||
"aws_access_key_id": self.configuration.get("aws_access_key", None),
|
||||
"aws_secret_access_key": self.configuration.get("aws_secret_key", None),
|
||||
"region_name": self.configuration["region"],
|
||||
}
|
||||
|
||||
def __get_schema_from_glue(self):
|
||||
client = boto3.client('glue', **self._get_iam_credentials())
|
||||
client = boto3.client("glue", **self._get_iam_credentials())
|
||||
schema = {}
|
||||
|
||||
database_paginator = client.get_paginator('get_databases')
|
||||
table_paginator = client.get_paginator('get_tables')
|
||||
database_paginator = client.get_paginator("get_databases")
|
||||
table_paginator = client.get_paginator("get_tables")
|
||||
|
||||
for databases in database_paginator.paginate():
|
||||
for database in databases['DatabaseList']:
|
||||
iterator = table_paginator.paginate(DatabaseName=database['Name'])
|
||||
for table in iterator.search('TableList[]'):
|
||||
table_name = '%s.%s' % (database['Name'], table['Name'])
|
||||
for database in databases["DatabaseList"]:
|
||||
iterator = table_paginator.paginate(DatabaseName=database["Name"])
|
||||
for table in iterator.search("TableList[]"):
|
||||
table_name = "%s.%s" % (database["Name"], table["Name"])
|
||||
if table_name not in schema:
|
||||
column = [columns['Name'] for columns in table['StorageDescriptor']['Columns']]
|
||||
schema[table_name] = {'name': table_name, 'columns': column}
|
||||
for partition in table.get('PartitionKeys', []):
|
||||
schema[table_name]['columns'].append(partition['Name'])
|
||||
column = [
|
||||
columns["Name"]
|
||||
for columns in table["StorageDescriptor"]["Columns"]
|
||||
]
|
||||
schema[table_name] = {"name": table_name, "columns": column}
|
||||
for partition in table.get("PartitionKeys", []):
|
||||
schema[table_name]["columns"].append(partition["Name"])
|
||||
return list(schema.values())
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
if self.configuration.get('glue', False):
|
||||
if self.configuration.get("glue", False):
|
||||
return self.__get_schema_from_glue()
|
||||
|
||||
schema = {}
|
||||
@@ -201,29 +195,35 @@ class Athena(BaseQueryRunner):
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json_loads(results)
|
||||
for row in results['rows']:
|
||||
table_name = '{0}.{1}'.format(row['table_schema'], row['table_name'])
|
||||
for row in results["rows"]:
|
||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name]['columns'].append(row['column_name'])
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
schema[table_name]["columns"].append(row["column_name"])
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
def run_query(self, query, user):
|
||||
cursor = pyathena.connect(
|
||||
s3_staging_dir=self.configuration['s3_staging_dir'],
|
||||
schema_name=self.configuration.get('schema', 'default'),
|
||||
encryption_option=self.configuration.get('encryption_option', None),
|
||||
kms_key=self.configuration.get('kms_key', None),
|
||||
work_group=self.configuration.get('work_group', 'primary'),
|
||||
s3_staging_dir=self.configuration["s3_staging_dir"],
|
||||
schema_name=self.configuration.get("schema", "default"),
|
||||
encryption_option=self.configuration.get("encryption_option", None),
|
||||
kms_key=self.configuration.get("kms_key", None),
|
||||
work_group=self.configuration.get("work_group", "primary"),
|
||||
formatter=SimpleFormatter(),
|
||||
**self._get_iam_credentials(user=user)).cursor()
|
||||
**self._get_iam_credentials(user=user)
|
||||
).cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
column_tuples = [(i[0], _TYPE_MAPPINGS.get(i[1], None)) for i in cursor.description]
|
||||
column_tuples = [
|
||||
(i[0], _TYPE_MAPPINGS.get(i[1], None)) for i in cursor.description
|
||||
]
|
||||
columns = self.fetch_columns(column_tuples)
|
||||
rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
rows = [
|
||||
dict(zip(([c["name"] for c in columns]), r))
|
||||
for i, r in enumerate(cursor.fetchall())
|
||||
]
|
||||
qbytes = None
|
||||
athena_query_id = None
|
||||
try:
|
||||
@@ -235,12 +235,12 @@ class Athena(BaseQueryRunner):
|
||||
except AttributeError as e:
|
||||
logger.debug("Athena Upstream can't get query_id: %s", e)
|
||||
data = {
|
||||
'columns': columns,
|
||||
'rows': rows,
|
||||
'metadata': {
|
||||
'data_scanned': qbytes,
|
||||
'athena_query_id': athena_query_id
|
||||
}
|
||||
"columns": columns,
|
||||
"rows": rows,
|
||||
"metadata": {
|
||||
"data_scanned": qbytes,
|
||||
"athena_query_id": athena_query_id,
|
||||
},
|
||||
}
|
||||
json_data = json_dumps(data, ignore_nan=True)
|
||||
error = None
|
||||
|
||||
@@ -13,24 +13,22 @@ try:
|
||||
import atsd_client
|
||||
from atsd_client.exceptions import SQLException
|
||||
from atsd_client.services import SQLService, MetricsService
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
types_map = {
|
||||
'long': TYPE_INTEGER,
|
||||
|
||||
'bigint': TYPE_INTEGER,
|
||||
'integer': TYPE_INTEGER,
|
||||
'smallint': TYPE_INTEGER,
|
||||
|
||||
'float': TYPE_FLOAT,
|
||||
'double': TYPE_FLOAT,
|
||||
'decimal': TYPE_FLOAT,
|
||||
|
||||
'string': TYPE_STRING,
|
||||
'date': TYPE_DATE,
|
||||
'xsd:dateTimeStamp': TYPE_DATETIME
|
||||
"long": TYPE_INTEGER,
|
||||
"bigint": TYPE_INTEGER,
|
||||
"integer": TYPE_INTEGER,
|
||||
"smallint": TYPE_INTEGER,
|
||||
"float": TYPE_FLOAT,
|
||||
"double": TYPE_FLOAT,
|
||||
"decimal": TYPE_FLOAT,
|
||||
"string": TYPE_STRING,
|
||||
"date": TYPE_DATE,
|
||||
"xsd:dateTimeStamp": TYPE_DATETIME,
|
||||
}
|
||||
|
||||
|
||||
@@ -41,7 +39,7 @@ def resolve_redash_type(type_in_atsd):
|
||||
:return: redash type constant
|
||||
"""
|
||||
if isinstance(type_in_atsd, dict):
|
||||
type_in_redash = types_map.get(type_in_atsd['base'])
|
||||
type_in_redash = types_map.get(type_in_atsd["base"])
|
||||
else:
|
||||
type_in_redash = types_map.get(type_in_atsd)
|
||||
return type_in_redash
|
||||
@@ -53,22 +51,26 @@ def generate_rows_and_columns(csv_response):
|
||||
:param csv_response: `str`
|
||||
:return: prepared rows and columns
|
||||
"""
|
||||
meta, data = csv_response.split('\n', 1)
|
||||
meta, data = csv_response.split("\n", 1)
|
||||
meta = meta[1:]
|
||||
|
||||
meta_with_padding = meta + '=' * (4 - len(meta) % 4)
|
||||
meta_decoded = meta_with_padding.decode('base64')
|
||||
meta_with_padding = meta + "=" * (4 - len(meta) % 4)
|
||||
meta_decoded = meta_with_padding.decode("base64")
|
||||
meta_json = json_loads(meta_decoded)
|
||||
meta_columns = meta_json['tableSchema']['columns']
|
||||
meta_columns = meta_json["tableSchema"]["columns"]
|
||||
|
||||
reader = csv.reader(data.splitlines())
|
||||
next(reader)
|
||||
|
||||
columns = [{'friendly_name': i['titles'],
|
||||
'type': resolve_redash_type(i['datatype']),
|
||||
'name': i['name']}
|
||||
for i in meta_columns]
|
||||
column_names = [c['name'] for c in columns]
|
||||
columns = [
|
||||
{
|
||||
"friendly_name": i["titles"],
|
||||
"type": resolve_redash_type(i["datatype"]),
|
||||
"name": i["name"],
|
||||
}
|
||||
for i in meta_columns
|
||||
]
|
||||
column_names = [c["name"] for c in columns]
|
||||
rows = [dict(zip(column_names, row)) for row in reader]
|
||||
return columns, rows
|
||||
|
||||
@@ -87,80 +89,66 @@ class AxibaseTSD(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'protocol': {
|
||||
'type': 'string',
|
||||
'title': 'Protocol',
|
||||
'default': 'http'
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"protocol": {"type": "string", "title": "Protocol", "default": "http"},
|
||||
"hostname": {
|
||||
"type": "string",
|
||||
"title": "Host",
|
||||
"default": "axibase_tsd_hostname",
|
||||
},
|
||||
'hostname': {
|
||||
'type': 'string',
|
||||
'title': 'Host',
|
||||
'default': 'axibase_tsd_hostname'
|
||||
"port": {"type": "number", "title": "Port", "default": 8088},
|
||||
"username": {"type": "string"},
|
||||
"password": {"type": "string", "title": "Password"},
|
||||
"timeout": {
|
||||
"type": "number",
|
||||
"default": 600,
|
||||
"title": "Connection Timeout",
|
||||
},
|
||||
'port': {
|
||||
'type': 'number',
|
||||
'title': 'Port',
|
||||
'default': 8088
|
||||
"min_insert_date": {
|
||||
"type": "string",
|
||||
"title": "Metric Minimum Insert Date",
|
||||
},
|
||||
'username': {
|
||||
'type': 'string'
|
||||
"expression": {"type": "string", "title": "Metric Filter"},
|
||||
"limit": {"type": "number", "default": 5000, "title": "Metric Limit"},
|
||||
"trust_certificate": {
|
||||
"type": "boolean",
|
||||
"title": "Trust SSL Certificate",
|
||||
},
|
||||
'password': {
|
||||
'type': 'string',
|
||||
'title': 'Password'
|
||||
},
|
||||
'timeout': {
|
||||
'type': 'number',
|
||||
'default': 600,
|
||||
'title': 'Connection Timeout'
|
||||
},
|
||||
'min_insert_date': {
|
||||
'type': 'string',
|
||||
'title': 'Metric Minimum Insert Date'
|
||||
},
|
||||
'expression': {
|
||||
'type': 'string',
|
||||
'title': 'Metric Filter'
|
||||
},
|
||||
'limit': {
|
||||
'type': 'number',
|
||||
'default': 5000,
|
||||
'title': 'Metric Limit'
|
||||
},
|
||||
'trust_certificate': {
|
||||
'type': 'boolean',
|
||||
'title': 'Trust SSL Certificate'
|
||||
}
|
||||
},
|
||||
'required': ['username', 'password', 'hostname', 'protocol', 'port'],
|
||||
'secret': ['password']
|
||||
"required": ["username", "password", "hostname", "protocol", "port"],
|
||||
"secret": ["password"],
|
||||
}
|
||||
|
||||
def __init__(self, configuration):
|
||||
super(AxibaseTSD, self).__init__(configuration)
|
||||
self.url = '{0}://{1}:{2}'.format(self.configuration.get('protocol', 'http'),
|
||||
self.configuration.get('hostname', 'localhost'),
|
||||
self.configuration.get('port', 8088))
|
||||
self.url = "{0}://{1}:{2}".format(
|
||||
self.configuration.get("protocol", "http"),
|
||||
self.configuration.get("hostname", "localhost"),
|
||||
self.configuration.get("port", 8088),
|
||||
)
|
||||
|
||||
def run_query(self, query, user):
|
||||
connection = atsd_client.connect_url(self.url,
|
||||
self.configuration.get('username'),
|
||||
self.configuration.get('password'),
|
||||
verify=self.configuration.get('trust_certificate', False),
|
||||
timeout=self.configuration.get('timeout', 600))
|
||||
connection = atsd_client.connect_url(
|
||||
self.url,
|
||||
self.configuration.get("username"),
|
||||
self.configuration.get("password"),
|
||||
verify=self.configuration.get("trust_certificate", False),
|
||||
timeout=self.configuration.get("timeout", 600),
|
||||
)
|
||||
sql = SQLService(connection)
|
||||
query_id = str(uuid.uuid4())
|
||||
|
||||
try:
|
||||
logger.debug("SQL running query: %s", query)
|
||||
data = sql.query_with_params(query, {'outputFormat': 'csv', 'metadataFormat': 'EMBED',
|
||||
'queryId': query_id})
|
||||
data = sql.query_with_params(
|
||||
query,
|
||||
{"outputFormat": "csv", "metadataFormat": "EMBED", "queryId": query_id},
|
||||
)
|
||||
|
||||
columns, rows = generate_rows_and_columns(data)
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
|
||||
@@ -175,23 +163,38 @@ class AxibaseTSD(BaseQueryRunner):
|
||||
return json_data, error
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
connection = atsd_client.connect_url(self.url,
|
||||
self.configuration.get('username'),
|
||||
self.configuration.get('password'),
|
||||
verify=self.configuration.get('trust_certificate', False),
|
||||
timeout=self.configuration.get('timeout', 600))
|
||||
connection = atsd_client.connect_url(
|
||||
self.url,
|
||||
self.configuration.get("username"),
|
||||
self.configuration.get("password"),
|
||||
verify=self.configuration.get("trust_certificate", False),
|
||||
timeout=self.configuration.get("timeout", 600),
|
||||
)
|
||||
metrics = MetricsService(connection)
|
||||
ml = metrics.list(expression=self.configuration.get('expression', None),
|
||||
minInsertDate=self.configuration.get('min_insert_date', None),
|
||||
limit=self.configuration.get('limit', 5000))
|
||||
metrics_list = [i.name.encode('utf-8') for i in ml]
|
||||
metrics_list.append('atsd_series')
|
||||
ml = metrics.list(
|
||||
expression=self.configuration.get("expression", None),
|
||||
minInsertDate=self.configuration.get("min_insert_date", None),
|
||||
limit=self.configuration.get("limit", 5000),
|
||||
)
|
||||
metrics_list = [i.name.encode("utf-8") for i in ml]
|
||||
metrics_list.append("atsd_series")
|
||||
schema = {}
|
||||
default_columns = ['entity', 'datetime', 'time', 'metric', 'value', 'text',
|
||||
'tags', 'entity.tags', 'metric.tags']
|
||||
default_columns = [
|
||||
"entity",
|
||||
"datetime",
|
||||
"time",
|
||||
"metric",
|
||||
"value",
|
||||
"text",
|
||||
"tags",
|
||||
"entity.tags",
|
||||
"metric.tags",
|
||||
]
|
||||
for table_name in metrics_list:
|
||||
schema[table_name] = {'name': "'{}'".format(table_name),
|
||||
'columns': default_columns}
|
||||
schema[table_name] = {
|
||||
"name": "'{}'".format(table_name),
|
||||
"columns": default_columns,
|
||||
}
|
||||
values = list(schema.values())
|
||||
return values
|
||||
|
||||
|
||||
@@ -1,27 +1,35 @@
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.query_runner import TYPE_STRING, TYPE_DATE, TYPE_DATETIME, TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN
|
||||
from redash.query_runner import (
|
||||
TYPE_STRING,
|
||||
TYPE_DATE,
|
||||
TYPE_DATETIME,
|
||||
TYPE_INTEGER,
|
||||
TYPE_FLOAT,
|
||||
TYPE_BOOLEAN,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
|
||||
try:
|
||||
from azure.kusto.data.request import KustoClient, KustoConnectionStringBuilder
|
||||
from azure.kusto.data.exceptions import KustoServiceError
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
|
||||
TYPES_MAP = {
|
||||
'boolean': TYPE_BOOLEAN,
|
||||
'datetime': TYPE_DATETIME,
|
||||
'date': TYPE_DATE,
|
||||
'dynamic': TYPE_STRING,
|
||||
'guid': TYPE_STRING,
|
||||
'int': TYPE_INTEGER,
|
||||
'long': TYPE_INTEGER,
|
||||
'real': TYPE_FLOAT,
|
||||
'string': TYPE_STRING,
|
||||
'timespan': TYPE_STRING,
|
||||
'decimal': TYPE_FLOAT
|
||||
"boolean": TYPE_BOOLEAN,
|
||||
"datetime": TYPE_DATETIME,
|
||||
"date": TYPE_DATE,
|
||||
"dynamic": TYPE_STRING,
|
||||
"guid": TYPE_STRING,
|
||||
"int": TYPE_INTEGER,
|
||||
"long": TYPE_INTEGER,
|
||||
"real": TYPE_FLOAT,
|
||||
"string": TYPE_STRING,
|
||||
"timespan": TYPE_STRING,
|
||||
"decimal": TYPE_FLOAT,
|
||||
}
|
||||
|
||||
|
||||
@@ -31,41 +39,37 @@ class AzureKusto(BaseQueryRunner):
|
||||
|
||||
def __init__(self, configuration):
|
||||
super(AzureKusto, self).__init__(configuration)
|
||||
self.syntax = 'custom'
|
||||
self.syntax = "custom"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"cluster": {
|
||||
"type": "string"
|
||||
},
|
||||
"azure_ad_client_id": {
|
||||
"type": "string",
|
||||
"title": "Azure AD Client ID"
|
||||
},
|
||||
"cluster": {"type": "string"},
|
||||
"azure_ad_client_id": {"type": "string", "title": "Azure AD Client ID"},
|
||||
"azure_ad_client_secret": {
|
||||
"type": "string",
|
||||
"title": "Azure AD Client Secret"
|
||||
"title": "Azure AD Client Secret",
|
||||
},
|
||||
"azure_ad_tenant_id": {
|
||||
"type": "string",
|
||||
"title": "Azure AD Tenant Id"
|
||||
},
|
||||
"database": {
|
||||
"type": "string"
|
||||
}
|
||||
"azure_ad_tenant_id": {"type": "string", "title": "Azure AD Tenant Id"},
|
||||
"database": {"type": "string"},
|
||||
},
|
||||
"required": [
|
||||
"cluster", "azure_ad_client_id", "azure_ad_client_secret",
|
||||
"azure_ad_tenant_id", "database"
|
||||
"cluster",
|
||||
"azure_ad_client_id",
|
||||
"azure_ad_client_secret",
|
||||
"azure_ad_tenant_id",
|
||||
"database",
|
||||
],
|
||||
"order": [
|
||||
"cluster", "azure_ad_client_id", "azure_ad_client_secret",
|
||||
"azure_ad_tenant_id", "database"
|
||||
"cluster",
|
||||
"azure_ad_client_id",
|
||||
"azure_ad_client_secret",
|
||||
"azure_ad_tenant_id",
|
||||
"database",
|
||||
],
|
||||
"secret": ["azure_ad_client_secret"]
|
||||
"secret": ["azure_ad_client_secret"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -83,14 +87,15 @@ class AzureKusto(BaseQueryRunner):
|
||||
def run_query(self, query, user):
|
||||
|
||||
kcsb = KustoConnectionStringBuilder.with_aad_application_key_authentication(
|
||||
connection_string=self.configuration['cluster'],
|
||||
aad_app_id=self.configuration['azure_ad_client_id'],
|
||||
app_key=self.configuration['azure_ad_client_secret'],
|
||||
authority_id=self.configuration['azure_ad_tenant_id'])
|
||||
connection_string=self.configuration["cluster"],
|
||||
aad_app_id=self.configuration["azure_ad_client_id"],
|
||||
app_key=self.configuration["azure_ad_client_secret"],
|
||||
authority_id=self.configuration["azure_ad_tenant_id"],
|
||||
)
|
||||
|
||||
client = KustoClient(kcsb)
|
||||
|
||||
db = self.configuration['database']
|
||||
db = self.configuration["database"]
|
||||
try:
|
||||
response = client.execute(db, query)
|
||||
|
||||
@@ -100,24 +105,26 @@ class AzureKusto(BaseQueryRunner):
|
||||
columns = []
|
||||
rows = []
|
||||
for c in result_cols:
|
||||
columns.append({
|
||||
'name': c.column_name,
|
||||
'friendly_name': c.column_name,
|
||||
'type': TYPES_MAP.get(c.column_type, None)
|
||||
})
|
||||
columns.append(
|
||||
{
|
||||
"name": c.column_name,
|
||||
"friendly_name": c.column_name,
|
||||
"type": TYPES_MAP.get(c.column_type, None),
|
||||
}
|
||||
)
|
||||
|
||||
# rows must be [{'column1': value, 'column2': value}]
|
||||
for row in result_rows:
|
||||
rows.append(row.to_dict())
|
||||
|
||||
error = None
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
|
||||
except KustoServiceError as err:
|
||||
json_data = None
|
||||
try:
|
||||
error = err.args[1][0]['error']['@message']
|
||||
error = err.args[1][0]["error"]["@message"]
|
||||
except (IndexError, KeyError):
|
||||
error = err.args[1]
|
||||
except KeyboardInterrupt:
|
||||
@@ -136,19 +143,21 @@ class AzureKusto(BaseQueryRunner):
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
schema_as_json = json_loads(results['rows'][0]['DatabaseSchema'])
|
||||
tables_list = schema_as_json['Databases'][self.configuration['database']]['Tables'].values()
|
||||
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
|
||||
tables_list = schema_as_json["Databases"][self.configuration["database"]][
|
||||
"Tables"
|
||||
].values()
|
||||
|
||||
schema = {}
|
||||
|
||||
for table in tables_list:
|
||||
table_name = table['Name']
|
||||
table_name = table["Name"]
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
for column in table['OrderedColumns']:
|
||||
schema[table_name]['columns'].append(column['Name'])
|
||||
for column in table["OrderedColumns"]:
|
||||
schema[table_name]["columns"].append(column["Name"])
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -24,24 +24,24 @@ except ImportError:
|
||||
enabled = False
|
||||
|
||||
types_map = {
|
||||
'INTEGER': TYPE_INTEGER,
|
||||
'FLOAT': TYPE_FLOAT,
|
||||
'BOOLEAN': TYPE_BOOLEAN,
|
||||
'STRING': TYPE_STRING,
|
||||
'TIMESTAMP': TYPE_DATETIME,
|
||||
"INTEGER": TYPE_INTEGER,
|
||||
"FLOAT": TYPE_FLOAT,
|
||||
"BOOLEAN": TYPE_BOOLEAN,
|
||||
"STRING": TYPE_STRING,
|
||||
"TIMESTAMP": TYPE_DATETIME,
|
||||
}
|
||||
|
||||
|
||||
def transform_cell(field_type, cell_value):
|
||||
if cell_value is None:
|
||||
return None
|
||||
if field_type == 'INTEGER':
|
||||
if field_type == "INTEGER":
|
||||
return int(cell_value)
|
||||
elif field_type == 'FLOAT':
|
||||
elif field_type == "FLOAT":
|
||||
return float(cell_value)
|
||||
elif field_type == 'BOOLEAN':
|
||||
elif field_type == "BOOLEAN":
|
||||
return cell_value.lower() == "true"
|
||||
elif field_type == 'TIMESTAMP':
|
||||
elif field_type == "TIMESTAMP":
|
||||
return datetime.datetime.fromtimestamp(float(cell_value))
|
||||
return cell_value
|
||||
|
||||
@@ -51,10 +51,12 @@ def transform_row(row, fields):
|
||||
|
||||
for column_index, cell in enumerate(row["f"]):
|
||||
field = fields[column_index]
|
||||
if field.get('mode') == 'REPEATED':
|
||||
cell_value = [transform_cell(field['type'], item['v']) for item in cell['v']]
|
||||
if field.get("mode") == "REPEATED":
|
||||
cell_value = [
|
||||
transform_cell(field["type"], item["v"]) for item in cell["v"]
|
||||
]
|
||||
else:
|
||||
cell_value = transform_cell(field['type'], cell['v'])
|
||||
cell_value = transform_cell(field["type"], cell["v"])
|
||||
|
||||
row_data[field["name"]] = cell_value
|
||||
|
||||
@@ -70,12 +72,11 @@ def _load_key(filename):
|
||||
|
||||
|
||||
def _get_query_results(jobs, project_id, location, job_id, start_index):
|
||||
query_reply = jobs.getQueryResults(projectId=project_id,
|
||||
location=location,
|
||||
jobId=job_id,
|
||||
startIndex=start_index).execute()
|
||||
logging.debug('query_reply %s', query_reply)
|
||||
if not query_reply['jobComplete']:
|
||||
query_reply = jobs.getQueryResults(
|
||||
projectId=project_id, location=location, jobId=job_id, startIndex=start_index
|
||||
).execute()
|
||||
logging.debug("query_reply %s", query_reply)
|
||||
if not query_reply["jobComplete"]:
|
||||
time.sleep(10)
|
||||
return _get_query_results(jobs, project_id, location, job_id, start_index)
|
||||
|
||||
@@ -93,54 +94,51 @@ class BigQuery(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'projectId': {
|
||||
'type': 'string',
|
||||
'title': 'Project ID'
|
||||
},
|
||||
'jsonKeyFile': {
|
||||
"type": "string",
|
||||
'title': 'JSON Key File'
|
||||
},
|
||||
'totalMBytesProcessedLimit': {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"projectId": {"type": "string", "title": "Project ID"},
|
||||
"jsonKeyFile": {"type": "string", "title": "JSON Key File"},
|
||||
"totalMBytesProcessedLimit": {
|
||||
"type": "number",
|
||||
'title': 'Scanned Data Limit (MB)'
|
||||
"title": "Scanned Data Limit (MB)",
|
||||
},
|
||||
'userDefinedFunctionResourceUri': {
|
||||
"userDefinedFunctionResourceUri": {
|
||||
"type": "string",
|
||||
'title': 'UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )'
|
||||
"title": "UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )",
|
||||
},
|
||||
'useStandardSql': {
|
||||
"useStandardSql": {
|
||||
"type": "boolean",
|
||||
'title': "Use Standard SQL",
|
||||
"title": "Use Standard SQL",
|
||||
"default": True,
|
||||
},
|
||||
'location': {
|
||||
"type": "string",
|
||||
"title": "Processing Location",
|
||||
},
|
||||
'loadSchema': {
|
||||
"type": "boolean",
|
||||
"title": "Load Schema"
|
||||
},
|
||||
'maximumBillingTier': {
|
||||
"location": {"type": "string", "title": "Processing Location"},
|
||||
"loadSchema": {"type": "boolean", "title": "Load Schema"},
|
||||
"maximumBillingTier": {
|
||||
"type": "number",
|
||||
"title": "Maximum Billing Tier"
|
||||
}
|
||||
"title": "Maximum Billing Tier",
|
||||
},
|
||||
},
|
||||
'required': ['jsonKeyFile', 'projectId'],
|
||||
"order": ['projectId', 'jsonKeyFile', 'loadSchema', 'useStandardSql', 'location', 'totalMBytesProcessedLimit', 'maximumBillingTier', 'userDefinedFunctionResourceUri'],
|
||||
'secret': ['jsonKeyFile']
|
||||
"required": ["jsonKeyFile", "projectId"],
|
||||
"order": [
|
||||
"projectId",
|
||||
"jsonKeyFile",
|
||||
"loadSchema",
|
||||
"useStandardSql",
|
||||
"location",
|
||||
"totalMBytesProcessedLimit",
|
||||
"maximumBillingTier",
|
||||
"userDefinedFunctionResourceUri",
|
||||
],
|
||||
"secret": ["jsonKeyFile"],
|
||||
}
|
||||
|
||||
def _get_bigquery_service(self):
|
||||
scope = [
|
||||
"https://www.googleapis.com/auth/bigquery",
|
||||
"https://www.googleapis.com/auth/drive"
|
||||
"https://www.googleapis.com/auth/drive",
|
||||
]
|
||||
|
||||
key = json_loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
key = json_loads(b64decode(self.configuration["jsonKeyFile"]))
|
||||
|
||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||
http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
|
||||
@@ -155,43 +153,38 @@ class BigQuery(BaseQueryRunner):
|
||||
return self.configuration.get("location")
|
||||
|
||||
def _get_total_bytes_processed(self, jobs, query):
|
||||
job_data = {
|
||||
"query": query,
|
||||
"dryRun": True,
|
||||
}
|
||||
job_data = {"query": query, "dryRun": True}
|
||||
|
||||
if self._get_location():
|
||||
job_data['location'] = self._get_location()
|
||||
job_data["location"] = self._get_location()
|
||||
|
||||
if self.configuration.get('useStandardSql', False):
|
||||
job_data['useLegacySql'] = False
|
||||
if self.configuration.get("useStandardSql", False):
|
||||
job_data["useLegacySql"] = False
|
||||
|
||||
response = jobs.query(projectId=self._get_project_id(), body=job_data).execute()
|
||||
return int(response["totalBytesProcessed"])
|
||||
|
||||
def _get_job_data(self, query):
|
||||
job_data = {
|
||||
"configuration": {
|
||||
"query": {
|
||||
"query": query,
|
||||
}
|
||||
}
|
||||
}
|
||||
job_data = {"configuration": {"query": {"query": query}}}
|
||||
|
||||
if self._get_location():
|
||||
job_data['jobReference'] = {
|
||||
'location': self._get_location()
|
||||
}
|
||||
job_data["jobReference"] = {"location": self._get_location()}
|
||||
|
||||
if self.configuration.get('useStandardSql', False):
|
||||
job_data['configuration']['query']['useLegacySql'] = False
|
||||
if self.configuration.get("useStandardSql", False):
|
||||
job_data["configuration"]["query"]["useLegacySql"] = False
|
||||
|
||||
if self.configuration.get('userDefinedFunctionResourceUri'):
|
||||
resource_uris = self.configuration["userDefinedFunctionResourceUri"].split(',')
|
||||
job_data["configuration"]["query"]["userDefinedFunctionResources"] = [{"resourceUri": resource_uri} for resource_uri in resource_uris]
|
||||
if self.configuration.get("userDefinedFunctionResourceUri"):
|
||||
resource_uris = self.configuration["userDefinedFunctionResourceUri"].split(
|
||||
","
|
||||
)
|
||||
job_data["configuration"]["query"]["userDefinedFunctionResources"] = [
|
||||
{"resourceUri": resource_uri} for resource_uri in resource_uris
|
||||
]
|
||||
|
||||
if "maximumBillingTier" in self.configuration:
|
||||
job_data["configuration"]["query"]["maximumBillingTier"] = self.configuration["maximumBillingTier"]
|
||||
job_data["configuration"]["query"][
|
||||
"maximumBillingTier"
|
||||
] = self.configuration["maximumBillingTier"]
|
||||
|
||||
return job_data
|
||||
|
||||
@@ -200,90 +193,113 @@ class BigQuery(BaseQueryRunner):
|
||||
job_data = self._get_job_data(query)
|
||||
insert_response = jobs.insert(projectId=project_id, body=job_data).execute()
|
||||
current_row = 0
|
||||
query_reply = _get_query_results(jobs, project_id=project_id, location=self._get_location(),
|
||||
job_id=insert_response['jobReference']['jobId'], start_index=current_row)
|
||||
query_reply = _get_query_results(
|
||||
jobs,
|
||||
project_id=project_id,
|
||||
location=self._get_location(),
|
||||
job_id=insert_response["jobReference"]["jobId"],
|
||||
start_index=current_row,
|
||||
)
|
||||
|
||||
logger.debug("bigquery replied: %s", query_reply)
|
||||
|
||||
rows = []
|
||||
|
||||
while ("rows" in query_reply) and current_row < query_reply['totalRows']:
|
||||
while ("rows" in query_reply) and current_row < query_reply["totalRows"]:
|
||||
for row in query_reply["rows"]:
|
||||
rows.append(transform_row(row, query_reply["schema"]["fields"]))
|
||||
|
||||
current_row += len(query_reply['rows'])
|
||||
current_row += len(query_reply["rows"])
|
||||
|
||||
query_result_request = {
|
||||
'projectId': project_id,
|
||||
'jobId': query_reply['jobReference']['jobId'],
|
||||
'startIndex': current_row
|
||||
"projectId": project_id,
|
||||
"jobId": query_reply["jobReference"]["jobId"],
|
||||
"startIndex": current_row,
|
||||
}
|
||||
|
||||
if self._get_location():
|
||||
query_result_request['location'] = self._get_location()
|
||||
query_result_request["location"] = self._get_location()
|
||||
|
||||
query_reply = jobs.getQueryResults(**query_result_request).execute()
|
||||
|
||||
columns = [{
|
||||
'name': f["name"],
|
||||
'friendly_name': f["name"],
|
||||
'type': "string" if f.get('mode') == "REPEATED"
|
||||
else types_map.get(f['type'], "string")
|
||||
} for f in query_reply["schema"]["fields"]]
|
||||
columns = [
|
||||
{
|
||||
"name": f["name"],
|
||||
"friendly_name": f["name"],
|
||||
"type": "string"
|
||||
if f.get("mode") == "REPEATED"
|
||||
else types_map.get(f["type"], "string"),
|
||||
}
|
||||
for f in query_reply["schema"]["fields"]
|
||||
]
|
||||
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows,
|
||||
'metadata': {'data_scanned': int(query_reply['totalBytesProcessed'])}
|
||||
"metadata": {"data_scanned": int(query_reply["totalBytesProcessed"])},
|
||||
}
|
||||
|
||||
return data
|
||||
|
||||
def _get_columns_schema(self, table_data):
|
||||
columns = []
|
||||
for column in table_data.get('schema', {}).get('fields', []):
|
||||
for column in table_data.get("schema", {}).get("fields", []):
|
||||
columns.extend(self._get_columns_schema_column(column))
|
||||
|
||||
project_id = self._get_project_id()
|
||||
table_name = table_data['id'].replace("%s:" % project_id, "")
|
||||
return {'name': table_name, 'columns': columns}
|
||||
table_name = table_data["id"].replace("%s:" % project_id, "")
|
||||
return {"name": table_name, "columns": columns}
|
||||
|
||||
def _get_columns_schema_column(self, column):
|
||||
columns = []
|
||||
if column['type'] == 'RECORD':
|
||||
for field in column['fields']:
|
||||
columns.append("{}.{}".format(column['name'], field['name']))
|
||||
if column["type"] == "RECORD":
|
||||
for field in column["fields"]:
|
||||
columns.append("{}.{}".format(column["name"], field["name"]))
|
||||
else:
|
||||
columns.append(column['name'])
|
||||
columns.append(column["name"])
|
||||
|
||||
return columns
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
if not self.configuration.get('loadSchema', False):
|
||||
if not self.configuration.get("loadSchema", False):
|
||||
return []
|
||||
|
||||
service = self._get_bigquery_service()
|
||||
project_id = self._get_project_id()
|
||||
datasets = service.datasets().list(projectId=project_id).execute()
|
||||
schema = []
|
||||
for dataset in datasets.get('datasets', []):
|
||||
dataset_id = dataset['datasetReference']['datasetId']
|
||||
tables = service.tables().list(projectId=project_id, datasetId=dataset_id).execute()
|
||||
for dataset in datasets.get("datasets", []):
|
||||
dataset_id = dataset["datasetReference"]["datasetId"]
|
||||
tables = (
|
||||
service.tables()
|
||||
.list(projectId=project_id, datasetId=dataset_id)
|
||||
.execute()
|
||||
)
|
||||
while True:
|
||||
for table in tables.get('tables', []):
|
||||
table_data = service.tables().get(projectId=project_id,
|
||||
datasetId=dataset_id,
|
||||
tableId=table['tableReference']['tableId']).execute()
|
||||
for table in tables.get("tables", []):
|
||||
table_data = (
|
||||
service.tables()
|
||||
.get(
|
||||
projectId=project_id,
|
||||
datasetId=dataset_id,
|
||||
tableId=table["tableReference"]["tableId"],
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
table_schema = self._get_columns_schema(table_data)
|
||||
schema.append(table_schema)
|
||||
|
||||
next_token = tables.get('nextPageToken', None)
|
||||
next_token = tables.get("nextPageToken", None)
|
||||
if next_token is None:
|
||||
break
|
||||
|
||||
tables = service.tables().list(projectId=project_id,
|
||||
datasetId=dataset_id,
|
||||
pageToken=next_token).execute()
|
||||
tables = (
|
||||
service.tables()
|
||||
.list(
|
||||
projectId=project_id, datasetId=dataset_id, pageToken=next_token
|
||||
)
|
||||
.execute()
|
||||
)
|
||||
|
||||
return schema
|
||||
|
||||
@@ -296,9 +312,15 @@ class BigQuery(BaseQueryRunner):
|
||||
try:
|
||||
if "totalMBytesProcessedLimit" in self.configuration:
|
||||
limitMB = self.configuration["totalMBytesProcessedLimit"]
|
||||
processedMB = self._get_total_bytes_processed(jobs, query) / 1000.0 / 1000.0
|
||||
processedMB = (
|
||||
self._get_total_bytes_processed(jobs, query) / 1000.0 / 1000.0
|
||||
)
|
||||
if limitMB < processedMB:
|
||||
return None, "Larger than %d MBytes will be processed (%f MBytes)" % (limitMB, processedMB)
|
||||
return (
|
||||
None,
|
||||
"Larger than %d MBytes will be processed (%f MBytes)"
|
||||
% (limitMB, processedMB),
|
||||
)
|
||||
|
||||
data = self._get_query_result(jobs, query)
|
||||
error = None
|
||||
@@ -307,7 +329,7 @@ class BigQuery(BaseQueryRunner):
|
||||
except apiclient.errors.HttpError as e:
|
||||
json_data = None
|
||||
if e.resp.status == 400:
|
||||
error = json_loads(e.content)['error']['message']
|
||||
error = json_loads(e.content)["error"]["message"]
|
||||
else:
|
||||
error = e.content
|
||||
except KeyboardInterrupt:
|
||||
|
||||
@@ -25,7 +25,7 @@ class BigQueryGCE(BigQuery):
|
||||
|
||||
try:
|
||||
# check if we're on a GCE instance
|
||||
requests.get('http://metadata.google.internal')
|
||||
requests.get("http://metadata.google.internal")
|
||||
except requests.exceptions.ConnectionError:
|
||||
return False
|
||||
|
||||
@@ -34,38 +34,40 @@ class BigQueryGCE(BigQuery):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'totalMBytesProcessedLimit': {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"totalMBytesProcessedLimit": {
|
||||
"type": "number",
|
||||
'title': 'Total MByte Processed Limit'
|
||||
"title": "Total MByte Processed Limit",
|
||||
},
|
||||
'userDefinedFunctionResourceUri': {
|
||||
"userDefinedFunctionResourceUri": {
|
||||
"type": "string",
|
||||
'title': 'UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )'
|
||||
"title": "UDF Source URIs (i.e. gs://bucket/date_utils.js, gs://bucket/string_utils.js )",
|
||||
},
|
||||
'useStandardSql': {
|
||||
"useStandardSql": {
|
||||
"type": "boolean",
|
||||
'title': "Use Standard SQL",
|
||||
"title": "Use Standard SQL",
|
||||
"default": True,
|
||||
},
|
||||
'location': {
|
||||
"location": {
|
||||
"type": "string",
|
||||
"title": "Processing Location",
|
||||
"default": "US",
|
||||
},
|
||||
'loadSchema': {
|
||||
"type": "boolean",
|
||||
"title": "Load Schema"
|
||||
}
|
||||
}
|
||||
"loadSchema": {"type": "boolean", "title": "Load Schema"},
|
||||
},
|
||||
}
|
||||
|
||||
def _get_project_id(self):
|
||||
return requests.get('http://metadata/computeMetadata/v1/project/project-id', headers={'Metadata-Flavor': 'Google'}).content
|
||||
return requests.get(
|
||||
"http://metadata/computeMetadata/v1/project/project-id",
|
||||
headers={"Metadata-Flavor": "Google"},
|
||||
).content
|
||||
|
||||
def _get_bigquery_service(self):
|
||||
credentials = gce.AppAssertionCredentials(scope='https://www.googleapis.com/auth/bigquery')
|
||||
credentials = gce.AppAssertionCredentials(
|
||||
scope="https://www.googleapis.com/auth/bigquery"
|
||||
)
|
||||
http = httplib2.Http()
|
||||
http = credentials.authorize(http)
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ try:
|
||||
from cassandra.cluster import Cluster
|
||||
from cassandra.auth import PlainTextAuthProvider
|
||||
from cassandra.util import sortedset
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
@@ -31,39 +32,21 @@ class Cassandra(BaseQueryRunner):
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'host': {
|
||||
'type': 'string',
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {"type": "string"},
|
||||
"port": {"type": "number", "default": 9042},
|
||||
"keyspace": {"type": "string", "title": "Keyspace name"},
|
||||
"username": {"type": "string", "title": "Username"},
|
||||
"password": {"type": "string", "title": "Password"},
|
||||
"protocol": {
|
||||
"type": "number",
|
||||
"title": "Protocol Version",
|
||||
"default": 3,
|
||||
},
|
||||
'port': {
|
||||
'type': 'number',
|
||||
'default': 9042,
|
||||
},
|
||||
'keyspace': {
|
||||
'type': 'string',
|
||||
'title': 'Keyspace name'
|
||||
},
|
||||
'username': {
|
||||
'type': 'string',
|
||||
'title': 'Username'
|
||||
},
|
||||
'password': {
|
||||
'type': 'string',
|
||||
'title': 'Password'
|
||||
},
|
||||
'protocol': {
|
||||
'type': 'number',
|
||||
'title': 'Protocol Version',
|
||||
'default': 3
|
||||
},
|
||||
'timeout': {
|
||||
'type': 'number',
|
||||
'title': 'Timeout',
|
||||
'default': 10
|
||||
}
|
||||
"timeout": {"type": "number", "title": "Timeout", "default": 10},
|
||||
},
|
||||
'required': ['keyspace', 'host']
|
||||
"required": ["keyspace", "host"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -76,61 +59,73 @@ class Cassandra(BaseQueryRunner):
|
||||
"""
|
||||
results, error = self.run_query(query, None)
|
||||
results = json_loads(results)
|
||||
release_version = results['rows'][0]['release_version']
|
||||
release_version = results["rows"][0]["release_version"]
|
||||
|
||||
query = """
|
||||
SELECT table_name, column_name
|
||||
FROM system_schema.columns
|
||||
WHERE keyspace_name ='{}';
|
||||
""".format(self.configuration['keyspace'])
|
||||
""".format(
|
||||
self.configuration["keyspace"]
|
||||
)
|
||||
|
||||
if release_version.startswith('2'):
|
||||
query = """
|
||||
if release_version.startswith("2"):
|
||||
query = """
|
||||
SELECT columnfamily_name AS table_name, column_name
|
||||
FROM system.schema_columns
|
||||
WHERE keyspace_name ='{}';
|
||||
""".format(self.configuration['keyspace'])
|
||||
""".format(
|
||||
self.configuration["keyspace"]
|
||||
)
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
results = json_loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
table_name = row['table_name']
|
||||
column_name = row['column_name']
|
||||
for row in results["rows"]:
|
||||
table_name = row["table_name"]
|
||||
column_name = row["column_name"]
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name]['columns'].append(column_name)
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
schema[table_name]["columns"].append(column_name)
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
def run_query(self, query, user):
|
||||
connection = None
|
||||
try:
|
||||
if self.configuration.get('username', '') and self.configuration.get('password', ''):
|
||||
auth_provider = PlainTextAuthProvider(username='{}'.format(self.configuration.get('username', '')),
|
||||
password='{}'.format(self.configuration.get('password', '')))
|
||||
connection = Cluster([self.configuration.get('host', '')],
|
||||
auth_provider=auth_provider,
|
||||
port=self.configuration.get('port', ''),
|
||||
protocol_version=self.configuration.get('protocol', 3))
|
||||
if self.configuration.get("username", "") and self.configuration.get(
|
||||
"password", ""
|
||||
):
|
||||
auth_provider = PlainTextAuthProvider(
|
||||
username="{}".format(self.configuration.get("username", "")),
|
||||
password="{}".format(self.configuration.get("password", "")),
|
||||
)
|
||||
connection = Cluster(
|
||||
[self.configuration.get("host", "")],
|
||||
auth_provider=auth_provider,
|
||||
port=self.configuration.get("port", ""),
|
||||
protocol_version=self.configuration.get("protocol", 3),
|
||||
)
|
||||
else:
|
||||
connection = Cluster([self.configuration.get('host', '')],
|
||||
port=self.configuration.get('port', ''),
|
||||
protocol_version=self.configuration.get('protocol', 3))
|
||||
connection = Cluster(
|
||||
[self.configuration.get("host", "")],
|
||||
port=self.configuration.get("port", ""),
|
||||
protocol_version=self.configuration.get("protocol", 3),
|
||||
)
|
||||
session = connection.connect()
|
||||
session.set_keyspace(self.configuration['keyspace'])
|
||||
session.default_timeout = self.configuration.get('timeout', 10)
|
||||
session.set_keyspace(self.configuration["keyspace"])
|
||||
session.default_timeout = self.configuration.get("timeout", 10)
|
||||
logger.debug("Cassandra running query: %s", query)
|
||||
result = session.execute(query)
|
||||
|
||||
column_names = result.column_names
|
||||
|
||||
columns = self.fetch_columns([(c, 'string') for c in column_names])
|
||||
columns = self.fetch_columns([(c, "string") for c in column_names])
|
||||
|
||||
rows = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data, cls=CassandraJSONEncoder)
|
||||
|
||||
error = None
|
||||
@@ -142,7 +137,6 @@ class Cassandra(BaseQueryRunner):
|
||||
|
||||
|
||||
class ScyllaDB(Cassandra):
|
||||
|
||||
@classmethod
|
||||
def type(cls):
|
||||
return "scylla"
|
||||
|
||||
@@ -17,30 +17,19 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"default": "http://127.0.0.1:8123"
|
||||
},
|
||||
"user": {
|
||||
"type": "string",
|
||||
"default": "default"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
},
|
||||
"dbname": {
|
||||
"type": "string",
|
||||
"title": "Database Name"
|
||||
},
|
||||
"url": {"type": "string", "default": "http://127.0.0.1:8123"},
|
||||
"user": {"type": "string", "default": "default"},
|
||||
"password": {"type": "string"},
|
||||
"dbname": {"type": "string", "title": "Database Name"},
|
||||
"timeout": {
|
||||
"type": "number",
|
||||
"title": "Request Timeout",
|
||||
"default": 30
|
||||
}
|
||||
"default": 30,
|
||||
},
|
||||
},
|
||||
"required": ["dbname"],
|
||||
"extra_options": ["timeout"],
|
||||
"secret": ["password"]
|
||||
"secret": ["password"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -57,29 +46,29 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['database'], row['table'])
|
||||
for row in results["rows"]:
|
||||
table_name = "{}.{}".format(row["database"], row["table"])
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
schema[table_name]['columns'].append(row['name'])
|
||||
schema[table_name]["columns"].append(row["name"])
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
def _send_query(self, data, stream=False):
|
||||
url = self.configuration.get('url', "http://127.0.0.1:8123")
|
||||
url = self.configuration.get("url", "http://127.0.0.1:8123")
|
||||
try:
|
||||
r = requests.post(
|
||||
url,
|
||||
data=data.encode("utf-8"),
|
||||
stream=stream,
|
||||
timeout=self.configuration.get('timeout', 30),
|
||||
timeout=self.configuration.get("timeout", 30),
|
||||
params={
|
||||
'user': self.configuration.get('user', "default"),
|
||||
'password': self.configuration.get('password', ""),
|
||||
'database': self.configuration['dbname']
|
||||
}
|
||||
"user": self.configuration.get("user", "default"),
|
||||
"password": self.configuration.get("password", ""),
|
||||
"database": self.configuration["dbname"],
|
||||
},
|
||||
)
|
||||
if r.status_code != 200:
|
||||
raise Exception(r.text)
|
||||
@@ -87,7 +76,9 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
return r.json()
|
||||
except requests.RequestException as e:
|
||||
if e.response:
|
||||
details = "({}, Status Code: {})".format(e.__class__.__name__, e.response.status_code)
|
||||
details = "({}, Status Code: {})".format(
|
||||
e.__class__.__name__, e.response.status_code
|
||||
)
|
||||
else:
|
||||
details = "({})".format(e.__class__.__name__)
|
||||
raise Exception("Connection error to: {} {}.".format(url, details))
|
||||
@@ -95,39 +86,43 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
@staticmethod
|
||||
def _define_column_type(column):
|
||||
c = column.lower()
|
||||
f = re.search(r'^nullable\((.*)\)$', c)
|
||||
f = re.search(r"^nullable\((.*)\)$", c)
|
||||
if f is not None:
|
||||
c = f.group(1)
|
||||
if c.startswith('int') or c.startswith('uint'):
|
||||
if c.startswith("int") or c.startswith("uint"):
|
||||
return TYPE_INTEGER
|
||||
elif c.startswith('float'):
|
||||
elif c.startswith("float"):
|
||||
return TYPE_FLOAT
|
||||
elif c == 'datetime':
|
||||
elif c == "datetime":
|
||||
return TYPE_DATETIME
|
||||
elif c == 'date':
|
||||
elif c == "date":
|
||||
return TYPE_DATE
|
||||
else:
|
||||
return TYPE_STRING
|
||||
|
||||
def _clickhouse_query(self, query):
|
||||
query += '\nFORMAT JSON'
|
||||
query += "\nFORMAT JSON"
|
||||
result = self._send_query(query)
|
||||
columns = []
|
||||
columns_int64 = [] # db converts value to string if its type equals UInt64
|
||||
columns_totals = {}
|
||||
|
||||
for r in result['meta']:
|
||||
column_name = r['name']
|
||||
column_type = self._define_column_type(r['type'])
|
||||
for r in result["meta"]:
|
||||
column_name = r["name"]
|
||||
column_type = self._define_column_type(r["type"])
|
||||
|
||||
if r['type'] in ('Int64', 'UInt64', 'Nullable(Int64)', 'Nullable(UInt64)'):
|
||||
if r["type"] in ("Int64", "UInt64", "Nullable(Int64)", "Nullable(UInt64)"):
|
||||
columns_int64.append(column_name)
|
||||
else:
|
||||
columns_totals[column_name] = 'Total' if column_type == TYPE_STRING else None
|
||||
columns_totals[column_name] = (
|
||||
"Total" if column_type == TYPE_STRING else None
|
||||
)
|
||||
|
||||
columns.append({'name': column_name, 'friendly_name': column_name, 'type': column_type})
|
||||
columns.append(
|
||||
{"name": column_name, "friendly_name": column_name, "type": column_type}
|
||||
)
|
||||
|
||||
rows = result['data']
|
||||
rows = result["data"]
|
||||
for row in rows:
|
||||
for column in columns_int64:
|
||||
try:
|
||||
@@ -135,13 +130,13 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
except TypeError:
|
||||
row[column] = None
|
||||
|
||||
if 'totals' in result:
|
||||
totals = result['totals']
|
||||
if "totals" in result:
|
||||
totals = result["totals"]
|
||||
for column, value in columns_totals.items():
|
||||
totals[column] = value
|
||||
rows.append(totals)
|
||||
|
||||
return {'columns': columns, 'rows': rows}
|
||||
return {"columns": columns, "rows": rows}
|
||||
|
||||
def run_query(self, query, user):
|
||||
logger.debug("Clickhouse is about to execute query: %s", query)
|
||||
|
||||
@@ -13,7 +13,7 @@ try:
|
||||
import requests
|
||||
import httplib2
|
||||
except ImportError as e:
|
||||
logger.error('Failed to import: ' + str(e))
|
||||
logger.error("Failed to import: " + str(e))
|
||||
|
||||
|
||||
TYPES_MAP = {
|
||||
@@ -23,7 +23,7 @@ TYPES_MAP = {
|
||||
float: TYPE_FLOAT,
|
||||
bool: TYPE_BOOLEAN,
|
||||
datetime.datetime: TYPE_DATETIME,
|
||||
datetime.datetime: TYPE_STRING
|
||||
datetime.datetime: TYPE_STRING,
|
||||
}
|
||||
|
||||
|
||||
@@ -43,23 +43,29 @@ def parse_results(results):
|
||||
for key in row:
|
||||
if isinstance(row[key], dict):
|
||||
for inner_key in row[key]:
|
||||
column_name = '{}.{}'.format(key, inner_key)
|
||||
column_name = "{}.{}".format(key, inner_key)
|
||||
if _get_column_by_name(columns, column_name) is None:
|
||||
columns.append({
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(type(row[key][inner_key]), TYPE_STRING)
|
||||
})
|
||||
columns.append(
|
||||
{
|
||||
"name": column_name,
|
||||
"friendly_name": column_name,
|
||||
"type": TYPES_MAP.get(
|
||||
type(row[key][inner_key]), TYPE_STRING
|
||||
),
|
||||
}
|
||||
)
|
||||
|
||||
parsed_row[column_name] = row[key][inner_key]
|
||||
|
||||
else:
|
||||
if _get_column_by_name(columns, key) is None:
|
||||
columns.append({
|
||||
"name": key,
|
||||
"friendly_name": key,
|
||||
"type": TYPES_MAP.get(type(row[key]), TYPE_STRING)
|
||||
})
|
||||
columns.append(
|
||||
{
|
||||
"name": key,
|
||||
"friendly_name": key,
|
||||
"type": TYPES_MAP.get(type(row[key]), TYPE_STRING),
|
||||
}
|
||||
)
|
||||
|
||||
parsed_row[key] = row[key]
|
||||
|
||||
@@ -69,35 +75,26 @@ def parse_results(results):
|
||||
|
||||
class Couchbase(BaseQueryRunner):
|
||||
should_annotate_query = False
|
||||
noop_query = 'Select 1'
|
||||
noop_query = "Select 1"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
return {
|
||||
'type': 'object',
|
||||
'properties': {
|
||||
'protocol': {
|
||||
'type': 'string',
|
||||
'default': 'http'
|
||||
},
|
||||
'host': {
|
||||
'type': 'string',
|
||||
},
|
||||
'port': {
|
||||
'type': 'string',
|
||||
'title': 'Port (Defaults: 8095 - Analytics, 8093 - N1QL)',
|
||||
'default': '8095'
|
||||
},
|
||||
'user': {
|
||||
'type': 'string',
|
||||
},
|
||||
'password': {
|
||||
'type': 'string',
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"protocol": {"type": "string", "default": "http"},
|
||||
"host": {"type": "string"},
|
||||
"port": {
|
||||
"type": "string",
|
||||
"title": "Port (Defaults: 8095 - Analytics, 8093 - N1QL)",
|
||||
"default": "8095",
|
||||
},
|
||||
"user": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
},
|
||||
'required': ['host', 'user', 'password'],
|
||||
'order': ['protocol', 'host', 'port', 'user', 'password'],
|
||||
'secret': ['password']
|
||||
"required": ["host", "user", "password"],
|
||||
"order": ["protocol", "host", "port", "user", "password"],
|
||||
"secret": ["password"],
|
||||
}
|
||||
|
||||
def __init__(self, configuration):
|
||||
@@ -108,17 +105,15 @@ class Couchbase(BaseQueryRunner):
|
||||
return True
|
||||
|
||||
def test_connection(self):
|
||||
result = self.call_service(self.noop_query, '')
|
||||
result = self.call_service(self.noop_query, "")
|
||||
|
||||
def get_buckets(self, query, name_param):
|
||||
defaultColumns = [
|
||||
'meta().id'
|
||||
]
|
||||
result = self.call_service(query, "").json()['results']
|
||||
defaultColumns = ["meta().id"]
|
||||
result = self.call_service(query, "").json()["results"]
|
||||
schema = {}
|
||||
for row in result:
|
||||
table_name = row.get(name_param)
|
||||
schema[table_name] = {'name': table_name, 'columns': defaultColumns}
|
||||
schema[table_name] = {"name": table_name, "columns": defaultColumns}
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
@@ -127,7 +122,9 @@ class Couchbase(BaseQueryRunner):
|
||||
try:
|
||||
# Try fetch from Analytics
|
||||
return self.get_buckets(
|
||||
"SELECT ds.GroupName as name FROM Metadata.`Dataset` ds where ds.DataverseName <> 'Metadata'", "name")
|
||||
"SELECT ds.GroupName as name FROM Metadata.`Dataset` ds where ds.DataverseName <> 'Metadata'",
|
||||
"name",
|
||||
)
|
||||
except Exception:
|
||||
# Try fetch from N1QL
|
||||
return self.get_buckets("select name from system:keyspaces", "name")
|
||||
@@ -139,7 +136,7 @@ class Couchbase(BaseQueryRunner):
|
||||
protocol = self.configuration.get("protocol", "http")
|
||||
host = self.configuration.get("host")
|
||||
port = self.configuration.get("port", 8095)
|
||||
params = {'statement': query}
|
||||
params = {"statement": query}
|
||||
|
||||
url = "%s://%s:%s/query/service" % (protocol, host, port)
|
||||
|
||||
@@ -147,7 +144,7 @@ class Couchbase(BaseQueryRunner):
|
||||
r.raise_for_status()
|
||||
return r
|
||||
except requests.exceptions.HTTPError as err:
|
||||
if (err.response.status_code == 401):
|
||||
if err.response.status_code == 401:
|
||||
raise Exception("Wrong username/password")
|
||||
raise Exception("Couchbase connection error")
|
||||
|
||||
@@ -155,11 +152,8 @@ class Couchbase(BaseQueryRunner):
|
||||
try:
|
||||
result = self.call_service(query, user)
|
||||
|
||||
rows, columns = parse_results(result.json()['results'])
|
||||
data = {
|
||||
"columns": columns,
|
||||
"rows": rows
|
||||
}
|
||||
rows, columns = parse_results(result.json()["results"])
|
||||
data = {"columns": columns, "rows": rows}
|
||||
|
||||
return json_dumps(data), None
|
||||
except KeyboardInterrupt:
|
||||
|
||||
@@ -5,6 +5,7 @@ from redash.query_runner import register
|
||||
try:
|
||||
from pyhive import hive
|
||||
from thrift.transport import THttpClient
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
@@ -24,41 +25,31 @@ class Databricks(Hive):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string"
|
||||
},
|
||||
"database": {
|
||||
"type": "string"
|
||||
},
|
||||
"http_path": {
|
||||
"type": "string",
|
||||
"title": "HTTP Path"
|
||||
},
|
||||
"http_password": {
|
||||
"type": "string",
|
||||
"title": "Access Token"
|
||||
},
|
||||
"host": {"type": "string"},
|
||||
"database": {"type": "string"},
|
||||
"http_path": {"type": "string", "title": "HTTP Path"},
|
||||
"http_password": {"type": "string", "title": "Access Token"},
|
||||
},
|
||||
"order": ["host", "http_path", "http_password", "database"],
|
||||
"secret": ["http_password"],
|
||||
"required": ["host", "database", "http_path", "http_password"]
|
||||
"required": ["host", "database", "http_path", "http_password"],
|
||||
}
|
||||
|
||||
def _get_connection(self):
|
||||
host = self.configuration['host']
|
||||
host = self.configuration["host"]
|
||||
|
||||
# if path is set but is missing initial slash, append it
|
||||
path = self.configuration.get('http_path', '')
|
||||
if path and path[0] != '/':
|
||||
path = '/' + path
|
||||
path = self.configuration.get("http_path", "")
|
||||
if path and path[0] != "/":
|
||||
path = "/" + path
|
||||
|
||||
http_uri = "https://{}{}".format(host, path)
|
||||
|
||||
transport = THttpClient.THttpClient(http_uri)
|
||||
|
||||
password = self.configuration.get('http_password', '')
|
||||
auth = base64.b64encode('token:' + password)
|
||||
transport.setCustomHeaders({'Authorization': 'Basic ' + auth})
|
||||
password = self.configuration.get("http_password", "")
|
||||
auth = base64.b64encode("token:" + password)
|
||||
transport.setCustomHeaders({"Authorization": "Basic " + auth})
|
||||
|
||||
connection = hive.connect(thrift_transport=transport)
|
||||
return connection
|
||||
@@ -70,14 +61,32 @@ class Databricks(Hive):
|
||||
|
||||
schemas = self._run_query_internal(schemas_query)
|
||||
|
||||
for schema_name in [a for a in [str(a['databaseName']) for a in schemas] if len(a) > 0]:
|
||||
for table_name in [a for a in [str(a['tableName']) for a in self._run_query_internal(tables_query % schema_name)] if len(a) > 0]:
|
||||
columns = [a for a in [str(a['col_name']) for a in self._run_query_internal(columns_query % (schema_name, table_name))] if len(a) > 0]
|
||||
for schema_name in [
|
||||
a for a in [str(a["databaseName"]) for a in schemas] if len(a) > 0
|
||||
]:
|
||||
for table_name in [
|
||||
a
|
||||
for a in [
|
||||
str(a["tableName"])
|
||||
for a in self._run_query_internal(tables_query % schema_name)
|
||||
]
|
||||
if len(a) > 0
|
||||
]:
|
||||
columns = [
|
||||
a
|
||||
for a in [
|
||||
str(a["col_name"])
|
||||
for a in self._run_query_internal(
|
||||
columns_query % (schema_name, table_name)
|
||||
)
|
||||
]
|
||||
if len(a) > 0
|
||||
]
|
||||
|
||||
if schema_name != 'default':
|
||||
table_name = '{}.{}'.format(schema_name, table_name)
|
||||
if schema_name != "default":
|
||||
table_name = "{}.{}".format(schema_name, table_name)
|
||||
|
||||
schema[table_name] = {'name': table_name, 'columns': columns}
|
||||
schema[table_name] = {"name": table_name, "columns": columns}
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ try:
|
||||
ibm_db_dbi.BINARY: TYPE_STRING,
|
||||
ibm_db_dbi.XML: TYPE_STRING,
|
||||
ibm_db_dbi.TEXT: TYPE_STRING,
|
||||
ibm_db_dbi.STRING: TYPE_STRING
|
||||
ibm_db_dbi.STRING: TYPE_STRING,
|
||||
}
|
||||
|
||||
enabled = True
|
||||
@@ -37,28 +37,15 @@ class DB2(BaseSQLQueryRunner):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
},
|
||||
"host": {
|
||||
"type": "string",
|
||||
"default": "127.0.0.1"
|
||||
},
|
||||
"port": {
|
||||
"type": "number",
|
||||
"default": 50000
|
||||
},
|
||||
"dbname": {
|
||||
"type": "string",
|
||||
"title": "Database Name"
|
||||
}
|
||||
"user": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"host": {"type": "string", "default": "127.0.0.1"},
|
||||
"port": {"type": "number", "default": 50000},
|
||||
"dbname": {"type": "string", "title": "Database Name"},
|
||||
},
|
||||
"order": ['host', 'port', 'user', 'password', 'dbname'],
|
||||
"order": ["host", "port", "user", "password", "dbname"],
|
||||
"required": ["dbname"],
|
||||
"secret": ["password"]
|
||||
"secret": ["password"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -82,16 +69,16 @@ class DB2(BaseSQLQueryRunner):
|
||||
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
if row['TABLE_SCHEMA'] != 'public':
|
||||
table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME'])
|
||||
for row in results["rows"]:
|
||||
if row["TABLE_SCHEMA"] != "public":
|
||||
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
||||
else:
|
||||
table_name = row['TABLE_NAME']
|
||||
table_name = row["TABLE_NAME"]
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
schema[table_name]['columns'].append(row['COLUMN_NAME'])
|
||||
schema[table_name]["columns"].append(row["COLUMN_NAME"])
|
||||
|
||||
def _get_tables(self, schema):
|
||||
query = """
|
||||
@@ -109,7 +96,12 @@ class DB2(BaseSQLQueryRunner):
|
||||
|
||||
def _get_connection(self):
|
||||
self.connection_string = "DATABASE={};HOSTNAME={};PORT={};PROTOCOL=TCPIP;UID={};PWD={};".format(
|
||||
self.configuration["dbname"], self.configuration["host"], self.configuration["port"], self.configuration["user"], self.configuration["password"])
|
||||
self.configuration["dbname"],
|
||||
self.configuration["host"],
|
||||
self.configuration["port"],
|
||||
self.configuration["user"],
|
||||
self.configuration["password"],
|
||||
)
|
||||
connection = ibm_db_dbi.connect(self.connection_string, "", "")
|
||||
|
||||
return connection
|
||||
@@ -122,14 +114,19 @@ class DB2(BaseSQLQueryRunner):
|
||||
cursor.execute(query)
|
||||
|
||||
if cursor.description is not None:
|
||||
columns = self.fetch_columns([(i[0], types_map.get(i[1], None)) for i in cursor.description])
|
||||
rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor]
|
||||
columns = self.fetch_columns(
|
||||
[(i[0], types_map.get(i[1], None)) for i in cursor.description]
|
||||
)
|
||||
rows = [
|
||||
dict(zip((column["name"] for column in columns), row))
|
||||
for row in cursor
|
||||
]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
data = {"columns": columns, "rows": rows}
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
error = "Query completed but it returned no data."
|
||||
json_data = None
|
||||
except (select.error, OSError) as e:
|
||||
error = "Query interrupted. Please retry."
|
||||
|
||||
@@ -2,6 +2,7 @@ import json
|
||||
|
||||
try:
|
||||
import pydgraph
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
@@ -15,13 +16,13 @@ def reduce_item(reduced_item, key, value):
|
||||
# Reduction Condition 1
|
||||
if type(value) is list:
|
||||
for i, sub_item in enumerate(value):
|
||||
reduce_item(reduced_item, '{}.{}'.format(key, i), sub_item)
|
||||
reduce_item(reduced_item, "{}.{}".format(key, i), sub_item)
|
||||
|
||||
# Reduction Condition 2
|
||||
elif type(value) is dict:
|
||||
sub_keys = value.keys()
|
||||
for sub_key in sub_keys:
|
||||
reduce_item(reduced_item, '{}.{}'.format(key, sub_key), value[sub_key])
|
||||
reduce_item(reduced_item, "{}.{}".format(key, sub_key), value[sub_key])
|
||||
|
||||
# Base Condition
|
||||
else:
|
||||
@@ -42,19 +43,13 @@ class Dgraph(BaseQueryRunner):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"user": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
},
|
||||
"servers": {
|
||||
"type": "string"
|
||||
}
|
||||
"user": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
"servers": {"type": "string"},
|
||||
},
|
||||
"order": ["servers", "user", "password"],
|
||||
"required": ["servers"],
|
||||
"secret": ["password"]
|
||||
"secret": ["password"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -66,7 +61,7 @@ class Dgraph(BaseQueryRunner):
|
||||
return enabled
|
||||
|
||||
def run_dgraph_query_raw(self, query):
|
||||
servers = self.configuration.get('servers')
|
||||
servers = self.configuration.get("servers")
|
||||
|
||||
client_stub = pydgraph.DgraphClientStub(servers)
|
||||
client = pydgraph.DgraphClient(client_stub)
|
||||
@@ -111,10 +106,12 @@ class Dgraph(BaseQueryRunner):
|
||||
|
||||
header = list(set(header))
|
||||
|
||||
columns = [{'name': c, 'friendly_name': c, 'type': 'string'} for c in header]
|
||||
columns = [
|
||||
{"name": c, "friendly_name": c, "type": "string"} for c in header
|
||||
]
|
||||
|
||||
# finally, assemble both the columns and data
|
||||
data = {'columns': columns, 'rows': processed_data}
|
||||
data = {"columns": columns, "rows": processed_data}
|
||||
|
||||
json_data = json_dumps(data)
|
||||
except Exception as e:
|
||||
@@ -132,11 +129,11 @@ class Dgraph(BaseQueryRunner):
|
||||
|
||||
schema = {}
|
||||
|
||||
for row in results['schema']:
|
||||
table_name = row['predicate']
|
||||
for row in results["schema"]:
|
||||
table_name = row["predicate"]
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -7,9 +7,13 @@ from dateutil import parser
|
||||
from six import text_type
|
||||
|
||||
from redash.query_runner import (
|
||||
BaseHTTPQueryRunner, register,
|
||||
TYPE_DATETIME, TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN,
|
||||
guess_type
|
||||
BaseHTTPQueryRunner,
|
||||
register,
|
||||
TYPE_DATETIME,
|
||||
TYPE_INTEGER,
|
||||
TYPE_FLOAT,
|
||||
TYPE_BOOLEAN,
|
||||
guess_type,
|
||||
)
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
@@ -18,8 +22,8 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
# Convert Drill string value to actual type
|
||||
def convert_type(string_value, actual_type):
|
||||
if string_value is None or string_value == '':
|
||||
return ''
|
||||
if string_value is None or string_value == "":
|
||||
return ""
|
||||
|
||||
if actual_type == TYPE_INTEGER:
|
||||
return int(string_value)
|
||||
@@ -28,7 +32,7 @@ def convert_type(string_value, actual_type):
|
||||
return float(string_value)
|
||||
|
||||
if actual_type == TYPE_BOOLEAN:
|
||||
return text_type(string_value).lower() == 'true'
|
||||
return text_type(string_value).lower() == "true"
|
||||
|
||||
if actual_type == TYPE_DATETIME:
|
||||
return parser.parse(string_value)
|
||||
@@ -38,41 +42,43 @@ def convert_type(string_value, actual_type):
|
||||
|
||||
# Parse Drill API response and translate it to accepted format
|
||||
def parse_response(data):
|
||||
cols = data['columns']
|
||||
rows = data['rows']
|
||||
cols = data["columns"]
|
||||
rows = data["rows"]
|
||||
|
||||
if len(cols) == 0:
|
||||
return {'columns': [], 'rows': []}
|
||||
return {"columns": [], "rows": []}
|
||||
|
||||
first_row = rows[0]
|
||||
columns = []
|
||||
types = {}
|
||||
|
||||
for c in cols:
|
||||
columns.append({'name': c, 'type': guess_type(first_row[c]), 'friendly_name': c})
|
||||
columns.append(
|
||||
{"name": c, "type": guess_type(first_row[c]), "friendly_name": c}
|
||||
)
|
||||
|
||||
for col in columns:
|
||||
types[col['name']] = col['type']
|
||||
types[col["name"]] = col["type"]
|
||||
|
||||
for row in rows:
|
||||
for key, value in row.items():
|
||||
row[key] = convert_type(value, types[key])
|
||||
|
||||
return {'columns': columns, 'rows': rows}
|
||||
return {"columns": columns, "rows": rows}
|
||||
|
||||
|
||||
class Drill(BaseHTTPQueryRunner):
|
||||
noop_query = 'select version from sys.version'
|
||||
noop_query = "select version from sys.version"
|
||||
response_error = "Drill API returned unexpected status code"
|
||||
requires_authentication = False
|
||||
requires_url = True
|
||||
url_title = 'Drill URL'
|
||||
username_title = 'Username'
|
||||
password_title = 'Password'
|
||||
url_title = "Drill URL"
|
||||
username_title = "Username"
|
||||
password_title = "Password"
|
||||
|
||||
@classmethod
|
||||
def name(cls):
|
||||
return 'Apache Drill'
|
||||
return "Apache Drill"
|
||||
|
||||
@classmethod
|
||||
def configuration_schema(cls):
|
||||
@@ -80,20 +86,22 @@ class Drill(BaseHTTPQueryRunner):
|
||||
# Since Drill itself can act as aggregator of various datasources,
|
||||
# it can contain quite a lot of schemas in `INFORMATION_SCHEMA`
|
||||
# We added this to improve user experience and let users focus only on desired schemas.
|
||||
schema['properties']['allowed_schemas'] = {
|
||||
'type': 'string',
|
||||
'title': 'List of schemas to use in schema browser (comma separated)'
|
||||
schema["properties"]["allowed_schemas"] = {
|
||||
"type": "string",
|
||||
"title": "List of schemas to use in schema browser (comma separated)",
|
||||
}
|
||||
schema['order'] += ['allowed_schemas']
|
||||
schema["order"] += ["allowed_schemas"]
|
||||
return schema
|
||||
|
||||
def run_query(self, query, user):
|
||||
drill_url = os.path.join(self.configuration['url'], 'query.json')
|
||||
drill_url = os.path.join(self.configuration["url"], "query.json")
|
||||
|
||||
try:
|
||||
payload = {'queryType': 'SQL', 'query': query}
|
||||
payload = {"queryType": "SQL", "query": query}
|
||||
|
||||
response, error = self.get_response(drill_url, http_method='post', json=payload)
|
||||
response, error = self.get_response(
|
||||
drill_url, http_method="post", json=payload
|
||||
)
|
||||
if error is not None:
|
||||
return None, error
|
||||
|
||||
@@ -101,7 +109,7 @@ class Drill(BaseHTTPQueryRunner):
|
||||
|
||||
return json_dumps(results), None
|
||||
except KeyboardInterrupt:
|
||||
return None, 'Query cancelled by user.'
|
||||
return None, "Query cancelled by user."
|
||||
|
||||
def get_schema(self, get_stats=False):
|
||||
|
||||
@@ -118,9 +126,16 @@ class Drill(BaseHTTPQueryRunner):
|
||||
and TABLE_SCHEMA not like '%.INFORMATION_SCHEMA'
|
||||
|
||||
"""
|
||||
allowed_schemas = self.configuration.get('allowed_schemas')
|
||||
allowed_schemas = self.configuration.get("allowed_schemas")
|
||||
if allowed_schemas:
|
||||
query += "and TABLE_SCHEMA in ({})".format(', '.join(["'{}'".format(re.sub('[^a-zA-Z0-9_.`]', '', allowed_schema)) for allowed_schema in allowed_schemas.split(',')]))
|
||||
query += "and TABLE_SCHEMA in ({})".format(
|
||||
", ".join(
|
||||
[
|
||||
"'{}'".format(re.sub("[^a-zA-Z0-9_.`]", "", allowed_schema))
|
||||
for allowed_schema in allowed_schemas.split(",")
|
||||
]
|
||||
)
|
||||
)
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
|
||||
@@ -131,13 +146,13 @@ class Drill(BaseHTTPQueryRunner):
|
||||
|
||||
schema = {}
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME'])
|
||||
for row in results["rows"]:
|
||||
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
schema[table_name]['columns'].append(row['COLUMN_NAME'])
|
||||
schema[table_name]["columns"].append(row["COLUMN_NAME"])
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
try:
|
||||
from pydruid.db import connect
|
||||
|
||||
enabled = True
|
||||
except ImportError:
|
||||
enabled = False
|
||||
@@ -8,11 +9,7 @@ from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.query_runner import TYPE_STRING, TYPE_INTEGER, TYPE_BOOLEAN
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
TYPES_MAP = {
|
||||
1: TYPE_STRING,
|
||||
2: TYPE_INTEGER,
|
||||
3: TYPE_BOOLEAN,
|
||||
}
|
||||
TYPES_MAP = {1: TYPE_STRING, 2: TYPE_INTEGER, 3: TYPE_BOOLEAN}
|
||||
|
||||
|
||||
class Druid(BaseQueryRunner):
|
||||
@@ -23,28 +20,15 @@ class Druid(BaseQueryRunner):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"host": {
|
||||
"type": "string",
|
||||
"default": "localhost"
|
||||
},
|
||||
"port": {
|
||||
"type": "number",
|
||||
"default": 8082
|
||||
},
|
||||
"scheme": {
|
||||
"type": "string",
|
||||
"default": "http"
|
||||
},
|
||||
"user": {
|
||||
"type": "string"
|
||||
},
|
||||
"password": {
|
||||
"type": "string"
|
||||
}
|
||||
"host": {"type": "string", "default": "localhost"},
|
||||
"port": {"type": "number", "default": 8082},
|
||||
"scheme": {"type": "string", "default": "http"},
|
||||
"user": {"type": "string"},
|
||||
"password": {"type": "string"},
|
||||
},
|
||||
"order": ['scheme', 'host', 'port', 'user', 'password'],
|
||||
"required": ['host'],
|
||||
"secret": ['password']
|
||||
"order": ["scheme", "host", "port", "user", "password"],
|
||||
"required": ["host"],
|
||||
"secret": ["password"],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
@@ -52,21 +36,27 @@ class Druid(BaseQueryRunner):
|
||||
return enabled
|
||||
|
||||
def run_query(self, query, user):
|
||||
connection = connect(host=self.configuration['host'],
|
||||
port=self.configuration['port'],
|
||||
path='/druid/v2/sql/',
|
||||
scheme=(self.configuration.get('scheme') or 'http'),
|
||||
user=(self.configuration.get('user') or None),
|
||||
password=(self.configuration.get('password') or None))
|
||||
connection = connect(
|
||||
host=self.configuration["host"],
|
||||
port=self.configuration["port"],
|
||||
path="/druid/v2/sql/",
|
||||
scheme=(self.configuration.get("scheme") or "http"),
|
||||
user=(self.configuration.get("user") or None),
|
||||
password=(self.configuration.get("password") or None),
|
||||
)
|
||||
|
||||
cursor = connection.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute(query)
|
||||
columns = self.fetch_columns([(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description])
|
||||
rows = [dict(zip((column['name'] for column in columns), row)) for row in cursor]
|
||||
columns = self.fetch_columns(
|
||||
[(i[0], TYPES_MAP.get(i[1], None)) for i in cursor.description]
|
||||
)
|
||||
rows = [
|
||||
dict(zip((column["name"] for column in columns), row)) for row in cursor
|
||||
]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
data = {"columns": columns, "rows": rows}
|
||||
error = None
|
||||
json_data = json_dumps(data)
|
||||
print(json_data)
|
||||
@@ -92,13 +82,13 @@ class Druid(BaseQueryRunner):
|
||||
schema = {}
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME'])
|
||||
for row in results["rows"]:
|
||||
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
||||
|
||||
if table_name not in schema:
|
||||
schema[table_name] = {'name': table_name, 'columns': []}
|
||||
schema[table_name] = {"name": table_name, "columns": []}
|
||||
|
||||
schema[table_name]['columns'].append(row['COLUMN_NAME'])
|
||||
schema[table_name]["columns"].append(row["COLUMN_NAME"])
|
||||
|
||||
return list(schema.values())
|
||||
|
||||
|
||||
@@ -10,25 +10,26 @@ try:
|
||||
from dql import Engine, FragmentEngine
|
||||
from dynamo3 import DynamoDBError
|
||||
from pyparsing import ParseException
|
||||
|
||||
enabled = True
|
||||
except ImportError as e:
|
||||
enabled = False
|
||||
|
||||
types_map = {
|
||||
'UNICODE': TYPE_INTEGER,
|
||||
'TINYINT': TYPE_INTEGER,
|
||||
'SMALLINT': TYPE_INTEGER,
|
||||
'INT': TYPE_INTEGER,
|
||||
'DOUBLE': TYPE_FLOAT,
|
||||
'DECIMAL': TYPE_FLOAT,
|
||||
'FLOAT': TYPE_FLOAT,
|
||||
'REAL': TYPE_FLOAT,
|
||||
'BOOLEAN': TYPE_BOOLEAN,
|
||||
'TIMESTAMP': TYPE_DATETIME,
|
||||
'DATE': TYPE_DATETIME,
|
||||
'CHAR': TYPE_STRING,
|
||||
'STRING': TYPE_STRING,
|
||||
'VARCHAR': TYPE_STRING
|
||||
"UNICODE": TYPE_INTEGER,
|
||||
"TINYINT": TYPE_INTEGER,
|
||||
"SMALLINT": TYPE_INTEGER,
|
||||
"INT": TYPE_INTEGER,
|
||||
"DOUBLE": TYPE_FLOAT,
|
||||
"DECIMAL": TYPE_FLOAT,
|
||||
"FLOAT": TYPE_FLOAT,
|
||||
"REAL": TYPE_FLOAT,
|
||||
"BOOLEAN": TYPE_BOOLEAN,
|
||||
"TIMESTAMP": TYPE_DATETIME,
|
||||
"DATE": TYPE_DATETIME,
|
||||
"CHAR": TYPE_STRING,
|
||||
"STRING": TYPE_STRING,
|
||||
"VARCHAR": TYPE_STRING,
|
||||
}
|
||||
|
||||
|
||||
@@ -40,19 +41,12 @@ class DynamoDBSQL(BaseSQLQueryRunner):
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"region": {
|
||||
"type": "string",
|
||||
"default": "us-east-1"
|
||||
},
|
||||
"access_key": {
|
||||
"type": "string",
|
||||
},
|
||||
"secret_key": {
|
||||
"type": "string",
|
||||
}
|
||||
"region": {"type": "string", "default": "us-east-1"},
|
||||
"access_key": {"type": "string"},
|
||||
"secret_key": {"type": "string"},
|
||||
},
|
||||
"required": ["access_key", "secret_key"],
|
||||
"secret": ["secret_key"]
|
||||
"secret": ["secret_key"],
|
||||
}
|
||||
|
||||
def test_connection(self):
|
||||
@@ -71,11 +65,11 @@ class DynamoDBSQL(BaseSQLQueryRunner):
|
||||
engine = FragmentEngine()
|
||||
config = self.configuration.to_dict()
|
||||
|
||||
if not config.get('region'):
|
||||
config['region'] = 'us-east-1'
|
||||
if not config.get("region"):
|
||||
config["region"] = "us-east-1"
|
||||
|
||||
if config.get('host') == '':
|
||||
config['host'] = None
|
||||
if config.get("host") == "":
|
||||
config["host"] = None
|
||||
|
||||
engine.connect(**config)
|
||||
|
||||
@@ -90,8 +84,10 @@ class DynamoDBSQL(BaseSQLQueryRunner):
|
||||
for table_name in tables:
|
||||
try:
|
||||
table = engine.describe(table_name, True)
|
||||
schema[table.name] = {'name': table.name,
|
||||
'columns': list(table.attrs.keys())}
|
||||
schema[table.name] = {
|
||||
"name": table.name,
|
||||
"columns": list(table.attrs.keys()),
|
||||
}
|
||||
except DynamoDBError:
|
||||
pass
|
||||
|
||||
@@ -100,8 +96,8 @@ class DynamoDBSQL(BaseSQLQueryRunner):
|
||||
try:
|
||||
engine = self._connect()
|
||||
|
||||
if not query.endswith(';'):
|
||||
query = query + ';'
|
||||
if not query.endswith(";"):
|
||||
query = query + ";"
|
||||
|
||||
result = engine.execute(query)
|
||||
|
||||
@@ -120,19 +116,22 @@ class DynamoDBSQL(BaseSQLQueryRunner):
|
||||
for item in result:
|
||||
if not columns:
|
||||
for k, v in item.items():
|
||||
columns.append({
|
||||
'name': k,
|
||||
'friendly_name': k,
|
||||
'type': types_map.get(str(type(v)).upper(), None)
|
||||
})
|
||||
columns.append(
|
||||
{
|
||||
"name": k,
|
||||
"friendly_name": k,
|
||||
"type": types_map.get(str(type(v)).upper(), None),
|
||||
}
|
||||
)
|
||||
rows.append(item)
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
data = {"columns": columns, "rows": rows}
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except ParseException as e:
|
||||
error = "Error parsing query at line {} (column {}):\n{}".format(
|
||||
e.lineno, e.column, e.line)
|
||||
e.lineno, e.column, e.line
|
||||
)
|
||||
json_data = None
|
||||
except (SyntaxError, RuntimeError) as e:
|
||||
error = e.message
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user