mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Removed pseudojson class, converted all options and other json columns to jsonb ones (#6687)
Co-authored-by: Andrew Chubatiuk <andrew.chubatiuk@motional.com>
This commit is contained in:
@@ -7,7 +7,7 @@ Create Date: 2020-12-23 21:35:32.766354
|
|||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import JSON
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = '0ec979123ba4'
|
revision = '0ec979123ba4'
|
||||||
@@ -18,7 +18,7 @@ depends_on = None
|
|||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
op.add_column('dashboards', sa.Column('options', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
op.add_column('dashboards', sa.Column('options', JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
|
||||||
# ### end Alembic commands ###
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -10,8 +10,7 @@ import json
|
|||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.sql import table
|
from sqlalchemy.sql import table
|
||||||
|
from redash.models import MutableDict
|
||||||
from redash.models import MutableDict, PseudoJSON
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
@@ -41,7 +40,7 @@ def upgrade():
|
|||||||
"queries",
|
"queries",
|
||||||
sa.Column(
|
sa.Column(
|
||||||
"schedule",
|
"schedule",
|
||||||
MutableDict.as_mutable(PseudoJSON),
|
sa.Text(),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
server_default=json.dumps({}),
|
server_default=json.dumps({}),
|
||||||
),
|
),
|
||||||
@@ -51,7 +50,7 @@ def upgrade():
|
|||||||
queries = table(
|
queries = table(
|
||||||
"queries",
|
"queries",
|
||||||
sa.Column("id", sa.Integer, primary_key=True),
|
sa.Column("id", sa.Integer, primary_key=True),
|
||||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
sa.Column("schedule", sa.Text()),
|
||||||
sa.Column("old_schedule", sa.String(length=10)),
|
sa.Column("old_schedule", sa.String(length=10)),
|
||||||
)
|
)
|
||||||
|
|
||||||
@@ -85,7 +84,7 @@ def downgrade():
|
|||||||
"queries",
|
"queries",
|
||||||
sa.Column(
|
sa.Column(
|
||||||
"old_schedule",
|
"old_schedule",
|
||||||
MutableDict.as_mutable(PseudoJSON),
|
sa.Text(),
|
||||||
nullable=False,
|
nullable=False,
|
||||||
server_default=json.dumps({}),
|
server_default=json.dumps({}),
|
||||||
),
|
),
|
||||||
@@ -93,8 +92,8 @@ def downgrade():
|
|||||||
|
|
||||||
queries = table(
|
queries = table(
|
||||||
"queries",
|
"queries",
|
||||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
sa.Column("schedule", sa.Text()),
|
||||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
sa.Column("old_schedule", sa.Text()),
|
||||||
)
|
)
|
||||||
|
|
||||||
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
|
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
|
||||||
@@ -106,7 +105,7 @@ def downgrade():
|
|||||||
"queries",
|
"queries",
|
||||||
sa.Column("id", sa.Integer, primary_key=True),
|
sa.Column("id", sa.Integer, primary_key=True),
|
||||||
sa.Column("schedule", sa.String(length=10)),
|
sa.Column("schedule", sa.String(length=10)),
|
||||||
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
|
sa.Column("old_schedule", sa.Text()),
|
||||||
)
|
)
|
||||||
|
|
||||||
conn = op.get_bind()
|
conn = op.get_bind()
|
||||||
|
|||||||
@@ -0,0 +1,146 @@
|
|||||||
|
"""change type of json fields from varchar to json
|
||||||
|
|
||||||
|
Revision ID: 7205816877ec
|
||||||
|
Revises: 7ce5925f832b
|
||||||
|
Create Date: 2024-01-03 13:55:18.885021
|
||||||
|
|
||||||
|
"""
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
from sqlalchemy.dialects.postgresql import JSONB, JSON
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision = '7205816877ec'
|
||||||
|
down_revision = '7ce5925f832b'
|
||||||
|
branch_labels = None
|
||||||
|
depends_on = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade():
|
||||||
|
connection = op.get_bind()
|
||||||
|
op.alter_column('queries', 'options',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
nullable=True,
|
||||||
|
postgresql_using='options::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('queries', 'schedule',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
nullable=True,
|
||||||
|
postgresql_using='schedule::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('events', 'additional_properties',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
nullable=True,
|
||||||
|
postgresql_using='additional_properties::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('organizations', 'settings',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
nullable=True,
|
||||||
|
postgresql_using='settings::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('alerts', 'options',
|
||||||
|
existing_type=JSON(astext_type=sa.Text()),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
nullable=True,
|
||||||
|
postgresql_using='options::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('dashboards', 'options',
|
||||||
|
existing_type=JSON(astext_type=sa.Text()),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
postgresql_using='options::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('dashboards', 'layout',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
postgresql_using='layout::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('query_results', 'data',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
nullable=True,
|
||||||
|
postgresql_using='data::text',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('changes', 'change',
|
||||||
|
existing_type=JSON(astext_type=sa.Text()),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
postgresql_using='change::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('visualizations', 'options',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
postgresql_using='options::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
op.alter_column('widgets', 'options',
|
||||||
|
existing_type=sa.Text(),
|
||||||
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
|
postgresql_using='options::jsonb',
|
||||||
|
server_default=sa.text("'{}'::jsonb"))
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade():
|
||||||
|
connection = op.get_bind()
|
||||||
|
op.alter_column('queries', 'options',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=sa.Text(),
|
||||||
|
postgresql_using='options::text',
|
||||||
|
existing_nullable=True,
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
|
op.alter_column('queries', 'schedule',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=sa.Text(),
|
||||||
|
postgresql_using='schedule::text',
|
||||||
|
existing_nullable=True,
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
|
op.alter_column('events', 'additional_properties',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=sa.Text(),
|
||||||
|
postgresql_using='additional_properties::text',
|
||||||
|
existing_nullable=True,
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
|
op.alter_column('organizations', 'settings',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=sa.Text(),
|
||||||
|
postgresql_using='settings::text',
|
||||||
|
existing_nullable=True,
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
|
op.alter_column('alerts', 'options',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=JSON(astext_type=sa.Text()),
|
||||||
|
postgresql_using='options::json',
|
||||||
|
existing_nullable=True,
|
||||||
|
server_default=sa.text("'{}'::json"))
|
||||||
|
op.alter_column('dashboards', 'options',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=JSON(astext_type=sa.Text()),
|
||||||
|
postgresql_using='options::json',
|
||||||
|
server_default=sa.text("'{}'::json"))
|
||||||
|
op.alter_column('dashboards', 'layout',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=sa.Text(),
|
||||||
|
postgresql_using='layout::text',
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
|
op.alter_column('query_results', 'data',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=sa.Text(),
|
||||||
|
postgresql_using='data::text',
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
|
op.alter_column('changes', 'change',
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
type_=JSON(astext_type=sa.Text()),
|
||||||
|
postgresql_using='change::json',
|
||||||
|
server_default=sa.text("'{}'::json"))
|
||||||
|
op.alter_column('visualizations', 'options',
|
||||||
|
type_=sa.Text(),
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
postgresql_using='options::text',
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
|
op.alter_column('widgets', 'options',
|
||||||
|
type_=sa.Text(),
|
||||||
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
|
postgresql_using='options::text',
|
||||||
|
server_default=sa.text("'{}'::text"))
|
||||||
@@ -7,10 +7,9 @@ Create Date: 2019-01-17 13:22:21.729334
|
|||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
from sqlalchemy.sql import table
|
from sqlalchemy.sql import table
|
||||||
|
|
||||||
from redash.models import MutableDict, PseudoJSON
|
from redash.models import MutableDict
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = "73beceabb948"
|
revision = "73beceabb948"
|
||||||
@@ -43,7 +42,7 @@ def upgrade():
|
|||||||
queries = table(
|
queries = table(
|
||||||
"queries",
|
"queries",
|
||||||
sa.Column("id", sa.Integer, primary_key=True),
|
sa.Column("id", sa.Integer, primary_key=True),
|
||||||
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
|
sa.Column("schedule", sa.Text()),
|
||||||
)
|
)
|
||||||
|
|
||||||
conn = op.get_bind()
|
conn = op.get_bind()
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ Create Date: 2019-01-31 09:21:31.517265
|
|||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import BYTEA
|
||||||
from sqlalchemy.sql import table
|
from sqlalchemy.sql import table
|
||||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||||
|
|
||||||
@@ -18,7 +18,6 @@ from redash.models.types import (
|
|||||||
Configuration,
|
Configuration,
|
||||||
MutableDict,
|
MutableDict,
|
||||||
MutableList,
|
MutableList,
|
||||||
PseudoJSON,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
@@ -31,7 +30,7 @@ depends_on = None
|
|||||||
def upgrade():
|
def upgrade():
|
||||||
op.add_column(
|
op.add_column(
|
||||||
"data_sources",
|
"data_sources",
|
||||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True),
|
sa.Column("encrypted_options", BYTEA(), nullable=True),
|
||||||
)
|
)
|
||||||
|
|
||||||
# copy values
|
# copy values
|
||||||
|
|||||||
@@ -9,7 +9,7 @@ import re
|
|||||||
from funcy import flatten, compact
|
from funcy import flatten, compact
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import ARRAY
|
||||||
from redash import models
|
from redash import models
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
@@ -21,10 +21,10 @@ depends_on = None
|
|||||||
|
|
||||||
def upgrade():
|
def upgrade():
|
||||||
op.add_column(
|
op.add_column(
|
||||||
"dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
"dashboards", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||||
)
|
)
|
||||||
op.add_column(
|
op.add_column(
|
||||||
"queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
|
"queries", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ Create Date: 2020-12-14 21:42:48.661684
|
|||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import BYTEA
|
||||||
from sqlalchemy.sql import table
|
from sqlalchemy.sql import table
|
||||||
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
|
||||||
|
|
||||||
@@ -30,7 +30,7 @@ depends_on = None
|
|||||||
def upgrade():
|
def upgrade():
|
||||||
op.add_column(
|
op.add_column(
|
||||||
"notification_destinations",
|
"notification_destinations",
|
||||||
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True)
|
sa.Column("encrypted_options", BYTEA(), nullable=True)
|
||||||
)
|
)
|
||||||
|
|
||||||
# copy values
|
# copy values
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ Create Date: 2018-11-08 16:12:17.023569
|
|||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import JSON
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
# revision identifiers, used by Alembic.
|
||||||
revision = "e7f8a917aa8e"
|
revision = "e7f8a917aa8e"
|
||||||
@@ -21,7 +21,7 @@ def upgrade():
|
|||||||
"users",
|
"users",
|
||||||
sa.Column(
|
sa.Column(
|
||||||
"details",
|
"details",
|
||||||
postgresql.JSON(astext_type=sa.Text()),
|
JSON(astext_type=sa.Text()),
|
||||||
server_default="{}",
|
server_default="{}",
|
||||||
nullable=True,
|
nullable=True,
|
||||||
),
|
),
|
||||||
|
|||||||
@@ -7,7 +7,7 @@ Create Date: 2022-01-31 15:24:16.507888
|
|||||||
"""
|
"""
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import JSON, JSONB
|
||||||
|
|
||||||
from redash.models import db
|
from redash.models import db
|
||||||
|
|
||||||
@@ -23,8 +23,8 @@ def upgrade():
|
|||||||
|
|
||||||
### commands auto generated by Alembic - please adjust! ###
|
### commands auto generated by Alembic - please adjust! ###
|
||||||
op.alter_column('users', 'details',
|
op.alter_column('users', 'details',
|
||||||
existing_type=postgresql.JSON(astext_type=sa.Text()),
|
existing_type=JSON(astext_type=sa.Text()),
|
||||||
type_=postgresql.JSONB(astext_type=sa.Text()),
|
type_=JSONB(astext_type=sa.Text()),
|
||||||
existing_nullable=True,
|
existing_nullable=True,
|
||||||
existing_server_default=sa.text("'{}'::jsonb"))
|
existing_server_default=sa.text("'{}'::jsonb"))
|
||||||
### end Alembic commands ###
|
### end Alembic commands ###
|
||||||
@@ -52,8 +52,8 @@ def downgrade():
|
|||||||
connection.execute(update_query)
|
connection.execute(update_query)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
op.alter_column('users', 'details',
|
op.alter_column('users', 'details',
|
||||||
existing_type=postgresql.JSONB(astext_type=sa.Text()),
|
existing_type=JSONB(astext_type=sa.Text()),
|
||||||
type_=postgresql.JSON(astext_type=sa.Text()),
|
type_=JSON(astext_type=sa.Text()),
|
||||||
existing_nullable=True,
|
existing_nullable=True,
|
||||||
existing_server_default=sa.text("'{}'::json"))
|
existing_server_default=sa.text("'{}'::json"))
|
||||||
|
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ from flask import Blueprint, current_app, request
|
|||||||
from flask_login import current_user, login_required
|
from flask_login import current_user, login_required
|
||||||
from flask_restful import Resource, abort
|
from flask_restful import Resource, abort
|
||||||
from sqlalchemy import cast
|
from sqlalchemy import cast
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import ARRAY
|
||||||
from sqlalchemy.orm.exc import NoResultFound
|
from sqlalchemy.orm.exc import NoResultFound
|
||||||
from sqlalchemy_utils.functions import sort_query
|
from sqlalchemy_utils.functions import sort_query
|
||||||
|
|
||||||
@@ -114,7 +114,7 @@ def json_response(response):
|
|||||||
def filter_by_tags(result_set, column):
|
def filter_by_tags(result_set, column):
|
||||||
if request.args.getlist("tags"):
|
if request.args.getlist("tags"):
|
||||||
tags = request.args.getlist("tags")
|
tags = request.args.getlist("tags")
|
||||||
result_set = result_set.filter(cast(column, postgresql.ARRAY(db.Text)).contains(tags))
|
result_set = result_set.filter(cast(column, ARRAY(db.Text)).contains(tags))
|
||||||
return result_set
|
return result_set
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -96,7 +96,7 @@ class DashboardListResource(BaseResource):
|
|||||||
org=self.current_org,
|
org=self.current_org,
|
||||||
user=self.current_user,
|
user=self.current_user,
|
||||||
is_draft=True,
|
is_draft=True,
|
||||||
layout="[]",
|
layout=[],
|
||||||
)
|
)
|
||||||
models.db.session.add(dashboard)
|
models.db.session.add(dashboard)
|
||||||
models.db.session.commit()
|
models.db.session.commit()
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from redash.permissions import (
|
|||||||
require_permission,
|
require_permission,
|
||||||
)
|
)
|
||||||
from redash.serializers import serialize_visualization
|
from redash.serializers import serialize_visualization
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
class VisualizationListResource(BaseResource):
|
class VisualizationListResource(BaseResource):
|
||||||
@@ -18,7 +17,6 @@ class VisualizationListResource(BaseResource):
|
|||||||
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org)
|
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org)
|
||||||
require_object_modify_permission(query, self.current_user)
|
require_object_modify_permission(query, self.current_user)
|
||||||
|
|
||||||
kwargs["options"] = json_dumps(kwargs["options"])
|
|
||||||
kwargs["query_rel"] = query
|
kwargs["query_rel"] = query
|
||||||
|
|
||||||
vis = models.Visualization(**kwargs)
|
vis = models.Visualization(**kwargs)
|
||||||
@@ -34,8 +32,6 @@ class VisualizationResource(BaseResource):
|
|||||||
require_object_modify_permission(vis.query_rel, self.current_user)
|
require_object_modify_permission(vis.query_rel, self.current_user)
|
||||||
|
|
||||||
kwargs = request.get_json(force=True)
|
kwargs = request.get_json(force=True)
|
||||||
if "options" in kwargs:
|
|
||||||
kwargs["options"] = json_dumps(kwargs["options"])
|
|
||||||
|
|
||||||
kwargs.pop("id", None)
|
kwargs.pop("id", None)
|
||||||
kwargs.pop("query_id", None)
|
kwargs.pop("query_id", None)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from redash.permissions import (
|
|||||||
view_only,
|
view_only,
|
||||||
)
|
)
|
||||||
from redash.serializers import serialize_widget
|
from redash.serializers import serialize_widget
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
class WidgetListResource(BaseResource):
|
class WidgetListResource(BaseResource):
|
||||||
@@ -30,7 +29,6 @@ class WidgetListResource(BaseResource):
|
|||||||
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get("dashboard_id"), self.current_org)
|
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get("dashboard_id"), self.current_org)
|
||||||
require_object_modify_permission(dashboard, self.current_user)
|
require_object_modify_permission(dashboard, self.current_user)
|
||||||
|
|
||||||
widget_properties["options"] = json_dumps(widget_properties["options"])
|
|
||||||
widget_properties.pop("id", None)
|
widget_properties.pop("id", None)
|
||||||
|
|
||||||
visualization_id = widget_properties.pop("visualization_id")
|
visualization_id = widget_properties.pop("visualization_id")
|
||||||
@@ -65,7 +63,7 @@ class WidgetResource(BaseResource):
|
|||||||
require_object_modify_permission(widget.dashboard, self.current_user)
|
require_object_modify_permission(widget.dashboard, self.current_user)
|
||||||
widget_properties = request.get_json(force=True)
|
widget_properties = request.get_json(force=True)
|
||||||
widget.text = widget_properties["text"]
|
widget.text = widget_properties["text"]
|
||||||
widget.options = json_dumps(widget_properties["options"])
|
widget.options = widget_properties["options"]
|
||||||
models.db.session.commit()
|
models.db.session.commit()
|
||||||
return serialize_widget(widget)
|
return serialize_widget(widget)
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ import time
|
|||||||
|
|
||||||
import pytz
|
import pytz
|
||||||
from sqlalchemy import UniqueConstraint, and_, cast, distinct, func, or_
|
from sqlalchemy import UniqueConstraint, and_, cast, distinct, func, or_
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, JSONB
|
||||||
from sqlalchemy.event import listens_for
|
from sqlalchemy.event import listens_for
|
||||||
from sqlalchemy.ext.hybrid import hybrid_property
|
from sqlalchemy.ext.hybrid import hybrid_property
|
||||||
from sqlalchemy.orm import (
|
from sqlalchemy.orm import (
|
||||||
@@ -50,8 +50,7 @@ from redash.models.types import (
|
|||||||
EncryptedConfiguration,
|
EncryptedConfiguration,
|
||||||
MutableDict,
|
MutableDict,
|
||||||
MutableList,
|
MutableList,
|
||||||
PseudoJSON,
|
json_cast_property,
|
||||||
pseudo_json_cast_property,
|
|
||||||
)
|
)
|
||||||
from redash.models.users import ( # noqa
|
from redash.models.users import ( # noqa
|
||||||
AccessPermission,
|
AccessPermission,
|
||||||
@@ -127,7 +126,10 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
|||||||
|
|
||||||
data_source_groups = db.relationship("DataSourceGroup", back_populates="data_source", cascade="all")
|
data_source_groups = db.relationship("DataSourceGroup", back_populates="data_source", cascade="all")
|
||||||
__tablename__ = "data_sources"
|
__tablename__ = "data_sources"
|
||||||
__table_args__ = (db.Index("data_sources_org_id_name", "org_id", "name"),)
|
__table_args__ = (
|
||||||
|
db.Index("data_sources_org_id_name", "org_id", "name"),
|
||||||
|
{"extend_existing": True},
|
||||||
|
)
|
||||||
|
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
return self.id == other.id
|
return self.id == other.id
|
||||||
@@ -301,34 +303,11 @@ class DataSourceGroup(db.Model):
|
|||||||
view_only = Column(db.Boolean, default=False)
|
view_only = Column(db.Boolean, default=False)
|
||||||
|
|
||||||
__tablename__ = "data_source_groups"
|
__tablename__ = "data_source_groups"
|
||||||
|
__table_args__ = ({"extend_existing": True},)
|
||||||
|
|
||||||
DESERIALIZED_DATA_ATTR = "_deserialized_data"
|
|
||||||
|
|
||||||
|
|
||||||
class DBPersistence:
|
|
||||||
@property
|
|
||||||
def data(self):
|
|
||||||
if self._data is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if not hasattr(self, DESERIALIZED_DATA_ATTR):
|
|
||||||
setattr(self, DESERIALIZED_DATA_ATTR, json_loads(self._data))
|
|
||||||
|
|
||||||
return self._deserialized_data
|
|
||||||
|
|
||||||
@data.setter
|
|
||||||
def data(self, data):
|
|
||||||
if hasattr(self, DESERIALIZED_DATA_ATTR):
|
|
||||||
delattr(self, DESERIALIZED_DATA_ATTR)
|
|
||||||
self._data = data
|
|
||||||
|
|
||||||
|
|
||||||
QueryResultPersistence = settings.dynamic_settings.QueryResultPersistence or DBPersistence
|
|
||||||
|
|
||||||
|
|
||||||
@generic_repr("id", "org_id", "data_source_id", "query_hash", "runtime", "retrieved_at")
|
@generic_repr("id", "org_id", "data_source_id", "query_hash", "runtime", "retrieved_at")
|
||||||
class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
|
class QueryResult(db.Model, BelongsToOrgMixin):
|
||||||
id = primary_key("QueryResult")
|
id = primary_key("QueryResult")
|
||||||
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
|
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
|
||||||
org = db.relationship(Organization)
|
org = db.relationship(Organization)
|
||||||
@@ -336,8 +315,8 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
|
|||||||
data_source = db.relationship(DataSource, backref=backref("query_results"))
|
data_source = db.relationship(DataSource, backref=backref("query_results"))
|
||||||
query_hash = Column(db.String(32), index=True)
|
query_hash = Column(db.String(32), index=True)
|
||||||
query_text = Column("query", db.Text)
|
query_text = Column("query", db.Text)
|
||||||
_data = Column("data", db.Text)
|
data = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||||
runtime = Column(postgresql.DOUBLE_PRECISION)
|
runtime = Column(DOUBLE_PRECISION)
|
||||||
retrieved_at = Column(db.DateTime(True))
|
retrieved_at = Column(db.DateTime(True))
|
||||||
|
|
||||||
__tablename__ = "query_results"
|
__tablename__ = "query_results"
|
||||||
@@ -478,11 +457,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
last_modified_by = db.relationship(User, backref="modified_queries", foreign_keys=[last_modified_by_id])
|
last_modified_by = db.relationship(User, backref="modified_queries", foreign_keys=[last_modified_by_id])
|
||||||
is_archived = Column(db.Boolean, default=False, index=True)
|
is_archived = Column(db.Boolean, default=False, index=True)
|
||||||
is_draft = Column(db.Boolean, default=True, index=True)
|
is_draft = Column(db.Boolean, default=True, index=True)
|
||||||
schedule = Column(MutableDict.as_mutable(PseudoJSON), nullable=True)
|
schedule = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||||
interval = pseudo_json_cast_property(db.Integer, "schedule", "interval", default=0)
|
interval = json_cast_property(db.Integer, "schedule", "interval", default=0)
|
||||||
schedule_failures = Column(db.Integer, default=0)
|
schedule_failures = Column(db.Integer, default=0)
|
||||||
visualizations = db.relationship("Visualization", cascade="all, delete-orphan")
|
visualizations = db.relationship("Visualization", cascade="all, delete-orphan")
|
||||||
options = Column(MutableDict.as_mutable(PseudoJSON), default={})
|
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||||
search_vector = Column(
|
search_vector = Column(
|
||||||
TSVectorType(
|
TSVectorType(
|
||||||
"id",
|
"id",
|
||||||
@@ -493,7 +472,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
),
|
),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
)
|
)
|
||||||
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
|
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
|
||||||
|
|
||||||
query_class = SearchBaseQuery
|
query_class = SearchBaseQuery
|
||||||
__tablename__ = "queries"
|
__tablename__ = "queries"
|
||||||
@@ -529,7 +508,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
name="Table",
|
name="Table",
|
||||||
description="",
|
description="",
|
||||||
type="TABLE",
|
type="TABLE",
|
||||||
options="{}",
|
options={},
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
return query
|
return query
|
||||||
@@ -595,7 +574,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def past_scheduled_queries(cls):
|
def past_scheduled_queries(cls):
|
||||||
now = utils.utcnow()
|
now = utils.utcnow()
|
||||||
queries = Query.query.filter(Query.schedule.isnot(None)).order_by(Query.id)
|
queries = Query.query.filter(func.jsonb_typeof(Query.schedule) != "null").order_by(Query.id)
|
||||||
return [
|
return [
|
||||||
query
|
query
|
||||||
for query in queries
|
for query in queries
|
||||||
@@ -607,7 +586,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
def outdated_queries(cls):
|
def outdated_queries(cls):
|
||||||
queries = (
|
queries = (
|
||||||
Query.query.options(joinedload(Query.latest_query_data).load_only("retrieved_at"))
|
Query.query.options(joinedload(Query.latest_query_data).load_only("retrieved_at"))
|
||||||
.filter(Query.schedule.isnot(None))
|
.filter(func.jsonb_typeof(Query.schedule) != "null")
|
||||||
.order_by(Query.id)
|
.order_by(Query.id)
|
||||||
.all()
|
.all()
|
||||||
)
|
)
|
||||||
@@ -953,7 +932,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
query_rel = db.relationship(Query, backref=backref("alerts", cascade="all"))
|
query_rel = db.relationship(Query, backref=backref("alerts", cascade="all"))
|
||||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||||
user = db.relationship(User, backref="alerts")
|
user = db.relationship(User, backref="alerts")
|
||||||
options = Column(MutableDict.as_mutable(PseudoJSON))
|
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||||
state = Column(db.String(255), default=UNKNOWN_STATE)
|
state = Column(db.String(255), default=UNKNOWN_STATE)
|
||||||
subscriptions = db.relationship("AlertSubscription", cascade="all, delete-orphan")
|
subscriptions = db.relationship("AlertSubscription", cascade="all, delete-orphan")
|
||||||
last_triggered_at = Column(db.DateTime(True), nullable=True)
|
last_triggered_at = Column(db.DateTime(True), nullable=True)
|
||||||
@@ -1064,13 +1043,13 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
|
|||||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||||
user = db.relationship(User)
|
user = db.relationship(User)
|
||||||
# layout is no longer used, but kept so we know how to render old dashboards.
|
# layout is no longer used, but kept so we know how to render old dashboards.
|
||||||
layout = Column(db.Text)
|
layout = Column(MutableList.as_mutable(JSONB), default=[])
|
||||||
dashboard_filters_enabled = Column(db.Boolean, default=False)
|
dashboard_filters_enabled = Column(db.Boolean, default=False)
|
||||||
is_archived = Column(db.Boolean, default=False, index=True)
|
is_archived = Column(db.Boolean, default=False, index=True)
|
||||||
is_draft = Column(db.Boolean, default=True, index=True)
|
is_draft = Column(db.Boolean, default=True, index=True)
|
||||||
widgets = db.relationship("Widget", backref="dashboard", lazy="dynamic")
|
widgets = db.relationship("Widget", backref="dashboard", lazy="dynamic")
|
||||||
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
|
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
|
||||||
options = Column(MutableDict.as_mutable(postgresql.JSON), server_default="{}", default={})
|
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||||
|
|
||||||
__tablename__ = "dashboards"
|
__tablename__ = "dashboards"
|
||||||
__mapper_args__ = {"version_id_col": version}
|
__mapper_args__ = {"version_id_col": version}
|
||||||
@@ -1183,7 +1162,7 @@ class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
query_rel = db.relationship(Query, back_populates="visualizations")
|
query_rel = db.relationship(Query, back_populates="visualizations")
|
||||||
name = Column(db.String(255))
|
name = Column(db.String(255))
|
||||||
description = Column(db.String(4096), nullable=True)
|
description = Column(db.String(4096), nullable=True)
|
||||||
options = Column(db.Text)
|
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
|
||||||
|
|
||||||
__tablename__ = "visualizations"
|
__tablename__ = "visualizations"
|
||||||
|
|
||||||
@@ -1210,7 +1189,7 @@ class Widget(TimestampMixin, BelongsToOrgMixin, db.Model):
|
|||||||
visualization = db.relationship(Visualization, backref=backref("widgets", cascade="delete"))
|
visualization = db.relationship(Visualization, backref=backref("widgets", cascade="delete"))
|
||||||
text = Column(db.Text, nullable=True)
|
text = Column(db.Text, nullable=True)
|
||||||
width = Column(db.Integer)
|
width = Column(db.Integer)
|
||||||
options = Column(db.Text)
|
options = Column(MutableDict.as_mutable(JSONB), default={})
|
||||||
dashboard_id = Column(key_type("Dashboard"), db.ForeignKey("dashboards.id"), index=True)
|
dashboard_id = Column(key_type("Dashboard"), db.ForeignKey("dashboards.id"), index=True)
|
||||||
|
|
||||||
__tablename__ = "widgets"
|
__tablename__ = "widgets"
|
||||||
@@ -1242,7 +1221,7 @@ class Event(db.Model):
|
|||||||
action = Column(db.String(255))
|
action = Column(db.String(255))
|
||||||
object_type = Column(db.String(255))
|
object_type = Column(db.String(255))
|
||||||
object_id = Column(db.String(255), nullable=True)
|
object_id = Column(db.String(255), nullable=True)
|
||||||
additional_properties = Column(MutableDict.as_mutable(PseudoJSON), nullable=True, default={})
|
additional_properties = Column(MutableDict.as_mutable(JSONB), nullable=True, default={})
|
||||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||||
|
|
||||||
__tablename__ = "events"
|
__tablename__ = "events"
|
||||||
|
|||||||
@@ -1,13 +1,13 @@
|
|||||||
import functools
|
import functools
|
||||||
|
|
||||||
from flask_sqlalchemy import BaseQuery, SQLAlchemy
|
from flask_sqlalchemy import BaseQuery, SQLAlchemy
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import UUID
|
||||||
from sqlalchemy.orm import object_session
|
from sqlalchemy.orm import object_session
|
||||||
from sqlalchemy.pool import NullPool
|
from sqlalchemy.pool import NullPool
|
||||||
from sqlalchemy_searchable import SearchQueryMixin, make_searchable, vectorizer
|
from sqlalchemy_searchable import SearchQueryMixin, make_searchable, vectorizer
|
||||||
|
|
||||||
from redash import settings
|
from redash import settings
|
||||||
from redash.utils import json_dumps
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
|
|
||||||
class RedashSQLAlchemy(SQLAlchemy):
|
class RedashSQLAlchemy(SQLAlchemy):
|
||||||
@@ -28,7 +28,10 @@ class RedashSQLAlchemy(SQLAlchemy):
|
|||||||
return options
|
return options
|
||||||
|
|
||||||
|
|
||||||
db = RedashSQLAlchemy(session_options={"expire_on_commit": False})
|
db = RedashSQLAlchemy(
|
||||||
|
session_options={"expire_on_commit": False},
|
||||||
|
engine_options={"json_serializer": json_dumps, "json_deserializer": json_loads},
|
||||||
|
)
|
||||||
# Make sure the SQLAlchemy mappers are all properly configured first.
|
# Make sure the SQLAlchemy mappers are all properly configured first.
|
||||||
# This is required by SQLAlchemy-Searchable as it adds DDL listeners
|
# This is required by SQLAlchemy-Searchable as it adds DDL listeners
|
||||||
# on the configuration phase of models.
|
# on the configuration phase of models.
|
||||||
@@ -50,7 +53,7 @@ def integer_vectorizer(column):
|
|||||||
return db.func.cast(column, db.Text)
|
return db.func.cast(column, db.Text)
|
||||||
|
|
||||||
|
|
||||||
@vectorizer(postgresql.UUID)
|
@vectorizer(UUID)
|
||||||
def uuid_vectorizer(column):
|
def uuid_vectorizer(column):
|
||||||
return db.func.cast(column, db.Text)
|
return db.func.cast(column, db.Text)
|
||||||
|
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
from sqlalchemy.inspection import inspect
|
from sqlalchemy.inspection import inspect
|
||||||
from sqlalchemy_utils.models import generic_repr
|
from sqlalchemy_utils.models import generic_repr
|
||||||
|
|
||||||
from .base import Column, GFKBase, db, key_type, primary_key
|
from .base import Column, GFKBase, db, key_type, primary_key
|
||||||
from .types import PseudoJSON
|
|
||||||
|
|
||||||
|
|
||||||
@generic_repr("id", "object_type", "object_id", "created_at")
|
@generic_repr("id", "object_type", "object_id", "created_at")
|
||||||
@@ -13,7 +13,7 @@ class Change(GFKBase, db.Model):
|
|||||||
object_version = Column(db.Integer, default=0)
|
object_version = Column(db.Integer, default=0)
|
||||||
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
|
||||||
user = db.relationship("User", backref="changes")
|
user = db.relationship("User", backref="changes")
|
||||||
change = Column(PseudoJSON)
|
change = Column(JSONB)
|
||||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||||
|
|
||||||
__tablename__ = "changes"
|
__tablename__ = "changes"
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
|
from sqlalchemy.dialects.postgresql import JSONB
|
||||||
from sqlalchemy.orm.attributes import flag_modified
|
from sqlalchemy.orm.attributes import flag_modified
|
||||||
from sqlalchemy_utils.models import generic_repr
|
from sqlalchemy_utils.models import generic_repr
|
||||||
|
|
||||||
@@ -5,7 +6,7 @@ from redash.settings.organization import settings as org_settings
|
|||||||
|
|
||||||
from .base import Column, db, primary_key
|
from .base import Column, db, primary_key
|
||||||
from .mixins import TimestampMixin
|
from .mixins import TimestampMixin
|
||||||
from .types import MutableDict, PseudoJSON
|
from .types import MutableDict
|
||||||
from .users import Group, User
|
from .users import Group, User
|
||||||
|
|
||||||
|
|
||||||
@@ -17,7 +18,7 @@ class Organization(TimestampMixin, db.Model):
|
|||||||
id = primary_key("Organization")
|
id = primary_key("Organization")
|
||||||
name = Column(db.String(255))
|
name = Column(db.String(255))
|
||||||
slug = Column(db.String(255), unique=True)
|
slug = Column(db.String(255), unique=True)
|
||||||
settings = Column(MutableDict.as_mutable(PseudoJSON))
|
settings = Column(MutableDict.as_mutable(JSONB), default={})
|
||||||
groups = db.relationship("Group", lazy="dynamic")
|
groups = db.relationship("Group", lazy="dynamic")
|
||||||
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)")
|
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)")
|
||||||
|
|
||||||
|
|||||||
@@ -1,11 +1,8 @@
|
|||||||
from sqlalchemy import cast
|
|
||||||
from sqlalchemy.dialects.postgresql import JSON
|
|
||||||
from sqlalchemy.ext.indexable import index_property
|
from sqlalchemy.ext.indexable import index_property
|
||||||
from sqlalchemy.ext.mutable import Mutable
|
from sqlalchemy.ext.mutable import Mutable
|
||||||
from sqlalchemy.types import TypeDecorator
|
from sqlalchemy.types import TypeDecorator
|
||||||
from sqlalchemy_utils import EncryptedType
|
from sqlalchemy_utils import EncryptedType
|
||||||
|
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
from redash.utils.configuration import ConfigurationContainer
|
from redash.utils.configuration import ConfigurationContainer
|
||||||
|
|
||||||
from .base import db
|
from .base import db
|
||||||
@@ -31,22 +28,6 @@ class EncryptedConfiguration(EncryptedType):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# XXX replace PseudoJSON and MutableDict with real JSON field
|
|
||||||
class PseudoJSON(TypeDecorator):
|
|
||||||
impl = db.Text
|
|
||||||
|
|
||||||
def process_bind_param(self, value, dialect):
|
|
||||||
if value is None:
|
|
||||||
return value
|
|
||||||
|
|
||||||
return json_dumps(value)
|
|
||||||
|
|
||||||
def process_result_value(self, value, dialect):
|
|
||||||
if not value:
|
|
||||||
return value
|
|
||||||
return json_loads(value)
|
|
||||||
|
|
||||||
|
|
||||||
class MutableDict(Mutable, dict):
|
class MutableDict(Mutable, dict):
|
||||||
@classmethod
|
@classmethod
|
||||||
def coerce(cls, key, value):
|
def coerce(cls, key, value):
|
||||||
@@ -107,19 +88,3 @@ class json_cast_property(index_property):
|
|||||||
def expr(self, model):
|
def expr(self, model):
|
||||||
expr = super(json_cast_property, self).expr(model)
|
expr = super(json_cast_property, self).expr(model)
|
||||||
return expr.astext.cast(self.cast_type)
|
return expr.astext.cast(self.cast_type)
|
||||||
|
|
||||||
|
|
||||||
class pseudo_json_cast_property(index_property):
|
|
||||||
"""
|
|
||||||
A SQLAlchemy index property that is able to cast the
|
|
||||||
entity attribute as the specified cast type. Useful
|
|
||||||
for PseudoJSON colums for easier querying/filtering.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, cast_type, *args, **kwargs):
|
|
||||||
super().__init__(*args, **kwargs)
|
|
||||||
self.cast_type = cast_type
|
|
||||||
|
|
||||||
def expr(self, model):
|
|
||||||
expr = cast(getattr(model, self.attr_name), JSON)[self.index]
|
|
||||||
return expr.astext.cast(self.cast_type)
|
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from operator import or_
|
|||||||
from flask import current_app, request_started, url_for
|
from flask import current_app, request_started, url_for
|
||||||
from flask_login import AnonymousUserMixin, UserMixin, current_user
|
from flask_login import AnonymousUserMixin, UserMixin, current_user
|
||||||
from passlib.apps import custom_app_context as pwd_context
|
from passlib.apps import custom_app_context as pwd_context
|
||||||
from sqlalchemy.dialects import postgresql
|
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
|
||||||
from sqlalchemy_utils import EmailType
|
from sqlalchemy_utils import EmailType
|
||||||
from sqlalchemy_utils.models import generic_repr
|
from sqlalchemy_utils.models import generic_repr
|
||||||
|
|
||||||
@@ -84,14 +84,14 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
|
|||||||
password_hash = Column(db.String(128), nullable=True)
|
password_hash = Column(db.String(128), nullable=True)
|
||||||
group_ids = Column(
|
group_ids = Column(
|
||||||
"groups",
|
"groups",
|
||||||
MutableList.as_mutable(postgresql.ARRAY(key_type("Group"))),
|
MutableList.as_mutable(ARRAY(key_type("Group"))),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
)
|
)
|
||||||
api_key = Column(db.String(40), default=lambda: generate_token(40), unique=True)
|
api_key = Column(db.String(40), default=lambda: generate_token(40), unique=True)
|
||||||
|
|
||||||
disabled_at = Column(db.DateTime(True), default=None, nullable=True)
|
disabled_at = Column(db.DateTime(True), default=None, nullable=True)
|
||||||
details = Column(
|
details = Column(
|
||||||
MutableDict.as_mutable(postgresql.JSONB),
|
MutableDict.as_mutable(JSONB),
|
||||||
nullable=True,
|
nullable=True,
|
||||||
server_default="{}",
|
server_default="{}",
|
||||||
default={},
|
default={},
|
||||||
@@ -267,7 +267,7 @@ class Group(db.Model, BelongsToOrgMixin):
|
|||||||
org = db.relationship("Organization", back_populates="groups")
|
org = db.relationship("Organization", back_populates="groups")
|
||||||
type = Column(db.String(255), default=REGULAR_GROUP)
|
type = Column(db.String(255), default=REGULAR_GROUP)
|
||||||
name = Column(db.String(100))
|
name = Column(db.String(100))
|
||||||
permissions = Column(postgresql.ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
|
permissions = Column(ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
|
||||||
created_at = Column(db.DateTime(True), default=db.func.now())
|
created_at = Column(db.DateTime(True), default=db.func.now())
|
||||||
|
|
||||||
__tablename__ = "groups"
|
__tablename__ = "groups"
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from rq.timeouts import JobTimeoutException
|
|||||||
from sshtunnel import open_tunnel
|
from sshtunnel import open_tunnel
|
||||||
|
|
||||||
from redash import settings, utils
|
from redash import settings, utils
|
||||||
from redash.utils import json_loads
|
|
||||||
from redash.utils.requests_session import (
|
from redash.utils.requests_session import (
|
||||||
UnacceptableAddressException,
|
UnacceptableAddressException,
|
||||||
requests_or_advocate,
|
requests_or_advocate,
|
||||||
@@ -243,7 +242,7 @@ class BaseQueryRunner:
|
|||||||
|
|
||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed running query [%s]." % query)
|
raise Exception("Failed running query [%s]." % query)
|
||||||
return json_loads(results)["rows"]
|
return results["rows"]
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def to_dict(cls):
|
def to_dict(cls):
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -81,12 +80,11 @@ class Arango(BaseQueryRunner):
|
|||||||
"rows": result,
|
"rows": result,
|
||||||
}
|
}
|
||||||
|
|
||||||
json_data = json_dumps(data, allow_nan=False)
|
|
||||||
error = None
|
error = None
|
||||||
except Exception:
|
except Exception:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Arango)
|
register(Arango)
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from redash.query_runner import (
|
|||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.settings import parse_boolean
|
from redash.settings import parse_boolean
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
ANNOTATE_QUERY = parse_boolean(os.environ.get("ATHENA_ANNOTATE_QUERY", "true"))
|
ANNOTATE_QUERY = parse_boolean(os.environ.get("ATHENA_ANNOTATE_QUERY", "true"))
|
||||||
@@ -210,7 +209,6 @@ class Athena(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||||
if table_name not in schema:
|
if table_name not in schema:
|
||||||
@@ -257,14 +255,13 @@ class Athena(BaseQueryRunner):
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
json_data = json_dumps(data, allow_nan=False)
|
|
||||||
error = None
|
error = None
|
||||||
except Exception:
|
except Exception:
|
||||||
if cursor.query_id:
|
if cursor.query_id:
|
||||||
cursor.cancel()
|
cursor.cancel()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Athena)
|
register(Athena)
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -157,17 +157,16 @@ class AxibaseTSD(BaseQueryRunner):
|
|||||||
columns, rows = generate_rows_and_columns(data)
|
columns, rows = generate_rows_and_columns(data)
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
except SQLException as e:
|
except SQLException as e:
|
||||||
json_data = None
|
data = None
|
||||||
error = e.content
|
error = e.content
|
||||||
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
||||||
sql.cancel_query(query_id)
|
sql.cancel_query(query_id)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
connection = atsd_client.connect_url(
|
connection = atsd_client.connect_url(
|
||||||
|
|||||||
@@ -8,7 +8,7 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from azure.kusto.data.exceptions import KustoServiceError
|
from azure.kusto.data.exceptions import KustoServiceError
|
||||||
@@ -124,16 +124,15 @@ class AzureKusto(BaseQueryRunner):
|
|||||||
|
|
||||||
error = None
|
error = None
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
|
|
||||||
except KustoServiceError as err:
|
except KustoServiceError as err:
|
||||||
json_data = None
|
data = None
|
||||||
try:
|
try:
|
||||||
error = err.args[1][0]["error"]["@message"]
|
error = err.args[1][0]["error"]["@message"]
|
||||||
except (IndexError, KeyError):
|
except (IndexError, KeyError):
|
||||||
error = err.args[1]
|
error = err.args[1]
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
query = ".show database schema as json"
|
query = ".show database schema as json"
|
||||||
@@ -143,8 +142,6 @@ class AzureKusto(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
|
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
|
||||||
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()
|
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()
|
||||||
|
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -318,7 +318,6 @@ class BigQuery(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
|
||||||
if table_name not in schema:
|
if table_name not in schema:
|
||||||
@@ -346,9 +345,8 @@ class BigQuery(BaseQueryRunner):
|
|||||||
data = self._get_query_result(jobs, query)
|
data = self._get_query_result(jobs, query)
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
json_data = json_dumps(data, allow_nan=False)
|
|
||||||
except apiclient.errors.HttpError as e:
|
except apiclient.errors.HttpError as e:
|
||||||
json_data = None
|
data = None
|
||||||
if e.resp.status in [400, 404]:
|
if e.resp.status in [400, 404]:
|
||||||
error = json_loads(e.content)["error"]["message"]
|
error = json_loads(e.content)["error"]["message"]
|
||||||
else:
|
else:
|
||||||
@@ -363,7 +361,7 @@ class BigQuery(BaseQueryRunner):
|
|||||||
|
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(BigQuery)
|
register(BigQuery)
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from base64 import b64decode
|
|||||||
from tempfile import NamedTemporaryFile
|
from tempfile import NamedTemporaryFile
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, register
|
from redash.query_runner import BaseQueryRunner, register
|
||||||
from redash.utils import JSONEncoder, json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -27,11 +26,10 @@ def generate_ssl_options_dict(protocol, cert_path=None):
|
|||||||
return ssl_options
|
return ssl_options
|
||||||
|
|
||||||
|
|
||||||
class CassandraJSONEncoder(JSONEncoder):
|
def json_encoder(dec, o):
|
||||||
def default(self, o):
|
if isinstance(o, sortedset):
|
||||||
if isinstance(o, sortedset):
|
return list(o)
|
||||||
return list(o)
|
return None
|
||||||
return super(CassandraJSONEncoder, self).default(o)
|
|
||||||
|
|
||||||
|
|
||||||
class Cassandra(BaseQueryRunner):
|
class Cassandra(BaseQueryRunner):
|
||||||
@@ -86,7 +84,6 @@ class Cassandra(BaseQueryRunner):
|
|||||||
select release_version from system.local;
|
select release_version from system.local;
|
||||||
"""
|
"""
|
||||||
results, error = self.run_query(query, None)
|
results, error = self.run_query(query, None)
|
||||||
results = json_loads(results)
|
|
||||||
release_version = results["rows"][0]["release_version"]
|
release_version = results["rows"][0]["release_version"]
|
||||||
|
|
||||||
query = """
|
query = """
|
||||||
@@ -107,7 +104,6 @@ class Cassandra(BaseQueryRunner):
|
|||||||
)
|
)
|
||||||
|
|
||||||
results, error = self.run_query(query, None)
|
results, error = self.run_query(query, None)
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
schema = {}
|
schema = {}
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
@@ -155,9 +151,8 @@ class Cassandra(BaseQueryRunner):
|
|||||||
rows = [dict(zip(column_names, row)) for row in result]
|
rows = [dict(zip(column_names, row)) for row in result]
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data, cls=CassandraJSONEncoder)
|
|
||||||
|
|
||||||
return json_data, None
|
return data, None
|
||||||
|
|
||||||
def _generate_cert_file(self):
|
def _generate_cert_file(self):
|
||||||
cert_encoded_bytes = self.configuration.get("sslCertificateFile", None)
|
cert_encoded_bytes = self.configuration.get("sslCertificateFile", None)
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ from redash.query_runner import (
|
|||||||
register,
|
register,
|
||||||
split_sql_statements,
|
split_sql_statements,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -85,8 +84,6 @@ class ClickHouse(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{}.{}".format(row["database"], row["table"])
|
table_name = "{}.{}".format(row["database"], row["table"])
|
||||||
|
|
||||||
@@ -200,25 +197,24 @@ class ClickHouse(BaseSQLQueryRunner):
|
|||||||
queries = split_multi_query(query)
|
queries = split_multi_query(query)
|
||||||
|
|
||||||
if not queries:
|
if not queries:
|
||||||
json_data = None
|
data = None
|
||||||
error = "Query is empty"
|
error = "Query is empty"
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# If just one query was given no session is needed
|
# If just one query was given no session is needed
|
||||||
if len(queries) == 1:
|
if len(queries) == 1:
|
||||||
results = self._clickhouse_query(queries[0])
|
data = self._clickhouse_query(queries[0])
|
||||||
else:
|
else:
|
||||||
# If more than one query was given, a session is needed. Parameter session_check must be false
|
# If more than one query was given, a session is needed. Parameter session_check must be false
|
||||||
# for the first query
|
# for the first query
|
||||||
session_id = "redash_{}".format(uuid4().hex)
|
session_id = "redash_{}".format(uuid4().hex)
|
||||||
|
|
||||||
results = self._clickhouse_query(queries[0], session_id, session_check=False)
|
data = self._clickhouse_query(queries[0], session_id, session_check=False)
|
||||||
|
|
||||||
for query in queries[1:]:
|
for query in queries[1:]:
|
||||||
results = self._clickhouse_query(query, session_id, session_check=True)
|
data = self._clickhouse_query(query, session_id, session_check=True)
|
||||||
|
|
||||||
data = json_dumps(results)
|
|
||||||
error = None
|
error = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
data = None
|
data = None
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ import datetime
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, register
|
from redash.query_runner import BaseQueryRunner, register
|
||||||
from redash.utils import json_dumps, parse_human_time
|
from redash.utils import parse_human_time
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import boto3
|
import boto3
|
||||||
@@ -121,7 +121,7 @@ class CloudWatch(BaseQueryRunner):
|
|||||||
|
|
||||||
rows, columns = parse_response(results)
|
rows, columns = parse_response(results)
|
||||||
|
|
||||||
return json_dumps({"rows": rows, "columns": columns}), None
|
return {"rows": rows, "columns": columns}, None
|
||||||
|
|
||||||
|
|
||||||
register(CloudWatch)
|
register(CloudWatch)
|
||||||
|
|||||||
@@ -4,7 +4,7 @@ import time
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, register
|
from redash.query_runner import BaseQueryRunner, register
|
||||||
from redash.utils import json_dumps, parse_human_time
|
from redash.utils import parse_human_time
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import boto3
|
import boto3
|
||||||
@@ -146,7 +146,7 @@ class CloudWatchInsights(BaseQueryRunner):
|
|||||||
time.sleep(POLL_INTERVAL)
|
time.sleep(POLL_INTERVAL)
|
||||||
elapsed += POLL_INTERVAL
|
elapsed += POLL_INTERVAL
|
||||||
|
|
||||||
return json_dumps(data), None
|
return data, None
|
||||||
|
|
||||||
|
|
||||||
register(CloudWatchInsights)
|
register(CloudWatchInsights)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ import logging
|
|||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner
|
from redash.query_runner import BaseQueryRunner
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
from . import register
|
from . import register
|
||||||
|
|
||||||
@@ -115,7 +114,7 @@ class CorporateMemoryQueryRunner(BaseQueryRunner):
|
|||||||
logger.info("results are: {}".format(results))
|
logger.info("results are: {}".format(results))
|
||||||
# Not sure why we do not use the json package here but all other
|
# Not sure why we do not use the json package here but all other
|
||||||
# query runner do it the same way :-)
|
# query runner do it the same way :-)
|
||||||
sparql_results = json_loads(results)
|
sparql_results = results
|
||||||
# transform all bindings to redash rows
|
# transform all bindings to redash rows
|
||||||
rows = []
|
rows = []
|
||||||
for sparql_row in sparql_results["results"]["bindings"]:
|
for sparql_row in sparql_results["results"]["bindings"]:
|
||||||
@@ -133,7 +132,7 @@ class CorporateMemoryQueryRunner(BaseQueryRunner):
|
|||||||
columns.append({"name": var, "friendly_name": var, "type": "string"})
|
columns.append({"name": var, "friendly_name": var, "type": "string"})
|
||||||
# Not sure why we do not use the json package here but all other
|
# Not sure why we do not use the json package here but all other
|
||||||
# query runner do it the same way :-)
|
# query runner do it the same way :-)
|
||||||
return json_dumps({"columns": columns, "rows": rows})
|
return {"columns": columns, "rows": rows}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def name(cls):
|
def name(cls):
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
try:
|
try:
|
||||||
@@ -155,7 +154,7 @@ class Couchbase(BaseQueryRunner):
|
|||||||
rows, columns = parse_results(result.json()["results"])
|
rows, columns = parse_results(result.json()["results"])
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
|
|
||||||
return json_dumps(data), None
|
return data, None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def name(cls):
|
def name(cls):
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import logging
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, NotSupported, register
|
from redash.query_runner import BaseQueryRunner, NotSupported, register
|
||||||
from redash.utils import json_dumps
|
|
||||||
from redash.utils.requests_session import (
|
from redash.utils.requests_session import (
|
||||||
UnacceptableAddressException,
|
UnacceptableAddressException,
|
||||||
requests_or_advocate,
|
requests_or_advocate,
|
||||||
@@ -96,19 +95,18 @@ class CSV(BaseQueryRunner):
|
|||||||
break
|
break
|
||||||
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
|
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
|
||||||
|
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
error = "Query cancelled by user."
|
error = "Query cancelled by user."
|
||||||
json_data = None
|
data = None
|
||||||
except UnacceptableAddressException:
|
except UnacceptableAddressException:
|
||||||
error = "Can't query private addresses."
|
error = "Can't query private addresses."
|
||||||
json_data = None
|
data = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error = "Error reading {0}. {1}".format(path, str(e))
|
error = "Error reading {0}. {1}".format(path, str(e))
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self):
|
def get_schema(self):
|
||||||
raise NotSupported()
|
raise NotSupported()
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
|
|
||||||
class Databend(BaseQueryRunner):
|
class Databend(BaseQueryRunner):
|
||||||
@@ -85,11 +84,10 @@ class Databend(BaseQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
finally:
|
finally:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
query = """
|
query = """
|
||||||
@@ -106,7 +104,6 @@ class Databend(BaseQueryRunner):
|
|||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
schema = {}
|
schema = {}
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
@@ -133,7 +130,6 @@ class Databend(BaseQueryRunner):
|
|||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
schema = {}
|
schema = {}
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
|
|||||||
@@ -16,7 +16,6 @@ from redash.query_runner import (
|
|||||||
split_sql_statements,
|
split_sql_statements,
|
||||||
)
|
)
|
||||||
from redash.settings import cast_int_or_default
|
from redash.settings import cast_int_or_default
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pyodbc
|
import pyodbc
|
||||||
@@ -115,16 +114,13 @@ class Databricks(BaseSQLQueryRunner):
|
|||||||
logger.warning("Truncated result set.")
|
logger.warning("Truncated result set.")
|
||||||
statsd_client.incr("redash.query_runner.databricks.truncated")
|
statsd_client.incr("redash.query_runner.databricks.truncated")
|
||||||
data["truncated"] = True
|
data["truncated"] = True
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(
|
data = {
|
||||||
{
|
"columns": [{"name": "result", "type": TYPE_STRING}],
|
||||||
"columns": [{"name": "result", "type": TYPE_STRING}],
|
"rows": [{"result": "No data was returned."}],
|
||||||
"rows": [{"result": "No data was returned."}],
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
cursor.close()
|
cursor.close()
|
||||||
except pyodbc.Error as e:
|
except pyodbc.Error as e:
|
||||||
@@ -132,9 +128,9 @@ class Databricks(BaseSQLQueryRunner):
|
|||||||
error = str(e.args[1])
|
error = str(e.args[1])
|
||||||
else:
|
else:
|
||||||
error = str(e)
|
error = str(e)
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self):
|
def get_schema(self):
|
||||||
raise NotSupported()
|
raise NotSupported()
|
||||||
@@ -146,8 +142,6 @@ class Databricks(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
first_column_name = results["columns"][0]["name"]
|
first_column_name = results["columns"][0]["name"]
|
||||||
return [row[first_column_name] for row in results["rows"]]
|
return [row[first_column_name] for row in results["rows"]]
|
||||||
|
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -78,8 +77,6 @@ class DB2(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
if row["TABLE_SCHEMA"] != "public":
|
if row["TABLE_SCHEMA"] != "public":
|
||||||
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
||||||
@@ -130,23 +127,22 @@ class DB2(BaseSQLQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
else:
|
else:
|
||||||
error = "Query completed but it returned no data."
|
error = "Query completed but it returned no data."
|
||||||
json_data = None
|
data = None
|
||||||
except (select.error, OSError):
|
except (select.error, OSError):
|
||||||
error = "Query interrupted. Please retry."
|
error = "Query interrupted. Please retry."
|
||||||
json_data = None
|
data = None
|
||||||
except ibm_db_dbi.DatabaseError as e:
|
except ibm_db_dbi.DatabaseError as e:
|
||||||
error = str(e)
|
error = str(e)
|
||||||
json_data = None
|
data = None
|
||||||
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(DB2)
|
register(DB2)
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ except ImportError:
|
|||||||
enabled = False
|
enabled = False
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, register
|
from redash.query_runner import BaseQueryRunner, register
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
def reduce_item(reduced_item, key, value):
|
def reduce_item(reduced_item, key, value):
|
||||||
@@ -81,7 +80,7 @@ class Dgraph(BaseQueryRunner):
|
|||||||
client_stub.close()
|
client_stub.close()
|
||||||
|
|
||||||
def run_query(self, query, user):
|
def run_query(self, query, user):
|
||||||
json_data = None
|
data = None
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -109,12 +108,10 @@ class Dgraph(BaseQueryRunner):
|
|||||||
|
|
||||||
# finally, assemble both the columns and data
|
# finally, assemble both the columns and data
|
||||||
data = {"columns": columns, "rows": processed_data}
|
data = {"columns": columns, "rows": processed_data}
|
||||||
|
|
||||||
json_data = json_dumps(data)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error = e
|
error = e
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
"""Queries Dgraph for all the predicates, their types, their tokenizers, etc.
|
"""Queries Dgraph for all the predicates, their types, their tokenizers, etc.
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from redash.query_runner import (
|
|||||||
guess_type,
|
guess_type,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -98,9 +97,7 @@ class Drill(BaseHTTPQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
return None, error
|
return None, error
|
||||||
|
|
||||||
results = parse_response(response.json())
|
return parse_response(response.json()), None
|
||||||
|
|
||||||
return json_dumps(results), None
|
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
query = """
|
query = """
|
||||||
@@ -132,8 +129,6 @@ class Drill(BaseHTTPQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
schema = {}
|
schema = {}
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
TYPES_MAP = {1: TYPE_STRING, 2: TYPE_INTEGER, 3: TYPE_BOOLEAN}
|
TYPES_MAP = {1: TYPE_STRING, 2: TYPE_INTEGER, 3: TYPE_BOOLEAN}
|
||||||
|
|
||||||
@@ -59,12 +58,10 @@ class Druid(BaseQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
print(json_data)
|
|
||||||
finally:
|
finally:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
query = """
|
query = """
|
||||||
@@ -81,7 +78,6 @@ class Druid(BaseQueryRunner):
|
|||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
schema = {}
|
schema = {}
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ try:
|
|||||||
except ImportError:
|
except ImportError:
|
||||||
enabled = False
|
enabled = False
|
||||||
|
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -106,18 +105,17 @@ class e6data(BaseQueryRunner):
|
|||||||
columns.append({"name": column_name, "type": column_type})
|
columns.append({"name": column_name, "type": column_type})
|
||||||
rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
|
rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
except Exception as error:
|
except Exception as error:
|
||||||
logger.debug(error)
|
logger.debug(error)
|
||||||
json_data = None
|
data = None
|
||||||
finally:
|
finally:
|
||||||
if cursor is not None:
|
if cursor is not None:
|
||||||
cursor.clear()
|
cursor.clear()
|
||||||
cursor.close()
|
cursor.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def test_connection(self):
|
def test_connection(self):
|
||||||
self.noop_query = "SELECT 1"
|
self.noop_query = "SELECT 1"
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import http.client as http_client
|
import http.client as http_client
|
||||||
@@ -406,18 +406,18 @@ class Kibana(BaseElasticSearch):
|
|||||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||||
raise Exception("Advanced queries are not supported")
|
raise Exception("Advanced queries are not supported")
|
||||||
|
|
||||||
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
|
data = {"columns": result_columns, "rows": result_rows}
|
||||||
except requests.HTTPError as e:
|
except requests.HTTPError as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
r = e.response
|
r = e.response
|
||||||
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
|
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
|
||||||
json_data = None
|
data = None
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
error = "Connection refused"
|
error = "Connection refused"
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
class ElasticSearch(BaseElasticSearch):
|
class ElasticSearch(BaseElasticSearch):
|
||||||
@@ -460,20 +460,20 @@ class ElasticSearch(BaseElasticSearch):
|
|||||||
result_rows = []
|
result_rows = []
|
||||||
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
|
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
|
||||||
|
|
||||||
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
|
data = {"columns": result_columns, "rows": result_rows}
|
||||||
except (KeyboardInterrupt, JobTimeoutException) as e:
|
except (KeyboardInterrupt, JobTimeoutException) as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
raise
|
raise
|
||||||
except requests.HTTPError as e:
|
except requests.HTTPError as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
|
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
|
||||||
json_data = None
|
data = None
|
||||||
except requests.exceptions.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
logger.exception(e)
|
logger.exception(e)
|
||||||
error = "Connection refused"
|
error = "Connection refused"
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Kibana)
|
register(Kibana)
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
BaseHTTPQueryRunner,
|
BaseHTTPQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -62,11 +61,9 @@ class ElasticSearch2(BaseHTTPQueryRunner):
|
|||||||
query_results = response.json()
|
query_results = response.json()
|
||||||
data = self._parse_results(result_fields, query_results)
|
data = self._parse_results(result_fields, query_results)
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
return data, error
|
||||||
return json_data, error
|
|
||||||
|
|
||||||
def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
|
def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
|
||||||
query = json_loads(query)
|
|
||||||
index_name = query.pop("index", "")
|
index_name = query.pop("index", "")
|
||||||
result_fields = query.pop("result_fields", None)
|
result_fields = query.pop("result_fields", None)
|
||||||
url = "/{}/_search".format(index_name)
|
url = "/{}/_search".format(index_name)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
def _exasol_type_mapper(val, data_type):
|
def _exasol_type_mapper(val, data_type):
|
||||||
@@ -109,14 +108,13 @@ class Exasol(BaseQueryRunner):
|
|||||||
|
|
||||||
rows = [dict(zip(cnames, row)) for row in statement]
|
rows = [dict(zip(cnames, row)) for row in statement]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
finally:
|
finally:
|
||||||
if statement is not None:
|
if statement is not None:
|
||||||
statement.close()
|
statement.close()
|
||||||
|
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
query = """
|
query = """
|
||||||
|
|||||||
@@ -3,7 +3,6 @@ import logging
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, NotSupported, register
|
from redash.query_runner import BaseQueryRunner, NotSupported, register
|
||||||
from redash.utils import json_dumps
|
|
||||||
from redash.utils.requests_session import (
|
from redash.utils.requests_session import (
|
||||||
UnacceptableAddressException,
|
UnacceptableAddressException,
|
||||||
requests_or_advocate,
|
requests_or_advocate,
|
||||||
@@ -94,19 +93,18 @@ class Excel(BaseQueryRunner):
|
|||||||
break
|
break
|
||||||
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
|
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
|
||||||
|
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
error = "Query cancelled by user."
|
error = "Query cancelled by user."
|
||||||
json_data = None
|
data = None
|
||||||
except UnacceptableAddressException:
|
except UnacceptableAddressException:
|
||||||
error = "Can't query private addresses."
|
error = "Can't query private addresses."
|
||||||
json_data = None
|
data = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error = "Error reading {0}. {1}".format(path, str(e))
|
error = "Error reading {0}. {1}".format(path, str(e))
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self):
|
def get_schema(self):
|
||||||
raise NotSupported()
|
raise NotSupported()
|
||||||
|
|||||||
@@ -12,7 +12,7 @@ from redash.query_runner import (
|
|||||||
BaseSQLQueryRunner,
|
BaseSQLQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -180,15 +180,14 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
|||||||
response = api.get(**params).execute()
|
response = api.get(**params).execute()
|
||||||
data = parse_ga_response(response)
|
data = parse_ga_response(response)
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
except HttpError as e:
|
except HttpError as e:
|
||||||
# Make sure we return a more readable error to the end user
|
# Make sure we return a more readable error to the end user
|
||||||
error = e._get_reason()
|
error = e._get_reason()
|
||||||
json_data = None
|
data = None
|
||||||
else:
|
else:
|
||||||
error = "Wrong query format."
|
error = "Wrong query format."
|
||||||
json_data = None
|
data = None
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(GoogleAnalytics)
|
register(GoogleAnalytics)
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -160,9 +160,8 @@ class GoogleAnalytics4(BaseQueryRunner):
|
|||||||
data = parse_ga_response(raw_result)
|
data = parse_ga_response(raw_result)
|
||||||
|
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def test_connection(self):
|
def test_connection(self):
|
||||||
try:
|
try:
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ from redash.query_runner import (
|
|||||||
BaseSQLQueryRunner,
|
BaseSQLQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -151,15 +151,14 @@ class GoogleSearchConsole(BaseSQLQueryRunner):
|
|||||||
response = api.searchanalytics().query(siteUrl=site_url, body=params).execute()
|
response = api.searchanalytics().query(siteUrl=site_url, body=params).execute()
|
||||||
data = parse_ga_response(response, params["dimensions"])
|
data = parse_ga_response(response, params["dimensions"])
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
except HttpError as e:
|
except HttpError as e:
|
||||||
# Make sure we return a more readable error to the end user
|
# Make sure we return a more readable error to the end user
|
||||||
error = e._get_reason()
|
error = e._get_reason()
|
||||||
json_data = None
|
data = None
|
||||||
else:
|
else:
|
||||||
error = "Wrong query format."
|
error = "Wrong query format."
|
||||||
json_data = None
|
data = None
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(GoogleSearchConsole)
|
register(GoogleSearchConsole)
|
||||||
|
|||||||
@@ -16,7 +16,7 @@ from redash.query_runner import (
|
|||||||
guess_type,
|
guess_type,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -257,7 +257,7 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
|||||||
|
|
||||||
data = parse_spreadsheet(SpreadsheetWrapper(spreadsheet), worksheet_num_or_title)
|
data = parse_spreadsheet(SpreadsheetWrapper(spreadsheet), worksheet_num_or_title)
|
||||||
|
|
||||||
return json_dumps(data), None
|
return data, None
|
||||||
except gspread.SpreadsheetNotFound:
|
except gspread.SpreadsheetNotFound:
|
||||||
return (
|
return (
|
||||||
None,
|
None,
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -35,8 +34,7 @@ def _transform_result(response):
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
return {"columns": columns, "rows": rows}
|
||||||
return json_dumps(data)
|
|
||||||
|
|
||||||
|
|
||||||
class Graphite(BaseQueryRunner):
|
class Graphite(BaseQueryRunner):
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -139,7 +138,6 @@ class Hive(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
if connection:
|
if connection:
|
||||||
@@ -150,12 +148,12 @@ class Hive(BaseSQLQueryRunner):
|
|||||||
error = e.args[0].status.errorMessage
|
error = e.args[0].status.errorMessage
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
error = str(e)
|
error = str(e)
|
||||||
json_data = None
|
data = None
|
||||||
finally:
|
finally:
|
||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
class HiveHttp(Hive):
|
class HiveHttp(Hive):
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
ignite_available = importlib.util.find_spec("pyignite") is not None
|
ignite_available = importlib.util.find_spec("pyignite") is not None
|
||||||
gridgain_available = importlib.util.find_spec("pygridgain") is not None
|
gridgain_available = importlib.util.find_spec("pygridgain") is not None
|
||||||
@@ -81,8 +80,6 @@ class Ignite(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
if row["SCHEMA_NAME"] != self.configuration.get("schema", "PUBLIC"):
|
if row["SCHEMA_NAME"] != self.configuration.get("schema", "PUBLIC"):
|
||||||
table_name = "{}.{}".format(row["SCHEMA_NAME"], row["TABLE_NAME"])
|
table_name = "{}.{}".format(row["SCHEMA_NAME"], row["TABLE_NAME"])
|
||||||
@@ -160,8 +157,8 @@ class Ignite(BaseSQLQueryRunner):
|
|||||||
)
|
)
|
||||||
logger.debug("Ignite running query: %s", query)
|
logger.debug("Ignite running query: %s", query)
|
||||||
|
|
||||||
data = self._parse_results(cursor)
|
result = self._parse_results(cursor)
|
||||||
json_data = json_dumps({"columns": data[0], "rows": data[1]})
|
data = {"columns": result[0], "rows": result[1]}
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
@@ -171,7 +168,7 @@ class Ignite(BaseSQLQueryRunner):
|
|||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Ignite)
|
register(Ignite)
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -120,14 +119,13 @@ class Impala(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
cursor.close()
|
cursor.close()
|
||||||
except DatabaseError as e:
|
except DatabaseError as e:
|
||||||
json_data = None
|
data = None
|
||||||
error = str(e)
|
error = str(e)
|
||||||
except RPCError as e:
|
except RPCError as e:
|
||||||
json_data = None
|
data = None
|
||||||
error = "Metastore Error [%s]" % str(e)
|
error = "Metastore Error [%s]" % str(e)
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
@@ -136,7 +134,7 @@ class Impala(BaseSQLQueryRunner):
|
|||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Impala)
|
register(Impala)
|
||||||
|
|||||||
@@ -7,7 +7,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -64,7 +63,7 @@ def _transform_result(results):
|
|||||||
else:
|
else:
|
||||||
result_columns = [{"name": c, "type": TYPE_STRING} for c in column_names]
|
result_columns = [{"name": c, "type": TYPE_STRING} for c in column_names]
|
||||||
|
|
||||||
return json_dumps({"columns": result_columns, "rows": result_rows})
|
return {"columns": result_columns, "rows": result_rows}
|
||||||
|
|
||||||
|
|
||||||
class InfluxDB(BaseQueryRunner):
|
class InfluxDB(BaseQueryRunner):
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from influxdb_client import InfluxDBClient
|
from influxdb_client import InfluxDBClient
|
||||||
@@ -188,7 +187,7 @@ class InfluxDBv2(BaseQueryRunner):
|
|||||||
2. element: An error message, if an error occured. None, if no
|
2. element: An error message, if an error occured. None, if no
|
||||||
error occurred.
|
error occurred.
|
||||||
"""
|
"""
|
||||||
json_data = None
|
data = None
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@@ -204,14 +203,12 @@ class InfluxDBv2(BaseQueryRunner):
|
|||||||
tables = client.query_api().query(query)
|
tables = client.query_api().query(query)
|
||||||
|
|
||||||
data = self._get_data_from_tables(tables)
|
data = self._get_data_from_tables(tables)
|
||||||
|
|
||||||
json_data = json_dumps(data)
|
|
||||||
except Exception as ex:
|
except Exception as ex:
|
||||||
error = str(ex)
|
error = str(ex)
|
||||||
finally:
|
finally:
|
||||||
self._cleanup_cert_files(influx_kwargs)
|
self._cleanup_cert_files(influx_kwargs)
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(InfluxDBv2)
|
register(InfluxDBv2)
|
||||||
|
|||||||
@@ -2,7 +2,7 @@ import re
|
|||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from redash.query_runner import TYPE_STRING, BaseHTTPQueryRunner, register
|
from redash.query_runner import TYPE_STRING, BaseHTTPQueryRunner, register
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
|
|
||||||
# TODO: make this more general and move into __init__.py
|
# TODO: make this more general and move into __init__.py
|
||||||
@@ -26,7 +26,7 @@ class ResultSet:
|
|||||||
}
|
}
|
||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
return json_dumps({"rows": self.rows, "columns": list(self.columns.values())})
|
return {"rows": self.rows, "columns": list(self.columns.values())}
|
||||||
|
|
||||||
def merge(self, set):
|
def merge(self, set):
|
||||||
self.rows = self.rows + set.rows
|
self.rows = self.rows + set.rows
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from redash.query_runner import (
|
|||||||
BaseHTTPQueryRunner,
|
BaseHTTPQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
class QueryParseError(Exception):
|
class QueryParseError(Exception):
|
||||||
@@ -158,11 +157,10 @@ class JSON(BaseHTTPQueryRunner):
|
|||||||
def run_query(self, query, user):
|
def run_query(self, query, user):
|
||||||
query = parse_query(query)
|
query = parse_query(query)
|
||||||
|
|
||||||
results, error = self._run_json_query(query)
|
data, error = self._run_json_query(query)
|
||||||
if error is not None:
|
if error is not None:
|
||||||
return None, error
|
return None, error
|
||||||
|
|
||||||
data = json_dumps(results)
|
|
||||||
if data:
|
if data:
|
||||||
return data, None
|
return data, None
|
||||||
return None, "Got empty response from '{}'.".format(query["url"])
|
return None, "Got empty response from '{}'.".format(query["url"])
|
||||||
|
|||||||
@@ -15,7 +15,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -102,7 +101,7 @@ class Kylin(BaseQueryRunner):
|
|||||||
columns = self.get_columns(data["columnMetas"])
|
columns = self.get_columns(data["columnMetas"])
|
||||||
rows = self.get_rows(columns, data["results"])
|
rows = self.get_rows(columns, data["results"])
|
||||||
|
|
||||||
return json_dumps({"columns": columns, "rows": rows}), None
|
return {"columns": columns, "rows": rows}, None
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
url = self.configuration["url"]
|
url = self.configuration["url"]
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -130,7 +129,6 @@ class MemSQL(BaseSQLQueryRunner):
|
|||||||
columns.append({"name": column, "friendly_name": column, "type": TYPE_STRING})
|
columns.append({"name": column, "friendly_name": column, "type": TYPE_STRING})
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
cursor.close()
|
cursor.close()
|
||||||
@@ -139,7 +137,7 @@ class MemSQL(BaseSQLQueryRunner):
|
|||||||
if cursor:
|
if cursor:
|
||||||
cursor.close()
|
cursor.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(MemSQL)
|
register(MemSQL)
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time
|
from redash.utils import json_loads, parse_human_time
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -42,15 +42,14 @@ TYPES_MAP = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class MongoDBJSONEncoder(JSONEncoder):
|
def json_encoder(dec, o):
|
||||||
def default(self, o):
|
if isinstance(o, ObjectId):
|
||||||
if isinstance(o, ObjectId):
|
return str(o)
|
||||||
return str(o)
|
elif isinstance(o, Timestamp):
|
||||||
elif isinstance(o, Timestamp):
|
return dec.default(o.as_datetime())
|
||||||
return super(MongoDBJSONEncoder, self).default(o.as_datetime())
|
elif isinstance(o, Decimal128):
|
||||||
elif isinstance(o, Decimal128):
|
return o.to_decimal()
|
||||||
return o.to_decimal()
|
return None
|
||||||
return super(MongoDBJSONEncoder, self).default(o)
|
|
||||||
|
|
||||||
|
|
||||||
date_regex = re.compile(r'ISODate\("(.*)"\)', re.IGNORECASE)
|
date_regex = re.compile(r'ISODate\("(.*)"\)', re.IGNORECASE)
|
||||||
@@ -348,9 +347,8 @@ class MongoDB(BaseQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data, cls=MongoDBJSONEncoder)
|
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(MongoDB)
|
register(MongoDB)
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -87,8 +86,6 @@ class SqlServer(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
if row["table_schema"] != self.configuration["db"]:
|
if row["table_schema"] != self.configuration["db"]:
|
||||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
@@ -140,11 +137,10 @@ class SqlServer(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
|
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
error = "No data was returned."
|
error = "No data was returned."
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
cursor.close()
|
cursor.close()
|
||||||
connection.commit()
|
connection.commit()
|
||||||
@@ -155,7 +151,7 @@ class SqlServer(BaseSQLQueryRunner):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
# Connection errors are `args[0][1]`
|
# Connection errors are `args[0][1]`
|
||||||
error = e.args[0][1]
|
error = e.args[0][1]
|
||||||
json_data = None
|
data = None
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
raise
|
raise
|
||||||
@@ -163,7 +159,7 @@ class SqlServer(BaseSQLQueryRunner):
|
|||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(SqlServer)
|
register(SqlServer)
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ from redash.query_runner import (
|
|||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.query_runner.mssql import types_map
|
from redash.query_runner.mssql import types_map
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -94,8 +93,6 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
if row["table_schema"] != self.configuration["db"]:
|
if row["table_schema"] != self.configuration["db"]:
|
||||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
@@ -139,11 +136,10 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
|
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
error = "No data was returned."
|
error = "No data was returned."
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
cursor.close()
|
cursor.close()
|
||||||
except pyodbc.Error as e:
|
except pyodbc.Error as e:
|
||||||
@@ -153,7 +149,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
|||||||
except IndexError:
|
except IndexError:
|
||||||
# Connection errors are `args[0][1]`
|
# Connection errors are `args[0][1]`
|
||||||
error = e.args[0][1]
|
error = e.args[0][1]
|
||||||
json_data = None
|
data = None
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
raise
|
raise
|
||||||
@@ -161,7 +157,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
|||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(SQLServerODBC)
|
register(SQLServerODBC)
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from redash.query_runner import (
|
|||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.settings import parse_boolean
|
from redash.settings import parse_boolean
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import MySQLdb
|
import MySQLdb
|
||||||
@@ -161,8 +160,6 @@ class Mysql(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
if row["table_schema"] != self.configuration["db"]:
|
if row["table_schema"] != self.configuration["db"]:
|
||||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
@@ -194,7 +191,7 @@ class Mysql(BaseSQLQueryRunner):
|
|||||||
t.join()
|
t.join()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return r.json_data, r.error
|
return r.data, r.error
|
||||||
|
|
||||||
def _run_query(self, query, user, connection, r, ev):
|
def _run_query(self, query, user, connection, r, ev):
|
||||||
try:
|
try:
|
||||||
@@ -216,17 +213,17 @@ class Mysql(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
|
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
r.json_data = json_dumps(data)
|
r.data = data
|
||||||
r.error = None
|
r.error = None
|
||||||
else:
|
else:
|
||||||
r.json_data = None
|
r.data = None
|
||||||
r.error = "No data was returned."
|
r.error = "No data was returned."
|
||||||
|
|
||||||
cursor.close()
|
cursor.close()
|
||||||
except MySQLdb.Error as e:
|
except MySQLdb.Error as e:
|
||||||
if cursor:
|
if cursor:
|
||||||
cursor.close()
|
cursor.close()
|
||||||
r.json_data = None
|
r.data = None
|
||||||
r.error = e.args[1]
|
r.error = e.args[1]
|
||||||
finally:
|
finally:
|
||||||
ev.set()
|
ev.set()
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
@@ -150,7 +149,7 @@ class Netezza(BaseSQLQueryRunner):
|
|||||||
return typ
|
return typ
|
||||||
|
|
||||||
def run_query(self, query, user):
|
def run_query(self, query, user):
|
||||||
json_data, error = None, None
|
data, error = None, None
|
||||||
try:
|
try:
|
||||||
with self.connection.cursor() as cursor:
|
with self.connection.cursor() as cursor:
|
||||||
cursor.execute(query)
|
cursor.execute(query)
|
||||||
@@ -165,10 +164,10 @@ class Netezza(BaseSQLQueryRunner):
|
|||||||
)
|
)
|
||||||
rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
|
rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
|
||||||
|
|
||||||
json_data = json.dumps({"columns": columns, "rows": rows})
|
data = {"columns": columns, "rows": rows}
|
||||||
except Exception:
|
except Exception:
|
||||||
error = traceback.format_exc()
|
error = traceback.format_exc()
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Netezza)
|
register(Netezza)
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import oracledb
|
import oracledb
|
||||||
@@ -98,8 +97,6 @@ class Oracle(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
if row["OWNER"] is not None:
|
if row["OWNER"] is not None:
|
||||||
table_name = "{}.{}".format(row["OWNER"], row["TABLE_NAME"])
|
table_name = "{}.{}".format(row["OWNER"], row["TABLE_NAME"])
|
||||||
@@ -168,19 +165,17 @@ class Oracle(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((c["name"] for c in columns), row)) for row in cursor]
|
rows = [dict(zip((c["name"] for c in columns), row)) for row in cursor]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
else:
|
else:
|
||||||
columns = [{"name": "Row(s) Affected", "type": "TYPE_INTEGER"}]
|
columns = [{"name": "Row(s) Affected", "type": "TYPE_INTEGER"}]
|
||||||
rows = [{"Row(s) Affected": rows_count}]
|
rows = [{"Row(s) Affected": rows_count}]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
connection.commit()
|
connection.commit()
|
||||||
except oracledb.DatabaseError as err:
|
except oracledb.DatabaseError as err:
|
||||||
(err_args,) = err.args
|
(err_args,) = err.args
|
||||||
line_number = query.count("\n", 0, err_args.offset) + 1
|
line_number = query.count("\n", 0, err_args.offset) + 1
|
||||||
column_number = err_args.offset - query.rfind("\n", 0, err_args.offset) - 1
|
column_number = err_args.offset - query.rfind("\n", 0, err_args.offset) - 1
|
||||||
error = "Query failed at line {}, column {}: {}".format(str(line_number), str(column_number), str(err))
|
error = "Query failed at line {}, column {}: {}".format(str(line_number), str(column_number), str(err))
|
||||||
json_data = None
|
data = None
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
raise
|
raise
|
||||||
@@ -188,7 +183,7 @@ class Oracle(BaseSQLQueryRunner):
|
|||||||
os.environ.pop("NLS_LANG", None)
|
os.environ.pop("NLS_LANG", None)
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Oracle)
|
register(Oracle)
|
||||||
|
|||||||
@@ -20,7 +20,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import JSONEncoder, json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -56,18 +55,16 @@ types_map = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class PostgreSQLJSONEncoder(JSONEncoder):
|
def json_encoder(dec, o):
|
||||||
def default(self, o):
|
if isinstance(o, Range):
|
||||||
if isinstance(o, Range):
|
# From: https://github.com/psycopg/psycopg2/pull/779
|
||||||
# From: https://github.com/psycopg/psycopg2/pull/779
|
if o._bounds is None:
|
||||||
if o._bounds is None:
|
return ""
|
||||||
return ""
|
|
||||||
|
|
||||||
items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
|
items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
|
||||||
|
|
||||||
return "".join(items)
|
return "".join(items)
|
||||||
|
return None
|
||||||
return super(PostgreSQLJSONEncoder, self).default(o)
|
|
||||||
|
|
||||||
|
|
||||||
def _wait(conn, timeout=None):
|
def _wait(conn, timeout=None):
|
||||||
@@ -204,8 +201,6 @@ class PostgreSQL(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
build_schema(results, schema)
|
build_schema(results, schema)
|
||||||
|
|
||||||
def _get_tables(self, schema):
|
def _get_tables(self, schema):
|
||||||
@@ -282,16 +277,15 @@ class PostgreSQL(BaseSQLQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data, allow_nan=False, cls=PostgreSQLJSONEncoder)
|
|
||||||
else:
|
else:
|
||||||
error = "Query completed but it returned no data."
|
error = "Query completed but it returned no data."
|
||||||
json_data = None
|
data = None
|
||||||
except (select.error, OSError):
|
except (select.error, OSError):
|
||||||
error = "Query interrupted. Please retry."
|
error = "Query interrupted. Please retry."
|
||||||
json_data = None
|
data = None
|
||||||
except psycopg2.DatabaseError as e:
|
except psycopg2.DatabaseError as e:
|
||||||
error = str(e)
|
error = str(e)
|
||||||
json_data = None
|
data = None
|
||||||
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
raise
|
raise
|
||||||
@@ -299,7 +293,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
|||||||
connection.close()
|
connection.close()
|
||||||
_cleanup_ssl_certs(self.ssl_config)
|
_cleanup_ssl_certs(self.ssl_config)
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
class Redshift(PostgreSQL):
|
class Redshift(PostgreSQL):
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -82,8 +81,6 @@ class Phoenix(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{}.{}".format(row["TABLE_SCHEM"], row["TABLE_NAME"])
|
table_name = "{}.{}".format(row["TABLE_SCHEM"], row["TABLE_NAME"])
|
||||||
|
|
||||||
@@ -105,17 +102,16 @@ class Phoenix(BaseQueryRunner):
|
|||||||
columns = self.fetch_columns(column_tuples)
|
columns = self.fetch_columns(column_tuples)
|
||||||
rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
cursor.close()
|
cursor.close()
|
||||||
except Error as e:
|
except Error as e:
|
||||||
json_data = None
|
data = None
|
||||||
error = "code: {}, sql state:{}, message: {}".format(e.code, e.sqlstate, str(e))
|
error = "code: {}, sql state:{}, message: {}".format(e.code, e.sqlstate, str(e))
|
||||||
finally:
|
finally:
|
||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Phoenix)
|
register(Phoenix)
|
||||||
|
|||||||
@@ -19,7 +19,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -92,12 +91,11 @@ class Pinot(BaseQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
logger.debug("Pinot execute query [%s]", query)
|
logger.debug("Pinot execute query [%s]", query)
|
||||||
finally:
|
finally:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
schema = {}
|
schema = {}
|
||||||
|
|||||||
@@ -11,7 +11,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -89,8 +88,6 @@ class Presto(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
|
|
||||||
@@ -120,10 +117,9 @@ class Presto(BaseQueryRunner):
|
|||||||
columns = self.fetch_columns(column_tuples)
|
columns = self.fetch_columns(column_tuples)
|
||||||
rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
except DatabaseError as db:
|
except DatabaseError as db:
|
||||||
json_data = None
|
data = None
|
||||||
default_message = "Unspecified DatabaseError: {0}".format(str(db))
|
default_message = "Unspecified DatabaseError: {0}".format(str(db))
|
||||||
if isinstance(db.args[0], dict):
|
if isinstance(db.args[0], dict):
|
||||||
message = db.args[0].get("failureInfo", {"message", None}).get("message")
|
message = db.args[0].get("failureInfo", {"message", None}).get("message")
|
||||||
@@ -134,7 +130,7 @@ class Presto(BaseQueryRunner):
|
|||||||
cursor.cancel()
|
cursor.cancel()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Presto)
|
register(Presto)
|
||||||
|
|||||||
@@ -14,7 +14,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
def get_instant_rows(metrics_data):
|
def get_instant_rows(metrics_data):
|
||||||
@@ -247,7 +246,7 @@ class Prometheus(BaseQueryRunner):
|
|||||||
else:
|
else:
|
||||||
rows = get_instant_rows(metrics)
|
rows = get_instant_rows(metrics)
|
||||||
|
|
||||||
json_data = json_dumps({"rows": rows, "columns": columns})
|
data = {"rows": rows, "columns": columns}
|
||||||
|
|
||||||
except requests.RequestException as e:
|
except requests.RequestException as e:
|
||||||
return None, str(e)
|
return None, str(e)
|
||||||
@@ -256,7 +255,7 @@ class Prometheus(BaseQueryRunner):
|
|||||||
finally:
|
finally:
|
||||||
self._cleanup_cert_files(promehteus_kwargs)
|
self._cleanup_cert_files(promehteus_kwargs)
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Prometheus)
|
register(Prometheus)
|
||||||
|
|||||||
@@ -23,7 +23,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
from redash.utils.pandas import pandas_installed
|
from redash.utils.pandas import pandas_installed
|
||||||
|
|
||||||
if pandas_installed:
|
if pandas_installed:
|
||||||
@@ -228,7 +227,7 @@ class Python(BaseQueryRunner):
|
|||||||
raise Exception(error)
|
raise Exception(error)
|
||||||
|
|
||||||
# TODO: allow avoiding the JSON dumps/loads in same process
|
# TODO: allow avoiding the JSON dumps/loads in same process
|
||||||
query_result = json_loads(data)
|
query_result = data
|
||||||
|
|
||||||
if result_type == "dataframe" and pandas_installed:
|
if result_type == "dataframe" and pandas_installed:
|
||||||
return pd.DataFrame(query_result["rows"])
|
return pd.DataFrame(query_result["rows"])
|
||||||
@@ -357,15 +356,14 @@ class Python(BaseQueryRunner):
|
|||||||
|
|
||||||
exec(code, restricted_globals, self._script_locals)
|
exec(code, restricted_globals, self._script_locals)
|
||||||
|
|
||||||
result = self._script_locals["result"]
|
data = self._script_locals["result"]
|
||||||
self.validate_result(result)
|
self.validate_result(data)
|
||||||
result["log"] = self._custom_print.lines
|
data["log"] = self._custom_print.lines
|
||||||
json_data = json_dumps(result)
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error = str(type(e)) + " " + str(e)
|
error = str(type(e)) + " " + str(e)
|
||||||
json_data = None
|
data = None
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Python)
|
register(Python)
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import qds_sdk # noqa: F401
|
import qds_sdk # noqa: F401
|
||||||
@@ -125,13 +124,13 @@ class Qubole(BaseQueryRunner):
|
|||||||
columns = self.fetch_columns([(i, TYPE_STRING) for i in data.pop(0).split("\t")])
|
columns = self.fetch_columns([(i, TYPE_STRING) for i in data.pop(0).split("\t")])
|
||||||
rows = [dict(zip((column["name"] for column in columns), row.split("\t"))) for row in data]
|
rows = [dict(zip((column["name"] for column in columns), row.split("\t"))) for row in data]
|
||||||
|
|
||||||
json_data = json_dumps({"columns": columns, "rows": rows})
|
data = {"columns": columns, "rows": rows}
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
logging.info("Sending KILL signal to Qubole Command Id: %s", cmd.id)
|
logging.info("Sending KILL signal to Qubole Command Id: %s", cmd.id)
|
||||||
cmd.cancel()
|
cmd.cancel()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
schemas = {}
|
schemas = {}
|
||||||
|
|||||||
@@ -13,7 +13,7 @@ from redash.query_runner import (
|
|||||||
guess_type,
|
guess_type,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -76,8 +76,6 @@ def get_query_results(user, query_id, bring_from_cache, params=None):
|
|||||||
results, error = query.data_source.query_runner.run_query(query_text, user)
|
results, error = query.data_source.query_runner.run_query(query_text, user)
|
||||||
if error:
|
if error:
|
||||||
raise Exception("Failed loading results for query id {}.".format(query.id))
|
raise Exception("Failed loading results for query id {}.".format(query.id))
|
||||||
else:
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@@ -194,16 +192,15 @@ class Results(BaseQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
else:
|
else:
|
||||||
error = "Query completed but it returned no data."
|
error = "Query completed but it returned no data."
|
||||||
json_data = None
|
data = None
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
connection.close()
|
connection.close()
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Results)
|
register(Results)
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ from redash.query_runner import (
|
|||||||
BaseSQLQueryRunner,
|
BaseSQLQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
def _get_type(value):
|
def _get_type(value):
|
||||||
@@ -121,7 +120,7 @@ class Rockset(BaseSQLQueryRunner):
|
|||||||
columns = []
|
columns = []
|
||||||
for k in rows[0]:
|
for k in rows[0]:
|
||||||
columns.append({"name": k, "friendly_name": k, "type": _get_type(rows[0][k])})
|
columns.append({"name": k, "friendly_name": k, "type": _get_type(rows[0][k])})
|
||||||
data = json_dumps({"columns": columns, "rows": rows})
|
data = {"columns": columns, "rows": rows}
|
||||||
return data, None
|
return data, None
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -166,11 +165,10 @@ class Salesforce(BaseQueryRunner):
|
|||||||
columns = self.fetch_columns(cols)
|
columns = self.fetch_columns(cols)
|
||||||
error = None
|
error = None
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
except SalesforceError as err:
|
except SalesforceError as err:
|
||||||
error = err.content
|
error = err.content
|
||||||
json_data = None
|
data = None
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
sf = self._get_sf()
|
sf = self._get_sf()
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ from redash.query_runner import (
|
|||||||
BaseSQLQueryRunner,
|
BaseSQLQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
TYPES_MAP = {
|
TYPES_MAP = {
|
||||||
0: TYPE_INTEGER,
|
0: TYPE_INTEGER,
|
||||||
@@ -135,12 +134,11 @@ class Snowflake(BaseSQLQueryRunner):
|
|||||||
|
|
||||||
data = self._parse_results(cursor)
|
data = self._parse_results(cursor)
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
finally:
|
finally:
|
||||||
cursor.close()
|
cursor.close()
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def _run_query_without_warehouse(self, query):
|
def _run_query_without_warehouse(self, query):
|
||||||
connection = self._get_connection()
|
connection = self._get_connection()
|
||||||
|
|||||||
@@ -8,7 +8,6 @@ import logging
|
|||||||
from os import environ
|
from os import environ
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner
|
from redash.query_runner import BaseQueryRunner
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
from . import register
|
from . import register
|
||||||
|
|
||||||
@@ -83,7 +82,7 @@ class SPARQLEndpointQueryRunner(BaseQueryRunner):
|
|||||||
logger.info("results are: {}".format(results))
|
logger.info("results are: {}".format(results))
|
||||||
# Not sure why we do not use the json package here but all other
|
# Not sure why we do not use the json package here but all other
|
||||||
# query runner do it the same way :-)
|
# query runner do it the same way :-)
|
||||||
sparql_results = json_loads(results)
|
sparql_results = results
|
||||||
# transform all bindings to redash rows
|
# transform all bindings to redash rows
|
||||||
rows = []
|
rows = []
|
||||||
for sparql_row in sparql_results["results"]["bindings"]:
|
for sparql_row in sparql_results["results"]["bindings"]:
|
||||||
@@ -101,7 +100,7 @@ class SPARQLEndpointQueryRunner(BaseQueryRunner):
|
|||||||
columns.append({"name": var, "friendly_name": var, "type": "string"})
|
columns.append({"name": var, "friendly_name": var, "type": "string"})
|
||||||
# Not sure why we do not use the json package here but all other
|
# Not sure why we do not use the json package here but all other
|
||||||
# query runner do it the same way :-)
|
# query runner do it the same way :-)
|
||||||
return json_dumps({"columns": columns, "rows": rows})
|
return {"columns": columns, "rows": rows}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def name(cls):
|
def name(cls):
|
||||||
|
|||||||
@@ -6,7 +6,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -40,8 +39,6 @@ class Sqlite(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = row["tbl_name"]
|
table_name = row["tbl_name"]
|
||||||
schema[table_name] = {"name": table_name, "columns": []}
|
schema[table_name] = {"name": table_name, "columns": []}
|
||||||
@@ -49,7 +46,6 @@ class Sqlite(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results_table = json_loads(results_table)
|
|
||||||
for row_column in results_table["rows"]:
|
for row_column in results_table["rows"]:
|
||||||
schema[table_name]["columns"].append(row_column["name"])
|
schema[table_name]["columns"].append(row_column["name"])
|
||||||
|
|
||||||
@@ -69,16 +65,15 @@ class Sqlite(BaseSQLQueryRunner):
|
|||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json_dumps(data)
|
|
||||||
else:
|
else:
|
||||||
error = "Query completed but it returned no data."
|
error = "Query completed but it returned no data."
|
||||||
json_data = None
|
data = None
|
||||||
except (KeyboardInterrupt, JobTimeoutException):
|
except (KeyboardInterrupt, JobTimeoutException):
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
connection.close()
|
connection.close()
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Sqlite)
|
register(Sqlite)
|
||||||
|
|||||||
@@ -9,7 +9,6 @@ from redash.query_runner import (
|
|||||||
BaseQueryRunner,
|
BaseQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -112,15 +111,14 @@ class TreasureData(BaseQueryRunner):
|
|||||||
else:
|
else:
|
||||||
rows = [dict(zip(([column["name"] for column in columns]), r)) for r in cursor.fetchall()]
|
rows = [dict(zip(([column["name"] for column in columns]), r)) for r in cursor.fetchall()]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
except errors.InternalError as e:
|
except errors.InternalError as e:
|
||||||
json_data = None
|
data = None
|
||||||
error = "%s: %s" % (
|
error = "%s: %s" % (
|
||||||
str(e),
|
str(e),
|
||||||
cursor.show_job().get("debug", {}).get("stderr", "No stderr message in the response"),
|
cursor.show_job().get("debug", {}).get("stderr", "No stderr message in the response"),
|
||||||
)
|
)
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(TreasureData)
|
register(TreasureData)
|
||||||
|
|||||||
@@ -12,7 +12,6 @@ from redash.query_runner import (
|
|||||||
JobTimeoutException,
|
JobTimeoutException,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -100,8 +99,6 @@ class Trino(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = f'{catalog}.{row["table_schema"]}.{row["table_name"]}'
|
table_name = f'{catalog}.{row["table_schema"]}.{row["table_name"]}'
|
||||||
|
|
||||||
@@ -122,8 +119,6 @@ class Trino(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
catalogs = []
|
catalogs = []
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
catalog = row["Catalog"]
|
catalog = row["Catalog"]
|
||||||
@@ -158,10 +153,9 @@ class Trino(BaseQueryRunner):
|
|||||||
columns = self.fetch_columns([(c[0], TRINO_TYPES_MAPPING.get(c[1], None)) for c in description])
|
columns = self.fetch_columns([(c[0], TRINO_TYPES_MAPPING.get(c[1], None)) for c in description])
|
||||||
rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
|
rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
except DatabaseError as db:
|
except DatabaseError as db:
|
||||||
json_data = None
|
data = None
|
||||||
default_message = "Unspecified DatabaseError: {0}".format(str(db))
|
default_message = "Unspecified DatabaseError: {0}".format(str(db))
|
||||||
if isinstance(db.args[0], dict):
|
if isinstance(db.args[0], dict):
|
||||||
message = db.args[0].get("failureInfo", {"message", None}).get("message")
|
message = db.args[0].get("failureInfo", {"message", None}).get("message")
|
||||||
@@ -172,7 +166,7 @@ class Trino(BaseQueryRunner):
|
|||||||
cursor.cancel()
|
cursor.cancel()
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Trino)
|
register(Trino)
|
||||||
|
|||||||
@@ -5,7 +5,7 @@ import jwt
|
|||||||
import requests
|
import requests
|
||||||
|
|
||||||
from redash.query_runner import BaseSQLQueryRunner, register
|
from redash.query_runner import BaseSQLQueryRunner, register
|
||||||
from redash.utils import json_dumps, json_loads
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -58,8 +58,7 @@ class Uptycs(BaseSQLQueryRunner):
|
|||||||
if "items" in data:
|
if "items" in data:
|
||||||
rows = data["items"]
|
rows = data["items"]
|
||||||
|
|
||||||
redash_json_data = {"columns": transformed_columns, "rows": rows}
|
return {"columns": transformed_columns, "rows": rows}
|
||||||
return redash_json_data
|
|
||||||
|
|
||||||
def api_call(self, sql):
|
def api_call(self, sql):
|
||||||
# JWT encoded header
|
# JWT encoded header
|
||||||
@@ -86,22 +85,21 @@ class Uptycs(BaseSQLQueryRunner):
|
|||||||
else:
|
else:
|
||||||
error = "status_code " + str(response.status_code) + "\n"
|
error = "status_code " + str(response.status_code) + "\n"
|
||||||
error = error + "failed to connect"
|
error = error + "failed to connect"
|
||||||
json_data = {}
|
data = {}
|
||||||
return json_data, error
|
return data, error
|
||||||
# if we get right status code then call transfored_to_redash
|
# if we get right status code then call transfored_to_redash
|
||||||
json_data = self.transformed_to_redash_json(response_output)
|
data = self.transformed_to_redash_json(response_output)
|
||||||
error = None
|
error = None
|
||||||
# if we got error from Uptycs include error information
|
# if we got error from Uptycs include error information
|
||||||
if "error" in response_output:
|
if "error" in response_output:
|
||||||
error = response_output["error"]["message"]["brief"]
|
error = response_output["error"]["message"]["brief"]
|
||||||
error = error + "\n" + response_output["error"]["message"]["detail"]
|
error = error + "\n" + response_output["error"]["message"]["detail"]
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
def run_query(self, query, user):
|
def run_query(self, query, user):
|
||||||
data, error = self.api_call(query)
|
data, error = self.api_call(query)
|
||||||
json_data = json_dumps(data)
|
logger.debug("%s", data)
|
||||||
logger.debug("%s", json_data)
|
return data, error
|
||||||
return json_data, error
|
|
||||||
|
|
||||||
def get_schema(self, get_stats=False):
|
def get_schema(self, get_stats=False):
|
||||||
header = self.generate_header(self.configuration.get("key"), self.configuration.get("secret"))
|
header = self.generate_header(self.configuration.get("key"), self.configuration.get("secret"))
|
||||||
|
|||||||
@@ -10,7 +10,6 @@ from redash.query_runner import (
|
|||||||
BaseSQLQueryRunner,
|
BaseSQLQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps, json_loads
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -85,8 +84,6 @@ class Vertica(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
self._handle_run_query_error(error)
|
self._handle_run_query_error(error)
|
||||||
|
|
||||||
results = json_loads(results)
|
|
||||||
|
|
||||||
for row in results["rows"]:
|
for row in results["rows"]:
|
||||||
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
|
||||||
|
|
||||||
@@ -101,9 +98,9 @@ class Vertica(BaseSQLQueryRunner):
|
|||||||
import vertica_python
|
import vertica_python
|
||||||
|
|
||||||
if query == "":
|
if query == "":
|
||||||
json_data = None
|
data = None
|
||||||
error = "Query is empty"
|
error = "Query is empty"
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
connection = None
|
connection = None
|
||||||
try:
|
try:
|
||||||
@@ -131,10 +128,9 @@ class Vertica(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip(([c["name"] for c in columns]), r)) for r in cursor.fetchall()]
|
rows = [dict(zip(([c["name"] for c in columns]), r)) for r in cursor.fetchall()]
|
||||||
|
|
||||||
data = {"columns": columns, "rows": rows}
|
data = {"columns": columns, "rows": rows}
|
||||||
json_data = json_dumps(data)
|
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
json_data = None
|
data = None
|
||||||
error = "No data was returned."
|
error = "No data was returned."
|
||||||
|
|
||||||
cursor.close()
|
cursor.close()
|
||||||
@@ -142,7 +138,7 @@ class Vertica(BaseSQLQueryRunner):
|
|||||||
if connection:
|
if connection:
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
return json_data, error
|
return data, error
|
||||||
|
|
||||||
|
|
||||||
register(Vertica)
|
register(Vertica)
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ import requests
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from redash.query_runner import BaseSQLQueryRunner, register
|
from redash.query_runner import BaseSQLQueryRunner, register
|
||||||
from redash.utils import json_dumps
|
|
||||||
from redash.utils.pandas import pandas_installed
|
from redash.utils.pandas import pandas_installed
|
||||||
|
|
||||||
openpyxl_installed = find_spec("openpyxl")
|
openpyxl_installed = find_spec("openpyxl")
|
||||||
@@ -157,7 +156,7 @@ class YandexDisk(BaseSQLQueryRunner):
|
|||||||
new_df = pd.concat(new_df, ignore_index=True)
|
new_df = pd.concat(new_df, ignore_index=True)
|
||||||
df = new_df.copy()
|
df = new_df.copy()
|
||||||
|
|
||||||
data = json_dumps(pandas_to_result(df))
|
data = pandas_to_result(df)
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
return data, error
|
return data, error
|
||||||
|
|||||||
@@ -13,7 +13,6 @@ from redash.query_runner import (
|
|||||||
BaseSQLQueryRunner,
|
BaseSQLQueryRunner,
|
||||||
register,
|
register,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -168,7 +167,7 @@ class YandexMetrica(BaseSQLQueryRunner):
|
|||||||
return data, error
|
return data, error
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json_dumps(parse_ym_response(self._send_query(**params)))
|
data = parse_ym_response(self._send_query(**params))
|
||||||
error = None
|
error = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
|
|||||||
@@ -16,14 +16,13 @@ from redash.serializers.query_result import (
|
|||||||
serialize_query_result_to_dsv,
|
serialize_query_result_to_dsv,
|
||||||
serialize_query_result_to_xlsx,
|
serialize_query_result_to_xlsx,
|
||||||
)
|
)
|
||||||
from redash.utils import json_loads
|
|
||||||
|
|
||||||
|
|
||||||
def public_widget(widget):
|
def public_widget(widget):
|
||||||
res = {
|
res = {
|
||||||
"id": widget.id,
|
"id": widget.id,
|
||||||
"width": widget.width,
|
"width": widget.width,
|
||||||
"options": json_loads(widget.options),
|
"options": widget.options,
|
||||||
"text": widget.text,
|
"text": widget.text,
|
||||||
"updated_at": widget.updated_at,
|
"updated_at": widget.updated_at,
|
||||||
"created_at": widget.created_at,
|
"created_at": widget.created_at,
|
||||||
@@ -35,7 +34,7 @@ def public_widget(widget):
|
|||||||
"type": v.type,
|
"type": v.type,
|
||||||
"name": v.name,
|
"name": v.name,
|
||||||
"description": v.description,
|
"description": v.description,
|
||||||
"options": json_loads(v.options),
|
"options": v.options,
|
||||||
"updated_at": v.updated_at,
|
"updated_at": v.updated_at,
|
||||||
"created_at": v.created_at,
|
"created_at": v.created_at,
|
||||||
"query": {
|
"query": {
|
||||||
@@ -146,7 +145,7 @@ def serialize_visualization(object, with_query=True):
|
|||||||
"type": object.type,
|
"type": object.type,
|
||||||
"name": object.name,
|
"name": object.name,
|
||||||
"description": object.description,
|
"description": object.description,
|
||||||
"options": json_loads(object.options),
|
"options": object.options,
|
||||||
"updated_at": object.updated_at,
|
"updated_at": object.updated_at,
|
||||||
"created_at": object.created_at,
|
"created_at": object.created_at,
|
||||||
}
|
}
|
||||||
@@ -161,7 +160,7 @@ def serialize_widget(object):
|
|||||||
d = {
|
d = {
|
||||||
"id": object.id,
|
"id": object.id,
|
||||||
"width": object.width,
|
"width": object.width,
|
||||||
"options": json_loads(object.options),
|
"options": object.options,
|
||||||
"dashboard_id": object.dashboard_id,
|
"dashboard_id": object.dashboard_id,
|
||||||
"text": object.text,
|
"text": object.text,
|
||||||
"updated_at": object.updated_at,
|
"updated_at": object.updated_at,
|
||||||
@@ -197,7 +196,7 @@ def serialize_alert(alert, full=True):
|
|||||||
|
|
||||||
|
|
||||||
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True):
|
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True):
|
||||||
layout = json_loads(obj.layout)
|
layout = obj.layout
|
||||||
|
|
||||||
widgets = []
|
widgets = []
|
||||||
|
|
||||||
|
|||||||
@@ -9,6 +9,7 @@ import json
|
|||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
import pystache
|
import pystache
|
||||||
@@ -69,11 +70,21 @@ def generate_token(length):
|
|||||||
return "".join(rand.choice(chars) for x in range(length))
|
return "".join(rand.choice(chars) for x in range(length))
|
||||||
|
|
||||||
|
|
||||||
|
json_encoders = [m.custom_json_encoder for m in sys.modules if hasattr(m, "custom_json_encoder")]
|
||||||
|
|
||||||
|
|
||||||
class JSONEncoder(json.JSONEncoder):
|
class JSONEncoder(json.JSONEncoder):
|
||||||
"""Adapter for `json.dumps`."""
|
"""Adapter for `json.dumps`."""
|
||||||
|
|
||||||
|
def __init__(self, **kwargs):
|
||||||
|
self.encoders = json_encoders
|
||||||
|
super().__init__(**kwargs)
|
||||||
|
|
||||||
def default(self, o):
|
def default(self, o):
|
||||||
# Some SQLAlchemy collections are lazy.
|
for encoder in self.encoders:
|
||||||
|
result = encoder(self, o)
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
if isinstance(o, Query):
|
if isinstance(o, Query):
|
||||||
result = list(o)
|
result = list(o)
|
||||||
elif isinstance(o, decimal.Decimal):
|
elif isinstance(o, decimal.Decimal):
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ dashboard_factory = ModelFactory(
|
|||||||
redash.models.Dashboard,
|
redash.models.Dashboard,
|
||||||
name="test",
|
name="test",
|
||||||
user=user_factory.create,
|
user=user_factory.create,
|
||||||
layout="[]",
|
layout=[],
|
||||||
is_draft=False,
|
is_draft=False,
|
||||||
org=1,
|
org=1,
|
||||||
)
|
)
|
||||||
@@ -122,7 +122,7 @@ alert_factory = ModelFactory(
|
|||||||
|
|
||||||
query_result_factory = ModelFactory(
|
query_result_factory = ModelFactory(
|
||||||
redash.models.QueryResult,
|
redash.models.QueryResult,
|
||||||
data='{"columns":{}, "rows":[]}',
|
data={"columns": {}, "rows": []},
|
||||||
runtime=1,
|
runtime=1,
|
||||||
retrieved_at=utcnow,
|
retrieved_at=utcnow,
|
||||||
query_text="SELECT 1",
|
query_text="SELECT 1",
|
||||||
@@ -137,13 +137,13 @@ visualization_factory = ModelFactory(
|
|||||||
query_rel=query_factory.create,
|
query_rel=query_factory.create,
|
||||||
name="Chart",
|
name="Chart",
|
||||||
description="",
|
description="",
|
||||||
options="{}",
|
options={},
|
||||||
)
|
)
|
||||||
|
|
||||||
widget_factory = ModelFactory(
|
widget_factory = ModelFactory(
|
||||||
redash.models.Widget,
|
redash.models.Widget,
|
||||||
width=1,
|
width=1,
|
||||||
options="{}",
|
options={},
|
||||||
dashboard=dashboard_factory.create,
|
dashboard=dashboard_factory.create,
|
||||||
visualization=visualization_factory.create,
|
visualization=visualization_factory.create,
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -74,7 +74,7 @@ class TestDashboardResourceGet(BaseTestCase):
|
|||||||
vis = self.factory.create_visualization(query_rel=query)
|
vis = self.factory.create_visualization(query_rel=query)
|
||||||
restricted_widget = self.factory.create_widget(visualization=vis, dashboard=dashboard)
|
restricted_widget = self.factory.create_widget(visualization=vis, dashboard=dashboard)
|
||||||
widget = self.factory.create_widget(dashboard=dashboard)
|
widget = self.factory.create_widget(dashboard=dashboard)
|
||||||
dashboard.layout = "[[{}, {}]]".format(widget.id, restricted_widget.id)
|
dashboard.layout = [[widget.id, restricted_widget.id]]
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
rv = self.make_request("get", "/api/dashboards/{0}".format(dashboard.id))
|
rv = self.make_request("get", "/api/dashboards/{0}".format(dashboard.id))
|
||||||
@@ -94,7 +94,7 @@ class TestDashboardResourcePost(BaseTestCase):
|
|||||||
rv = self.make_request(
|
rv = self.make_request(
|
||||||
"post",
|
"post",
|
||||||
"/api/dashboards/{0}".format(d.id),
|
"/api/dashboards/{0}".format(d.id),
|
||||||
data={"name": new_name, "layout": "[]"},
|
data={"name": new_name, "layout": []},
|
||||||
)
|
)
|
||||||
self.assertEqual(rv.status_code, 200)
|
self.assertEqual(rv.status_code, 200)
|
||||||
self.assertEqual(rv.json["name"], new_name)
|
self.assertEqual(rv.json["name"], new_name)
|
||||||
@@ -107,7 +107,7 @@ class TestDashboardResourcePost(BaseTestCase):
|
|||||||
rv = self.make_request(
|
rv = self.make_request(
|
||||||
"post",
|
"post",
|
||||||
"/api/dashboards/{0}".format(d.id),
|
"/api/dashboards/{0}".format(d.id),
|
||||||
data={"name": new_name, "layout": "[]", "version": d.version - 1},
|
data={"name": new_name, "layout": [], "version": d.version - 1},
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 409)
|
self.assertEqual(rv.status_code, 409)
|
||||||
@@ -120,7 +120,7 @@ class TestDashboardResourcePost(BaseTestCase):
|
|||||||
rv = self.make_request(
|
rv = self.make_request(
|
||||||
"post",
|
"post",
|
||||||
"/api/dashboards/{0}".format(d.id),
|
"/api/dashboards/{0}".format(d.id),
|
||||||
data={"name": new_name, "layout": "[]"},
|
data={"name": new_name, "layout": []},
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(rv.status_code, 200)
|
self.assertEqual(rv.status_code, 200)
|
||||||
@@ -133,7 +133,7 @@ class TestDashboardResourcePost(BaseTestCase):
|
|||||||
rv = self.make_request(
|
rv = self.make_request(
|
||||||
"post",
|
"post",
|
||||||
"/api/dashboards/{0}".format(d.id),
|
"/api/dashboards/{0}".format(d.id),
|
||||||
data={"name": new_name, "layout": "[]", "version": d.version},
|
data={"name": new_name, "layout": [], "version": d.version},
|
||||||
user=user,
|
user=user,
|
||||||
)
|
)
|
||||||
self.assertEqual(rv.status_code, 403)
|
self.assertEqual(rv.status_code, 403)
|
||||||
@@ -143,7 +143,7 @@ class TestDashboardResourcePost(BaseTestCase):
|
|||||||
rv = self.make_request(
|
rv = self.make_request(
|
||||||
"post",
|
"post",
|
||||||
"/api/dashboards/{0}".format(d.id),
|
"/api/dashboards/{0}".format(d.id),
|
||||||
data={"name": new_name, "layout": "[]", "version": d.version},
|
data={"name": new_name, "layout": [], "version": d.version},
|
||||||
user=user,
|
user=user,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from redash.handlers.query_results import error_messages, run_query
|
from redash.handlers.query_results import error_messages, run_query
|
||||||
from redash.models import db
|
from redash.models import db
|
||||||
from redash.utils import json_dumps
|
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -362,7 +361,7 @@ class TestQueryDropdownsResource(BaseTestCase):
|
|||||||
|
|
||||||
query_result = self.factory.create_query_result()
|
query_result = self.factory.create_query_result()
|
||||||
data = {"rows": [], "columns": [{"name": "whatever"}]}
|
data = {"rows": [], "columns": [{"name": "whatever"}]}
|
||||||
query_result = self.factory.create_query_result(data=json_dumps(data))
|
query_result = self.factory.create_query_result(data=data)
|
||||||
unrelated_dropdown_query = self.factory.create_query(latest_query_data=query_result)
|
unrelated_dropdown_query = self.factory.create_query(latest_query_data=query_result)
|
||||||
|
|
||||||
# unrelated_dropdown_query has not been associated with query
|
# unrelated_dropdown_query has not been associated with query
|
||||||
@@ -378,7 +377,7 @@ class TestQueryDropdownsResource(BaseTestCase):
|
|||||||
def test_allows_access_if_associated_and_has_access_to_parent(self):
|
def test_allows_access_if_associated_and_has_access_to_parent(self):
|
||||||
query_result = self.factory.create_query_result()
|
query_result = self.factory.create_query_result()
|
||||||
data = {"rows": [], "columns": [{"name": "whatever"}]}
|
data = {"rows": [], "columns": [{"name": "whatever"}]}
|
||||||
query_result = self.factory.create_query_result(data=json_dumps(data))
|
query_result = self.factory.create_query_result(data=data)
|
||||||
dropdown_query = self.factory.create_query(latest_query_data=query_result)
|
dropdown_query = self.factory.create_query(latest_query_data=query_result)
|
||||||
|
|
||||||
options = {"parameters": [{"name": "param", "type": "query", "queryId": dropdown_query.id}]}
|
options = {"parameters": [{"name": "param", "type": "query", "queryId": dropdown_query.id}]}
|
||||||
@@ -423,7 +422,7 @@ class TestQueryResultExcelResponse(BaseTestCase):
|
|||||||
"rows": [{"test": 1}, {"test": 2, "test2": 3}],
|
"rows": [{"test": 1}, {"test": 2, "test2": 3}],
|
||||||
"columns": [{"name": "test"}, {"name": "test2"}],
|
"columns": [{"name": "test"}, {"name": "test2"}],
|
||||||
}
|
}
|
||||||
query_result = self.factory.create_query_result(data=json_dumps(data))
|
query_result = self.factory.create_query_result(data=data)
|
||||||
|
|
||||||
rv = self.make_request(
|
rv = self.make_request(
|
||||||
"get",
|
"get",
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ import textwrap
|
|||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
from redash.models import OPERATORS, Alert, db, next_state
|
from redash.models import OPERATORS, Alert, db, next_state
|
||||||
from redash.utils import json_dumps
|
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -43,7 +42,7 @@ class TestAlertAll(BaseTestCase):
|
|||||||
|
|
||||||
|
|
||||||
def get_results(value):
|
def get_results(value):
|
||||||
return json_dumps({"rows": [{"foo": value}], "columns": [{"name": "foo", "type": "STRING"}]})
|
return {"rows": [{"foo": value}], "columns": [{"name": "foo", "type": "STRING"}]}
|
||||||
|
|
||||||
|
|
||||||
class TestAlertEvaluate(BaseTestCase):
|
class TestAlertEvaluate(BaseTestCase):
|
||||||
@@ -66,7 +65,7 @@ class TestAlertEvaluate(BaseTestCase):
|
|||||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||||
|
|
||||||
def test_evaluate_return_unknown_when_empty_results(self):
|
def test_evaluate_return_unknown_when_empty_results(self):
|
||||||
results = json_dumps({"rows": [], "columns": [{"name": "foo", "type": "STRING"}]})
|
results = {"rows": [], "columns": [{"name": "foo", "type": "STRING"}]}
|
||||||
alert = self.create_alert(results)
|
alert = self.create_alert(results)
|
||||||
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
|
||||||
|
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class DashboardTest(BaseTestCase):
|
|||||||
widget1 = self.factory.create_widget(visualization=vis1, dashboard=dashboard)
|
widget1 = self.factory.create_widget(visualization=vis1, dashboard=dashboard)
|
||||||
widget2 = self.factory.create_widget(visualization=vis2, dashboard=dashboard)
|
widget2 = self.factory.create_widget(visualization=vis2, dashboard=dashboard)
|
||||||
widget3 = self.factory.create_widget(visualization=vis3, dashboard=dashboard)
|
widget3 = self.factory.create_widget(visualization=vis3, dashboard=dashboard)
|
||||||
dashboard.layout = "[[{}, {}, {}]]".format(widget1.id, widget2.id, widget3.id)
|
dashboard.layout = [[widget1.id, widget2.id, widget3.id]]
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
return dashboard
|
return dashboard
|
||||||
|
|
||||||
|
|||||||
@@ -159,7 +159,7 @@ class QueryTest(BaseTestCase):
|
|||||||
q2 = self.factory.create_query(name="Testing searching")
|
q2 = self.factory.create_query(name="Testing searching")
|
||||||
q3 = self.factory.create_query(name="Testing finding")
|
q3 = self.factory.create_query(name="Testing finding")
|
||||||
|
|
||||||
queries = list(Query.search("(testing search) or finding", [self.factory.default_group.id]))
|
queries = list(Query.search("testing (search or finding)", [self.factory.default_group.id]))
|
||||||
self.assertIn(q1, queries)
|
self.assertIn(q1, queries)
|
||||||
self.assertIn(q2, queries)
|
self.assertIn(q2, queries)
|
||||||
self.assertIn(q3, queries)
|
self.assertIn(q3, queries)
|
||||||
@@ -373,16 +373,26 @@ class TestQueryFork(BaseTestCase):
|
|||||||
query = self.factory.create_query(data_source=data_source, description="this is description")
|
query = self.factory.create_query(data_source=data_source, description="this is description")
|
||||||
|
|
||||||
# create default TABLE - query factory does not create it
|
# create default TABLE - query factory does not create it
|
||||||
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options="{}")
|
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options={})
|
||||||
|
|
||||||
visualization_chart = self.factory.create_visualization(
|
visualization_chart = self.factory.create_visualization(
|
||||||
query_rel=query,
|
query_rel=query,
|
||||||
description="chart vis",
|
description="chart vis",
|
||||||
type="CHART",
|
type="CHART",
|
||||||
options="""{"yAxis": [{"type": "linear"}, {"type": "linear", "opposite": true}], "series": {"stacking": null}, "globalSeriesType": "line", "sortX": true, "seriesOptions": {"count": {"zIndex": 0, "index": 0, "type": "line", "yAxis": 0}}, "xAxis": {"labels": {"enabled": true}, "type": "datetime"}, "columnMapping": {"count": "y", "created_at": "x"}, "bottomMargin": 50, "legend": {"enabled": true}}""",
|
options={
|
||||||
|
"yAxis": [{"type": "linear"}, {"type": "linear", "opposite": True}],
|
||||||
|
"series": {"stacking": None},
|
||||||
|
"globalSeriesType": "line",
|
||||||
|
"sortX": True,
|
||||||
|
"seriesOptions": {"count": {"zIndex": 0, "index": 0, "type": "line", "yAxis": 0}},
|
||||||
|
"xAxis": {"labels": {"enabled": True}, "type": "datetime"},
|
||||||
|
"columnMapping": {"count": "y", "created_at": "x"},
|
||||||
|
"bottomMargin": 50,
|
||||||
|
"legend": {"enabled": True},
|
||||||
|
},
|
||||||
)
|
)
|
||||||
visualization_box = self.factory.create_visualization(
|
visualization_box = self.factory.create_visualization(
|
||||||
query_rel=query, description="box vis", type="BOXPLOT", options="{}"
|
query_rel=query, description="box vis", type="BOXPLOT", options={}
|
||||||
)
|
)
|
||||||
fork_user = self.factory.create_user()
|
fork_user = self.factory.create_user()
|
||||||
forked_query = query.fork(fork_user)
|
forked_query = query.fork(fork_user)
|
||||||
@@ -417,7 +427,7 @@ class TestQueryFork(BaseTestCase):
|
|||||||
self.assertEqual(count_table, 1)
|
self.assertEqual(count_table, 1)
|
||||||
self.assertEqual(forked_table.name, "Table")
|
self.assertEqual(forked_table.name, "Table")
|
||||||
self.assertEqual(forked_table.description, "")
|
self.assertEqual(forked_table.description, "")
|
||||||
self.assertEqual(forked_table.options, "{}")
|
self.assertEqual(forked_table.options, {})
|
||||||
|
|
||||||
def test_fork_from_query_that_has_no_visualization(self):
|
def test_fork_from_query_that_has_no_visualization(self):
|
||||||
# prepare original query and visualizations
|
# prepare original query and visualizations
|
||||||
@@ -425,7 +435,7 @@ class TestQueryFork(BaseTestCase):
|
|||||||
query = self.factory.create_query(data_source=data_source, description="this is description")
|
query = self.factory.create_query(data_source=data_source, description="this is description")
|
||||||
|
|
||||||
# create default TABLE - query factory does not create it
|
# create default TABLE - query factory does not create it
|
||||||
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options="{}")
|
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options={})
|
||||||
|
|
||||||
fork_user = self.factory.create_user()
|
fork_user = self.factory.create_user()
|
||||||
|
|
||||||
@@ -457,7 +467,7 @@ class TestQueryUpdateLatestResult(BaseTestCase):
|
|||||||
self.query_hash = gen_query_hash(self.query)
|
self.query_hash = gen_query_hash(self.query)
|
||||||
self.runtime = 123
|
self.runtime = 123
|
||||||
self.utcnow = utcnow()
|
self.utcnow = utcnow()
|
||||||
self.data = "data"
|
self.data = {"columns": {}, "rows": []}
|
||||||
|
|
||||||
def test_updates_existing_queries(self):
|
def test_updates_existing_queries(self):
|
||||||
query1 = self.factory.create_query(query_text=self.query)
|
query1 = self.factory.create_query(query_text=self.query)
|
||||||
|
|||||||
@@ -1,10 +1,6 @@
|
|||||||
import datetime
|
import datetime
|
||||||
from unittest import TestCase
|
|
||||||
|
|
||||||
from mock import patch
|
|
||||||
|
|
||||||
from redash import models
|
from redash import models
|
||||||
from redash.models import DBPersistence
|
|
||||||
from redash.utils import utcnow
|
from redash.utils import utcnow
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
@@ -71,28 +67,9 @@ class QueryResultTest(BaseTestCase):
|
|||||||
query.data_source,
|
query.data_source,
|
||||||
query.query_hash,
|
query.query_hash,
|
||||||
query.query_text,
|
query.query_text,
|
||||||
"",
|
{},
|
||||||
0,
|
0,
|
||||||
utcnow(),
|
utcnow(),
|
||||||
)
|
)
|
||||||
|
|
||||||
self.assertEqual(original_updated_at, query.updated_at)
|
self.assertEqual(original_updated_at, query.updated_at)
|
||||||
|
|
||||||
|
|
||||||
class TestDBPersistence(TestCase):
|
|
||||||
def test_updating_data_removes_cached_result(self):
|
|
||||||
p = DBPersistence()
|
|
||||||
p.data = '{"test": 1}'
|
|
||||||
self.assertDictEqual(p.data, {"test": 1})
|
|
||||||
p.data = '{"test": 2}'
|
|
||||||
self.assertDictEqual(p.data, {"test": 2})
|
|
||||||
|
|
||||||
@patch("redash.models.json_loads")
|
|
||||||
def test_calls_json_loads_only_once(self, json_loads_patch):
|
|
||||||
json_loads_patch.return_value = "1"
|
|
||||||
p = DBPersistence()
|
|
||||||
json_data = '{"test": 1}'
|
|
||||||
p.data = json_data
|
|
||||||
a = p.data # noqa
|
|
||||||
b = p.data # noqa
|
|
||||||
json_loads_patch.assert_called_once_with(json_data)
|
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ class TestClickHouse(TestCase):
|
|||||||
|
|
||||||
self.assertIsNone(error)
|
self.assertIsNone(error)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
json.loads(data),
|
data,
|
||||||
{
|
{
|
||||||
"columns": [
|
"columns": [
|
||||||
{"name": "1", "friendly_name": "1", "type": TYPE_INTEGER},
|
{"name": "1", "friendly_name": "1", "type": TYPE_INTEGER},
|
||||||
@@ -139,7 +139,7 @@ SELECT * FROM test;
|
|||||||
|
|
||||||
self.assertIsNone(error)
|
self.assertIsNone(error)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
json.loads(data),
|
data,
|
||||||
{
|
{
|
||||||
"columns": [
|
"columns": [
|
||||||
{"name": "1", "friendly_name": "1", "type": TYPE_INTEGER},
|
{"name": "1", "friendly_name": "1", "type": TYPE_INTEGER},
|
||||||
|
|||||||
@@ -2,7 +2,6 @@ from unittest.mock import patch
|
|||||||
|
|
||||||
from redash.query_runner import TYPE_INTEGER, TYPE_STRING
|
from redash.query_runner import TYPE_INTEGER, TYPE_STRING
|
||||||
from redash.query_runner.e6data import e6data
|
from redash.query_runner.e6data import e6data
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
runner = e6data(
|
runner = e6data(
|
||||||
{
|
{
|
||||||
@@ -28,15 +27,13 @@ def test_run_query(mock_cursor):
|
|||||||
|
|
||||||
json_data, error = runner.run_query(query, user)
|
json_data, error = runner.run_query(query, user)
|
||||||
|
|
||||||
expected_json_data = json_dumps(
|
expected_json_data = {
|
||||||
{
|
"columns": [
|
||||||
"columns": [
|
{"name": "id", "type": TYPE_INTEGER},
|
||||||
{"name": "id", "type": TYPE_INTEGER},
|
{"name": "name", "type": TYPE_STRING},
|
||||||
{"name": "name", "type": TYPE_STRING},
|
],
|
||||||
],
|
"rows": [{"id": 1, "name": "John"}],
|
||||||
"rows": [{"id": 1, "name": "John"}],
|
}
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert json_data == expected_json_data
|
assert json_data == expected_json_data
|
||||||
|
|
||||||
@@ -50,7 +47,7 @@ def test_test_connection(mock_cursor):
|
|||||||
|
|
||||||
json_data, error = runner.run_query(query, user)
|
json_data, error = runner.run_query(query, user)
|
||||||
|
|
||||||
expected_json_data = json_dumps({"columns": [{"name": "EXPR$0", "type": TYPE_INTEGER}], "rows": [{"EXPR$0": 1}]})
|
expected_json_data = {"columns": [{"name": "EXPR$0", "type": TYPE_INTEGER}], "rows": [{"EXPR$0": 1}]}
|
||||||
|
|
||||||
assert json_data == expected_json_data
|
assert json_data == expected_json_data
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
from influxdb.resultset import ResultSet
|
from influxdb.resultset import ResultSet
|
||||||
|
|
||||||
from redash.query_runner import (
|
from redash.query_runner import (
|
||||||
@@ -40,7 +38,7 @@ def test_influxdb_result_types_with_rows():
|
|||||||
{"k1": "bar", "time": "2023-10-06T13:31:08.882953339Z", "v1": 0.6, "v2": 4},
|
{"k1": "bar", "time": "2023-10-06T13:31:08.882953339Z", "v1": 0.6, "v2": 4},
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
assert json.loads(transformed) == expected
|
assert transformed == expected
|
||||||
|
|
||||||
|
|
||||||
def test_influxdb_result_types_with_no_rows_are_string():
|
def test_influxdb_result_types_with_no_rows_are_string():
|
||||||
@@ -55,4 +53,4 @@ def test_influxdb_result_types_with_no_rows_are_string():
|
|||||||
],
|
],
|
||||||
"rows": [],
|
"rows": [],
|
||||||
}
|
}
|
||||||
assert json.loads(transformed) == expected
|
assert transformed == expected
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
import mock
|
import mock
|
||||||
import pytest
|
import pytest
|
||||||
from influxdb_client.client.flux_table import (
|
from influxdb_client.client.flux_table import (
|
||||||
@@ -277,10 +275,8 @@ class TestInfluxDBv2:
|
|||||||
@mock.patch("redash.query_runner.influx_db_v2.InfluxDBClient")
|
@mock.patch("redash.query_runner.influx_db_v2.InfluxDBClient")
|
||||||
@mock.patch("redash.query_runner.influx_db_v2.InfluxDBv2." "_cleanup_cert_files")
|
@mock.patch("redash.query_runner.influx_db_v2.InfluxDBv2." "_cleanup_cert_files")
|
||||||
@mock.patch("redash.query_runner.influx_db_v2.logger")
|
@mock.patch("redash.query_runner.influx_db_v2.logger")
|
||||||
@mock.patch("redash.query_runner.influx_db_v2.json_dumps")
|
|
||||||
def test_run_query(
|
def test_run_query(
|
||||||
self,
|
self,
|
||||||
json_dumps_mock: mock.MagicMock,
|
|
||||||
logger_mock: mock.MagicMock,
|
logger_mock: mock.MagicMock,
|
||||||
cleanup_cert_files_mock: mock.MagicMock,
|
cleanup_cert_files_mock: mock.MagicMock,
|
||||||
influx_db_client_mock: mock.MagicMock,
|
influx_db_client_mock: mock.MagicMock,
|
||||||
@@ -310,28 +306,24 @@ class TestInfluxDBv2:
|
|||||||
],
|
],
|
||||||
"rows": [{"col_1": "col_value_1", "col_2": 1}, {"col_1": "col_value_2", "col_2": 2}, {"col_3": 3.0}],
|
"rows": [{"col_1": "col_value_1", "col_2": 1}, {"col_1": "col_value_2", "col_2": 2}, {"col_3": 3.0}],
|
||||||
}
|
}
|
||||||
json_dumps_data = json.dumps(result_data)
|
|
||||||
|
|
||||||
query_mock = influx_db_client_mock.return_value.__enter__().query_api().query
|
query_mock = influx_db_client_mock.return_value.__enter__().query_api().query
|
||||||
query_mock.return_value = influx_table_list
|
query_mock.return_value = influx_table_list
|
||||||
json_dumps_mock.return_value = json_dumps_data
|
|
||||||
|
|
||||||
# 1. case: successful query data
|
# 1. case: successful query data
|
||||||
data, error = influx_db_v2.run_query(query, "user")
|
data, error = influx_db_v2.run_query(query, "user")
|
||||||
|
|
||||||
assert data == json_dumps_data
|
assert data == result_data
|
||||||
assert error is None
|
assert error is None
|
||||||
|
|
||||||
influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs)
|
influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs)
|
||||||
logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}")
|
logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}")
|
||||||
query_mock.assert_called_once_with(query)
|
query_mock.assert_called_once_with(query)
|
||||||
json_dumps_mock.assert_called_once_with(result_data)
|
|
||||||
cleanup_cert_files_mock.assert_called_once_with(influx_kwargs)
|
cleanup_cert_files_mock.assert_called_once_with(influx_kwargs)
|
||||||
|
|
||||||
influx_db_client_mock.reset_mock()
|
influx_db_client_mock.reset_mock()
|
||||||
logger_mock.reset_mock()
|
logger_mock.reset_mock()
|
||||||
query_mock.reset_mock()
|
query_mock.reset_mock()
|
||||||
json_dumps_mock.reset_mock()
|
|
||||||
cleanup_cert_files_mock.reset_mock()
|
cleanup_cert_files_mock.reset_mock()
|
||||||
|
|
||||||
# 2. case: unsuccessful query data
|
# 2. case: unsuccessful query data
|
||||||
@@ -344,5 +336,4 @@ class TestInfluxDBv2:
|
|||||||
influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs)
|
influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs)
|
||||||
logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}")
|
logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}")
|
||||||
query_mock.assert_called_once_with(query)
|
query_mock.assert_called_once_with(query)
|
||||||
json_dumps_mock.assert_not_called()
|
|
||||||
cleanup_cert_files_mock.assert_called_once_with(influx_kwargs)
|
cleanup_cert_files_mock.assert_called_once_with(influx_kwargs)
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from unittest import TestCase
|
|||||||
import mock
|
import mock
|
||||||
|
|
||||||
from redash.query_runner.prometheus import Prometheus, get_instant_rows, get_range_rows
|
from redash.query_runner.prometheus import Prometheus, get_instant_rows, get_range_rows
|
||||||
from redash.utils import json_dumps
|
|
||||||
|
|
||||||
|
|
||||||
class TestPrometheus(TestCase):
|
class TestPrometheus(TestCase):
|
||||||
@@ -350,7 +349,7 @@ class TestPrometheus(TestCase):
|
|||||||
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
|
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
|
||||||
]
|
]
|
||||||
|
|
||||||
data_expected = json_dumps({"rows": rows, "columns": columns})
|
data_expected = {"rows": rows, "columns": columns}
|
||||||
|
|
||||||
requests_get_mock.return_value = mock.Mock(
|
requests_get_mock.return_value = mock.Mock(
|
||||||
json=mock.Mock(return_value={"data": {"result": self.instant_query_result}})
|
json=mock.Mock(return_value={"data": {"result": self.instant_query_result}})
|
||||||
@@ -424,7 +423,7 @@ class TestPrometheus(TestCase):
|
|||||||
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
|
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
|
||||||
]
|
]
|
||||||
|
|
||||||
data_expected = json_dumps({"rows": rows, "columns": columns})
|
data_expected = {"rows": rows, "columns": columns}
|
||||||
|
|
||||||
requests_get_mock.return_value = mock.Mock(
|
requests_get_mock.return_value = mock.Mock(
|
||||||
json=mock.Mock(return_value={"data": {"result": self.range_query_result}})
|
json=mock.Mock(return_value={"data": {"result": self.range_query_result}})
|
||||||
@@ -490,7 +489,7 @@ class TestPrometheus(TestCase):
|
|||||||
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
|
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
|
||||||
]
|
]
|
||||||
|
|
||||||
data_expected = json_dumps({"rows": rows, "columns": columns})
|
data_expected = {"rows": rows, "columns": columns}
|
||||||
|
|
||||||
now_datetime = datetime(2023, 12, 12, 11, 00, 00)
|
now_datetime = datetime(2023, 12, 12, 11, 00, 00)
|
||||||
end_timestamp_expected = int(time.mktime(now_datetime.timetuple()))
|
end_timestamp_expected = int(time.mktime(now_datetime.timetuple()))
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ class TestPythonQueryRunner(TestCase):
|
|||||||
query_string = "print('test')"
|
query_string = "print('test')"
|
||||||
mock_dt.utcnow = mock.Mock(return_value=datetime(1901, 12, 21))
|
mock_dt.utcnow = mock.Mock(return_value=datetime(1901, 12, 21))
|
||||||
result = self.python.run_query(query_string, "user")
|
result = self.python.run_query(query_string, "user")
|
||||||
self.assertEqual(result[0], '{"rows": [], "columns": [], "log": ["[1901-12-21T00:00:00] test"]}')
|
self.assertEqual(result[0], {"rows": [], "columns": [], "log": ["[1901-12-21T00:00:00] test"]})
|
||||||
|
|
||||||
def test_empty_result(self):
|
def test_empty_result(self):
|
||||||
query_string = "result={}"
|
query_string = "result={}"
|
||||||
@@ -68,11 +68,11 @@ class TestPythonQueryRunner(TestCase):
|
|||||||
result = self.python.run_query(query_string, "user")
|
result = self.python.run_query(query_string, "user")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
result[0],
|
result[0],
|
||||||
'{"columns": [{"name": "col1", "type": "string"},'
|
{
|
||||||
' {"name": "col2", "type": "integer"}],'
|
"columns": [{"name": "col1", "type": "string"}, {"name": "col2", "type": "integer"}],
|
||||||
' "rows": [{"col1": "foo", "col2": 100},'
|
"rows": [{"col1": "foo", "col2": 100}, {"col1": "bar", "col2": 200}],
|
||||||
' {"col1": "bar", "col2": 200}],'
|
"log": [],
|
||||||
' "log": []}',
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
@mock.patch("datetime.datetime")
|
@mock.patch("datetime.datetime")
|
||||||
@@ -89,11 +89,11 @@ class TestPythonQueryRunner(TestCase):
|
|||||||
result = self.python.run_query(query_string, "user")
|
result = self.python.run_query(query_string, "user")
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
result[0],
|
result[0],
|
||||||
'{"columns": [{"name": "col1", "type": "string"},'
|
{
|
||||||
' {"name": "col2", "type": "integer"}],'
|
"columns": [{"name": "col1", "type": "string"}, {"name": "col2", "type": "integer"}],
|
||||||
' "rows": [{"col1": "foo", "col2": 100},'
|
"rows": [{"col1": "foo", "col2": 100}, {"col1": "bar", "col2": 200}],
|
||||||
' {"col1": "bar", "col2": 200}],'
|
"log": ["[1901-12-21T00:00:00] test"],
|
||||||
' "log": ["[1901-12-21T00:00:00] test"]}',
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -17,7 +17,6 @@ from redash.query_runner.query_results import (
|
|||||||
prepare_parameterized_query,
|
prepare_parameterized_query,
|
||||||
replace_query_parameters,
|
replace_query_parameters,
|
||||||
)
|
)
|
||||||
from redash.utils import json_dumps
|
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
|
|
||||||
@@ -235,5 +234,5 @@ class TestGetQueryResult(BaseTestCase):
|
|||||||
|
|
||||||
with mock.patch.object(PostgreSQL, "run_query") as qr:
|
with mock.patch.object(PostgreSQL, "run_query") as qr:
|
||||||
query_result_data = {"columns": [], "rows": []}
|
query_result_data = {"columns": [], "rows": []}
|
||||||
qr.return_value = (json_dumps(query_result_data), None)
|
qr.return_value = (query_result_data, None)
|
||||||
self.assertEqual(query_result_data, get_query_results(self.factory.user, query.id, False))
|
self.assertEqual(query_result_data, get_query_results(self.factory.user, query.id, False))
|
||||||
|
|||||||
@@ -83,7 +83,7 @@ class TestTinybird(TestCase):
|
|||||||
|
|
||||||
self.assertIsNone(error)
|
self.assertIsNone(error)
|
||||||
self.assertEqual(
|
self.assertEqual(
|
||||||
json.loads(data),
|
data,
|
||||||
{
|
{
|
||||||
"columns": [
|
"columns": [
|
||||||
{"name": "string_attribute", "friendly_name": "string_attribute", "type": TYPE_STRING},
|
{"name": "string_attribute", "friendly_name": "string_attribute", "type": TYPE_STRING},
|
||||||
|
|||||||
@@ -28,7 +28,16 @@ class TestTrino(TestCase):
|
|||||||
|
|
||||||
def _assert_schema_catalog(self, mock_run_query, mock__get_catalogs, runner):
|
def _assert_schema_catalog(self, mock_run_query, mock__get_catalogs, runner):
|
||||||
mock_run_query.return_value = (
|
mock_run_query.return_value = (
|
||||||
f'{{"rows": [{{"table_schema": "{TestTrino.schema_name}", "table_name": "{TestTrino.table_name}", "column_name": "{TestTrino.column_name}", "data_type": "{TestTrino.column_type}"}}]}}',
|
{
|
||||||
|
"rows": [
|
||||||
|
{
|
||||||
|
"table_schema": TestTrino.schema_name,
|
||||||
|
"table_name": TestTrino.table_name,
|
||||||
|
"column_name": TestTrino.column_name,
|
||||||
|
"data_type": TestTrino.column_type,
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
None,
|
None,
|
||||||
)
|
)
|
||||||
mock__get_catalogs.return_value = [TestTrino.catalog_name]
|
mock__get_catalogs.return_value = [TestTrino.catalog_name]
|
||||||
@@ -36,14 +45,14 @@ class TestTrino(TestCase):
|
|||||||
expected_schema = [
|
expected_schema = [
|
||||||
{
|
{
|
||||||
"name": f"{TestTrino.catalog_name}.{TestTrino.schema_name}.{TestTrino.table_name}",
|
"name": f"{TestTrino.catalog_name}.{TestTrino.schema_name}.{TestTrino.table_name}",
|
||||||
"columns": [{"name": f"{TestTrino.column_name}", "type": f"{TestTrino.column_type}"}],
|
"columns": [{"name": TestTrino.column_name, "type": TestTrino.column_type}],
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
self.assertEqual(schema, expected_schema)
|
self.assertEqual(schema, expected_schema)
|
||||||
|
|
||||||
@patch.object(Trino, "run_query")
|
@patch.object(Trino, "run_query")
|
||||||
def test__get_catalogs(self, mock_run_query):
|
def test__get_catalogs(self, mock_run_query):
|
||||||
mock_run_query.return_value = (f'{{"rows": [{{"Catalog": "{TestTrino.catalog_name}"}}]}}', None)
|
mock_run_query.return_value = ({"rows": [{"Catalog": TestTrino.catalog_name}]}, None)
|
||||||
runner = Trino({})
|
runner = Trino({})
|
||||||
catalogs = runner._get_catalogs()
|
catalogs = runner._get_catalogs()
|
||||||
expected_catalogs = [TestTrino.catalog_name]
|
expected_catalogs = [TestTrino.catalog_name]
|
||||||
|
|||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user