Removed pseudojson class, converted all options and other json columns to jsonb ones (#6687)

Co-authored-by: Andrew Chubatiuk <andrew.chubatiuk@motional.com>
This commit is contained in:
Andrii Chubatiuk
2024-01-12 01:02:00 +02:00
committed by GitHub
parent 4d5103978b
commit ec1c4d07de
105 changed files with 549 additions and 606 deletions

View File

@@ -7,7 +7,7 @@ Create Date: 2020-12-23 21:35:32.766354
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import JSON
# revision identifiers, used by Alembic.
revision = '0ec979123ba4'
@@ -18,7 +18,7 @@ depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('dashboards', sa.Column('options', postgresql.JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
op.add_column('dashboards', sa.Column('options', JSON(astext_type=sa.Text()), server_default='{}', nullable=False))
# ### end Alembic commands ###

View File

@@ -10,8 +10,7 @@ import json
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table
from redash.models import MutableDict, PseudoJSON
from redash.models import MutableDict
# revision identifiers, used by Alembic.
@@ -41,7 +40,7 @@ def upgrade():
"queries",
sa.Column(
"schedule",
MutableDict.as_mutable(PseudoJSON),
sa.Text(),
nullable=False,
server_default=json.dumps({}),
),
@@ -51,7 +50,7 @@ def upgrade():
queries = table(
"queries",
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
sa.Column("schedule", sa.Text()),
sa.Column("old_schedule", sa.String(length=10)),
)
@@ -85,7 +84,7 @@ def downgrade():
"queries",
sa.Column(
"old_schedule",
MutableDict.as_mutable(PseudoJSON),
sa.Text(),
nullable=False,
server_default=json.dumps({}),
),
@@ -93,8 +92,8 @@ def downgrade():
queries = table(
"queries",
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
sa.Column("schedule", sa.Text()),
sa.Column("old_schedule", sa.Text()),
)
op.execute(queries.update().values({"old_schedule": queries.c.schedule}))
@@ -106,7 +105,7 @@ def downgrade():
"queries",
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("schedule", sa.String(length=10)),
sa.Column("old_schedule", MutableDict.as_mutable(PseudoJSON)),
sa.Column("old_schedule", sa.Text()),
)
conn = op.get_bind()

View File

@@ -0,0 +1,146 @@
"""change type of json fields from varchar to json
Revision ID: 7205816877ec
Revises: 7ce5925f832b
Create Date: 2024-01-03 13:55:18.885021
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSONB, JSON
# revision identifiers, used by Alembic.
revision = '7205816877ec'
down_revision = '7ce5925f832b'
branch_labels = None
depends_on = None
def upgrade():
connection = op.get_bind()
op.alter_column('queries', 'options',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
nullable=True,
postgresql_using='options::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('queries', 'schedule',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
nullable=True,
postgresql_using='schedule::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('events', 'additional_properties',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
nullable=True,
postgresql_using='additional_properties::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('organizations', 'settings',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
nullable=True,
postgresql_using='settings::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('alerts', 'options',
existing_type=JSON(astext_type=sa.Text()),
type_=JSONB(astext_type=sa.Text()),
nullable=True,
postgresql_using='options::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('dashboards', 'options',
existing_type=JSON(astext_type=sa.Text()),
type_=JSONB(astext_type=sa.Text()),
postgresql_using='options::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('dashboards', 'layout',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
postgresql_using='layout::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('query_results', 'data',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
nullable=True,
postgresql_using='data::text',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('changes', 'change',
existing_type=JSON(astext_type=sa.Text()),
type_=JSONB(astext_type=sa.Text()),
postgresql_using='change::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('visualizations', 'options',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
postgresql_using='options::jsonb',
server_default=sa.text("'{}'::jsonb"))
op.alter_column('widgets', 'options',
existing_type=sa.Text(),
type_=JSONB(astext_type=sa.Text()),
postgresql_using='options::jsonb',
server_default=sa.text("'{}'::jsonb"))
def downgrade():
connection = op.get_bind()
op.alter_column('queries', 'options',
existing_type=JSONB(astext_type=sa.Text()),
type_=sa.Text(),
postgresql_using='options::text',
existing_nullable=True,
server_default=sa.text("'{}'::text"))
op.alter_column('queries', 'schedule',
existing_type=JSONB(astext_type=sa.Text()),
type_=sa.Text(),
postgresql_using='schedule::text',
existing_nullable=True,
server_default=sa.text("'{}'::text"))
op.alter_column('events', 'additional_properties',
existing_type=JSONB(astext_type=sa.Text()),
type_=sa.Text(),
postgresql_using='additional_properties::text',
existing_nullable=True,
server_default=sa.text("'{}'::text"))
op.alter_column('organizations', 'settings',
existing_type=JSONB(astext_type=sa.Text()),
type_=sa.Text(),
postgresql_using='settings::text',
existing_nullable=True,
server_default=sa.text("'{}'::text"))
op.alter_column('alerts', 'options',
existing_type=JSONB(astext_type=sa.Text()),
type_=JSON(astext_type=sa.Text()),
postgresql_using='options::json',
existing_nullable=True,
server_default=sa.text("'{}'::json"))
op.alter_column('dashboards', 'options',
existing_type=JSONB(astext_type=sa.Text()),
type_=JSON(astext_type=sa.Text()),
postgresql_using='options::json',
server_default=sa.text("'{}'::json"))
op.alter_column('dashboards', 'layout',
existing_type=JSONB(astext_type=sa.Text()),
type_=sa.Text(),
postgresql_using='layout::text',
server_default=sa.text("'{}'::text"))
op.alter_column('query_results', 'data',
existing_type=JSONB(astext_type=sa.Text()),
type_=sa.Text(),
postgresql_using='data::text',
server_default=sa.text("'{}'::text"))
op.alter_column('changes', 'change',
existing_type=JSONB(astext_type=sa.Text()),
type_=JSON(astext_type=sa.Text()),
postgresql_using='change::json',
server_default=sa.text("'{}'::json"))
op.alter_column('visualizations', 'options',
type_=sa.Text(),
existing_type=JSONB(astext_type=sa.Text()),
postgresql_using='options::text',
server_default=sa.text("'{}'::text"))
op.alter_column('widgets', 'options',
type_=sa.Text(),
existing_type=JSONB(astext_type=sa.Text()),
postgresql_using='options::text',
server_default=sa.text("'{}'::text"))

View File

@@ -7,10 +7,9 @@ Create Date: 2019-01-17 13:22:21.729334
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table
from redash.models import MutableDict, PseudoJSON
from redash.models import MutableDict
# revision identifiers, used by Alembic.
revision = "73beceabb948"
@@ -43,7 +42,7 @@ def upgrade():
queries = table(
"queries",
sa.Column("id", sa.Integer, primary_key=True),
sa.Column("schedule", MutableDict.as_mutable(PseudoJSON)),
sa.Column("schedule", sa.Text()),
)
conn = op.get_bind()

View File

@@ -7,7 +7,7 @@ Create Date: 2019-01-31 09:21:31.517265
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import BYTEA
from sqlalchemy.sql import table
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
@@ -18,7 +18,6 @@ from redash.models.types import (
Configuration,
MutableDict,
MutableList,
PseudoJSON,
)
# revision identifiers, used by Alembic.
@@ -31,7 +30,7 @@ depends_on = None
def upgrade():
op.add_column(
"data_sources",
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True),
sa.Column("encrypted_options", BYTEA(), nullable=True),
)
# copy values

View File

@@ -9,7 +9,7 @@ import re
from funcy import flatten, compact
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import ARRAY
from redash import models
# revision identifiers, used by Alembic.
@@ -21,10 +21,10 @@ depends_on = None
def upgrade():
op.add_column(
"dashboards", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
"dashboards", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
)
op.add_column(
"queries", sa.Column("tags", postgresql.ARRAY(sa.Unicode()), nullable=True)
"queries", sa.Column("tags", ARRAY(sa.Unicode()), nullable=True)
)

View File

@@ -7,7 +7,7 @@ Create Date: 2020-12-14 21:42:48.661684
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import BYTEA
from sqlalchemy.sql import table
from sqlalchemy_utils.types.encrypted.encrypted_type import FernetEngine
@@ -30,7 +30,7 @@ depends_on = None
def upgrade():
op.add_column(
"notification_destinations",
sa.Column("encrypted_options", postgresql.BYTEA(), nullable=True)
sa.Column("encrypted_options", BYTEA(), nullable=True)
)
# copy values

View File

@@ -7,7 +7,7 @@ Create Date: 2018-11-08 16:12:17.023569
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import JSON
# revision identifiers, used by Alembic.
revision = "e7f8a917aa8e"
@@ -21,7 +21,7 @@ def upgrade():
"users",
sa.Column(
"details",
postgresql.JSON(astext_type=sa.Text()),
JSON(astext_type=sa.Text()),
server_default="{}",
nullable=True,
),

View File

@@ -7,7 +7,7 @@ Create Date: 2022-01-31 15:24:16.507888
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import JSON, JSONB
from redash.models import db
@@ -23,8 +23,8 @@ def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'details',
existing_type=postgresql.JSON(astext_type=sa.Text()),
type_=postgresql.JSONB(astext_type=sa.Text()),
existing_type=JSON(astext_type=sa.Text()),
type_=JSONB(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::jsonb"))
### end Alembic commands ###
@@ -52,8 +52,8 @@ def downgrade():
connection.execute(update_query)
db.session.commit()
op.alter_column('users', 'details',
existing_type=postgresql.JSONB(astext_type=sa.Text()),
type_=postgresql.JSON(astext_type=sa.Text()),
existing_type=JSONB(astext_type=sa.Text()),
type_=JSON(astext_type=sa.Text()),
existing_nullable=True,
existing_server_default=sa.text("'{}'::json"))

View File

@@ -5,7 +5,7 @@ from flask import Blueprint, current_app, request
from flask_login import current_user, login_required
from flask_restful import Resource, abort
from sqlalchemy import cast
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import ARRAY
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy_utils.functions import sort_query
@@ -114,7 +114,7 @@ def json_response(response):
def filter_by_tags(result_set, column):
if request.args.getlist("tags"):
tags = request.args.getlist("tags")
result_set = result_set.filter(cast(column, postgresql.ARRAY(db.Text)).contains(tags))
result_set = result_set.filter(cast(column, ARRAY(db.Text)).contains(tags))
return result_set

View File

@@ -96,7 +96,7 @@ class DashboardListResource(BaseResource):
org=self.current_org,
user=self.current_user,
is_draft=True,
layout="[]",
layout=[],
)
models.db.session.add(dashboard)
models.db.session.commit()

View File

@@ -7,7 +7,6 @@ from redash.permissions import (
require_permission,
)
from redash.serializers import serialize_visualization
from redash.utils import json_dumps
class VisualizationListResource(BaseResource):
@@ -18,7 +17,6 @@ class VisualizationListResource(BaseResource):
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop("query_id"), self.current_org)
require_object_modify_permission(query, self.current_user)
kwargs["options"] = json_dumps(kwargs["options"])
kwargs["query_rel"] = query
vis = models.Visualization(**kwargs)
@@ -34,8 +32,6 @@ class VisualizationResource(BaseResource):
require_object_modify_permission(vis.query_rel, self.current_user)
kwargs = request.get_json(force=True)
if "options" in kwargs:
kwargs["options"] = json_dumps(kwargs["options"])
kwargs.pop("id", None)
kwargs.pop("query_id", None)

View File

@@ -9,7 +9,6 @@ from redash.permissions import (
view_only,
)
from redash.serializers import serialize_widget
from redash.utils import json_dumps
class WidgetListResource(BaseResource):
@@ -30,7 +29,6 @@ class WidgetListResource(BaseResource):
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.get("dashboard_id"), self.current_org)
require_object_modify_permission(dashboard, self.current_user)
widget_properties["options"] = json_dumps(widget_properties["options"])
widget_properties.pop("id", None)
visualization_id = widget_properties.pop("visualization_id")
@@ -65,7 +63,7 @@ class WidgetResource(BaseResource):
require_object_modify_permission(widget.dashboard, self.current_user)
widget_properties = request.get_json(force=True)
widget.text = widget_properties["text"]
widget.options = json_dumps(widget_properties["options"])
widget.options = widget_properties["options"]
models.db.session.commit()
return serialize_widget(widget)

View File

@@ -6,7 +6,7 @@ import time
import pytz
from sqlalchemy import UniqueConstraint, and_, cast, distinct, func, or_
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import ARRAY, DOUBLE_PRECISION, JSONB
from sqlalchemy.event import listens_for
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import (
@@ -50,8 +50,7 @@ from redash.models.types import (
EncryptedConfiguration,
MutableDict,
MutableList,
PseudoJSON,
pseudo_json_cast_property,
json_cast_property,
)
from redash.models.users import ( # noqa
AccessPermission,
@@ -127,7 +126,10 @@ class DataSource(BelongsToOrgMixin, db.Model):
data_source_groups = db.relationship("DataSourceGroup", back_populates="data_source", cascade="all")
__tablename__ = "data_sources"
__table_args__ = (db.Index("data_sources_org_id_name", "org_id", "name"),)
__table_args__ = (
db.Index("data_sources_org_id_name", "org_id", "name"),
{"extend_existing": True},
)
def __eq__(self, other):
return self.id == other.id
@@ -301,34 +303,11 @@ class DataSourceGroup(db.Model):
view_only = Column(db.Boolean, default=False)
__tablename__ = "data_source_groups"
DESERIALIZED_DATA_ATTR = "_deserialized_data"
class DBPersistence:
@property
def data(self):
if self._data is None:
return None
if not hasattr(self, DESERIALIZED_DATA_ATTR):
setattr(self, DESERIALIZED_DATA_ATTR, json_loads(self._data))
return self._deserialized_data
@data.setter
def data(self, data):
if hasattr(self, DESERIALIZED_DATA_ATTR):
delattr(self, DESERIALIZED_DATA_ATTR)
self._data = data
QueryResultPersistence = settings.dynamic_settings.QueryResultPersistence or DBPersistence
__table_args__ = ({"extend_existing": True},)
@generic_repr("id", "org_id", "data_source_id", "query_hash", "runtime", "retrieved_at")
class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
class QueryResult(db.Model, BelongsToOrgMixin):
id = primary_key("QueryResult")
org_id = Column(key_type("Organization"), db.ForeignKey("organizations.id"))
org = db.relationship(Organization)
@@ -336,8 +315,8 @@ class QueryResult(db.Model, QueryResultPersistence, BelongsToOrgMixin):
data_source = db.relationship(DataSource, backref=backref("query_results"))
query_hash = Column(db.String(32), index=True)
query_text = Column("query", db.Text)
_data = Column("data", db.Text)
runtime = Column(postgresql.DOUBLE_PRECISION)
data = Column(MutableDict.as_mutable(JSONB), nullable=True)
runtime = Column(DOUBLE_PRECISION)
retrieved_at = Column(db.DateTime(True))
__tablename__ = "query_results"
@@ -478,11 +457,11 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
last_modified_by = db.relationship(User, backref="modified_queries", foreign_keys=[last_modified_by_id])
is_archived = Column(db.Boolean, default=False, index=True)
is_draft = Column(db.Boolean, default=True, index=True)
schedule = Column(MutableDict.as_mutable(PseudoJSON), nullable=True)
interval = pseudo_json_cast_property(db.Integer, "schedule", "interval", default=0)
schedule = Column(MutableDict.as_mutable(JSONB), nullable=True)
interval = json_cast_property(db.Integer, "schedule", "interval", default=0)
schedule_failures = Column(db.Integer, default=0)
visualizations = db.relationship("Visualization", cascade="all, delete-orphan")
options = Column(MutableDict.as_mutable(PseudoJSON), default={})
options = Column(MutableDict.as_mutable(JSONB), default={})
search_vector = Column(
TSVectorType(
"id",
@@ -493,7 +472,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
),
nullable=True,
)
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
query_class = SearchBaseQuery
__tablename__ = "queries"
@@ -529,7 +508,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
name="Table",
description="",
type="TABLE",
options="{}",
options={},
)
)
return query
@@ -595,7 +574,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
@classmethod
def past_scheduled_queries(cls):
now = utils.utcnow()
queries = Query.query.filter(Query.schedule.isnot(None)).order_by(Query.id)
queries = Query.query.filter(func.jsonb_typeof(Query.schedule) != "null").order_by(Query.id)
return [
query
for query in queries
@@ -607,7 +586,7 @@ class Query(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model):
def outdated_queries(cls):
queries = (
Query.query.options(joinedload(Query.latest_query_data).load_only("retrieved_at"))
.filter(Query.schedule.isnot(None))
.filter(func.jsonb_typeof(Query.schedule) != "null")
.order_by(Query.id)
.all()
)
@@ -953,7 +932,7 @@ class Alert(TimestampMixin, BelongsToOrgMixin, db.Model):
query_rel = db.relationship(Query, backref=backref("alerts", cascade="all"))
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
user = db.relationship(User, backref="alerts")
options = Column(MutableDict.as_mutable(PseudoJSON))
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
state = Column(db.String(255), default=UNKNOWN_STATE)
subscriptions = db.relationship("AlertSubscription", cascade="all, delete-orphan")
last_triggered_at = Column(db.DateTime(True), nullable=True)
@@ -1064,13 +1043,13 @@ class Dashboard(ChangeTrackingMixin, TimestampMixin, BelongsToOrgMixin, db.Model
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
user = db.relationship(User)
# layout is no longer used, but kept so we know how to render old dashboards.
layout = Column(db.Text)
layout = Column(MutableList.as_mutable(JSONB), default=[])
dashboard_filters_enabled = Column(db.Boolean, default=False)
is_archived = Column(db.Boolean, default=False, index=True)
is_draft = Column(db.Boolean, default=True, index=True)
widgets = db.relationship("Widget", backref="dashboard", lazy="dynamic")
tags = Column("tags", MutableList.as_mutable(postgresql.ARRAY(db.Unicode)), nullable=True)
options = Column(MutableDict.as_mutable(postgresql.JSON), server_default="{}", default={})
tags = Column("tags", MutableList.as_mutable(ARRAY(db.Unicode)), nullable=True)
options = Column(MutableDict.as_mutable(JSONB), default={})
__tablename__ = "dashboards"
__mapper_args__ = {"version_id_col": version}
@@ -1183,7 +1162,7 @@ class Visualization(TimestampMixin, BelongsToOrgMixin, db.Model):
query_rel = db.relationship(Query, back_populates="visualizations")
name = Column(db.String(255))
description = Column(db.String(4096), nullable=True)
options = Column(db.Text)
options = Column(MutableDict.as_mutable(JSONB), nullable=True)
__tablename__ = "visualizations"
@@ -1210,7 +1189,7 @@ class Widget(TimestampMixin, BelongsToOrgMixin, db.Model):
visualization = db.relationship(Visualization, backref=backref("widgets", cascade="delete"))
text = Column(db.Text, nullable=True)
width = Column(db.Integer)
options = Column(db.Text)
options = Column(MutableDict.as_mutable(JSONB), default={})
dashboard_id = Column(key_type("Dashboard"), db.ForeignKey("dashboards.id"), index=True)
__tablename__ = "widgets"
@@ -1242,7 +1221,7 @@ class Event(db.Model):
action = Column(db.String(255))
object_type = Column(db.String(255))
object_id = Column(db.String(255), nullable=True)
additional_properties = Column(MutableDict.as_mutable(PseudoJSON), nullable=True, default={})
additional_properties = Column(MutableDict.as_mutable(JSONB), nullable=True, default={})
created_at = Column(db.DateTime(True), default=db.func.now())
__tablename__ = "events"

View File

@@ -1,13 +1,13 @@
import functools
from flask_sqlalchemy import BaseQuery, SQLAlchemy
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import object_session
from sqlalchemy.pool import NullPool
from sqlalchemy_searchable import SearchQueryMixin, make_searchable, vectorizer
from redash import settings
from redash.utils import json_dumps
from redash.utils import json_dumps, json_loads
class RedashSQLAlchemy(SQLAlchemy):
@@ -28,7 +28,10 @@ class RedashSQLAlchemy(SQLAlchemy):
return options
db = RedashSQLAlchemy(session_options={"expire_on_commit": False})
db = RedashSQLAlchemy(
session_options={"expire_on_commit": False},
engine_options={"json_serializer": json_dumps, "json_deserializer": json_loads},
)
# Make sure the SQLAlchemy mappers are all properly configured first.
# This is required by SQLAlchemy-Searchable as it adds DDL listeners
# on the configuration phase of models.
@@ -50,7 +53,7 @@ def integer_vectorizer(column):
return db.func.cast(column, db.Text)
@vectorizer(postgresql.UUID)
@vectorizer(UUID)
def uuid_vectorizer(column):
return db.func.cast(column, db.Text)

View File

@@ -1,8 +1,8 @@
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.inspection import inspect
from sqlalchemy_utils.models import generic_repr
from .base import Column, GFKBase, db, key_type, primary_key
from .types import PseudoJSON
@generic_repr("id", "object_type", "object_id", "created_at")
@@ -13,7 +13,7 @@ class Change(GFKBase, db.Model):
object_version = Column(db.Integer, default=0)
user_id = Column(key_type("User"), db.ForeignKey("users.id"))
user = db.relationship("User", backref="changes")
change = Column(PseudoJSON)
change = Column(JSONB)
created_at = Column(db.DateTime(True), default=db.func.now())
__tablename__ = "changes"

View File

@@ -1,3 +1,4 @@
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm.attributes import flag_modified
from sqlalchemy_utils.models import generic_repr
@@ -5,7 +6,7 @@ from redash.settings.organization import settings as org_settings
from .base import Column, db, primary_key
from .mixins import TimestampMixin
from .types import MutableDict, PseudoJSON
from .types import MutableDict
from .users import Group, User
@@ -17,7 +18,7 @@ class Organization(TimestampMixin, db.Model):
id = primary_key("Organization")
name = Column(db.String(255))
slug = Column(db.String(255), unique=True)
settings = Column(MutableDict.as_mutable(PseudoJSON))
settings = Column(MutableDict.as_mutable(JSONB), default={})
groups = db.relationship("Group", lazy="dynamic")
events = db.relationship("Event", lazy="dynamic", order_by="desc(Event.created_at)")

View File

@@ -1,11 +1,8 @@
from sqlalchemy import cast
from sqlalchemy.dialects.postgresql import JSON
from sqlalchemy.ext.indexable import index_property
from sqlalchemy.ext.mutable import Mutable
from sqlalchemy.types import TypeDecorator
from sqlalchemy_utils import EncryptedType
from redash.utils import json_dumps, json_loads
from redash.utils.configuration import ConfigurationContainer
from .base import db
@@ -31,22 +28,6 @@ class EncryptedConfiguration(EncryptedType):
)
# XXX replace PseudoJSON and MutableDict with real JSON field
class PseudoJSON(TypeDecorator):
impl = db.Text
def process_bind_param(self, value, dialect):
if value is None:
return value
return json_dumps(value)
def process_result_value(self, value, dialect):
if not value:
return value
return json_loads(value)
class MutableDict(Mutable, dict):
@classmethod
def coerce(cls, key, value):
@@ -107,19 +88,3 @@ class json_cast_property(index_property):
def expr(self, model):
expr = super(json_cast_property, self).expr(model)
return expr.astext.cast(self.cast_type)
class pseudo_json_cast_property(index_property):
"""
A SQLAlchemy index property that is able to cast the
entity attribute as the specified cast type. Useful
for PseudoJSON colums for easier querying/filtering.
"""
def __init__(self, cast_type, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cast_type = cast_type
def expr(self, model):
expr = cast(getattr(model, self.attr_name), JSON)[self.index]
return expr.astext.cast(self.cast_type)

View File

@@ -8,7 +8,7 @@ from operator import or_
from flask import current_app, request_started, url_for
from flask_login import AnonymousUserMixin, UserMixin, current_user
from passlib.apps import custom_app_context as pwd_context
from sqlalchemy.dialects import postgresql
from sqlalchemy.dialects.postgresql import ARRAY, JSONB
from sqlalchemy_utils import EmailType
from sqlalchemy_utils.models import generic_repr
@@ -84,14 +84,14 @@ class User(TimestampMixin, db.Model, BelongsToOrgMixin, UserMixin, PermissionsCh
password_hash = Column(db.String(128), nullable=True)
group_ids = Column(
"groups",
MutableList.as_mutable(postgresql.ARRAY(key_type("Group"))),
MutableList.as_mutable(ARRAY(key_type("Group"))),
nullable=True,
)
api_key = Column(db.String(40), default=lambda: generate_token(40), unique=True)
disabled_at = Column(db.DateTime(True), default=None, nullable=True)
details = Column(
MutableDict.as_mutable(postgresql.JSONB),
MutableDict.as_mutable(JSONB),
nullable=True,
server_default="{}",
default={},
@@ -267,7 +267,7 @@ class Group(db.Model, BelongsToOrgMixin):
org = db.relationship("Organization", back_populates="groups")
type = Column(db.String(255), default=REGULAR_GROUP)
name = Column(db.String(100))
permissions = Column(postgresql.ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
permissions = Column(ARRAY(db.String(255)), default=DEFAULT_PERMISSIONS)
created_at = Column(db.DateTime(True), default=db.func.now())
__tablename__ = "groups"

View File

@@ -9,7 +9,6 @@ from rq.timeouts import JobTimeoutException
from sshtunnel import open_tunnel
from redash import settings, utils
from redash.utils import json_loads
from redash.utils.requests_session import (
UnacceptableAddressException,
requests_or_advocate,
@@ -243,7 +242,7 @@ class BaseQueryRunner:
if error is not None:
raise Exception("Failed running query [%s]." % query)
return json_loads(results)["rows"]
return results["rows"]
@classmethod
def to_dict(cls):

View File

@@ -7,7 +7,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -81,12 +80,11 @@ class Arango(BaseQueryRunner):
"rows": result,
}
json_data = json_dumps(data, allow_nan=False)
error = None
except Exception:
raise
return json_data, error
return data, error
register(Arango)

View File

@@ -12,7 +12,6 @@ from redash.query_runner import (
register,
)
from redash.settings import parse_boolean
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
ANNOTATE_QUERY = parse_boolean(os.environ.get("ATHENA_ANNOTATE_QUERY", "true"))
@@ -210,7 +209,6 @@ class Athena(BaseQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
@@ -257,14 +255,13 @@ class Athena(BaseQueryRunner):
},
}
json_data = json_dumps(data, allow_nan=False)
error = None
except Exception:
if cursor.query_id:
cursor.cancel()
raise
return json_data, error
return data, error
register(Athena)

View File

@@ -13,7 +13,7 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -157,17 +157,16 @@ class AxibaseTSD(BaseQueryRunner):
columns, rows = generate_rows_and_columns(data)
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
except SQLException as e:
json_data = None
data = None
error = e.content
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
sql.cancel_query(query_id)
raise
return json_data, error
return data, error
def get_schema(self, get_stats=False):
connection = atsd_client.connect_url(

View File

@@ -8,7 +8,7 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
try:
from azure.kusto.data.exceptions import KustoServiceError
@@ -124,16 +124,15 @@ class AzureKusto(BaseQueryRunner):
error = None
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
except KustoServiceError as err:
json_data = None
data = None
try:
error = err.args[1][0]["error"]["@message"]
except (IndexError, KeyError):
error = err.args[1]
return json_data, error
return data, error
def get_schema(self, get_stats=False):
query = ".show database schema as json"
@@ -143,8 +142,6 @@ class AzureKusto(BaseQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
schema_as_json = json_loads(results["rows"][0]["DatabaseSchema"])
tables_list = schema_as_json["Databases"][self.configuration["database"]]["Tables"].values()

View File

@@ -16,7 +16,7 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -318,7 +318,6 @@ class BigQuery(BaseQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
table_name = "{0}.{1}".format(row["table_schema"], row["table_name"])
if table_name not in schema:
@@ -346,9 +345,8 @@ class BigQuery(BaseQueryRunner):
data = self._get_query_result(jobs, query)
error = None
json_data = json_dumps(data, allow_nan=False)
except apiclient.errors.HttpError as e:
json_data = None
data = None
if e.resp.status in [400, 404]:
error = json_loads(e.content)["error"]["message"]
else:
@@ -363,7 +361,7 @@ class BigQuery(BaseQueryRunner):
raise
return json_data, error
return data, error
register(BigQuery)

View File

@@ -5,7 +5,6 @@ from base64 import b64decode
from tempfile import NamedTemporaryFile
from redash.query_runner import BaseQueryRunner, register
from redash.utils import JSONEncoder, json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -27,11 +26,10 @@ def generate_ssl_options_dict(protocol, cert_path=None):
return ssl_options
class CassandraJSONEncoder(JSONEncoder):
def default(self, o):
def json_encoder(dec, o):
if isinstance(o, sortedset):
return list(o)
return super(CassandraJSONEncoder, self).default(o)
return None
class Cassandra(BaseQueryRunner):
@@ -86,7 +84,6 @@ class Cassandra(BaseQueryRunner):
select release_version from system.local;
"""
results, error = self.run_query(query, None)
results = json_loads(results)
release_version = results["rows"][0]["release_version"]
query = """
@@ -107,7 +104,6 @@ class Cassandra(BaseQueryRunner):
)
results, error = self.run_query(query, None)
results = json_loads(results)
schema = {}
for row in results["rows"]:
@@ -155,9 +151,8 @@ class Cassandra(BaseQueryRunner):
rows = [dict(zip(column_names, row)) for row in result]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data, cls=CassandraJSONEncoder)
return json_data, None
return data, None
def _generate_cert_file(self):
cert_encoded_bytes = self.configuration.get("sslCertificateFile", None)

View File

@@ -15,7 +15,6 @@ from redash.query_runner import (
register,
split_sql_statements,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -85,8 +84,6 @@ class ClickHouse(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["database"], row["table"])
@@ -200,25 +197,24 @@ class ClickHouse(BaseSQLQueryRunner):
queries = split_multi_query(query)
if not queries:
json_data = None
data = None
error = "Query is empty"
return json_data, error
return data, error
try:
# If just one query was given no session is needed
if len(queries) == 1:
results = self._clickhouse_query(queries[0])
data = self._clickhouse_query(queries[0])
else:
# If more than one query was given, a session is needed. Parameter session_check must be false
# for the first query
session_id = "redash_{}".format(uuid4().hex)
results = self._clickhouse_query(queries[0], session_id, session_check=False)
data = self._clickhouse_query(queries[0], session_id, session_check=False)
for query in queries[1:]:
results = self._clickhouse_query(query, session_id, session_check=True)
data = self._clickhouse_query(query, session_id, session_check=True)
data = json_dumps(results)
error = None
except Exception as e:
data = None

View File

@@ -3,7 +3,7 @@ import datetime
import yaml
from redash.query_runner import BaseQueryRunner, register
from redash.utils import json_dumps, parse_human_time
from redash.utils import parse_human_time
try:
import boto3
@@ -121,7 +121,7 @@ class CloudWatch(BaseQueryRunner):
rows, columns = parse_response(results)
return json_dumps({"rows": rows, "columns": columns}), None
return {"rows": rows, "columns": columns}, None
register(CloudWatch)

View File

@@ -4,7 +4,7 @@ import time
import yaml
from redash.query_runner import BaseQueryRunner, register
from redash.utils import json_dumps, parse_human_time
from redash.utils import parse_human_time
try:
import boto3
@@ -146,7 +146,7 @@ class CloudWatchInsights(BaseQueryRunner):
time.sleep(POLL_INTERVAL)
elapsed += POLL_INTERVAL
return json_dumps(data), None
return data, None
register(CloudWatchInsights)

View File

@@ -9,7 +9,6 @@ import logging
from os import environ
from redash.query_runner import BaseQueryRunner
from redash.utils import json_dumps, json_loads
from . import register
@@ -115,7 +114,7 @@ class CorporateMemoryQueryRunner(BaseQueryRunner):
logger.info("results are: {}".format(results))
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
sparql_results = json_loads(results)
sparql_results = results
# transform all bindings to redash rows
rows = []
for sparql_row in sparql_results["results"]["bindings"]:
@@ -133,7 +132,7 @@ class CorporateMemoryQueryRunner(BaseQueryRunner):
columns.append({"name": var, "friendly_name": var, "type": "string"})
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
return json_dumps({"columns": columns, "rows": rows})
return {"columns": columns, "rows": rows}
@classmethod
def name(cls):

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
try:
@@ -155,7 +154,7 @@ class Couchbase(BaseQueryRunner):
rows, columns = parse_results(result.json()["results"])
data = {"columns": columns, "rows": rows}
return json_dumps(data), None
return data, None
@classmethod
def name(cls):

View File

@@ -4,7 +4,6 @@ import logging
import yaml
from redash.query_runner import BaseQueryRunner, NotSupported, register
from redash.utils import json_dumps
from redash.utils.requests_session import (
UnacceptableAddressException,
requests_or_advocate,
@@ -96,19 +95,18 @@ class CSV(BaseQueryRunner):
break
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
json_data = json_dumps(data)
error = None
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
data = None
except UnacceptableAddressException:
error = "Can't query private addresses."
json_data = None
data = None
except Exception as e:
error = "Error reading {0}. {1}".format(path, str(e))
json_data = None
data = None
return json_data, error
return data, error
def get_schema(self):
raise NotSupported()

View File

@@ -16,7 +16,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
class Databend(BaseQueryRunner):
@@ -85,11 +84,10 @@ class Databend(BaseQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
finally:
connection.close()
return json_data, error
return data, error
def get_schema(self, get_stats=False):
query = """
@@ -106,7 +104,6 @@ class Databend(BaseQueryRunner):
self._handle_run_query_error(error)
schema = {}
results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -133,7 +130,6 @@ class Databend(BaseQueryRunner):
self._handle_run_query_error(error)
schema = {}
results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])

View File

@@ -16,7 +16,6 @@ from redash.query_runner import (
split_sql_statements,
)
from redash.settings import cast_int_or_default
from redash.utils import json_dumps, json_loads
try:
import pyodbc
@@ -115,16 +114,13 @@ class Databricks(BaseSQLQueryRunner):
logger.warning("Truncated result set.")
statsd_client.incr("redash.query_runner.databricks.truncated")
data["truncated"] = True
json_data = json_dumps(data)
error = None
else:
error = None
json_data = json_dumps(
{
data = {
"columns": [{"name": "result", "type": TYPE_STRING}],
"rows": [{"result": "No data was returned."}],
}
)
cursor.close()
except pyodbc.Error as e:
@@ -132,9 +128,9 @@ class Databricks(BaseSQLQueryRunner):
error = str(e.args[1])
else:
error = str(e)
json_data = None
data = None
return json_data, error
return data, error
def get_schema(self):
raise NotSupported()
@@ -146,8 +142,6 @@ class Databricks(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
first_column_name = results["columns"][0]["name"]
return [row[first_column_name] for row in results["rows"]]

View File

@@ -11,7 +11,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -78,8 +77,6 @@ class DB2(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
if row["TABLE_SCHEMA"] != "public":
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])
@@ -130,23 +127,22 @@ class DB2(BaseSQLQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
else:
error = "Query completed but it returned no data."
json_data = None
data = None
except (select.error, OSError):
error = "Query interrupted. Please retry."
json_data = None
data = None
except ibm_db_dbi.DatabaseError as e:
error = str(e)
json_data = None
data = None
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
connection.cancel()
raise
finally:
connection.close()
return json_data, error
return data, error
register(DB2)

View File

@@ -8,7 +8,6 @@ except ImportError:
enabled = False
from redash.query_runner import BaseQueryRunner, register
from redash.utils import json_dumps
def reduce_item(reduced_item, key, value):
@@ -81,7 +80,7 @@ class Dgraph(BaseQueryRunner):
client_stub.close()
def run_query(self, query, user):
json_data = None
data = None
error = None
try:
@@ -109,12 +108,10 @@ class Dgraph(BaseQueryRunner):
# finally, assemble both the columns and data
data = {"columns": columns, "rows": processed_data}
json_data = json_dumps(data)
except Exception as e:
error = e
return json_data, error
return data, error
def get_schema(self, get_stats=False):
"""Queries Dgraph for all the predicates, their types, their tokenizers, etc.

View File

@@ -13,7 +13,6 @@ from redash.query_runner import (
guess_type,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -98,9 +97,7 @@ class Drill(BaseHTTPQueryRunner):
if error is not None:
return None, error
results = parse_response(response.json())
return json_dumps(results), None
return parse_response(response.json()), None
def get_schema(self, get_stats=False):
query = """
@@ -132,8 +129,6 @@ class Drill(BaseHTTPQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
schema = {}
for row in results["rows"]:

View File

@@ -12,7 +12,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
TYPES_MAP = {1: TYPE_STRING, 2: TYPE_INTEGER, 3: TYPE_BOOLEAN}
@@ -59,12 +58,10 @@ class Druid(BaseQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
print(json_data)
finally:
connection.close()
return json_data, error
return data, error
def get_schema(self, get_stats=False):
query = """
@@ -81,7 +78,6 @@ class Druid(BaseQueryRunner):
self._handle_run_query_error(error)
schema = {}
results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["TABLE_SCHEMA"], row["TABLE_NAME"])

View File

@@ -19,7 +19,6 @@ try:
except ImportError:
enabled = False
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -106,18 +105,17 @@ class e6data(BaseQueryRunner):
columns.append({"name": column_name, "type": column_type})
rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
except Exception as error:
logger.debug(error)
json_data = None
data = None
finally:
if cursor is not None:
cursor.clear()
cursor.close()
return json_data, error
return data, error
def test_connection(self):
self.noop_query = "SELECT 1"

View File

@@ -16,7 +16,7 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
try:
import http.client as http_client
@@ -406,18 +406,18 @@ class Kibana(BaseElasticSearch):
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
raise Exception("Advanced queries are not supported")
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
data = {"columns": result_columns, "rows": result_rows}
except requests.HTTPError as e:
logger.exception(e)
r = e.response
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
json_data = None
data = None
except requests.exceptions.RequestException as e:
logger.exception(e)
error = "Connection refused"
json_data = None
data = None
return json_data, error
return data, error
class ElasticSearch(BaseElasticSearch):
@@ -460,20 +460,20 @@ class ElasticSearch(BaseElasticSearch):
result_rows = []
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
json_data = json_dumps({"columns": result_columns, "rows": result_rows})
data = {"columns": result_columns, "rows": result_rows}
except (KeyboardInterrupt, JobTimeoutException) as e:
logger.exception(e)
raise
except requests.HTTPError as e:
logger.exception(e)
error = "Failed to execute query. Return Code: {0} Reason: {1}".format(r.status_code, r.text)
json_data = None
data = None
except requests.exceptions.RequestException as e:
logger.exception(e)
error = "Connection refused"
json_data = None
data = None
return json_data, error
return data, error
register(Kibana)

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
BaseHTTPQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -62,11 +61,9 @@ class ElasticSearch2(BaseHTTPQueryRunner):
query_results = response.json()
data = self._parse_results(result_fields, query_results)
error = None
json_data = json_dumps(data)
return json_data, error
return data, error
def _build_query(self, query: str) -> Tuple[dict, str, Optional[list]]:
query = json_loads(query)
index_name = query.pop("index", "")
result_fields = query.pop("result_fields", None)
url = "/{}/_search".format(index_name)

View File

@@ -9,7 +9,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
def _exasol_type_mapper(val, data_type):
@@ -109,14 +108,13 @@ class Exasol(BaseQueryRunner):
rows = [dict(zip(cnames, row)) for row in statement]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
finally:
if statement is not None:
statement.close()
connection.close()
return json_data, error
return data, error
def get_schema(self, get_stats=False):
query = """

View File

@@ -3,7 +3,6 @@ import logging
import yaml
from redash.query_runner import BaseQueryRunner, NotSupported, register
from redash.utils import json_dumps
from redash.utils.requests_session import (
UnacceptableAddressException,
requests_or_advocate,
@@ -94,19 +93,18 @@ class Excel(BaseQueryRunner):
break
data["rows"] = df[labels].replace({np.nan: None}).to_dict(orient="records")
json_data = json_dumps(data)
error = None
except KeyboardInterrupt:
error = "Query cancelled by user."
json_data = None
data = None
except UnacceptableAddressException:
error = "Can't query private addresses."
json_data = None
data = None
except Exception as e:
error = "Error reading {0}. {1}".format(path, str(e))
json_data = None
data = None
return json_data, error
return data, error
def get_schema(self):
raise NotSupported()

View File

@@ -12,7 +12,7 @@ from redash.query_runner import (
BaseSQLQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -180,15 +180,14 @@ class GoogleAnalytics(BaseSQLQueryRunner):
response = api.get(**params).execute()
data = parse_ga_response(response)
error = None
json_data = json_dumps(data)
except HttpError as e:
# Make sure we return a more readable error to the end user
error = e._get_reason()
json_data = None
data = None
else:
error = "Wrong query format."
json_data = None
return json_data, error
data = None
return data, error
register(GoogleAnalytics)

View File

@@ -13,7 +13,7 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -160,9 +160,8 @@ class GoogleAnalytics4(BaseQueryRunner):
data = parse_ga_response(raw_result)
error = None
json_data = json_dumps(data)
return json_data, error
return data, error
def test_connection(self):
try:

View File

@@ -11,7 +11,7 @@ from redash.query_runner import (
BaseSQLQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -151,15 +151,14 @@ class GoogleSearchConsole(BaseSQLQueryRunner):
response = api.searchanalytics().query(siteUrl=site_url, body=params).execute()
data = parse_ga_response(response, params["dimensions"])
error = None
json_data = json_dumps(data)
except HttpError as e:
# Make sure we return a more readable error to the end user
error = e._get_reason()
json_data = None
data = None
else:
error = "Wrong query format."
json_data = None
return json_data, error
data = None
return data, error
register(GoogleSearchConsole)

View File

@@ -16,7 +16,7 @@ from redash.query_runner import (
guess_type,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -257,7 +257,7 @@ class GoogleSpreadsheet(BaseQueryRunner):
data = parse_spreadsheet(SpreadsheetWrapper(spreadsheet), worksheet_num_or_title)
return json_dumps(data), None
return data, None
except gspread.SpreadsheetNotFound:
return (
None,

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -35,8 +34,7 @@ def _transform_result(response):
}
)
data = {"columns": columns, "rows": rows}
return json_dumps(data)
return {"columns": columns, "rows": rows}
class Graphite(BaseQueryRunner):

View File

@@ -12,7 +12,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -139,7 +138,6 @@ class Hive(BaseSQLQueryRunner):
rows = [dict(zip(column_names, row)) for row in cursor]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
except (KeyboardInterrupt, JobTimeoutException):
if connection:
@@ -150,12 +148,12 @@ class Hive(BaseSQLQueryRunner):
error = e.args[0].status.errorMessage
except AttributeError:
error = str(e)
json_data = None
data = None
finally:
if connection:
connection.close()
return json_data, error
return data, error
class HiveHttp(Hive):

View File

@@ -12,7 +12,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
ignite_available = importlib.util.find_spec("pyignite") is not None
gridgain_available = importlib.util.find_spec("pygridgain") is not None
@@ -81,8 +80,6 @@ class Ignite(BaseSQLQueryRunner):
if error is not None:
raise Exception("Failed getting schema.")
results = json_loads(results)
for row in results["rows"]:
if row["SCHEMA_NAME"] != self.configuration.get("schema", "PUBLIC"):
table_name = "{}.{}".format(row["SCHEMA_NAME"], row["TABLE_NAME"])
@@ -160,8 +157,8 @@ class Ignite(BaseSQLQueryRunner):
)
logger.debug("Ignite running query: %s", query)
data = self._parse_results(cursor)
json_data = json_dumps({"columns": data[0], "rows": data[1]})
result = self._parse_results(cursor)
data = {"columns": result[0], "rows": result[1]}
error = None
except (KeyboardInterrupt, JobTimeoutException):
@@ -171,7 +168,7 @@ class Ignite(BaseSQLQueryRunner):
if connection:
connection.close()
return json_data, error
return data, error
register(Ignite)

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -120,14 +119,13 @@ class Impala(BaseSQLQueryRunner):
rows = [dict(zip(column_names, row)) for row in cursor]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
cursor.close()
except DatabaseError as e:
json_data = None
data = None
error = str(e)
except RPCError as e:
json_data = None
data = None
error = "Metastore Error [%s]" % str(e)
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
@@ -136,7 +134,7 @@ class Impala(BaseSQLQueryRunner):
if connection:
connection.close()
return json_data, error
return data, error
register(Impala)

View File

@@ -7,7 +7,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -64,7 +63,7 @@ def _transform_result(results):
else:
result_columns = [{"name": c, "type": TYPE_STRING} for c in column_names]
return json_dumps({"columns": result_columns, "rows": result_rows})
return {"columns": result_columns, "rows": result_rows}
class InfluxDB(BaseQueryRunner):

View File

@@ -13,7 +13,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
try:
from influxdb_client import InfluxDBClient
@@ -188,7 +187,7 @@ class InfluxDBv2(BaseQueryRunner):
2. element: An error message, if an error occured. None, if no
error occurred.
"""
json_data = None
data = None
error = None
try:
@@ -204,14 +203,12 @@ class InfluxDBv2(BaseQueryRunner):
tables = client.query_api().query(query)
data = self._get_data_from_tables(tables)
json_data = json_dumps(data)
except Exception as ex:
error = str(ex)
finally:
self._cleanup_cert_files(influx_kwargs)
return json_data, error
return data, error
register(InfluxDBv2)

View File

@@ -2,7 +2,7 @@ import re
from collections import OrderedDict
from redash.query_runner import TYPE_STRING, BaseHTTPQueryRunner, register
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
# TODO: make this more general and move into __init__.py
@@ -26,7 +26,7 @@ class ResultSet:
}
def to_json(self):
return json_dumps({"rows": self.rows, "columns": list(self.columns.values())})
return {"rows": self.rows, "columns": list(self.columns.values())}
def merge(self, set):
self.rows = self.rows + set.rows

View File

@@ -14,7 +14,6 @@ from redash.query_runner import (
BaseHTTPQueryRunner,
register,
)
from redash.utils import json_dumps
class QueryParseError(Exception):
@@ -158,11 +157,10 @@ class JSON(BaseHTTPQueryRunner):
def run_query(self, query, user):
query = parse_query(query)
results, error = self._run_json_query(query)
data, error = self._run_json_query(query)
if error is not None:
return None, error
data = json_dumps(results)
if data:
return data, None
return None, "Got empty response from '{}'.".format(query["url"])

View File

@@ -15,7 +15,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -102,7 +101,7 @@ class Kylin(BaseQueryRunner):
columns = self.get_columns(data["columnMetas"])
rows = self.get_rows(columns, data["results"])
return json_dumps({"columns": columns, "rows": rows}), None
return {"columns": columns, "rows": rows}, None
def get_schema(self, get_stats=False):
url = self.configuration["url"]

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -130,7 +129,6 @@ class MemSQL(BaseSQLQueryRunner):
columns.append({"name": column, "friendly_name": column, "type": TYPE_STRING})
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
except (KeyboardInterrupt, JobTimeoutException):
cursor.close()
@@ -139,7 +137,7 @@ class MemSQL(BaseSQLQueryRunner):
if cursor:
cursor.close()
return json_data, error
return data, error
register(MemSQL)

View File

@@ -13,7 +13,7 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time
from redash.utils import json_loads, parse_human_time
logger = logging.getLogger(__name__)
@@ -42,15 +42,14 @@ TYPES_MAP = {
}
class MongoDBJSONEncoder(JSONEncoder):
def default(self, o):
def json_encoder(dec, o):
if isinstance(o, ObjectId):
return str(o)
elif isinstance(o, Timestamp):
return super(MongoDBJSONEncoder, self).default(o.as_datetime())
return dec.default(o.as_datetime())
elif isinstance(o, Decimal128):
return o.to_decimal()
return super(MongoDBJSONEncoder, self).default(o)
return None
date_regex = re.compile(r'ISODate\("(.*)"\)', re.IGNORECASE)
@@ -348,9 +347,8 @@ class MongoDB(BaseQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data, cls=MongoDBJSONEncoder)
return json_data, error
return data, error
register(MongoDB)

View File

@@ -8,7 +8,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -87,8 +86,6 @@ class SqlServer(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
if row["table_schema"] != self.configuration["db"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -140,11 +137,10 @@ class SqlServer(BaseSQLQueryRunner):
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
else:
error = "No data was returned."
json_data = None
data = None
cursor.close()
connection.commit()
@@ -155,7 +151,7 @@ class SqlServer(BaseSQLQueryRunner):
except IndexError:
# Connection errors are `args[0][1]`
error = e.args[0][1]
json_data = None
data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
@@ -163,7 +159,7 @@ class SqlServer(BaseSQLQueryRunner):
if connection:
connection.close()
return json_data, error
return data, error
register(SqlServer)

View File

@@ -6,7 +6,6 @@ from redash.query_runner import (
register,
)
from redash.query_runner.mssql import types_map
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -94,8 +93,6 @@ class SQLServerODBC(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
if row["table_schema"] != self.configuration["db"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -139,11 +136,10 @@ class SQLServerODBC(BaseSQLQueryRunner):
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
else:
error = "No data was returned."
json_data = None
data = None
cursor.close()
except pyodbc.Error as e:
@@ -153,7 +149,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
except IndexError:
# Connection errors are `args[0][1]`
error = e.args[0][1]
json_data = None
data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
@@ -161,7 +157,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
if connection:
connection.close()
return json_data, error
return data, error
register(SQLServerODBC)

View File

@@ -14,7 +14,6 @@ from redash.query_runner import (
register,
)
from redash.settings import parse_boolean
from redash.utils import json_dumps, json_loads
try:
import MySQLdb
@@ -161,8 +160,6 @@ class Mysql(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
if row["table_schema"] != self.configuration["db"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -194,7 +191,7 @@ class Mysql(BaseSQLQueryRunner):
t.join()
raise
return r.json_data, r.error
return r.data, r.error
def _run_query(self, query, user, connection, r, ev):
try:
@@ -216,17 +213,17 @@ class Mysql(BaseSQLQueryRunner):
rows = [dict(zip((column["name"] for column in columns), row)) for row in data]
data = {"columns": columns, "rows": rows}
r.json_data = json_dumps(data)
r.data = data
r.error = None
else:
r.json_data = None
r.data = None
r.error = "No data was returned."
cursor.close()
except MySQLdb.Error as e:
if cursor:
cursor.close()
r.json_data = None
r.data = None
r.error = e.args[1]
finally:
ev.set()

View File

@@ -1,4 +1,3 @@
import json
import logging
import traceback
@@ -150,7 +149,7 @@ class Netezza(BaseSQLQueryRunner):
return typ
def run_query(self, query, user):
json_data, error = None, None
data, error = None, None
try:
with self.connection.cursor() as cursor:
cursor.execute(query)
@@ -165,10 +164,10 @@ class Netezza(BaseSQLQueryRunner):
)
rows = [dict(zip((column["name"] for column in columns), row)) for row in cursor]
json_data = json.dumps({"columns": columns, "rows": rows})
data = {"columns": columns, "rows": rows}
except Exception:
error = traceback.format_exc()
return json_data, error
return data, error
register(Netezza)

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
try:
import oracledb
@@ -98,8 +97,6 @@ class Oracle(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
if row["OWNER"] is not None:
table_name = "{}.{}".format(row["OWNER"], row["TABLE_NAME"])
@@ -168,19 +165,17 @@ class Oracle(BaseSQLQueryRunner):
rows = [dict(zip((c["name"] for c in columns), row)) for row in cursor]
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
else:
columns = [{"name": "Row(s) Affected", "type": "TYPE_INTEGER"}]
rows = [{"Row(s) Affected": rows_count}]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
connection.commit()
except oracledb.DatabaseError as err:
(err_args,) = err.args
line_number = query.count("\n", 0, err_args.offset) + 1
column_number = err_args.offset - query.rfind("\n", 0, err_args.offset) - 1
error = "Query failed at line {}, column {}: {}".format(str(line_number), str(column_number), str(err))
json_data = None
data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
@@ -188,7 +183,7 @@ class Oracle(BaseSQLQueryRunner):
os.environ.pop("NLS_LANG", None)
connection.close()
return json_data, error
return data, error
register(Oracle)

View File

@@ -20,7 +20,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import JSONEncoder, json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -56,8 +55,7 @@ types_map = {
}
class PostgreSQLJSONEncoder(JSONEncoder):
def default(self, o):
def json_encoder(dec, o):
if isinstance(o, Range):
# From: https://github.com/psycopg/psycopg2/pull/779
if o._bounds is None:
@@ -66,8 +64,7 @@ class PostgreSQLJSONEncoder(JSONEncoder):
items = [o._bounds[0], str(o._lower), ", ", str(o._upper), o._bounds[1]]
return "".join(items)
return super(PostgreSQLJSONEncoder, self).default(o)
return None
def _wait(conn, timeout=None):
@@ -204,8 +201,6 @@ class PostgreSQL(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
build_schema(results, schema)
def _get_tables(self, schema):
@@ -282,16 +277,15 @@ class PostgreSQL(BaseSQLQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data, allow_nan=False, cls=PostgreSQLJSONEncoder)
else:
error = "Query completed but it returned no data."
json_data = None
data = None
except (select.error, OSError):
error = "Query interrupted. Please retry."
json_data = None
data = None
except psycopg2.DatabaseError as e:
error = str(e)
json_data = None
data = None
except (KeyboardInterrupt, InterruptException, JobTimeoutException):
connection.cancel()
raise
@@ -299,7 +293,7 @@ class PostgreSQL(BaseSQLQueryRunner):
connection.close()
_cleanup_ssl_certs(self.ssl_config)
return json_data, error
return data, error
class Redshift(PostgreSQL):

View File

@@ -9,7 +9,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -82,8 +81,6 @@ class Phoenix(BaseQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["TABLE_SCHEM"], row["TABLE_NAME"])
@@ -105,17 +102,16 @@ class Phoenix(BaseQueryRunner):
columns = self.fetch_columns(column_tuples)
rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
cursor.close()
except Error as e:
json_data = None
data = None
error = "code: {}, sql state:{}, message: {}".format(e.code, e.sqlstate, str(e))
finally:
if connection:
connection.close()
return json_data, error
return data, error
register(Phoenix)

View File

@@ -19,7 +19,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -92,12 +91,11 @@ class Pinot(BaseQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
logger.debug("Pinot execute query [%s]", query)
finally:
connection.close()
return json_data, error
return data, error
def get_schema(self, get_stats=False):
schema = {}

View File

@@ -11,7 +11,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -89,8 +88,6 @@ class Presto(BaseQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -120,10 +117,9 @@ class Presto(BaseQueryRunner):
columns = self.fetch_columns(column_tuples)
rows = [dict(zip(([column["name"] for column in columns]), r)) for i, r in enumerate(cursor.fetchall())]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
except DatabaseError as db:
json_data = None
data = None
default_message = "Unspecified DatabaseError: {0}".format(str(db))
if isinstance(db.args[0], dict):
message = db.args[0].get("failureInfo", {"message", None}).get("message")
@@ -134,7 +130,7 @@ class Presto(BaseQueryRunner):
cursor.cancel()
raise
return json_data, error
return data, error
register(Presto)

View File

@@ -14,7 +14,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
def get_instant_rows(metrics_data):
@@ -247,7 +246,7 @@ class Prometheus(BaseQueryRunner):
else:
rows = get_instant_rows(metrics)
json_data = json_dumps({"rows": rows, "columns": columns})
data = {"rows": rows, "columns": columns}
except requests.RequestException as e:
return None, str(e)
@@ -256,7 +255,7 @@ class Prometheus(BaseQueryRunner):
finally:
self._cleanup_cert_files(promehteus_kwargs)
return json_data, error
return data, error
register(Prometheus)

View File

@@ -23,7 +23,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils.pandas import pandas_installed
if pandas_installed:
@@ -228,7 +227,7 @@ class Python(BaseQueryRunner):
raise Exception(error)
# TODO: allow avoiding the JSON dumps/loads in same process
query_result = json_loads(data)
query_result = data
if result_type == "dataframe" and pandas_installed:
return pd.DataFrame(query_result["rows"])
@@ -357,15 +356,14 @@ class Python(BaseQueryRunner):
exec(code, restricted_globals, self._script_locals)
result = self._script_locals["result"]
self.validate_result(result)
result["log"] = self._custom_print.lines
json_data = json_dumps(result)
data = self._script_locals["result"]
self.validate_result(data)
data["log"] = self._custom_print.lines
except Exception as e:
error = str(type(e)) + " " + str(e)
json_data = None
data = None
return json_data, error
return data, error
register(Python)

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps
try:
import qds_sdk # noqa: F401
@@ -125,13 +124,13 @@ class Qubole(BaseQueryRunner):
columns = self.fetch_columns([(i, TYPE_STRING) for i in data.pop(0).split("\t")])
rows = [dict(zip((column["name"] for column in columns), row.split("\t"))) for row in data]
json_data = json_dumps({"columns": columns, "rows": rows})
data = {"columns": columns, "rows": rows}
except (KeyboardInterrupt, JobTimeoutException):
logging.info("Sending KILL signal to Qubole Command Id: %s", cmd.id)
cmd.cancel()
raise
return json_data, error
return data, error
def get_schema(self, get_stats=False):
schemas = {}

View File

@@ -13,7 +13,7 @@ from redash.query_runner import (
guess_type,
register,
)
from redash.utils import json_dumps, json_loads
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -76,8 +76,6 @@ def get_query_results(user, query_id, bring_from_cache, params=None):
results, error = query.data_source.query_runner.run_query(query_text, user)
if error:
raise Exception("Failed loading results for query id {}.".format(query.id))
else:
results = json_loads(results)
return results
@@ -194,16 +192,15 @@ class Results(BaseQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
else:
error = "Query completed but it returned no data."
json_data = None
data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
finally:
connection.close()
return json_data, error
return data, error
register(Results)

View File

@@ -8,7 +8,6 @@ from redash.query_runner import (
BaseSQLQueryRunner,
register,
)
from redash.utils import json_dumps
def _get_type(value):
@@ -121,7 +120,7 @@ class Rockset(BaseSQLQueryRunner):
columns = []
for k in rows[0]:
columns.append({"name": k, "friendly_name": k, "type": _get_type(rows[0][k])})
data = json_dumps({"columns": columns, "rows": rows})
data = {"columns": columns, "rows": rows}
return data, None

View File

@@ -12,7 +12,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -166,11 +165,10 @@ class Salesforce(BaseQueryRunner):
columns = self.fetch_columns(cols)
error = None
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
except SalesforceError as err:
error = err.content
json_data = None
return json_data, error
data = None
return data, error
def get_schema(self, get_stats=False):
sf = self._get_sf()

View File

@@ -17,7 +17,6 @@ from redash.query_runner import (
BaseSQLQueryRunner,
register,
)
from redash.utils import json_dumps
TYPES_MAP = {
0: TYPE_INTEGER,
@@ -135,12 +134,11 @@ class Snowflake(BaseSQLQueryRunner):
data = self._parse_results(cursor)
error = None
json_data = json_dumps(data)
finally:
cursor.close()
connection.close()
return json_data, error
return data, error
def _run_query_without_warehouse(self, query):
connection = self._get_connection()

View File

@@ -8,7 +8,6 @@ import logging
from os import environ
from redash.query_runner import BaseQueryRunner
from redash.utils import json_dumps, json_loads
from . import register
@@ -83,7 +82,7 @@ class SPARQLEndpointQueryRunner(BaseQueryRunner):
logger.info("results are: {}".format(results))
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
sparql_results = json_loads(results)
sparql_results = results
# transform all bindings to redash rows
rows = []
for sparql_row in sparql_results["results"]["bindings"]:
@@ -101,7 +100,7 @@ class SPARQLEndpointQueryRunner(BaseQueryRunner):
columns.append({"name": var, "friendly_name": var, "type": "string"})
# Not sure why we do not use the json package here but all other
# query runner do it the same way :-)
return json_dumps({"columns": columns, "rows": rows})
return {"columns": columns, "rows": rows}
@classmethod
def name(cls):

View File

@@ -6,7 +6,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -40,8 +39,6 @@ class Sqlite(BaseSQLQueryRunner):
if error is not None:
raise Exception("Failed getting schema.")
results = json_loads(results)
for row in results["rows"]:
table_name = row["tbl_name"]
schema[table_name] = {"name": table_name, "columns": []}
@@ -49,7 +46,6 @@ class Sqlite(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results_table = json_loads(results_table)
for row_column in results_table["rows"]:
schema[table_name]["columns"].append(row_column["name"])
@@ -69,16 +65,15 @@ class Sqlite(BaseSQLQueryRunner):
data = {"columns": columns, "rows": rows}
error = None
json_data = json_dumps(data)
else:
error = "Query completed but it returned no data."
json_data = None
data = None
except (KeyboardInterrupt, JobTimeoutException):
connection.cancel()
raise
finally:
connection.close()
return json_data, error
return data, error
register(Sqlite)

View File

@@ -9,7 +9,6 @@ from redash.query_runner import (
BaseQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -112,15 +111,14 @@ class TreasureData(BaseQueryRunner):
else:
rows = [dict(zip(([column["name"] for column in columns]), r)) for r in cursor.fetchall()]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
except errors.InternalError as e:
json_data = None
data = None
error = "%s: %s" % (
str(e),
cursor.show_job().get("debug", {}).get("stderr", "No stderr message in the response"),
)
return json_data, error
return data, error
register(TreasureData)

View File

@@ -12,7 +12,6 @@ from redash.query_runner import (
JobTimeoutException,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -100,8 +99,6 @@ class Trino(BaseQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
table_name = f'{catalog}.{row["table_schema"]}.{row["table_name"]}'
@@ -122,8 +119,6 @@ class Trino(BaseQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
catalogs = []
for row in results["rows"]:
catalog = row["Catalog"]
@@ -158,10 +153,9 @@ class Trino(BaseQueryRunner):
columns = self.fetch_columns([(c[0], TRINO_TYPES_MAPPING.get(c[1], None)) for c in description])
rows = [dict(zip([c["name"] for c in columns], r)) for r in results]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
except DatabaseError as db:
json_data = None
data = None
default_message = "Unspecified DatabaseError: {0}".format(str(db))
if isinstance(db.args[0], dict):
message = db.args[0].get("failureInfo", {"message", None}).get("message")
@@ -172,7 +166,7 @@ class Trino(BaseQueryRunner):
cursor.cancel()
raise
return json_data, error
return data, error
register(Trino)

View File

@@ -5,7 +5,7 @@ import jwt
import requests
from redash.query_runner import BaseSQLQueryRunner, register
from redash.utils import json_dumps, json_loads
from redash.utils import json_loads
logger = logging.getLogger(__name__)
@@ -58,8 +58,7 @@ class Uptycs(BaseSQLQueryRunner):
if "items" in data:
rows = data["items"]
redash_json_data = {"columns": transformed_columns, "rows": rows}
return redash_json_data
return {"columns": transformed_columns, "rows": rows}
def api_call(self, sql):
# JWT encoded header
@@ -86,22 +85,21 @@ class Uptycs(BaseSQLQueryRunner):
else:
error = "status_code " + str(response.status_code) + "\n"
error = error + "failed to connect"
json_data = {}
return json_data, error
data = {}
return data, error
# if we get right status code then call transfored_to_redash
json_data = self.transformed_to_redash_json(response_output)
data = self.transformed_to_redash_json(response_output)
error = None
# if we got error from Uptycs include error information
if "error" in response_output:
error = response_output["error"]["message"]["brief"]
error = error + "\n" + response_output["error"]["message"]["detail"]
return json_data, error
return data, error
def run_query(self, query, user):
data, error = self.api_call(query)
json_data = json_dumps(data)
logger.debug("%s", json_data)
return json_data, error
logger.debug("%s", data)
return data, error
def get_schema(self, get_stats=False):
header = self.generate_header(self.configuration.get("key"), self.configuration.get("secret"))

View File

@@ -10,7 +10,6 @@ from redash.query_runner import (
BaseSQLQueryRunner,
register,
)
from redash.utils import json_dumps, json_loads
logger = logging.getLogger(__name__)
@@ -85,8 +84,6 @@ class Vertica(BaseSQLQueryRunner):
if error is not None:
self._handle_run_query_error(error)
results = json_loads(results)
for row in results["rows"]:
table_name = "{}.{}".format(row["table_schema"], row["table_name"])
@@ -101,9 +98,9 @@ class Vertica(BaseSQLQueryRunner):
import vertica_python
if query == "":
json_data = None
data = None
error = "Query is empty"
return json_data, error
return data, error
connection = None
try:
@@ -131,10 +128,9 @@ class Vertica(BaseSQLQueryRunner):
rows = [dict(zip(([c["name"] for c in columns]), r)) for r in cursor.fetchall()]
data = {"columns": columns, "rows": rows}
json_data = json_dumps(data)
error = None
else:
json_data = None
data = None
error = "No data was returned."
cursor.close()
@@ -142,7 +138,7 @@ class Vertica(BaseSQLQueryRunner):
if connection:
connection.close()
return json_data, error
return data, error
register(Vertica)

View File

@@ -5,7 +5,6 @@ import requests
import yaml
from redash.query_runner import BaseSQLQueryRunner, register
from redash.utils import json_dumps
from redash.utils.pandas import pandas_installed
openpyxl_installed = find_spec("openpyxl")
@@ -157,7 +156,7 @@ class YandexDisk(BaseSQLQueryRunner):
new_df = pd.concat(new_df, ignore_index=True)
df = new_df.copy()
data = json_dumps(pandas_to_result(df))
data = pandas_to_result(df)
error = None
return data, error

View File

@@ -13,7 +13,6 @@ from redash.query_runner import (
BaseSQLQueryRunner,
register,
)
from redash.utils import json_dumps
logger = logging.getLogger(__name__)
@@ -168,7 +167,7 @@ class YandexMetrica(BaseSQLQueryRunner):
return data, error
try:
data = json_dumps(parse_ym_response(self._send_query(**params)))
data = parse_ym_response(self._send_query(**params))
error = None
except Exception as e:
logging.exception(e)

View File

@@ -16,14 +16,13 @@ from redash.serializers.query_result import (
serialize_query_result_to_dsv,
serialize_query_result_to_xlsx,
)
from redash.utils import json_loads
def public_widget(widget):
res = {
"id": widget.id,
"width": widget.width,
"options": json_loads(widget.options),
"options": widget.options,
"text": widget.text,
"updated_at": widget.updated_at,
"created_at": widget.created_at,
@@ -35,7 +34,7 @@ def public_widget(widget):
"type": v.type,
"name": v.name,
"description": v.description,
"options": json_loads(v.options),
"options": v.options,
"updated_at": v.updated_at,
"created_at": v.created_at,
"query": {
@@ -146,7 +145,7 @@ def serialize_visualization(object, with_query=True):
"type": object.type,
"name": object.name,
"description": object.description,
"options": json_loads(object.options),
"options": object.options,
"updated_at": object.updated_at,
"created_at": object.created_at,
}
@@ -161,7 +160,7 @@ def serialize_widget(object):
d = {
"id": object.id,
"width": object.width,
"options": json_loads(object.options),
"options": object.options,
"dashboard_id": object.dashboard_id,
"text": object.text,
"updated_at": object.updated_at,
@@ -197,7 +196,7 @@ def serialize_alert(alert, full=True):
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True):
layout = json_loads(obj.layout)
layout = obj.layout
widgets = []

View File

@@ -9,6 +9,7 @@ import json
import os
import random
import re
import sys
import uuid
import pystache
@@ -69,11 +70,21 @@ def generate_token(length):
return "".join(rand.choice(chars) for x in range(length))
json_encoders = [m.custom_json_encoder for m in sys.modules if hasattr(m, "custom_json_encoder")]
class JSONEncoder(json.JSONEncoder):
"""Adapter for `json.dumps`."""
def __init__(self, **kwargs):
self.encoders = json_encoders
super().__init__(**kwargs)
def default(self, o):
# Some SQLAlchemy collections are lazy.
for encoder in self.encoders:
result = encoder(self, o)
if result:
return result
if isinstance(o, Query):
result = list(o)
elif isinstance(o, decimal.Decimal):

View File

@@ -70,7 +70,7 @@ dashboard_factory = ModelFactory(
redash.models.Dashboard,
name="test",
user=user_factory.create,
layout="[]",
layout=[],
is_draft=False,
org=1,
)
@@ -122,7 +122,7 @@ alert_factory = ModelFactory(
query_result_factory = ModelFactory(
redash.models.QueryResult,
data='{"columns":{}, "rows":[]}',
data={"columns": {}, "rows": []},
runtime=1,
retrieved_at=utcnow,
query_text="SELECT 1",
@@ -137,13 +137,13 @@ visualization_factory = ModelFactory(
query_rel=query_factory.create,
name="Chart",
description="",
options="{}",
options={},
)
widget_factory = ModelFactory(
redash.models.Widget,
width=1,
options="{}",
options={},
dashboard=dashboard_factory.create,
visualization=visualization_factory.create,
)

View File

@@ -74,7 +74,7 @@ class TestDashboardResourceGet(BaseTestCase):
vis = self.factory.create_visualization(query_rel=query)
restricted_widget = self.factory.create_widget(visualization=vis, dashboard=dashboard)
widget = self.factory.create_widget(dashboard=dashboard)
dashboard.layout = "[[{}, {}]]".format(widget.id, restricted_widget.id)
dashboard.layout = [[widget.id, restricted_widget.id]]
db.session.commit()
rv = self.make_request("get", "/api/dashboards/{0}".format(dashboard.id))
@@ -94,7 +94,7 @@ class TestDashboardResourcePost(BaseTestCase):
rv = self.make_request(
"post",
"/api/dashboards/{0}".format(d.id),
data={"name": new_name, "layout": "[]"},
data={"name": new_name, "layout": []},
)
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.json["name"], new_name)
@@ -107,7 +107,7 @@ class TestDashboardResourcePost(BaseTestCase):
rv = self.make_request(
"post",
"/api/dashboards/{0}".format(d.id),
data={"name": new_name, "layout": "[]", "version": d.version - 1},
data={"name": new_name, "layout": [], "version": d.version - 1},
)
self.assertEqual(rv.status_code, 409)
@@ -120,7 +120,7 @@ class TestDashboardResourcePost(BaseTestCase):
rv = self.make_request(
"post",
"/api/dashboards/{0}".format(d.id),
data={"name": new_name, "layout": "[]"},
data={"name": new_name, "layout": []},
)
self.assertEqual(rv.status_code, 200)
@@ -133,7 +133,7 @@ class TestDashboardResourcePost(BaseTestCase):
rv = self.make_request(
"post",
"/api/dashboards/{0}".format(d.id),
data={"name": new_name, "layout": "[]", "version": d.version},
data={"name": new_name, "layout": [], "version": d.version},
user=user,
)
self.assertEqual(rv.status_code, 403)
@@ -143,7 +143,7 @@ class TestDashboardResourcePost(BaseTestCase):
rv = self.make_request(
"post",
"/api/dashboards/{0}".format(d.id),
data={"name": new_name, "layout": "[]", "version": d.version},
data={"name": new_name, "layout": [], "version": d.version},
user=user,
)

View File

@@ -1,6 +1,5 @@
from redash.handlers.query_results import error_messages, run_query
from redash.models import db
from redash.utils import json_dumps
from tests import BaseTestCase
@@ -362,7 +361,7 @@ class TestQueryDropdownsResource(BaseTestCase):
query_result = self.factory.create_query_result()
data = {"rows": [], "columns": [{"name": "whatever"}]}
query_result = self.factory.create_query_result(data=json_dumps(data))
query_result = self.factory.create_query_result(data=data)
unrelated_dropdown_query = self.factory.create_query(latest_query_data=query_result)
# unrelated_dropdown_query has not been associated with query
@@ -378,7 +377,7 @@ class TestQueryDropdownsResource(BaseTestCase):
def test_allows_access_if_associated_and_has_access_to_parent(self):
query_result = self.factory.create_query_result()
data = {"rows": [], "columns": [{"name": "whatever"}]}
query_result = self.factory.create_query_result(data=json_dumps(data))
query_result = self.factory.create_query_result(data=data)
dropdown_query = self.factory.create_query(latest_query_data=query_result)
options = {"parameters": [{"name": "param", "type": "query", "queryId": dropdown_query.id}]}
@@ -423,7 +422,7 @@ class TestQueryResultExcelResponse(BaseTestCase):
"rows": [{"test": 1}, {"test": 2, "test2": 3}],
"columns": [{"name": "test"}, {"name": "test2"}],
}
query_result = self.factory.create_query_result(data=json_dumps(data))
query_result = self.factory.create_query_result(data=data)
rv = self.make_request(
"get",

View File

@@ -2,7 +2,6 @@ import textwrap
from unittest import TestCase
from redash.models import OPERATORS, Alert, db, next_state
from redash.utils import json_dumps
from tests import BaseTestCase
@@ -43,7 +42,7 @@ class TestAlertAll(BaseTestCase):
def get_results(value):
return json_dumps({"rows": [{"foo": value}], "columns": [{"name": "foo", "type": "STRING"}]})
return {"rows": [{"foo": value}], "columns": [{"name": "foo", "type": "STRING"}]}
class TestAlertEvaluate(BaseTestCase):
@@ -66,7 +65,7 @@ class TestAlertEvaluate(BaseTestCase):
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)
def test_evaluate_return_unknown_when_empty_results(self):
results = json_dumps({"rows": [], "columns": [{"name": "foo", "type": "STRING"}]})
results = {"rows": [], "columns": [{"name": "foo", "type": "STRING"}]}
alert = self.create_alert(results)
self.assertEqual(alert.evaluate(), Alert.UNKNOWN_STATE)

View File

@@ -15,7 +15,7 @@ class DashboardTest(BaseTestCase):
widget1 = self.factory.create_widget(visualization=vis1, dashboard=dashboard)
widget2 = self.factory.create_widget(visualization=vis2, dashboard=dashboard)
widget3 = self.factory.create_widget(visualization=vis3, dashboard=dashboard)
dashboard.layout = "[[{}, {}, {}]]".format(widget1.id, widget2.id, widget3.id)
dashboard.layout = [[widget1.id, widget2.id, widget3.id]]
db.session.commit()
return dashboard

View File

@@ -159,7 +159,7 @@ class QueryTest(BaseTestCase):
q2 = self.factory.create_query(name="Testing searching")
q3 = self.factory.create_query(name="Testing finding")
queries = list(Query.search("(testing search) or finding", [self.factory.default_group.id]))
queries = list(Query.search("testing (search or finding)", [self.factory.default_group.id]))
self.assertIn(q1, queries)
self.assertIn(q2, queries)
self.assertIn(q3, queries)
@@ -373,16 +373,26 @@ class TestQueryFork(BaseTestCase):
query = self.factory.create_query(data_source=data_source, description="this is description")
# create default TABLE - query factory does not create it
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options="{}")
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options={})
visualization_chart = self.factory.create_visualization(
query_rel=query,
description="chart vis",
type="CHART",
options="""{"yAxis": [{"type": "linear"}, {"type": "linear", "opposite": true}], "series": {"stacking": null}, "globalSeriesType": "line", "sortX": true, "seriesOptions": {"count": {"zIndex": 0, "index": 0, "type": "line", "yAxis": 0}}, "xAxis": {"labels": {"enabled": true}, "type": "datetime"}, "columnMapping": {"count": "y", "created_at": "x"}, "bottomMargin": 50, "legend": {"enabled": true}}""",
options={
"yAxis": [{"type": "linear"}, {"type": "linear", "opposite": True}],
"series": {"stacking": None},
"globalSeriesType": "line",
"sortX": True,
"seriesOptions": {"count": {"zIndex": 0, "index": 0, "type": "line", "yAxis": 0}},
"xAxis": {"labels": {"enabled": True}, "type": "datetime"},
"columnMapping": {"count": "y", "created_at": "x"},
"bottomMargin": 50,
"legend": {"enabled": True},
},
)
visualization_box = self.factory.create_visualization(
query_rel=query, description="box vis", type="BOXPLOT", options="{}"
query_rel=query, description="box vis", type="BOXPLOT", options={}
)
fork_user = self.factory.create_user()
forked_query = query.fork(fork_user)
@@ -417,7 +427,7 @@ class TestQueryFork(BaseTestCase):
self.assertEqual(count_table, 1)
self.assertEqual(forked_table.name, "Table")
self.assertEqual(forked_table.description, "")
self.assertEqual(forked_table.options, "{}")
self.assertEqual(forked_table.options, {})
def test_fork_from_query_that_has_no_visualization(self):
# prepare original query and visualizations
@@ -425,7 +435,7 @@ class TestQueryFork(BaseTestCase):
query = self.factory.create_query(data_source=data_source, description="this is description")
# create default TABLE - query factory does not create it
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options="{}")
self.factory.create_visualization(query_rel=query, name="Table", description="", type="TABLE", options={})
fork_user = self.factory.create_user()
@@ -457,7 +467,7 @@ class TestQueryUpdateLatestResult(BaseTestCase):
self.query_hash = gen_query_hash(self.query)
self.runtime = 123
self.utcnow = utcnow()
self.data = "data"
self.data = {"columns": {}, "rows": []}
def test_updates_existing_queries(self):
query1 = self.factory.create_query(query_text=self.query)

View File

@@ -1,10 +1,6 @@
import datetime
from unittest import TestCase
from mock import patch
from redash import models
from redash.models import DBPersistence
from redash.utils import utcnow
from tests import BaseTestCase
@@ -71,28 +67,9 @@ class QueryResultTest(BaseTestCase):
query.data_source,
query.query_hash,
query.query_text,
"",
{},
0,
utcnow(),
)
self.assertEqual(original_updated_at, query.updated_at)
class TestDBPersistence(TestCase):
def test_updating_data_removes_cached_result(self):
p = DBPersistence()
p.data = '{"test": 1}'
self.assertDictEqual(p.data, {"test": 1})
p.data = '{"test": 2}'
self.assertDictEqual(p.data, {"test": 2})
@patch("redash.models.json_loads")
def test_calls_json_loads_only_once(self, json_loads_patch):
json_loads_patch.return_value = "1"
p = DBPersistence()
json_data = '{"test": 1}'
p.data = json_data
a = p.data # noqa
b = p.data # noqa
json_loads_patch.assert_called_once_with(json_data)

View File

@@ -87,7 +87,7 @@ class TestClickHouse(TestCase):
self.assertIsNone(error)
self.assertEqual(
json.loads(data),
data,
{
"columns": [
{"name": "1", "friendly_name": "1", "type": TYPE_INTEGER},
@@ -139,7 +139,7 @@ SELECT * FROM test;
self.assertIsNone(error)
self.assertEqual(
json.loads(data),
data,
{
"columns": [
{"name": "1", "friendly_name": "1", "type": TYPE_INTEGER},

View File

@@ -2,7 +2,6 @@ from unittest.mock import patch
from redash.query_runner import TYPE_INTEGER, TYPE_STRING
from redash.query_runner.e6data import e6data
from redash.utils import json_dumps
runner = e6data(
{
@@ -28,15 +27,13 @@ def test_run_query(mock_cursor):
json_data, error = runner.run_query(query, user)
expected_json_data = json_dumps(
{
expected_json_data = {
"columns": [
{"name": "id", "type": TYPE_INTEGER},
{"name": "name", "type": TYPE_STRING},
],
"rows": [{"id": 1, "name": "John"}],
}
)
assert json_data == expected_json_data
@@ -50,7 +47,7 @@ def test_test_connection(mock_cursor):
json_data, error = runner.run_query(query, user)
expected_json_data = json_dumps({"columns": [{"name": "EXPR$0", "type": TYPE_INTEGER}], "rows": [{"EXPR$0": 1}]})
expected_json_data = {"columns": [{"name": "EXPR$0", "type": TYPE_INTEGER}], "rows": [{"EXPR$0": 1}]}
assert json_data == expected_json_data

View File

@@ -1,5 +1,3 @@
import json
from influxdb.resultset import ResultSet
from redash.query_runner import (
@@ -40,7 +38,7 @@ def test_influxdb_result_types_with_rows():
{"k1": "bar", "time": "2023-10-06T13:31:08.882953339Z", "v1": 0.6, "v2": 4},
],
}
assert json.loads(transformed) == expected
assert transformed == expected
def test_influxdb_result_types_with_no_rows_are_string():
@@ -55,4 +53,4 @@ def test_influxdb_result_types_with_no_rows_are_string():
],
"rows": [],
}
assert json.loads(transformed) == expected
assert transformed == expected

View File

@@ -1,5 +1,3 @@
import json
import mock
import pytest
from influxdb_client.client.flux_table import (
@@ -277,10 +275,8 @@ class TestInfluxDBv2:
@mock.patch("redash.query_runner.influx_db_v2.InfluxDBClient")
@mock.patch("redash.query_runner.influx_db_v2.InfluxDBv2." "_cleanup_cert_files")
@mock.patch("redash.query_runner.influx_db_v2.logger")
@mock.patch("redash.query_runner.influx_db_v2.json_dumps")
def test_run_query(
self,
json_dumps_mock: mock.MagicMock,
logger_mock: mock.MagicMock,
cleanup_cert_files_mock: mock.MagicMock,
influx_db_client_mock: mock.MagicMock,
@@ -310,28 +306,24 @@ class TestInfluxDBv2:
],
"rows": [{"col_1": "col_value_1", "col_2": 1}, {"col_1": "col_value_2", "col_2": 2}, {"col_3": 3.0}],
}
json_dumps_data = json.dumps(result_data)
query_mock = influx_db_client_mock.return_value.__enter__().query_api().query
query_mock.return_value = influx_table_list
json_dumps_mock.return_value = json_dumps_data
# 1. case: successful query data
data, error = influx_db_v2.run_query(query, "user")
assert data == json_dumps_data
assert data == result_data
assert error is None
influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs)
logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}")
query_mock.assert_called_once_with(query)
json_dumps_mock.assert_called_once_with(result_data)
cleanup_cert_files_mock.assert_called_once_with(influx_kwargs)
influx_db_client_mock.reset_mock()
logger_mock.reset_mock()
query_mock.reset_mock()
json_dumps_mock.reset_mock()
cleanup_cert_files_mock.reset_mock()
# 2. case: unsuccessful query data
@@ -344,5 +336,4 @@ class TestInfluxDBv2:
influx_db_client_mock.assert_called_once_with(url="url", token="token", org="org", **influx_kwargs)
logger_mock.debug.assert_called_once_with(f"InfluxDB got query: {query!r}")
query_mock.assert_called_once_with(query)
json_dumps_mock.assert_not_called()
cleanup_cert_files_mock.assert_called_once_with(influx_kwargs)

View File

@@ -5,7 +5,6 @@ from unittest import TestCase
import mock
from redash.query_runner.prometheus import Prometheus, get_instant_rows, get_range_rows
from redash.utils import json_dumps
class TestPrometheus(TestCase):
@@ -350,7 +349,7 @@ class TestPrometheus(TestCase):
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
]
data_expected = json_dumps({"rows": rows, "columns": columns})
data_expected = {"rows": rows, "columns": columns}
requests_get_mock.return_value = mock.Mock(
json=mock.Mock(return_value={"data": {"result": self.instant_query_result}})
@@ -424,7 +423,7 @@ class TestPrometheus(TestCase):
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
]
data_expected = json_dumps({"rows": rows, "columns": columns})
data_expected = {"rows": rows, "columns": columns}
requests_get_mock.return_value = mock.Mock(
json=mock.Mock(return_value={"data": {"result": self.range_query_result}})
@@ -490,7 +489,7 @@ class TestPrometheus(TestCase):
{"friendly_name": "foo_bar", "type": "string", "name": "foo_bar"},
]
data_expected = json_dumps({"rows": rows, "columns": columns})
data_expected = {"rows": rows, "columns": columns}
now_datetime = datetime(2023, 12, 12, 11, 00, 00)
end_timestamp_expected = int(time.mktime(now_datetime.timetuple()))

View File

@@ -15,7 +15,7 @@ class TestPythonQueryRunner(TestCase):
query_string = "print('test')"
mock_dt.utcnow = mock.Mock(return_value=datetime(1901, 12, 21))
result = self.python.run_query(query_string, "user")
self.assertEqual(result[0], '{"rows": [], "columns": [], "log": ["[1901-12-21T00:00:00] test"]}')
self.assertEqual(result[0], {"rows": [], "columns": [], "log": ["[1901-12-21T00:00:00] test"]})
def test_empty_result(self):
query_string = "result={}"
@@ -68,11 +68,11 @@ class TestPythonQueryRunner(TestCase):
result = self.python.run_query(query_string, "user")
self.assertEqual(
result[0],
'{"columns": [{"name": "col1", "type": "string"},'
' {"name": "col2", "type": "integer"}],'
' "rows": [{"col1": "foo", "col2": 100},'
' {"col1": "bar", "col2": 200}],'
' "log": []}',
{
"columns": [{"name": "col1", "type": "string"}, {"name": "col2", "type": "integer"}],
"rows": [{"col1": "foo", "col2": 100}, {"col1": "bar", "col2": 200}],
"log": [],
},
)
@mock.patch("datetime.datetime")
@@ -89,11 +89,11 @@ class TestPythonQueryRunner(TestCase):
result = self.python.run_query(query_string, "user")
self.assertEqual(
result[0],
'{"columns": [{"name": "col1", "type": "string"},'
' {"name": "col2", "type": "integer"}],'
' "rows": [{"col1": "foo", "col2": 100},'
' {"col1": "bar", "col2": 200}],'
' "log": ["[1901-12-21T00:00:00] test"]}',
{
"columns": [{"name": "col1", "type": "string"}, {"name": "col2", "type": "integer"}],
"rows": [{"col1": "foo", "col2": 100}, {"col1": "bar", "col2": 200}],
"log": ["[1901-12-21T00:00:00] test"],
},
)

View File

@@ -17,7 +17,6 @@ from redash.query_runner.query_results import (
prepare_parameterized_query,
replace_query_parameters,
)
from redash.utils import json_dumps
from tests import BaseTestCase
@@ -235,5 +234,5 @@ class TestGetQueryResult(BaseTestCase):
with mock.patch.object(PostgreSQL, "run_query") as qr:
query_result_data = {"columns": [], "rows": []}
qr.return_value = (json_dumps(query_result_data), None)
qr.return_value = (query_result_data, None)
self.assertEqual(query_result_data, get_query_results(self.factory.user, query.id, False))

View File

@@ -83,7 +83,7 @@ class TestTinybird(TestCase):
self.assertIsNone(error)
self.assertEqual(
json.loads(data),
data,
{
"columns": [
{"name": "string_attribute", "friendly_name": "string_attribute", "type": TYPE_STRING},

View File

@@ -28,7 +28,16 @@ class TestTrino(TestCase):
def _assert_schema_catalog(self, mock_run_query, mock__get_catalogs, runner):
mock_run_query.return_value = (
f'{{"rows": [{{"table_schema": "{TestTrino.schema_name}", "table_name": "{TestTrino.table_name}", "column_name": "{TestTrino.column_name}", "data_type": "{TestTrino.column_type}"}}]}}',
{
"rows": [
{
"table_schema": TestTrino.schema_name,
"table_name": TestTrino.table_name,
"column_name": TestTrino.column_name,
"data_type": TestTrino.column_type,
}
]
},
None,
)
mock__get_catalogs.return_value = [TestTrino.catalog_name]
@@ -36,14 +45,14 @@ class TestTrino(TestCase):
expected_schema = [
{
"name": f"{TestTrino.catalog_name}.{TestTrino.schema_name}.{TestTrino.table_name}",
"columns": [{"name": f"{TestTrino.column_name}", "type": f"{TestTrino.column_type}"}],
"columns": [{"name": TestTrino.column_name, "type": TestTrino.column_type}],
}
]
self.assertEqual(schema, expected_schema)
@patch.object(Trino, "run_query")
def test__get_catalogs(self, mock_run_query):
mock_run_query.return_value = (f'{{"rows": [{{"Catalog": "{TestTrino.catalog_name}"}}]}}', None)
mock_run_query.return_value = ({"rows": [{"Catalog": TestTrino.catalog_name}]}, None)
runner = Trino({})
catalogs = runner._get_catalogs()
expected_catalogs = [TestTrino.catalog_name]

Some files were not shown because too many files have changed in this diff Show More