mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Consistently use simplejson to loading and dumping JSON. (#2817)
* Consistently use simplejson to loading and dumping JSON. This introduces the new functions redash.utils.json_dumps and redash.utils.json_loads and simplifies the custom encoder setup. UUIDs are now handled by the default encoder, too. Fixes #2807. Use string comparison in parse_boolean instead of the (simple)json module.
This commit is contained in:
@@ -1,10 +1,10 @@
|
||||
from __future__ import print_function
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import requests
|
||||
import simplejson
|
||||
|
||||
github_token = os.environ['GITHUB_TOKEN']
|
||||
auth = (github_token, 'x-oauth-basic')
|
||||
@@ -17,7 +17,7 @@ def _github_request(method, path, params=None, headers={}):
|
||||
url = path
|
||||
|
||||
if params is not None:
|
||||
params = json.dumps(params)
|
||||
params = simplejson.dumps(params)
|
||||
|
||||
response = requests.request(method, url, data=params, auth=auth)
|
||||
return response
|
||||
|
||||
@@ -6,7 +6,7 @@ Create Date: 2018-01-31 15:20:30.396533
|
||||
|
||||
"""
|
||||
from __future__ import print_function
|
||||
import json
|
||||
import simplejson
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
@@ -26,7 +26,7 @@ def upgrade():
|
||||
print("Updating dashboards position data:")
|
||||
for dashboard in Dashboard.query:
|
||||
print(" Updating dashboard: {}".format(dashboard.id))
|
||||
layout = json.loads(dashboard.layout)
|
||||
layout = simplejson.loads(dashboard.layout)
|
||||
|
||||
print(" Building widgets map:")
|
||||
widgets = {}
|
||||
@@ -47,14 +47,14 @@ def upgrade():
|
||||
if widget is None:
|
||||
continue
|
||||
|
||||
options = json.loads(widget.options) or {}
|
||||
options = simplejson.loads(widget.options) or {}
|
||||
options['position'] = {
|
||||
"row": row_index,
|
||||
"col": column_index * column_size,
|
||||
"sizeX": column_size * widget.width
|
||||
}
|
||||
|
||||
widget.options = json.dumps(options)
|
||||
widget.options = simplejson.dumps(options)
|
||||
|
||||
db.session.add(widget)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from __future__ import print_function
|
||||
import json
|
||||
import simplejson
|
||||
import jsonschema
|
||||
from jsonschema import ValidationError
|
||||
|
||||
@@ -15,7 +15,7 @@ def validate_configuration(query_runner_type, configuration_json):
|
||||
|
||||
try:
|
||||
if isinstance(configuration_json, string_types):
|
||||
configuration = json.loads(configuration_json)
|
||||
configuration = simplejson.loads(configuration_json)
|
||||
else:
|
||||
configuration = configuration_json
|
||||
jsonschema.validate(configuration, query_runner_class.configuration_schema())
|
||||
@@ -24,6 +24,7 @@ def validate_configuration(query_runner_type, configuration_json):
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def update(data_source):
|
||||
print("[%s] Old options: %s" % (data_source.name, data_source.options))
|
||||
|
||||
@@ -40,7 +41,7 @@ def update(data_source):
|
||||
if k == 'port':
|
||||
configuration[k] = int(v)
|
||||
|
||||
data_source.options = json.dumps(configuration)
|
||||
data_source.options = simplejson.dumps(configuration)
|
||||
|
||||
elif data_source.type == 'mysql':
|
||||
mapping = {
|
||||
@@ -55,10 +56,10 @@ def update(data_source):
|
||||
for value in values:
|
||||
k, v = value.split("=", 1)
|
||||
configuration[mapping[k]] = v
|
||||
data_source.options = json.dumps(configuration)
|
||||
data_source.options = simplejson.dumps(configuration)
|
||||
|
||||
elif data_source.type == 'graphite':
|
||||
old_config = json.loads(data_source.options)
|
||||
old_config = simplejson.loads(data_source.options)
|
||||
|
||||
configuration = {
|
||||
"url": old_config["url"]
|
||||
@@ -70,13 +71,13 @@ def update(data_source):
|
||||
if "auth" in old_config:
|
||||
configuration['username'], configuration['password'] = old_config["auth"]
|
||||
|
||||
data_source.options = json.dumps(configuration)
|
||||
data_source.options = simplejson.dumps(configuration)
|
||||
|
||||
elif data_source.type == 'url':
|
||||
data_source.options = json.dumps({"url": data_source.options})
|
||||
data_source.options = simplejson.dumps({"url": data_source.options})
|
||||
|
||||
elif data_source.type == 'script':
|
||||
data_source.options = json.dumps({"path": data_source.options})
|
||||
data_source.options = simplejson.dumps({"path": data_source.options})
|
||||
|
||||
elif data_source.type == 'mongo':
|
||||
data_source.type = 'mongodb'
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from base64 import b64encode
|
||||
import json
|
||||
import simplejson
|
||||
from redash.models import DataSource
|
||||
|
||||
|
||||
@@ -15,23 +15,23 @@ if __name__ == '__main__':
|
||||
for ds in DataSource.select(DataSource.id, DataSource.type, DataSource.options):
|
||||
|
||||
if ds.type == 'bigquery':
|
||||
options = json.loads(ds.options)
|
||||
options = simplejson.loads(ds.options)
|
||||
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
new_options = {
|
||||
'projectId': options['projectId'],
|
||||
'jsonKeyFile': b64encode(json.dumps({
|
||||
'jsonKeyFile': b64encode(simplejson.dumps({
|
||||
'client_email': options['serviceAccount'],
|
||||
'private_key': convert_p12_to_pem(options['privateKey'])
|
||||
}))
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.options = simplejson.dumps(new_options)
|
||||
ds.save(only=ds.dirty_fields)
|
||||
elif ds.type == 'google_spreadsheets':
|
||||
options = json.loads(ds.options)
|
||||
options = simplejson.loads(ds.options)
|
||||
if 'jsonKeyFile' in options:
|
||||
continue
|
||||
|
||||
@@ -40,5 +40,5 @@ if __name__ == '__main__':
|
||||
'jsonKeyFile': b64encode(f.read())
|
||||
}
|
||||
|
||||
ds.options = json.dumps(new_options)
|
||||
ds.options = simplejson.dumps(new_options)
|
||||
ds.save(only=ds.dirty_fields)
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
from __future__ import print_function
|
||||
import json
|
||||
import simplejson
|
||||
from redash import models
|
||||
|
||||
if __name__ == '__main__':
|
||||
for vis in models.Visualization.select():
|
||||
if vis.type == 'COUNTER':
|
||||
options = json.loads(vis.options)
|
||||
options = simplejson.loads(vis.options)
|
||||
print("Before: ", options)
|
||||
if 'rowNumber' in options and options['rowNumber'] is not None:
|
||||
options['rowNumber'] += 1
|
||||
@@ -20,5 +20,5 @@ if __name__ == '__main__':
|
||||
options['targetRowNumber'] = options['rowNumber']
|
||||
|
||||
print("After: ", options)
|
||||
vis.options = json.dumps(options)
|
||||
vis.options = simplejson.dumps(options)
|
||||
vis.save()
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
from flask_admin import Admin
|
||||
from flask_admin.base import MenuLink
|
||||
from flask_admin.contrib.sqla import ModelView
|
||||
@@ -8,6 +7,7 @@ from wtforms.widgets import TextInput
|
||||
|
||||
from redash import models
|
||||
from redash.permissions import require_super_admin
|
||||
from redash.utils import json_loads
|
||||
|
||||
|
||||
class ArrayListField(fields.Field):
|
||||
@@ -30,7 +30,7 @@ class JSONTextAreaField(fields.TextAreaField):
|
||||
def process_formdata(self, valuelist):
|
||||
if valuelist:
|
||||
try:
|
||||
json.loads(valuelist[0])
|
||||
json_loads(valuelist[0])
|
||||
except ValueError:
|
||||
raise ValueError(self.gettext(u'Invalid JSON'))
|
||||
self.data = valuelist[0]
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
from __future__ import print_function
|
||||
import json
|
||||
|
||||
|
||||
import click
|
||||
import simplejson
|
||||
from flask.cli import FlaskGroup, run_command
|
||||
from flask import current_app
|
||||
|
||||
@@ -44,7 +42,7 @@ def version():
|
||||
|
||||
@manager.command()
|
||||
def status():
|
||||
print(json.dumps(get_status(), indent=2))
|
||||
print(simplejson.dumps(get_status(), indent=2))
|
||||
|
||||
|
||||
@manager.command()
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
from __future__ import print_function
|
||||
from sys import exit
|
||||
import json
|
||||
|
||||
import click
|
||||
from flask.cli import AppGroup
|
||||
@@ -10,6 +9,7 @@ from sqlalchemy.orm.exc import NoResultFound
|
||||
from redash import models
|
||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||
query_runners)
|
||||
from redash.utils import json_loads
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
|
||||
manager = AppGroup(help="Data sources management commands.")
|
||||
@@ -129,7 +129,7 @@ def new(name=None, type=None, options=None, organization='default'):
|
||||
|
||||
options = ConfigurationContainer(options_obj, schema)
|
||||
else:
|
||||
options = ConfigurationContainer(json.loads(options), schema)
|
||||
options = ConfigurationContainer(json_loads(options), schema)
|
||||
|
||||
if not options.is_valid():
|
||||
print("Error: invalid configuration.")
|
||||
@@ -198,7 +198,7 @@ def edit(name, new_name=None, options=None, type=None, organization='default'):
|
||||
if options is not None:
|
||||
schema = get_configuration_schema_for_query_runner_type(
|
||||
data_source.type)
|
||||
options = json.loads(options)
|
||||
options = json_loads(options)
|
||||
data_source.options.set_schema(schema)
|
||||
data_source.options.update(options)
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import logging
|
||||
import json
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -55,7 +54,7 @@ def register(destination_class):
|
||||
global destinations
|
||||
if destination_class.enabled():
|
||||
logger.debug("Registering %s (%s) destinations.", destination_class.name(), destination_class.type())
|
||||
destinations[destination_class.type()] = destination_class
|
||||
destinations[destination_class.type()] = destination_class
|
||||
else:
|
||||
logger.warning("%s destination enabled but not supported, not registering. Either disable or install missing dependencies.", destination_class.name())
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from redash.destinations import *
|
||||
from redash.models import Alert
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
colors = {
|
||||
@@ -46,7 +46,7 @@ class HipChat(BaseDestination):
|
||||
'color': colors.get(new_state, 'green')
|
||||
}
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
response = requests.post(options['url'], data=json.dumps(data), headers=headers)
|
||||
response = requests.post(options['url'], data=json_dumps(data), headers=headers)
|
||||
|
||||
if response.status_code != 204:
|
||||
logging.error('Bad status code received from HipChat: %d', response.status_code)
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from redash.destinations import *
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class Mattermost(BaseDestination):
|
||||
@@ -46,7 +46,7 @@ class Mattermost(BaseDestination):
|
||||
if options.get('channel'): payload['channel'] = options.get('channel')
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get('url'), data=json.dumps(payload))
|
||||
resp = requests.post(options.get('url'), data=json_dumps(payload))
|
||||
logging.warning(resp.text)
|
||||
|
||||
if resp.status_code != 200:
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
import requests
|
||||
|
||||
from redash.destinations import *
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class Slack(BaseDestination):
|
||||
@@ -58,7 +58,7 @@ class Slack(BaseDestination):
|
||||
else:
|
||||
text = alert.name + " went back to normal"
|
||||
color = "#27ae60"
|
||||
|
||||
|
||||
payload = {'attachments': [{'text': text, 'color': color, 'fields': fields}]}
|
||||
|
||||
if options.get('username'): payload['username'] = options.get('username')
|
||||
@@ -67,7 +67,7 @@ class Slack(BaseDestination):
|
||||
if options.get('channel'): payload['channel'] = options.get('channel')
|
||||
|
||||
try:
|
||||
resp = requests.post(options.get('url'), data=json.dumps(payload))
|
||||
resp = requests.post(options.get('url'), data=json_dumps(payload))
|
||||
logging.warning(resp.text)
|
||||
if resp.status_code != 200:
|
||||
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
from flask_login import login_required
|
||||
|
||||
from redash import models, redis_connection
|
||||
from redash.handlers import routes
|
||||
from redash.handlers.base import json_response
|
||||
from redash.permissions import require_super_admin
|
||||
from redash.serializers import QuerySerializer
|
||||
from redash.tasks.queries import QueryTaskTracker
|
||||
from redash.tasks import record_event
|
||||
from redash.tasks.queries import QueryTaskTracker
|
||||
from redash.utils import json_loads
|
||||
|
||||
|
||||
@routes.route('/api/admin/queries/outdated', methods=['GET'])
|
||||
@@ -16,7 +16,7 @@ from redash.tasks import record_event
|
||||
@login_required
|
||||
def outdated_queries():
|
||||
manager_status = redis_connection.hgetall('redash:status')
|
||||
query_ids = json.loads(manager_status.get('query_ids', '[]'))
|
||||
query_ids = json_loads(manager_status.get('query_ids', '[]'))
|
||||
if query_ids:
|
||||
outdated_queries = (
|
||||
models.Query.query.outerjoin(models.QueryResult)
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
import logging
|
||||
import json
|
||||
import time
|
||||
|
||||
import pystache
|
||||
from flask import make_response, request
|
||||
from flask_login import current_user
|
||||
from flask_restful import abort
|
||||
from redash import models, settings, utils
|
||||
from redash import models, settings
|
||||
from redash.tasks import QueryTask, record_event
|
||||
from redash.permissions import require_permission, not_view_only, has_access, require_access, view_only
|
||||
from redash.handlers.base import BaseResource, get_object_or_404
|
||||
from redash.utils import collect_query_parameters, collect_parameters_from_request, gen_query_hash
|
||||
from redash.utils import (collect_query_parameters,
|
||||
collect_parameters_from_request,
|
||||
gen_query_hash,
|
||||
json_dumps,
|
||||
utcnow)
|
||||
from redash.tasks.queries import enqueue_query
|
||||
|
||||
|
||||
@@ -56,7 +59,7 @@ def run_query_sync(data_source, parameter_values, query_text, max_age=0):
|
||||
run_time = time.time() - started_at
|
||||
query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source,
|
||||
query_hash, query_text, data,
|
||||
run_time, utils.utcnow())
|
||||
run_time, utcnow())
|
||||
|
||||
models.db.session.commit()
|
||||
return query_result
|
||||
@@ -243,7 +246,7 @@ class QueryResultResource(BaseResource):
|
||||
abort(404, message='No cached result found for this query.')
|
||||
|
||||
def make_json_response(self, query_result):
|
||||
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
|
||||
data = json_dumps({'query_result': query_result.to_dict()})
|
||||
headers = {'Content-Type': "application/json"}
|
||||
return make_response(data, 200, headers)
|
||||
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
|
||||
from redash import models
|
||||
@@ -7,6 +5,7 @@ from redash.handlers.base import BaseResource, get_object_or_404
|
||||
from redash.serializers import serialize_visualization
|
||||
from redash.permissions import (require_object_modify_permission,
|
||||
require_permission)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class VisualizationListResource(BaseResource):
|
||||
@@ -17,7 +16,7 @@ class VisualizationListResource(BaseResource):
|
||||
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop('query_id'), self.current_org)
|
||||
require_object_modify_permission(query, self.current_user)
|
||||
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs['options'] = json_dumps(kwargs['options'])
|
||||
kwargs['query_rel'] = query
|
||||
|
||||
vis = models.Visualization(**kwargs)
|
||||
@@ -34,7 +33,7 @@ class VisualizationResource(BaseResource):
|
||||
|
||||
kwargs = request.get_json(force=True)
|
||||
if 'options' in kwargs:
|
||||
kwargs['options'] = json.dumps(kwargs['options'])
|
||||
kwargs['options'] = json_dumps(kwargs['options'])
|
||||
|
||||
kwargs.pop('id', None)
|
||||
kwargs.pop('query_id', None)
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import os
|
||||
import json
|
||||
import simplejson
|
||||
from flask import url_for
|
||||
|
||||
WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), '../../client/dist/', 'asset-manifest.json')
|
||||
@@ -14,7 +14,7 @@ def configure_webpack(app):
|
||||
if assets is None or app.debug:
|
||||
try:
|
||||
with open(WEBPACK_MANIFEST_PATH) as fp:
|
||||
assets = json.load(fp)
|
||||
assets = simplejson.load(fp)
|
||||
except IOError:
|
||||
app.logger.exception('Unable to load webpack manifest')
|
||||
assets = {}
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import json
|
||||
|
||||
from flask import request
|
||||
|
||||
from redash import models
|
||||
from redash.handlers.base import BaseResource
|
||||
from redash.serializers import serialize_widget
|
||||
from redash.permissions import (require_access,
|
||||
require_object_modify_permission,
|
||||
require_permission, view_only)
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class WidgetListResource(BaseResource):
|
||||
@@ -27,7 +27,7 @@ class WidgetListResource(BaseResource):
|
||||
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.pop('dashboard_id'), self.current_org)
|
||||
require_object_modify_permission(dashboard, self.current_user)
|
||||
|
||||
widget_properties['options'] = json.dumps(widget_properties['options'])
|
||||
widget_properties['options'] = json_dumps(widget_properties['options'])
|
||||
widget_properties.pop('id', None)
|
||||
widget_properties['dashboard'] = dashboard
|
||||
|
||||
@@ -63,7 +63,7 @@ class WidgetResource(BaseResource):
|
||||
require_object_modify_permission(widget.dashboard, self.current_user)
|
||||
widget_properties = request.get_json(force=True)
|
||||
widget.text = widget_properties['text']
|
||||
widget.options = json.dumps(widget_properties['options'])
|
||||
widget.options = json_dumps(widget_properties['options'])
|
||||
models.db.session.commit()
|
||||
return serialize_widget(widget)
|
||||
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
from __future__ import absolute_import
|
||||
|
||||
import json
|
||||
import logging
|
||||
import socket
|
||||
import time
|
||||
|
||||
from celery.signals import task_postrun, task_prerun
|
||||
from redash import settings, statsd_client
|
||||
from redash.utils import json_dumps
|
||||
|
||||
tasks_start_time = {}
|
||||
|
||||
@@ -45,7 +45,7 @@ def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, st
|
||||
|
||||
normalized_task_name = task.name.replace('redash.tasks.', '').replace('.', '_')
|
||||
metric = "celery.task_runtime.{}".format(normalized_task_name)
|
||||
logging.debug("metric=%s", json.dumps({'metric': metric, 'tags': tags, 'value': run_time}))
|
||||
logging.debug("metric=%s", json_dumps({'metric': metric, 'tags': tags, 'value': run_time}))
|
||||
statsd_client.timing(metric_name(metric, tags), run_time)
|
||||
statsd_client.incr(metric_name('celery.task.{}.{}'.format(normalized_task_name, state), tags))
|
||||
except Exception:
|
||||
|
||||
@@ -4,7 +4,6 @@ import datetime
|
||||
import functools
|
||||
import hashlib
|
||||
import itertools
|
||||
import json
|
||||
import logging
|
||||
import time
|
||||
from functools import reduce
|
||||
@@ -22,7 +21,7 @@ from redash.destinations import (get_configuration_schema_for_destination_type,
|
||||
from redash.metrics import database # noqa: F401
|
||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||
get_query_runner)
|
||||
from redash.utils import generate_token, json_dumps
|
||||
from redash.utils import generate_token, json_dumps, json_loads
|
||||
from redash.utils.configuration import ConfigurationContainer
|
||||
from redash.settings.organization import settings as org_settings
|
||||
|
||||
@@ -141,7 +140,7 @@ class PseudoJSON(TypeDecorator):
|
||||
def process_result_value(self, value, dialect):
|
||||
if not value:
|
||||
return value
|
||||
return json.loads(value)
|
||||
return json_loads(value)
|
||||
|
||||
|
||||
class MutableDict(Mutable, dict):
|
||||
@@ -648,9 +647,9 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
||||
query_runner = self.query_runner
|
||||
schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name'])
|
||||
|
||||
redis_connection.set(key, json.dumps(schema))
|
||||
redis_connection.set(key, json_dumps(schema))
|
||||
else:
|
||||
schema = json.loads(cache)
|
||||
schema = json_loads(cache)
|
||||
|
||||
return schema
|
||||
|
||||
@@ -738,7 +737,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
'id': self.id,
|
||||
'query_hash': self.query_hash,
|
||||
'query': self.query_text,
|
||||
'data': json.loads(self.data),
|
||||
'data': json_loads(self.data),
|
||||
'data_source_id': self.data_source_id,
|
||||
'runtime': self.runtime,
|
||||
'retrieved_at': self.retrieved_at
|
||||
@@ -807,7 +806,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
def make_csv_content(self):
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(self.data)
|
||||
query_data = json_loads(self.data)
|
||||
writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=[col['name'] for col in query_data['columns']])
|
||||
writer.writer = utils.UnicodeWriter(s)
|
||||
writer.writeheader()
|
||||
@@ -819,7 +818,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
||||
def make_excel_content(self):
|
||||
s = cStringIO.StringIO()
|
||||
|
||||
query_data = json.loads(self.data)
|
||||
query_data = json_loads(self.data)
|
||||
book = xlsxwriter.Workbook(s, {'constant_memory': True})
|
||||
sheet = book.add_worksheet("result")
|
||||
|
||||
@@ -1297,7 +1296,7 @@ class Alert(TimestampMixin, db.Model):
|
||||
return db.session.query(Alert).join(Query).filter(Alert.id == id, Query.org == org).one()
|
||||
|
||||
def evaluate(self):
|
||||
data = json.loads(self.query_rel.latest_query_data.data)
|
||||
data = json_loads(self.query_rel.latest_query_data.data)
|
||||
if data['rows']:
|
||||
value = data['rows'][0][self.options['column']]
|
||||
op = self.options['op']
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import logging
|
||||
import json
|
||||
import sys
|
||||
|
||||
import requests
|
||||
|
||||
from redash import settings
|
||||
from redash.utils import json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -115,7 +113,7 @@ class BaseQueryRunner(object):
|
||||
|
||||
if error is not None:
|
||||
raise Exception("Failed running query [%s]." % query)
|
||||
return json.loads(results)['rows']
|
||||
return json_loads(results)['rows']
|
||||
|
||||
@classmethod
|
||||
def to_dict(cls):
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import simplejson
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.settings import parse_boolean
|
||||
from redash.utils import SimpleJSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
ANNOTATE_QUERY = parse_boolean(os.environ.get('ATHENA_ANNOTATE_QUERY', 'true'))
|
||||
@@ -152,7 +150,7 @@ class Athena(BaseQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
for row in results['rows']:
|
||||
table_name = '{0}.{1}'.format(row['table_schema'], row['table_name'])
|
||||
if table_name not in schema:
|
||||
@@ -195,7 +193,7 @@ class Athena(BaseQueryRunner):
|
||||
'athena_query_id': athena_query_id
|
||||
}
|
||||
}
|
||||
json_data = simplejson.dumps(data, ignore_nan=True, cls=SimpleJSONEncoder)
|
||||
json_data = json_dumps(data, ignore_nan=True)
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
if cursor.query_id:
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
from io import StringIO
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import uuid
|
||||
import csv
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -59,7 +58,7 @@ def generate_rows_and_columns(csv_response):
|
||||
|
||||
meta_with_padding = meta + '=' * (4 - len(meta) % 4)
|
||||
meta_decoded = meta_with_padding.decode('base64')
|
||||
meta_json = json.loads(meta_decoded)
|
||||
meta_json = json_loads(meta_decoded)
|
||||
meta_columns = meta_json['tableSchema']['columns']
|
||||
|
||||
reader = csv.reader(data.splitlines())
|
||||
@@ -162,7 +161,7 @@ class AxibaseTSD(BaseQueryRunner):
|
||||
columns, rows = generate_rows_and_columns(data)
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
|
||||
except SQLException as e:
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import time
|
||||
@@ -10,7 +9,7 @@ import requests
|
||||
|
||||
from redash import settings
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -141,7 +140,7 @@ class BigQuery(BaseQueryRunner):
|
||||
"https://www.googleapis.com/auth/drive"
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
key = json_loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
|
||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||
http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
|
||||
@@ -296,11 +295,11 @@ class BigQuery(BaseQueryRunner):
|
||||
data = self._get_query_result(jobs, query)
|
||||
error = None
|
||||
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
except apiclient.errors.HttpError as e:
|
||||
json_data = None
|
||||
if e.resp.status == 400:
|
||||
error = json.loads(e.content)['error']['message']
|
||||
error = json_loads(e.content)['error']['message']
|
||||
else:
|
||||
error = e.content
|
||||
except KeyboardInterrupt:
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
import uuid
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import JSONEncoder, json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -18,8 +16,6 @@ except ImportError:
|
||||
|
||||
class CassandraJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, uuid.UUID):
|
||||
return str(o)
|
||||
if isinstance(o, sortedset):
|
||||
return list(o)
|
||||
return super(CassandraJSONEncoder, self).default(o)
|
||||
@@ -79,7 +75,7 @@ class Cassandra(BaseQueryRunner):
|
||||
select release_version from system.local;
|
||||
"""
|
||||
results, error = self.run_query(query, None)
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
release_version = results['rows'][0]['release_version']
|
||||
|
||||
query = """
|
||||
@@ -96,7 +92,7 @@ class Cassandra(BaseQueryRunner):
|
||||
""".format(self.configuration['keyspace'])
|
||||
|
||||
results, error = self.run_query(query, None)
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
schema = {}
|
||||
for row in results['rows']:
|
||||
@@ -135,7 +131,7 @@ class Cassandra(BaseQueryRunner):
|
||||
rows = [dict(zip(column_names, row)) for row in result]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=CassandraJSONEncoder)
|
||||
json_data = json_dumps(data, cls=CassandraJSONEncoder)
|
||||
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
|
||||
@@ -1,9 +1,11 @@
|
||||
import json
|
||||
import logging
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
import requests
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@@ -47,7 +49,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['database'], row['table'])
|
||||
@@ -107,7 +109,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
||||
return json_data, error
|
||||
try:
|
||||
q = self._clickhouse_query(query)
|
||||
data = json.dumps(q, cls=JSONEncoder)
|
||||
data = json_dumps(q)
|
||||
error = None
|
||||
except Exception as e:
|
||||
data = None
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -119,7 +118,7 @@ class DynamoDBSQL(BaseSQLQueryRunner):
|
||||
rows.append(item)
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except ParseException as e:
|
||||
error = u"Error parsing query at line {} (column {}):\n{}".format(e.lineno, e.column, e.line)
|
||||
|
||||
@@ -3,10 +3,10 @@ import sys
|
||||
import urllib
|
||||
|
||||
import requests
|
||||
import simplejson as json
|
||||
from requests.auth import HTTPBasicAuth
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
try:
|
||||
import http.client as http_client
|
||||
@@ -315,7 +315,7 @@ class Kibana(BaseElasticSearch):
|
||||
error = None
|
||||
|
||||
logger.debug(query)
|
||||
query_params = json.loads(query)
|
||||
query_params = json_loads(query)
|
||||
|
||||
index_name = query_params["index"]
|
||||
query_data = query_params["query"]
|
||||
@@ -334,7 +334,6 @@ class Kibana(BaseElasticSearch):
|
||||
mappings, error = self._get_query_mappings(mapping_url)
|
||||
if error:
|
||||
return None, error
|
||||
#logger.debug(json.dumps(mappings, indent=4))
|
||||
|
||||
if sort:
|
||||
url += "&sort={0}".format(urllib.quote_plus(sort))
|
||||
@@ -358,7 +357,7 @@ class Kibana(BaseElasticSearch):
|
||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||
raise Exception("Advanced queries are not supported")
|
||||
|
||||
json_data = json.dumps({
|
||||
json_data = json_dumps({
|
||||
"columns": result_columns,
|
||||
"rows": result_rows
|
||||
})
|
||||
@@ -396,7 +395,7 @@ class ElasticSearch(BaseElasticSearch):
|
||||
error = None
|
||||
|
||||
logger.debug(query)
|
||||
query_dict = json.loads(query)
|
||||
query_dict = json_loads(query)
|
||||
|
||||
index_name = query_dict.pop("index", "")
|
||||
result_fields = query_dict.pop("result_fields", None)
|
||||
@@ -422,7 +421,7 @@ class ElasticSearch(BaseElasticSearch):
|
||||
result_rows = []
|
||||
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
|
||||
|
||||
json_data = json.dumps({
|
||||
json_data = json_dumps({
|
||||
"columns": result_columns,
|
||||
"rows": result_rows
|
||||
})
|
||||
|
||||
@@ -1,13 +1,12 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
import logging
|
||||
from base64 import b64decode
|
||||
from datetime import datetime
|
||||
from urlparse import parse_qs, urlparse
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -115,7 +114,7 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
|
||||
def _get_analytics_service(self):
|
||||
scope = ['https://www.googleapis.com/auth/analytics.readonly']
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
key = json_loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||
return build('analytics', 'v3', http=creds.authorize(httplib2.Http()))
|
||||
|
||||
@@ -147,7 +146,7 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
def run_query(self, query, user):
|
||||
logger.debug("Analytics is about to execute query: %s", query)
|
||||
try:
|
||||
params = json.loads(query)
|
||||
params = json_loads(query)
|
||||
except:
|
||||
params = parse_qs(urlparse(query).query, keep_blank_values=True)
|
||||
for key in params.keys():
|
||||
@@ -171,7 +170,7 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
||||
response = api.get(**params).execute()
|
||||
data = parse_ga_response(response)
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
except HttpError as e:
|
||||
# Make sure we return a more readable error to the end user
|
||||
error = e._get_reason()
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import logging
|
||||
from base64 import b64decode
|
||||
|
||||
@@ -7,7 +6,7 @@ from requests import Session
|
||||
from xlsxwriter.utility import xl_col_to_name
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -23,7 +22,7 @@ except ImportError:
|
||||
|
||||
def _load_key(filename):
|
||||
with open(filename, "rb") as f:
|
||||
return json.loads(f.read())
|
||||
return json_loads(f.read())
|
||||
|
||||
|
||||
def _get_columns_and_column_names(row):
|
||||
@@ -179,7 +178,7 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
||||
'https://spreadsheets.google.com/feeds',
|
||||
]
|
||||
|
||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
key = json_loads(b64decode(self.configuration['jsonKeyFile']))
|
||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||
|
||||
timeout_session = HTTPSession()
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import json
|
||||
import datetime
|
||||
import requests
|
||||
import logging
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -21,7 +22,7 @@ def _transform_result(response):
|
||||
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
return json_dumps(data)
|
||||
|
||||
|
||||
class Graphite(BaseQueryRunner):
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import base64
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -167,7 +166,7 @@ class Hive(BaseSQLQueryRunner):
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
connection.cancel()
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -118,7 +116,7 @@ class Impala(BaseSQLQueryRunner):
|
||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
cursor.close()
|
||||
except DatabaseError as e:
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -42,10 +41,10 @@ def _transform_result(results):
|
||||
result_row[column] = value
|
||||
result_rows.append(result_row)
|
||||
|
||||
return json.dumps({
|
||||
return json_dumps({
|
||||
"columns": [{'name': c} for c in result_columns],
|
||||
"rows": result_rows
|
||||
}, cls=JSONEncoder)
|
||||
})
|
||||
|
||||
|
||||
class InfluxDB(BaseQueryRunner):
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import json
|
||||
import re
|
||||
|
||||
from collections import OrderedDict
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
|
||||
# TODO: make this more general and move into __init__.py
|
||||
@@ -23,7 +22,7 @@ class ResultSet(object):
|
||||
self.columns[column] = {'name': column, 'type': column_type, 'friendly_name': column}
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps({'rows': self.rows, 'columns': self.columns.values()})
|
||||
return json_dumps({'rows': self.rows, 'columns': self.columns.values()})
|
||||
|
||||
|
||||
def parse_issue(issue, field_mapping):
|
||||
@@ -39,7 +38,7 @@ def parse_issue(issue, field_mapping):
|
||||
# if field mapping with dict member mappings defined get value of each member
|
||||
for member_name in member_names:
|
||||
if member_name in v:
|
||||
result[field_mapping.get_dict_output_field_name(k,member_name)] = v[member_name]
|
||||
result[field_mapping.get_dict_output_field_name(k, member_name)] = v[member_name]
|
||||
|
||||
else:
|
||||
# these special mapping rules are kept for backwards compatibility
|
||||
@@ -64,7 +63,7 @@ def parse_issue(issue, field_mapping):
|
||||
if member_name in listItem:
|
||||
listValues.append(listItem[member_name])
|
||||
if len(listValues) > 0:
|
||||
result[field_mapping.get_dict_output_field_name(k,member_name)] = ','.join(listValues)
|
||||
result[field_mapping.get_dict_output_field_name(k, member_name)] = ','.join(listValues)
|
||||
|
||||
else:
|
||||
# otherwise support list values only for non-dict items
|
||||
@@ -160,7 +159,7 @@ class JiraJQL(BaseHTTPQueryRunner):
|
||||
jql_url = '{}/rest/api/2/search'.format(self.configuration["url"])
|
||||
|
||||
try:
|
||||
query = json.loads(query)
|
||||
query = json_loads(query)
|
||||
query_type = query.pop('queryType', 'select')
|
||||
field_mapping = FieldMapping(query.pop('fieldMapping', {}))
|
||||
|
||||
|
||||
@@ -1,6 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import sys
|
||||
import json
|
||||
|
||||
try:
|
||||
import pymapd
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -131,7 +130,7 @@ class MemSQL(BaseSQLQueryRunner):
|
||||
})
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except KeyboardInterrupt:
|
||||
cursor.close()
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
import re
|
||||
|
||||
from dateutil.parser import parse
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder, parse_human_time
|
||||
from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -70,7 +69,7 @@ def datetime_parser(dct):
|
||||
|
||||
|
||||
def parse_query_json(query):
|
||||
query_data = json.loads(query, object_hook=datetime_parser)
|
||||
query_data = json_loads(query, object_hook=datetime_parser)
|
||||
return query_data
|
||||
|
||||
|
||||
@@ -312,7 +311,7 @@ class MongoDB(BaseQueryRunner):
|
||||
"rows": rows
|
||||
}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
|
||||
json_data = json_dumps(data, cls=MongoDBJSONEncoder)
|
||||
|
||||
return json_data, error
|
||||
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -26,13 +25,6 @@ types_map = {
|
||||
}
|
||||
|
||||
|
||||
class MSSQLJSONEncoder(JSONEncoder):
|
||||
def default(self, o):
|
||||
if isinstance(o, uuid.UUID):
|
||||
return str(o)
|
||||
return super(MSSQLJSONEncoder, self).default(o)
|
||||
|
||||
|
||||
class SqlServer(BaseSQLQueryRunner):
|
||||
noop_query = "SELECT 1"
|
||||
|
||||
@@ -105,7 +97,7 @@ class SqlServer(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != self.configuration['db']:
|
||||
@@ -151,7 +143,7 @@ class SqlServer(BaseSQLQueryRunner):
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=MSSQLJSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
else:
|
||||
error = "No data was returned."
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
import uuid
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner.mssql import MSSQLJSONEncoder, types_map
|
||||
from redash.query_runner.mssql import types_map
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -88,7 +87,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != self.configuration['db']:
|
||||
@@ -133,7 +132,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=MSSQLJSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
else:
|
||||
error = "No data was returned."
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.settings import parse_boolean
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
types_map = {
|
||||
@@ -111,7 +110,7 @@ class Mysql(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != self.configuration['db']:
|
||||
@@ -154,7 +153,7 @@ class Mysql(BaseSQLQueryRunner):
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
else:
|
||||
json_data = None
|
||||
|
||||
@@ -1,9 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
|
||||
try:
|
||||
import cx_Oracle
|
||||
@@ -100,7 +98,7 @@ class Oracle(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
if row['OWNER'] != None:
|
||||
@@ -148,13 +146,13 @@ class Oracle(BaseSQLQueryRunner):
|
||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
else:
|
||||
columns = [{'name': 'Row(s) Affected', 'type': 'TYPE_INTEGER'}]
|
||||
rows = [{'Row(s) Affected': rows_count}]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
connection.commit()
|
||||
json_data = json_dumps(data)
|
||||
connection.commit()
|
||||
except cx_Oracle.DatabaseError as err:
|
||||
error = u"Query failed. {}.".format(err.message)
|
||||
json_data = None
|
||||
|
||||
@@ -1,12 +1,11 @@
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
import select
|
||||
|
||||
import psycopg2
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -92,7 +91,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
if row['table_schema'] != 'public':
|
||||
@@ -166,7 +165,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import json
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import *
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -79,7 +77,7 @@ class Presto(BaseQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
@@ -108,7 +106,7 @@ class Presto(BaseQueryRunner):
|
||||
columns = self.fetch_columns(column_tuples)
|
||||
rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except DatabaseError as db:
|
||||
json_data = None
|
||||
|
||||
@@ -1,20 +1,18 @@
|
||||
import datetime
|
||||
import json
|
||||
import importlib
|
||||
import logging
|
||||
import sys
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from redash import models
|
||||
|
||||
import importlib
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from RestrictedPython import compile_restricted
|
||||
from RestrictedPython.Guards import safe_builtins
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class CustomPrint(object):
|
||||
"""CustomPrint redirect "print" calls to be sent as "log" on the result object."""
|
||||
def __init__(self):
|
||||
@@ -173,8 +171,8 @@ class Python(BaseQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception(error)
|
||||
|
||||
# TODO: allow avoiding the json.dumps/loads in same process
|
||||
return json.loads(data)
|
||||
# TODO: allow avoiding the JSON dumps/loads in same process
|
||||
return json_loads(data)
|
||||
|
||||
@staticmethod
|
||||
def get_source_schema(data_source_name_or_id):
|
||||
@@ -211,7 +209,7 @@ class Python(BaseQueryRunner):
|
||||
if query.latest_query_data.data is None:
|
||||
raise Exception("Query does not have results yet.")
|
||||
|
||||
return json.loads(query.latest_query_data.data)
|
||||
return json_loads(query.latest_query_data.data)
|
||||
|
||||
def test_connection(self):
|
||||
pass
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import logging
|
||||
import numbers
|
||||
import re
|
||||
@@ -12,7 +11,7 @@ from redash.permissions import has_access, not_view_only
|
||||
from redash.query_runner import (TYPE_BOOLEAN, TYPE_DATETIME, TYPE_FLOAT,
|
||||
TYPE_INTEGER, TYPE_STRING, BaseQueryRunner,
|
||||
register)
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -73,13 +72,12 @@ def get_query_results(user, query_id, bring_from_cache):
|
||||
results = query.latest_query_data.data
|
||||
else:
|
||||
raise Exception("No cached result available for query {}.".format(query.id))
|
||||
|
||||
else:
|
||||
else:
|
||||
results, error = query.data_source.query_runner.run_query(query.query_text, user)
|
||||
if error:
|
||||
raise Exception("Failed loading results for query id {}.".format(query.id))
|
||||
|
||||
return json.loads(results)
|
||||
return json_loads(results)
|
||||
|
||||
|
||||
def create_tables_from_query_ids(user, connection, query_ids, cached_query_ids=[]):
|
||||
@@ -170,7 +168,7 @@ class Results(BaseQueryRunner):
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
from __future__ import absolute_import
|
||||
import json
|
||||
|
||||
try:
|
||||
import snowflake.connector
|
||||
@@ -10,7 +9,7 @@ except ImportError:
|
||||
|
||||
from redash.query_runner import BaseQueryRunner, register
|
||||
from redash.query_runner import TYPE_STRING, TYPE_DATE, TYPE_DATETIME, TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
TYPES_MAP = {
|
||||
0: TYPE_INTEGER,
|
||||
@@ -98,7 +97,7 @@ class Snowflake(BaseQueryRunner):
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
schema = {}
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME'])
|
||||
|
||||
@@ -1,14 +1,11 @@
|
||||
import json
|
||||
import logging
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
from six import reraise
|
||||
|
||||
from redash.query_runner import BaseSQLQueryRunner
|
||||
from redash.query_runner import register
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.query_runner import BaseSQLQueryRunner, register
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -47,7 +44,7 @@ class Sqlite(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = row['tbl_name']
|
||||
@@ -56,7 +53,7 @@ class Sqlite(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results_table = json.loads(results_table)
|
||||
results_table = json_loads(results_table)
|
||||
for row_column in results_table['rows']:
|
||||
schema[table_name]['columns'].append(row_column['name'])
|
||||
|
||||
@@ -76,7 +73,7 @@ class Sqlite(BaseSQLQueryRunner):
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
error = None
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
else:
|
||||
error = 'Query completed but it returned no data.'
|
||||
json_data = None
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -113,7 +112,7 @@ class TreasureData(BaseQueryRunner):
|
||||
else:
|
||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
except errors.InternalError as e:
|
||||
json_data = None
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
|
||||
from redash.utils import JSONEncoder
|
||||
from redash.utils import json_loads, json_dumps
|
||||
from redash.query_runner import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -83,7 +82,7 @@ class Vertica(BaseSQLQueryRunner):
|
||||
if error is not None:
|
||||
raise Exception("Failed getting schema.")
|
||||
|
||||
results = json.loads(results)
|
||||
results = json_loads(results)
|
||||
|
||||
for row in results['rows']:
|
||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||
@@ -128,7 +127,7 @@ class Vertica(BaseSQLQueryRunner):
|
||||
'type': types_map.get(col[1], None)} for col in columns_data]
|
||||
|
||||
data = {'columns': columns, 'rows': rows}
|
||||
json_data = json.dumps(data, cls=JSONEncoder)
|
||||
json_data = json_dumps(data)
|
||||
error = None
|
||||
else:
|
||||
json_data = None
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
import json
|
||||
import yaml
|
||||
import logging
|
||||
from redash.query_runner import *
|
||||
from redash.utils import JSONEncoder
|
||||
import requests
|
||||
import yaml
|
||||
from urlparse import parse_qs, urlparse
|
||||
|
||||
import requests
|
||||
|
||||
from redash.query_runner import *
|
||||
from redash.utils import json_dumps
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
COLUMN_TYPES = {
|
||||
@@ -140,7 +142,7 @@ class YandexMetrica(BaseSQLQueryRunner):
|
||||
return data, error
|
||||
|
||||
try:
|
||||
data = json.dumps(parse_ym_response(self._send_query(**params)), cls=JSONEncoder)
|
||||
data = json_dumps(parse_ym_response(self._send_query(**params)))
|
||||
error = None
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
|
||||
@@ -3,19 +3,20 @@ This will eventually replace all the `to_dict` methods of the different model
|
||||
classes we have. This will ensure cleaner code and better
|
||||
separation of concerns.
|
||||
"""
|
||||
|
||||
import json
|
||||
from funcy import project
|
||||
|
||||
from flask_login import current_user
|
||||
|
||||
from redash import models
|
||||
from redash.permissions import has_access, view_only
|
||||
from redash.utils import json_loads
|
||||
|
||||
|
||||
def public_widget(widget):
|
||||
res = {
|
||||
'id': widget.id,
|
||||
'width': widget.width,
|
||||
'options': json.loads(widget.options),
|
||||
'options': json_loads(widget.options),
|
||||
'text': widget.text,
|
||||
'updated_at': widget.updated_at,
|
||||
'created_at': widget.created_at
|
||||
@@ -27,7 +28,7 @@ def public_widget(widget):
|
||||
'type': widget.visualization.type,
|
||||
'name': widget.visualization.name,
|
||||
'description': widget.visualization.description,
|
||||
'options': json.loads(widget.visualization.options),
|
||||
'options': json_loads(widget.visualization.options),
|
||||
'updated_at': widget.visualization.updated_at,
|
||||
'created_at': widget.visualization.created_at,
|
||||
'query': {
|
||||
@@ -65,7 +66,7 @@ class QuerySerializer(Serializer):
|
||||
def __init__(self, object_or_list, **kwargs):
|
||||
self.object_or_list = object_or_list
|
||||
self.options = kwargs
|
||||
|
||||
|
||||
def serialize(self):
|
||||
if isinstance(self.object_or_list, models.Query):
|
||||
result = serialize_query(self.object_or_list, **self.options)
|
||||
@@ -77,7 +78,7 @@ class QuerySerializer(Serializer):
|
||||
favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list)
|
||||
for query in result:
|
||||
query['is_favorite'] = query['id'] in favorite_ids
|
||||
|
||||
|
||||
return result
|
||||
|
||||
|
||||
@@ -132,7 +133,7 @@ def serialize_visualization(object, with_query=True):
|
||||
'type': object.type,
|
||||
'name': object.name,
|
||||
'description': object.description,
|
||||
'options': json.loads(object.options),
|
||||
'options': json_loads(object.options),
|
||||
'updated_at': object.updated_at,
|
||||
'created_at': object.created_at
|
||||
}
|
||||
@@ -147,7 +148,7 @@ def serialize_widget(object):
|
||||
d = {
|
||||
'id': object.id,
|
||||
'width': object.width,
|
||||
'options': json.loads(object.options),
|
||||
'options': json_loads(object.options),
|
||||
'dashboard_id': object.dashboard_id,
|
||||
'text': object.text,
|
||||
'updated_at': object.updated_at,
|
||||
@@ -181,8 +182,9 @@ def serialize_alert(alert, full=True):
|
||||
|
||||
return d
|
||||
|
||||
|
||||
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True):
|
||||
layout = json.loads(obj.layout)
|
||||
layout = json_loads(obj.layout)
|
||||
|
||||
widgets = []
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
|
||||
@@ -19,8 +18,15 @@ def set_from_string(s):
|
||||
return set(array_from_string(s))
|
||||
|
||||
|
||||
def parse_boolean(str):
|
||||
return json.loads(str.lower())
|
||||
def parse_boolean(s):
|
||||
"""Takes a string and returns the equivalent as a boolean value."""
|
||||
s = s.strip().lower()
|
||||
if s in ('yes', 'true', 'on', '1'):
|
||||
return True
|
||||
elif s in ('no', 'false', 'off', '0', 'none'):
|
||||
return False
|
||||
else:
|
||||
raise ValueError('Invalid boolean value %r' % s)
|
||||
|
||||
|
||||
def int_or_none(value):
|
||||
|
||||
@@ -1,20 +1,19 @@
|
||||
import json
|
||||
import logging
|
||||
import signal
|
||||
import time
|
||||
|
||||
import pystache
|
||||
import redis
|
||||
|
||||
from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
|
||||
from celery.result import AsyncResult
|
||||
from celery.utils.log import get_task_logger
|
||||
from six import text_type
|
||||
from redash import models, redis_connection, settings, statsd_client, utils
|
||||
|
||||
from redash import models, redis_connection, settings, statsd_client
|
||||
from redash.query_runner import InterruptException
|
||||
from redash.utils import gen_query_hash
|
||||
from redash.worker import celery
|
||||
from redash.tasks.alerts import check_alerts_for_query
|
||||
from redash.utils import gen_query_hash, json_dumps, json_loads, utcnow
|
||||
from redash.worker import celery
|
||||
|
||||
logger = get_task_logger(__name__)
|
||||
|
||||
@@ -60,7 +59,7 @@ class QueryTaskTracker(object):
|
||||
|
||||
self.data['updated_at'] = time.time()
|
||||
key_name = self._key_name(self.data['task_id'])
|
||||
connection.set(key_name, utils.json_dumps(self.data))
|
||||
connection.set(key_name, json_dumps(self.data))
|
||||
connection.zadd(self._get_list(), time.time(), key_name)
|
||||
|
||||
for l in self.ALL_LISTS:
|
||||
@@ -97,7 +96,7 @@ class QueryTaskTracker(object):
|
||||
@classmethod
|
||||
def create_from_data(cls, data):
|
||||
if data:
|
||||
data = json.loads(data)
|
||||
data = json_loads(data)
|
||||
return cls(data)
|
||||
|
||||
return None
|
||||
@@ -307,7 +306,7 @@ def refresh_queries():
|
||||
redis_connection.hmset('redash:status', {
|
||||
'outdated_queries_count': outdated_queries_count,
|
||||
'last_refresh_at': now,
|
||||
'query_ids': json.dumps(query_ids)
|
||||
'query_ids': json_dumps(query_ids)
|
||||
})
|
||||
|
||||
statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now)))
|
||||
@@ -483,7 +482,7 @@ class QueryExecutor(object):
|
||||
query_result, updated_query_ids = models.QueryResult.store_result(
|
||||
self.data_source.org_id, self.data_source,
|
||||
self.query_hash, self.query, data,
|
||||
run_time, utils.utcnow())
|
||||
run_time, utcnow())
|
||||
models.db.session.commit() # make sure that alert sees the latest query result
|
||||
self._log_progress('checking_alerts')
|
||||
for query_id in updated_query_ids:
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import cStringIO
|
||||
import csv
|
||||
import codecs
|
||||
import decimal
|
||||
import datetime
|
||||
import json
|
||||
import decimal
|
||||
import hashlib
|
||||
import os
|
||||
import random
|
||||
import re
|
||||
import hashlib
|
||||
import pytz
|
||||
import pystache
|
||||
import os
|
||||
import simplejson
|
||||
import uuid
|
||||
|
||||
import pystache
|
||||
import pytz
|
||||
import simplejson
|
||||
from funcy import distinct, select_values
|
||||
from six import string_types
|
||||
from sqlalchemy.orm.query import Query
|
||||
@@ -68,47 +68,34 @@ def generate_token(length):
|
||||
return ''.join(rand.choice(chars) for x in range(length))
|
||||
|
||||
|
||||
class JSONEncoderMixin:
|
||||
"""Custom JSON encoding class, to handle Decimal and datetime.date instances."""
|
||||
|
||||
def process_default(self, o):
|
||||
# Some SQLAlchemy collections are lazy.
|
||||
if isinstance(o, Query):
|
||||
return True, list(o)
|
||||
if isinstance(o, decimal.Decimal):
|
||||
return True, float(o)
|
||||
|
||||
if isinstance(o, (datetime.date, datetime.time)):
|
||||
return True, o.isoformat()
|
||||
|
||||
if isinstance(o, datetime.timedelta):
|
||||
return True, str(o)
|
||||
|
||||
return False, None # default processing
|
||||
|
||||
|
||||
class JSONEncoder(JSONEncoderMixin, json.JSONEncoder):
|
||||
"""Adapter for `json.dumps`."""
|
||||
|
||||
def default(self, o):
|
||||
processed, result = self.process_default(o)
|
||||
if not processed:
|
||||
result = super(JSONEncoder, self).default(o)
|
||||
return result
|
||||
|
||||
|
||||
class SimpleJSONEncoder(JSONEncoderMixin, simplejson.JSONEncoder):
|
||||
class JSONEncoder(simplejson.JSONEncoder):
|
||||
"""Adapter for `simplejson.dumps`."""
|
||||
|
||||
def default(self, o):
|
||||
processed, result = self.process_default(o)
|
||||
if not processed:
|
||||
result = super(SimpleJSONEncoder, self).default(o)
|
||||
return result
|
||||
# Some SQLAlchemy collections are lazy.
|
||||
if isinstance(o, Query):
|
||||
return list(o)
|
||||
elif isinstance(o, decimal.Decimal):
|
||||
return float(o)
|
||||
elif isinstance(o, (datetime.timedelta, uuid.UUID)):
|
||||
return str(o)
|
||||
elif isinstance(o, (datetime.date, datetime.time)):
|
||||
return o.isoformat()
|
||||
else:
|
||||
return super(JSONEncoder, self).default(o)
|
||||
|
||||
|
||||
def json_dumps(data):
|
||||
return json.dumps(data, cls=JSONEncoder)
|
||||
def json_loads(data, *args, **kwargs):
|
||||
"""A custom JSON loading function which passes all parameters to the
|
||||
simplejson.loads function."""
|
||||
return simplejson.loads(data, *args, **kwargs)
|
||||
|
||||
|
||||
def json_dumps(data, *args, **kwargs):
|
||||
"""A custom JSON dumping function which passes all parameters to the
|
||||
simplejson.dumps function."""
|
||||
kwargs.setdefault('cls', JSONEncoder)
|
||||
return simplejson.dumps(data, *args, **kwargs)
|
||||
|
||||
|
||||
def build_url(request, host, path):
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
import json
|
||||
import jsonschema
|
||||
from jsonschema import ValidationError
|
||||
|
||||
from sqlalchemy.ext.mutable import Mutable
|
||||
|
||||
from redash.utils import json_dumps, json_loads
|
||||
|
||||
SECRET_PLACEHOLDER = '--------'
|
||||
|
||||
|
||||
@@ -45,7 +45,7 @@ class ConfigurationContainer(Mutable):
|
||||
jsonschema.validate(self._config, self._schema)
|
||||
|
||||
def to_json(self):
|
||||
return json.dumps(self._config, sort_keys=True)
|
||||
return json_dumps(self._config, sort_keys=True)
|
||||
|
||||
def iteritems(self):
|
||||
return self._config.iteritems()
|
||||
@@ -92,4 +92,4 @@ class ConfigurationContainer(Mutable):
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, config_in_json):
|
||||
return cls(json.loads(config_in_json))
|
||||
return cls(json_loads(config_in_json))
|
||||
|
||||
@@ -8,5 +8,3 @@ cal = parsedatetime.Calendar()
|
||||
def parse_human_time(s):
|
||||
time_struct, _ = cal.parse(s)
|
||||
return datetime.fromtimestamp(mktime(time_struct))
|
||||
|
||||
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import os
|
||||
import datetime
|
||||
import json
|
||||
import logging
|
||||
from unittest import TestCase
|
||||
from contextlib import contextmanager
|
||||
@@ -17,7 +16,7 @@ os.environ['REDASH_MULTI_ORG'] = "true"
|
||||
from redash import create_app
|
||||
from redash import redis_connection
|
||||
from redash.models import db
|
||||
from redash.utils import json_dumps
|
||||
from redash.utils import json_dumps, json_loads
|
||||
from tests.factories import Factory, user_factory
|
||||
|
||||
|
||||
@@ -94,7 +93,7 @@ class BaseTestCase(TestCase):
|
||||
)
|
||||
|
||||
if response.data and is_json:
|
||||
response.json = json.loads(response.data)
|
||||
response.json = json_loads(response.data)
|
||||
|
||||
return response
|
||||
|
||||
@@ -112,7 +111,8 @@ class BaseTestCase(TestCase):
|
||||
|
||||
def assertResponseEqual(self, expected, actual):
|
||||
for k, v in expected.iteritems():
|
||||
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
|
||||
if isinstance(v, datetime.datetime) or isinstance(actual[k],
|
||||
datetime.datetime):
|
||||
continue
|
||||
|
||||
if isinstance(v, list):
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import json
|
||||
from tests import BaseTestCase
|
||||
|
||||
from redash.models import ApiKey, Dashboard, AccessPermission, db
|
||||
from redash.permissions import ACCESS_TYPE_MODIFY
|
||||
from redash.serializers import serialize_dashboard
|
||||
from redash.utils import json_loads
|
||||
|
||||
|
||||
class TestDashboardListResource(BaseTestCase):
|
||||
@@ -25,7 +26,7 @@ class TestDashboardListGetResource(BaseTestCase):
|
||||
|
||||
assert len(rv.json['results']) == 3
|
||||
assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id, d2.id, d3.id])
|
||||
|
||||
|
||||
def test_filters_with_tags(self):
|
||||
d1 = self.factory.create_dashboard(tags=[u'test'])
|
||||
d2 = self.factory.create_dashboard()
|
||||
@@ -34,7 +35,7 @@ class TestDashboardListGetResource(BaseTestCase):
|
||||
rv = self.make_request('get', '/api/dashboards?tags=test')
|
||||
assert len(rv.json['results']) == 1
|
||||
assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id])
|
||||
|
||||
|
||||
def test_search_term(self):
|
||||
d1 = self.factory.create_dashboard(name="Sales")
|
||||
d2 = self.factory.create_dashboard(name="Q1 sales")
|
||||
@@ -52,7 +53,7 @@ class TestDashboardResourceGet(BaseTestCase):
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
expected = serialize_dashboard(d1, with_widgets=True, with_favorite_state=False)
|
||||
actual = json.loads(rv.data)
|
||||
actual = json_loads(rv.data)
|
||||
|
||||
self.assertResponseEqual(expected, actual)
|
||||
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import json
|
||||
from tests import BaseTestCase
|
||||
|
||||
from redash.models import db
|
||||
from redash.utils import json_dumps
|
||||
|
||||
|
||||
class TestQueryResultsCacheHeaders(BaseTestCase):
|
||||
@@ -162,7 +163,17 @@ class TestQueryResultExcelResponse(BaseTestCase):
|
||||
|
||||
def test_renders_excel_file_when_rows_have_missing_columns(self):
|
||||
query = self.factory.create_query()
|
||||
query_result = self.factory.create_query_result(data=json.dumps({'rows': [{'test': 1}, {'test': 2, 'test2': 3}], 'columns': [{'name': 'test'}, {'name': 'test2'}]}))
|
||||
data = {
|
||||
'rows': [
|
||||
{'test': 1},
|
||||
{'test': 2, 'test2': 3},
|
||||
],
|
||||
'columns': [
|
||||
{'name': 'test'},
|
||||
{'name': 'test2'},
|
||||
],
|
||||
}
|
||||
query_result = self.factory.create_query_result(data=json_dumps(data))
|
||||
|
||||
rv = self.make_request('get', '/api/queries/{}/results/{}.xlsx'.format(query.id, query_result.id), is_json=False)
|
||||
self.assertEquals(rv.status_code, 200)
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import datetime
|
||||
import json
|
||||
from unittest import TestCase
|
||||
from pytz import utc
|
||||
from redash.query_runner.mongodb import parse_query_json, parse_results, _get_column_by_name
|
||||
|
||||
from redash.utils import parse_human_time
|
||||
from pytz import utc
|
||||
|
||||
from redash.query_runner.mongodb import parse_query_json, parse_results, _get_column_by_name
|
||||
from redash.utils import json_dumps, parse_human_time
|
||||
|
||||
|
||||
class TestParseQueryJson(TestCase):
|
||||
@@ -18,7 +18,7 @@ class TestParseQueryJson(TestCase):
|
||||
}
|
||||
}
|
||||
|
||||
query_data = parse_query_json(json.dumps(query))
|
||||
query_data = parse_query_json(json_dumps(query))
|
||||
self.assertDictEqual(query_data, query)
|
||||
|
||||
def test_parses_isodate_fields(self):
|
||||
@@ -32,7 +32,7 @@ class TestParseQueryJson(TestCase):
|
||||
'testIsoDate': "ISODate(\"2014-10-03T00:00\")"
|
||||
}
|
||||
|
||||
query_data = parse_query_json(json.dumps(query))
|
||||
query_data = parse_query_json(json_dumps(query))
|
||||
|
||||
self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0))
|
||||
|
||||
@@ -49,7 +49,7 @@ class TestParseQueryJson(TestCase):
|
||||
'testIsoDate': "ISODate(\"2014-10-03T00:00\")"
|
||||
}
|
||||
|
||||
query_data = parse_query_json(json.dumps(query))
|
||||
query_data = parse_query_json(json_dumps(query))
|
||||
|
||||
self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0))
|
||||
self.assertEqual(query_data['test_dict']['b']['date'], datetime.datetime(2014, 10, 4, 0, 0))
|
||||
@@ -71,7 +71,7 @@ class TestParseQueryJson(TestCase):
|
||||
]
|
||||
}
|
||||
|
||||
query_data = parse_query_json(json.dumps(query))
|
||||
query_data = parse_query_json(json_dumps(query))
|
||||
|
||||
self.assertDictEqual(query, query_data)
|
||||
|
||||
@@ -91,7 +91,7 @@ class TestParseQueryJson(TestCase):
|
||||
'$undefined': None
|
||||
}
|
||||
}
|
||||
query_data = parse_query_json(json.dumps(query))
|
||||
query_data = parse_query_json(json_dumps(query))
|
||||
self.assertEqual(query_data['test$undefined'], None)
|
||||
self.assertEqual(query_data['test$date'], datetime.datetime(2014, 10, 3, 0, 0).replace(tzinfo=utc))
|
||||
|
||||
@@ -101,7 +101,7 @@ class TestParseQueryJson(TestCase):
|
||||
}
|
||||
|
||||
one_hour_ago = parse_human_time("1 hour ago")
|
||||
query_data = parse_query_json(json.dumps(query))
|
||||
query_data = parse_query_json(json_dumps(query))
|
||||
self.assertEqual(query_data['ts'], one_hour_ago)
|
||||
|
||||
|
||||
@@ -119,7 +119,7 @@ class TestMongoResults(TestCase):
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'column'))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'column2'))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'column3'))
|
||||
|
||||
|
||||
def test_parses_nested_results(self):
|
||||
raw_results = [
|
||||
{'column': 1, 'column2': 'test', 'nested': {
|
||||
@@ -143,4 +143,4 @@ class TestMongoResults(TestCase):
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'column3'))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'nested.a'))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'nested.b'))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'nested.c'))
|
||||
self.assertIsNotNone(_get_column_by_name(columns, 'nested.c'))
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import datetime
|
||||
import json
|
||||
from unittest import TestCase
|
||||
|
||||
from redash.query_runner.prometheus import get_instant_rows, get_range_rows
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user