mirror of
https://github.com/getredash/redash.git
synced 2025-12-19 17:37:19 -05:00
Consistently use simplejson to loading and dumping JSON. (#2817)
* Consistently use simplejson to loading and dumping JSON. This introduces the new functions redash.utils.json_dumps and redash.utils.json_loads and simplifies the custom encoder setup. UUIDs are now handled by the default encoder, too. Fixes #2807. Use string comparison in parse_boolean instead of the (simple)json module.
This commit is contained in:
@@ -1,10 +1,10 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
|
||||||
import re
|
import re
|
||||||
import subprocess
|
import subprocess
|
||||||
import requests
|
import requests
|
||||||
|
import simplejson
|
||||||
|
|
||||||
github_token = os.environ['GITHUB_TOKEN']
|
github_token = os.environ['GITHUB_TOKEN']
|
||||||
auth = (github_token, 'x-oauth-basic')
|
auth = (github_token, 'x-oauth-basic')
|
||||||
@@ -17,7 +17,7 @@ def _github_request(method, path, params=None, headers={}):
|
|||||||
url = path
|
url = path
|
||||||
|
|
||||||
if params is not None:
|
if params is not None:
|
||||||
params = json.dumps(params)
|
params = simplejson.dumps(params)
|
||||||
|
|
||||||
response = requests.request(method, url, data=params, auth=auth)
|
response = requests.request(method, url, data=params, auth=auth)
|
||||||
return response
|
return response
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ Create Date: 2018-01-31 15:20:30.396533
|
|||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import json
|
import simplejson
|
||||||
from alembic import op
|
from alembic import op
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
|
|
||||||
@@ -26,7 +26,7 @@ def upgrade():
|
|||||||
print("Updating dashboards position data:")
|
print("Updating dashboards position data:")
|
||||||
for dashboard in Dashboard.query:
|
for dashboard in Dashboard.query:
|
||||||
print(" Updating dashboard: {}".format(dashboard.id))
|
print(" Updating dashboard: {}".format(dashboard.id))
|
||||||
layout = json.loads(dashboard.layout)
|
layout = simplejson.loads(dashboard.layout)
|
||||||
|
|
||||||
print(" Building widgets map:")
|
print(" Building widgets map:")
|
||||||
widgets = {}
|
widgets = {}
|
||||||
@@ -47,14 +47,14 @@ def upgrade():
|
|||||||
if widget is None:
|
if widget is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
options = json.loads(widget.options) or {}
|
options = simplejson.loads(widget.options) or {}
|
||||||
options['position'] = {
|
options['position'] = {
|
||||||
"row": row_index,
|
"row": row_index,
|
||||||
"col": column_index * column_size,
|
"col": column_index * column_size,
|
||||||
"sizeX": column_size * widget.width
|
"sizeX": column_size * widget.width
|
||||||
}
|
}
|
||||||
|
|
||||||
widget.options = json.dumps(options)
|
widget.options = simplejson.dumps(options)
|
||||||
|
|
||||||
db.session.add(widget)
|
db.session.add(widget)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import json
|
import simplejson
|
||||||
import jsonschema
|
import jsonschema
|
||||||
from jsonschema import ValidationError
|
from jsonschema import ValidationError
|
||||||
|
|
||||||
@@ -15,7 +15,7 @@ def validate_configuration(query_runner_type, configuration_json):
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if isinstance(configuration_json, string_types):
|
if isinstance(configuration_json, string_types):
|
||||||
configuration = json.loads(configuration_json)
|
configuration = simplejson.loads(configuration_json)
|
||||||
else:
|
else:
|
||||||
configuration = configuration_json
|
configuration = configuration_json
|
||||||
jsonschema.validate(configuration, query_runner_class.configuration_schema())
|
jsonschema.validate(configuration, query_runner_class.configuration_schema())
|
||||||
@@ -24,6 +24,7 @@ def validate_configuration(query_runner_type, configuration_json):
|
|||||||
|
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
def update(data_source):
|
def update(data_source):
|
||||||
print("[%s] Old options: %s" % (data_source.name, data_source.options))
|
print("[%s] Old options: %s" % (data_source.name, data_source.options))
|
||||||
|
|
||||||
@@ -40,7 +41,7 @@ def update(data_source):
|
|||||||
if k == 'port':
|
if k == 'port':
|
||||||
configuration[k] = int(v)
|
configuration[k] = int(v)
|
||||||
|
|
||||||
data_source.options = json.dumps(configuration)
|
data_source.options = simplejson.dumps(configuration)
|
||||||
|
|
||||||
elif data_source.type == 'mysql':
|
elif data_source.type == 'mysql':
|
||||||
mapping = {
|
mapping = {
|
||||||
@@ -55,10 +56,10 @@ def update(data_source):
|
|||||||
for value in values:
|
for value in values:
|
||||||
k, v = value.split("=", 1)
|
k, v = value.split("=", 1)
|
||||||
configuration[mapping[k]] = v
|
configuration[mapping[k]] = v
|
||||||
data_source.options = json.dumps(configuration)
|
data_source.options = simplejson.dumps(configuration)
|
||||||
|
|
||||||
elif data_source.type == 'graphite':
|
elif data_source.type == 'graphite':
|
||||||
old_config = json.loads(data_source.options)
|
old_config = simplejson.loads(data_source.options)
|
||||||
|
|
||||||
configuration = {
|
configuration = {
|
||||||
"url": old_config["url"]
|
"url": old_config["url"]
|
||||||
@@ -70,13 +71,13 @@ def update(data_source):
|
|||||||
if "auth" in old_config:
|
if "auth" in old_config:
|
||||||
configuration['username'], configuration['password'] = old_config["auth"]
|
configuration['username'], configuration['password'] = old_config["auth"]
|
||||||
|
|
||||||
data_source.options = json.dumps(configuration)
|
data_source.options = simplejson.dumps(configuration)
|
||||||
|
|
||||||
elif data_source.type == 'url':
|
elif data_source.type == 'url':
|
||||||
data_source.options = json.dumps({"url": data_source.options})
|
data_source.options = simplejson.dumps({"url": data_source.options})
|
||||||
|
|
||||||
elif data_source.type == 'script':
|
elif data_source.type == 'script':
|
||||||
data_source.options = json.dumps({"path": data_source.options})
|
data_source.options = simplejson.dumps({"path": data_source.options})
|
||||||
|
|
||||||
elif data_source.type == 'mongo':
|
elif data_source.type == 'mongo':
|
||||||
data_source.type = 'mongodb'
|
data_source.type = 'mongodb'
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
from base64 import b64encode
|
from base64 import b64encode
|
||||||
import json
|
import simplejson
|
||||||
from redash.models import DataSource
|
from redash.models import DataSource
|
||||||
|
|
||||||
|
|
||||||
@@ -15,23 +15,23 @@ if __name__ == '__main__':
|
|||||||
for ds in DataSource.select(DataSource.id, DataSource.type, DataSource.options):
|
for ds in DataSource.select(DataSource.id, DataSource.type, DataSource.options):
|
||||||
|
|
||||||
if ds.type == 'bigquery':
|
if ds.type == 'bigquery':
|
||||||
options = json.loads(ds.options)
|
options = simplejson.loads(ds.options)
|
||||||
|
|
||||||
if 'jsonKeyFile' in options:
|
if 'jsonKeyFile' in options:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
new_options = {
|
new_options = {
|
||||||
'projectId': options['projectId'],
|
'projectId': options['projectId'],
|
||||||
'jsonKeyFile': b64encode(json.dumps({
|
'jsonKeyFile': b64encode(simplejson.dumps({
|
||||||
'client_email': options['serviceAccount'],
|
'client_email': options['serviceAccount'],
|
||||||
'private_key': convert_p12_to_pem(options['privateKey'])
|
'private_key': convert_p12_to_pem(options['privateKey'])
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
ds.options = json.dumps(new_options)
|
ds.options = simplejson.dumps(new_options)
|
||||||
ds.save(only=ds.dirty_fields)
|
ds.save(only=ds.dirty_fields)
|
||||||
elif ds.type == 'google_spreadsheets':
|
elif ds.type == 'google_spreadsheets':
|
||||||
options = json.loads(ds.options)
|
options = simplejson.loads(ds.options)
|
||||||
if 'jsonKeyFile' in options:
|
if 'jsonKeyFile' in options:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -40,5 +40,5 @@ if __name__ == '__main__':
|
|||||||
'jsonKeyFile': b64encode(f.read())
|
'jsonKeyFile': b64encode(f.read())
|
||||||
}
|
}
|
||||||
|
|
||||||
ds.options = json.dumps(new_options)
|
ds.options = simplejson.dumps(new_options)
|
||||||
ds.save(only=ds.dirty_fields)
|
ds.save(only=ds.dirty_fields)
|
||||||
|
|||||||
@@ -1,11 +1,11 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import json
|
import simplejson
|
||||||
from redash import models
|
from redash import models
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
for vis in models.Visualization.select():
|
for vis in models.Visualization.select():
|
||||||
if vis.type == 'COUNTER':
|
if vis.type == 'COUNTER':
|
||||||
options = json.loads(vis.options)
|
options = simplejson.loads(vis.options)
|
||||||
print("Before: ", options)
|
print("Before: ", options)
|
||||||
if 'rowNumber' in options and options['rowNumber'] is not None:
|
if 'rowNumber' in options and options['rowNumber'] is not None:
|
||||||
options['rowNumber'] += 1
|
options['rowNumber'] += 1
|
||||||
@@ -20,5 +20,5 @@ if __name__ == '__main__':
|
|||||||
options['targetRowNumber'] = options['rowNumber']
|
options['targetRowNumber'] = options['rowNumber']
|
||||||
|
|
||||||
print("After: ", options)
|
print("After: ", options)
|
||||||
vis.options = json.dumps(options)
|
vis.options = simplejson.dumps(options)
|
||||||
vis.save()
|
vis.save()
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
from flask_admin import Admin
|
from flask_admin import Admin
|
||||||
from flask_admin.base import MenuLink
|
from flask_admin.base import MenuLink
|
||||||
from flask_admin.contrib.sqla import ModelView
|
from flask_admin.contrib.sqla import ModelView
|
||||||
@@ -8,6 +7,7 @@ from wtforms.widgets import TextInput
|
|||||||
|
|
||||||
from redash import models
|
from redash import models
|
||||||
from redash.permissions import require_super_admin
|
from redash.permissions import require_super_admin
|
||||||
|
from redash.utils import json_loads
|
||||||
|
|
||||||
|
|
||||||
class ArrayListField(fields.Field):
|
class ArrayListField(fields.Field):
|
||||||
@@ -30,7 +30,7 @@ class JSONTextAreaField(fields.TextAreaField):
|
|||||||
def process_formdata(self, valuelist):
|
def process_formdata(self, valuelist):
|
||||||
if valuelist:
|
if valuelist:
|
||||||
try:
|
try:
|
||||||
json.loads(valuelist[0])
|
json_loads(valuelist[0])
|
||||||
except ValueError:
|
except ValueError:
|
||||||
raise ValueError(self.gettext(u'Invalid JSON'))
|
raise ValueError(self.gettext(u'Invalid JSON'))
|
||||||
self.data = valuelist[0]
|
self.data = valuelist[0]
|
||||||
|
|||||||
@@ -1,8 +1,6 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
import simplejson
|
||||||
from flask.cli import FlaskGroup, run_command
|
from flask.cli import FlaskGroup, run_command
|
||||||
from flask import current_app
|
from flask import current_app
|
||||||
|
|
||||||
@@ -44,7 +42,7 @@ def version():
|
|||||||
|
|
||||||
@manager.command()
|
@manager.command()
|
||||||
def status():
|
def status():
|
||||||
print(json.dumps(get_status(), indent=2))
|
print(simplejson.dumps(get_status(), indent=2))
|
||||||
|
|
||||||
|
|
||||||
@manager.command()
|
@manager.command()
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
from sys import exit
|
from sys import exit
|
||||||
import json
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
from flask.cli import AppGroup
|
from flask.cli import AppGroup
|
||||||
@@ -10,6 +9,7 @@ from sqlalchemy.orm.exc import NoResultFound
|
|||||||
from redash import models
|
from redash import models
|
||||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||||
query_runners)
|
query_runners)
|
||||||
|
from redash.utils import json_loads
|
||||||
from redash.utils.configuration import ConfigurationContainer
|
from redash.utils.configuration import ConfigurationContainer
|
||||||
|
|
||||||
manager = AppGroup(help="Data sources management commands.")
|
manager = AppGroup(help="Data sources management commands.")
|
||||||
@@ -129,7 +129,7 @@ def new(name=None, type=None, options=None, organization='default'):
|
|||||||
|
|
||||||
options = ConfigurationContainer(options_obj, schema)
|
options = ConfigurationContainer(options_obj, schema)
|
||||||
else:
|
else:
|
||||||
options = ConfigurationContainer(json.loads(options), schema)
|
options = ConfigurationContainer(json_loads(options), schema)
|
||||||
|
|
||||||
if not options.is_valid():
|
if not options.is_valid():
|
||||||
print("Error: invalid configuration.")
|
print("Error: invalid configuration.")
|
||||||
@@ -198,7 +198,7 @@ def edit(name, new_name=None, options=None, type=None, organization='default'):
|
|||||||
if options is not None:
|
if options is not None:
|
||||||
schema = get_configuration_schema_for_query_runner_type(
|
schema = get_configuration_schema_for_query_runner_type(
|
||||||
data_source.type)
|
data_source.type)
|
||||||
options = json.loads(options)
|
options = json_loads(options)
|
||||||
data_source.options.set_schema(schema)
|
data_source.options.set_schema(schema)
|
||||||
data_source.options.update(options)
|
data_source.options.update(options)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import logging
|
import logging
|
||||||
import json
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -55,7 +54,7 @@ def register(destination_class):
|
|||||||
global destinations
|
global destinations
|
||||||
if destination_class.enabled():
|
if destination_class.enabled():
|
||||||
logger.debug("Registering %s (%s) destinations.", destination_class.name(), destination_class.type())
|
logger.debug("Registering %s (%s) destinations.", destination_class.name(), destination_class.type())
|
||||||
destinations[destination_class.type()] = destination_class
|
destinations[destination_class.type()] = destination_class
|
||||||
else:
|
else:
|
||||||
logger.warning("%s destination enabled but not supported, not registering. Either disable or install missing dependencies.", destination_class.name())
|
logger.warning("%s destination enabled but not supported, not registering. Either disable or install missing dependencies.", destination_class.name())
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from redash.destinations import *
|
from redash.destinations import *
|
||||||
from redash.models import Alert
|
from redash.models import Alert
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
|
|
||||||
colors = {
|
colors = {
|
||||||
@@ -46,7 +46,7 @@ class HipChat(BaseDestination):
|
|||||||
'color': colors.get(new_state, 'green')
|
'color': colors.get(new_state, 'green')
|
||||||
}
|
}
|
||||||
headers = {'Content-Type': 'application/json'}
|
headers = {'Content-Type': 'application/json'}
|
||||||
response = requests.post(options['url'], data=json.dumps(data), headers=headers)
|
response = requests.post(options['url'], data=json_dumps(data), headers=headers)
|
||||||
|
|
||||||
if response.status_code != 204:
|
if response.status_code != 204:
|
||||||
logging.error('Bad status code received from HipChat: %d', response.status_code)
|
logging.error('Bad status code received from HipChat: %d', response.status_code)
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from redash.destinations import *
|
from redash.destinations import *
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
|
|
||||||
class Mattermost(BaseDestination):
|
class Mattermost(BaseDestination):
|
||||||
@@ -46,7 +46,7 @@ class Mattermost(BaseDestination):
|
|||||||
if options.get('channel'): payload['channel'] = options.get('channel')
|
if options.get('channel'): payload['channel'] = options.get('channel')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = requests.post(options.get('url'), data=json.dumps(payload))
|
resp = requests.post(options.get('url'), data=json_dumps(payload))
|
||||||
logging.warning(resp.text)
|
logging.warning(resp.text)
|
||||||
|
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
|
|||||||
@@ -1,8 +1,8 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from redash.destinations import *
|
from redash.destinations import *
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
|
|
||||||
class Slack(BaseDestination):
|
class Slack(BaseDestination):
|
||||||
@@ -58,7 +58,7 @@ class Slack(BaseDestination):
|
|||||||
else:
|
else:
|
||||||
text = alert.name + " went back to normal"
|
text = alert.name + " went back to normal"
|
||||||
color = "#27ae60"
|
color = "#27ae60"
|
||||||
|
|
||||||
payload = {'attachments': [{'text': text, 'color': color, 'fields': fields}]}
|
payload = {'attachments': [{'text': text, 'color': color, 'fields': fields}]}
|
||||||
|
|
||||||
if options.get('username'): payload['username'] = options.get('username')
|
if options.get('username'): payload['username'] = options.get('username')
|
||||||
@@ -67,7 +67,7 @@ class Slack(BaseDestination):
|
|||||||
if options.get('channel'): payload['channel'] = options.get('channel')
|
if options.get('channel'): payload['channel'] = options.get('channel')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = requests.post(options.get('url'), data=json.dumps(payload))
|
resp = requests.post(options.get('url'), data=json_dumps(payload))
|
||||||
logging.warning(resp.text)
|
logging.warning(resp.text)
|
||||||
if resp.status_code != 200:
|
if resp.status_code != 200:
|
||||||
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
logging.error("Slack send ERROR. status_code => {status}".format(status=resp.status_code))
|
||||||
|
|||||||
@@ -1,14 +1,14 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
from flask_login import login_required
|
from flask_login import login_required
|
||||||
|
|
||||||
from redash import models, redis_connection
|
from redash import models, redis_connection
|
||||||
from redash.handlers import routes
|
from redash.handlers import routes
|
||||||
from redash.handlers.base import json_response
|
from redash.handlers.base import json_response
|
||||||
from redash.permissions import require_super_admin
|
from redash.permissions import require_super_admin
|
||||||
from redash.serializers import QuerySerializer
|
from redash.serializers import QuerySerializer
|
||||||
from redash.tasks.queries import QueryTaskTracker
|
|
||||||
from redash.tasks import record_event
|
from redash.tasks import record_event
|
||||||
|
from redash.tasks.queries import QueryTaskTracker
|
||||||
|
from redash.utils import json_loads
|
||||||
|
|
||||||
|
|
||||||
@routes.route('/api/admin/queries/outdated', methods=['GET'])
|
@routes.route('/api/admin/queries/outdated', methods=['GET'])
|
||||||
@@ -16,7 +16,7 @@ from redash.tasks import record_event
|
|||||||
@login_required
|
@login_required
|
||||||
def outdated_queries():
|
def outdated_queries():
|
||||||
manager_status = redis_connection.hgetall('redash:status')
|
manager_status = redis_connection.hgetall('redash:status')
|
||||||
query_ids = json.loads(manager_status.get('query_ids', '[]'))
|
query_ids = json_loads(manager_status.get('query_ids', '[]'))
|
||||||
if query_ids:
|
if query_ids:
|
||||||
outdated_queries = (
|
outdated_queries = (
|
||||||
models.Query.query.outerjoin(models.QueryResult)
|
models.Query.query.outerjoin(models.QueryResult)
|
||||||
|
|||||||
@@ -1,16 +1,19 @@
|
|||||||
import logging
|
import logging
|
||||||
import json
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import pystache
|
import pystache
|
||||||
from flask import make_response, request
|
from flask import make_response, request
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
from flask_restful import abort
|
from flask_restful import abort
|
||||||
from redash import models, settings, utils
|
from redash import models, settings
|
||||||
from redash.tasks import QueryTask, record_event
|
from redash.tasks import QueryTask, record_event
|
||||||
from redash.permissions import require_permission, not_view_only, has_access, require_access, view_only
|
from redash.permissions import require_permission, not_view_only, has_access, require_access, view_only
|
||||||
from redash.handlers.base import BaseResource, get_object_or_404
|
from redash.handlers.base import BaseResource, get_object_or_404
|
||||||
from redash.utils import collect_query_parameters, collect_parameters_from_request, gen_query_hash
|
from redash.utils import (collect_query_parameters,
|
||||||
|
collect_parameters_from_request,
|
||||||
|
gen_query_hash,
|
||||||
|
json_dumps,
|
||||||
|
utcnow)
|
||||||
from redash.tasks.queries import enqueue_query
|
from redash.tasks.queries import enqueue_query
|
||||||
|
|
||||||
|
|
||||||
@@ -56,7 +59,7 @@ def run_query_sync(data_source, parameter_values, query_text, max_age=0):
|
|||||||
run_time = time.time() - started_at
|
run_time = time.time() - started_at
|
||||||
query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source,
|
query_result, updated_query_ids = models.QueryResult.store_result(data_source.org_id, data_source,
|
||||||
query_hash, query_text, data,
|
query_hash, query_text, data,
|
||||||
run_time, utils.utcnow())
|
run_time, utcnow())
|
||||||
|
|
||||||
models.db.session.commit()
|
models.db.session.commit()
|
||||||
return query_result
|
return query_result
|
||||||
@@ -243,7 +246,7 @@ class QueryResultResource(BaseResource):
|
|||||||
abort(404, message='No cached result found for this query.')
|
abort(404, message='No cached result found for this query.')
|
||||||
|
|
||||||
def make_json_response(self, query_result):
|
def make_json_response(self, query_result):
|
||||||
data = json.dumps({'query_result': query_result.to_dict()}, cls=utils.JSONEncoder)
|
data = json_dumps({'query_result': query_result.to_dict()})
|
||||||
headers = {'Content-Type': "application/json"}
|
headers = {'Content-Type': "application/json"}
|
||||||
return make_response(data, 200, headers)
|
return make_response(data, 200, headers)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,3 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
|
|
||||||
from redash import models
|
from redash import models
|
||||||
@@ -7,6 +5,7 @@ from redash.handlers.base import BaseResource, get_object_or_404
|
|||||||
from redash.serializers import serialize_visualization
|
from redash.serializers import serialize_visualization
|
||||||
from redash.permissions import (require_object_modify_permission,
|
from redash.permissions import (require_object_modify_permission,
|
||||||
require_permission)
|
require_permission)
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
|
|
||||||
class VisualizationListResource(BaseResource):
|
class VisualizationListResource(BaseResource):
|
||||||
@@ -17,7 +16,7 @@ class VisualizationListResource(BaseResource):
|
|||||||
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop('query_id'), self.current_org)
|
query = get_object_or_404(models.Query.get_by_id_and_org, kwargs.pop('query_id'), self.current_org)
|
||||||
require_object_modify_permission(query, self.current_user)
|
require_object_modify_permission(query, self.current_user)
|
||||||
|
|
||||||
kwargs['options'] = json.dumps(kwargs['options'])
|
kwargs['options'] = json_dumps(kwargs['options'])
|
||||||
kwargs['query_rel'] = query
|
kwargs['query_rel'] = query
|
||||||
|
|
||||||
vis = models.Visualization(**kwargs)
|
vis = models.Visualization(**kwargs)
|
||||||
@@ -34,7 +33,7 @@ class VisualizationResource(BaseResource):
|
|||||||
|
|
||||||
kwargs = request.get_json(force=True)
|
kwargs = request.get_json(force=True)
|
||||||
if 'options' in kwargs:
|
if 'options' in kwargs:
|
||||||
kwargs['options'] = json.dumps(kwargs['options'])
|
kwargs['options'] = json_dumps(kwargs['options'])
|
||||||
|
|
||||||
kwargs.pop('id', None)
|
kwargs.pop('id', None)
|
||||||
kwargs.pop('query_id', None)
|
kwargs.pop('query_id', None)
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
import json
|
import simplejson
|
||||||
from flask import url_for
|
from flask import url_for
|
||||||
|
|
||||||
WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), '../../client/dist/', 'asset-manifest.json')
|
WEBPACK_MANIFEST_PATH = os.path.join(os.path.dirname(__file__), '../../client/dist/', 'asset-manifest.json')
|
||||||
@@ -14,7 +14,7 @@ def configure_webpack(app):
|
|||||||
if assets is None or app.debug:
|
if assets is None or app.debug:
|
||||||
try:
|
try:
|
||||||
with open(WEBPACK_MANIFEST_PATH) as fp:
|
with open(WEBPACK_MANIFEST_PATH) as fp:
|
||||||
assets = json.load(fp)
|
assets = simplejson.load(fp)
|
||||||
except IOError:
|
except IOError:
|
||||||
app.logger.exception('Unable to load webpack manifest')
|
app.logger.exception('Unable to load webpack manifest')
|
||||||
assets = {}
|
assets = {}
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
from flask import request
|
from flask import request
|
||||||
|
|
||||||
from redash import models
|
from redash import models
|
||||||
from redash.handlers.base import BaseResource
|
from redash.handlers.base import BaseResource
|
||||||
from redash.serializers import serialize_widget
|
from redash.serializers import serialize_widget
|
||||||
from redash.permissions import (require_access,
|
from redash.permissions import (require_access,
|
||||||
require_object_modify_permission,
|
require_object_modify_permission,
|
||||||
require_permission, view_only)
|
require_permission, view_only)
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
|
|
||||||
class WidgetListResource(BaseResource):
|
class WidgetListResource(BaseResource):
|
||||||
@@ -27,7 +27,7 @@ class WidgetListResource(BaseResource):
|
|||||||
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.pop('dashboard_id'), self.current_org)
|
dashboard = models.Dashboard.get_by_id_and_org(widget_properties.pop('dashboard_id'), self.current_org)
|
||||||
require_object_modify_permission(dashboard, self.current_user)
|
require_object_modify_permission(dashboard, self.current_user)
|
||||||
|
|
||||||
widget_properties['options'] = json.dumps(widget_properties['options'])
|
widget_properties['options'] = json_dumps(widget_properties['options'])
|
||||||
widget_properties.pop('id', None)
|
widget_properties.pop('id', None)
|
||||||
widget_properties['dashboard'] = dashboard
|
widget_properties['dashboard'] = dashboard
|
||||||
|
|
||||||
@@ -63,7 +63,7 @@ class WidgetResource(BaseResource):
|
|||||||
require_object_modify_permission(widget.dashboard, self.current_user)
|
require_object_modify_permission(widget.dashboard, self.current_user)
|
||||||
widget_properties = request.get_json(force=True)
|
widget_properties = request.get_json(force=True)
|
||||||
widget.text = widget_properties['text']
|
widget.text = widget_properties['text']
|
||||||
widget.options = json.dumps(widget_properties['options'])
|
widget.options = json_dumps(widget_properties['options'])
|
||||||
models.db.session.commit()
|
models.db.session.commit()
|
||||||
return serialize_widget(widget)
|
return serialize_widget(widget)
|
||||||
|
|
||||||
|
|||||||
@@ -1,12 +1,12 @@
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from celery.signals import task_postrun, task_prerun
|
from celery.signals import task_postrun, task_prerun
|
||||||
from redash import settings, statsd_client
|
from redash import settings, statsd_client
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
tasks_start_time = {}
|
tasks_start_time = {}
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ def task_postrun_handler(signal, sender, task_id, task, args, kwargs, retval, st
|
|||||||
|
|
||||||
normalized_task_name = task.name.replace('redash.tasks.', '').replace('.', '_')
|
normalized_task_name = task.name.replace('redash.tasks.', '').replace('.', '_')
|
||||||
metric = "celery.task_runtime.{}".format(normalized_task_name)
|
metric = "celery.task_runtime.{}".format(normalized_task_name)
|
||||||
logging.debug("metric=%s", json.dumps({'metric': metric, 'tags': tags, 'value': run_time}))
|
logging.debug("metric=%s", json_dumps({'metric': metric, 'tags': tags, 'value': run_time}))
|
||||||
statsd_client.timing(metric_name(metric, tags), run_time)
|
statsd_client.timing(metric_name(metric, tags), run_time)
|
||||||
statsd_client.incr(metric_name('celery.task.{}.{}'.format(normalized_task_name, state), tags))
|
statsd_client.incr(metric_name('celery.task.{}.{}'.format(normalized_task_name, state), tags))
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
@@ -4,7 +4,6 @@ import datetime
|
|||||||
import functools
|
import functools
|
||||||
import hashlib
|
import hashlib
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import time
|
import time
|
||||||
from functools import reduce
|
from functools import reduce
|
||||||
@@ -22,7 +21,7 @@ from redash.destinations import (get_configuration_schema_for_destination_type,
|
|||||||
from redash.metrics import database # noqa: F401
|
from redash.metrics import database # noqa: F401
|
||||||
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
from redash.query_runner import (get_configuration_schema_for_query_runner_type,
|
||||||
get_query_runner)
|
get_query_runner)
|
||||||
from redash.utils import generate_token, json_dumps
|
from redash.utils import generate_token, json_dumps, json_loads
|
||||||
from redash.utils.configuration import ConfigurationContainer
|
from redash.utils.configuration import ConfigurationContainer
|
||||||
from redash.settings.organization import settings as org_settings
|
from redash.settings.organization import settings as org_settings
|
||||||
|
|
||||||
@@ -141,7 +140,7 @@ class PseudoJSON(TypeDecorator):
|
|||||||
def process_result_value(self, value, dialect):
|
def process_result_value(self, value, dialect):
|
||||||
if not value:
|
if not value:
|
||||||
return value
|
return value
|
||||||
return json.loads(value)
|
return json_loads(value)
|
||||||
|
|
||||||
|
|
||||||
class MutableDict(Mutable, dict):
|
class MutableDict(Mutable, dict):
|
||||||
@@ -648,9 +647,9 @@ class DataSource(BelongsToOrgMixin, db.Model):
|
|||||||
query_runner = self.query_runner
|
query_runner = self.query_runner
|
||||||
schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name'])
|
schema = sorted(query_runner.get_schema(get_stats=refresh), key=lambda t: t['name'])
|
||||||
|
|
||||||
redis_connection.set(key, json.dumps(schema))
|
redis_connection.set(key, json_dumps(schema))
|
||||||
else:
|
else:
|
||||||
schema = json.loads(cache)
|
schema = json_loads(cache)
|
||||||
|
|
||||||
return schema
|
return schema
|
||||||
|
|
||||||
@@ -738,7 +737,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
|||||||
'id': self.id,
|
'id': self.id,
|
||||||
'query_hash': self.query_hash,
|
'query_hash': self.query_hash,
|
||||||
'query': self.query_text,
|
'query': self.query_text,
|
||||||
'data': json.loads(self.data),
|
'data': json_loads(self.data),
|
||||||
'data_source_id': self.data_source_id,
|
'data_source_id': self.data_source_id,
|
||||||
'runtime': self.runtime,
|
'runtime': self.runtime,
|
||||||
'retrieved_at': self.retrieved_at
|
'retrieved_at': self.retrieved_at
|
||||||
@@ -807,7 +806,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
|||||||
def make_csv_content(self):
|
def make_csv_content(self):
|
||||||
s = cStringIO.StringIO()
|
s = cStringIO.StringIO()
|
||||||
|
|
||||||
query_data = json.loads(self.data)
|
query_data = json_loads(self.data)
|
||||||
writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=[col['name'] for col in query_data['columns']])
|
writer = csv.DictWriter(s, extrasaction="ignore", fieldnames=[col['name'] for col in query_data['columns']])
|
||||||
writer.writer = utils.UnicodeWriter(s)
|
writer.writer = utils.UnicodeWriter(s)
|
||||||
writer.writeheader()
|
writer.writeheader()
|
||||||
@@ -819,7 +818,7 @@ class QueryResult(db.Model, BelongsToOrgMixin):
|
|||||||
def make_excel_content(self):
|
def make_excel_content(self):
|
||||||
s = cStringIO.StringIO()
|
s = cStringIO.StringIO()
|
||||||
|
|
||||||
query_data = json.loads(self.data)
|
query_data = json_loads(self.data)
|
||||||
book = xlsxwriter.Workbook(s, {'constant_memory': True})
|
book = xlsxwriter.Workbook(s, {'constant_memory': True})
|
||||||
sheet = book.add_worksheet("result")
|
sheet = book.add_worksheet("result")
|
||||||
|
|
||||||
@@ -1297,7 +1296,7 @@ class Alert(TimestampMixin, db.Model):
|
|||||||
return db.session.query(Alert).join(Query).filter(Alert.id == id, Query.org == org).one()
|
return db.session.query(Alert).join(Query).filter(Alert.id == id, Query.org == org).one()
|
||||||
|
|
||||||
def evaluate(self):
|
def evaluate(self):
|
||||||
data = json.loads(self.query_rel.latest_query_data.data)
|
data = json_loads(self.query_rel.latest_query_data.data)
|
||||||
if data['rows']:
|
if data['rows']:
|
||||||
value = data['rows'][0][self.options['column']]
|
value = data['rows'][0][self.options['column']]
|
||||||
op = self.options['op']
|
op = self.options['op']
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
import logging
|
import logging
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
|
|
||||||
from redash import settings
|
from redash import settings
|
||||||
|
from redash.utils import json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -115,7 +113,7 @@ class BaseQueryRunner(object):
|
|||||||
|
|
||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed running query [%s]." % query)
|
raise Exception("Failed running query [%s]." % query)
|
||||||
return json.loads(results)['rows']
|
return json_loads(results)['rows']
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def to_dict(cls):
|
def to_dict(cls):
|
||||||
|
|||||||
@@ -1,11 +1,9 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import simplejson
|
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.settings import parse_boolean
|
from redash.settings import parse_boolean
|
||||||
from redash.utils import SimpleJSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
ANNOTATE_QUERY = parse_boolean(os.environ.get('ATHENA_ANNOTATE_QUERY', 'true'))
|
ANNOTATE_QUERY = parse_boolean(os.environ.get('ATHENA_ANNOTATE_QUERY', 'true'))
|
||||||
@@ -152,7 +150,7 @@ class Athena(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
table_name = '{0}.{1}'.format(row['table_schema'], row['table_name'])
|
table_name = '{0}.{1}'.format(row['table_schema'], row['table_name'])
|
||||||
if table_name not in schema:
|
if table_name not in schema:
|
||||||
@@ -195,7 +193,7 @@ class Athena(BaseQueryRunner):
|
|||||||
'athena_query_id': athena_query_id
|
'athena_query_id': athena_query_id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
json_data = simplejson.dumps(data, ignore_nan=True, cls=SimpleJSONEncoder)
|
json_data = json_dumps(data, ignore_nan=True)
|
||||||
error = None
|
error = None
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
if cursor.query_id:
|
if cursor.query_id:
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
from io import StringIO
|
from io import StringIO
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
import csv
|
import csv
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -59,7 +58,7 @@ def generate_rows_and_columns(csv_response):
|
|||||||
|
|
||||||
meta_with_padding = meta + '=' * (4 - len(meta) % 4)
|
meta_with_padding = meta + '=' * (4 - len(meta) % 4)
|
||||||
meta_decoded = meta_with_padding.decode('base64')
|
meta_decoded = meta_with_padding.decode('base64')
|
||||||
meta_json = json.loads(meta_decoded)
|
meta_json = json_loads(meta_decoded)
|
||||||
meta_columns = meta_json['tableSchema']['columns']
|
meta_columns = meta_json['tableSchema']['columns']
|
||||||
|
|
||||||
reader = csv.reader(data.splitlines())
|
reader = csv.reader(data.splitlines())
|
||||||
@@ -162,7 +161,7 @@ class AxibaseTSD(BaseQueryRunner):
|
|||||||
columns, rows = generate_rows_and_columns(data)
|
columns, rows = generate_rows_and_columns(data)
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
except SQLException as e:
|
except SQLException as e:
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
@@ -10,7 +9,7 @@ import requests
|
|||||||
|
|
||||||
from redash import settings
|
from redash import settings
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -141,7 +140,7 @@ class BigQuery(BaseQueryRunner):
|
|||||||
"https://www.googleapis.com/auth/drive"
|
"https://www.googleapis.com/auth/drive"
|
||||||
]
|
]
|
||||||
|
|
||||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
key = json_loads(b64decode(self.configuration['jsonKeyFile']))
|
||||||
|
|
||||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||||
http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
|
http = httplib2.Http(timeout=settings.BIGQUERY_HTTP_TIMEOUT)
|
||||||
@@ -296,11 +295,11 @@ class BigQuery(BaseQueryRunner):
|
|||||||
data = self._get_query_result(jobs, query)
|
data = self._get_query_result(jobs, query)
|
||||||
error = None
|
error = None
|
||||||
|
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
except apiclient.errors.HttpError as e:
|
except apiclient.errors.HttpError as e:
|
||||||
json_data = None
|
json_data = None
|
||||||
if e.resp.status == 400:
|
if e.resp.status == 400:
|
||||||
error = json.loads(e.content)['error']['message']
|
error = json_loads(e.content)['error']['message']
|
||||||
else:
|
else:
|
||||||
error = e.content
|
error = e.content
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import uuid
|
|
||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, register
|
from redash.query_runner import BaseQueryRunner, register
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import JSONEncoder, json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -18,8 +16,6 @@ except ImportError:
|
|||||||
|
|
||||||
class CassandraJSONEncoder(JSONEncoder):
|
class CassandraJSONEncoder(JSONEncoder):
|
||||||
def default(self, o):
|
def default(self, o):
|
||||||
if isinstance(o, uuid.UUID):
|
|
||||||
return str(o)
|
|
||||||
if isinstance(o, sortedset):
|
if isinstance(o, sortedset):
|
||||||
return list(o)
|
return list(o)
|
||||||
return super(CassandraJSONEncoder, self).default(o)
|
return super(CassandraJSONEncoder, self).default(o)
|
||||||
@@ -79,7 +75,7 @@ class Cassandra(BaseQueryRunner):
|
|||||||
select release_version from system.local;
|
select release_version from system.local;
|
||||||
"""
|
"""
|
||||||
results, error = self.run_query(query, None)
|
results, error = self.run_query(query, None)
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
release_version = results['rows'][0]['release_version']
|
release_version = results['rows'][0]['release_version']
|
||||||
|
|
||||||
query = """
|
query = """
|
||||||
@@ -96,7 +92,7 @@ class Cassandra(BaseQueryRunner):
|
|||||||
""".format(self.configuration['keyspace'])
|
""".format(self.configuration['keyspace'])
|
||||||
|
|
||||||
results, error = self.run_query(query, None)
|
results, error = self.run_query(query, None)
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
schema = {}
|
schema = {}
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
@@ -135,7 +131,7 @@ class Cassandra(BaseQueryRunner):
|
|||||||
rows = [dict(zip(column_names, row)) for row in result]
|
rows = [dict(zip(column_names, row)) for row in result]
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=CassandraJSONEncoder)
|
json_data = json_dumps(data, cls=CassandraJSONEncoder)
|
||||||
|
|
||||||
error = None
|
error = None
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
|||||||
@@ -1,9 +1,11 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from redash.query_runner import *
|
|
||||||
from redash.utils import JSONEncoder
|
|
||||||
import requests
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from redash.query_runner import *
|
||||||
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@@ -47,7 +49,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
table_name = '{}.{}'.format(row['database'], row['table'])
|
table_name = '{}.{}'.format(row['database'], row['table'])
|
||||||
@@ -107,7 +109,7 @@ class ClickHouse(BaseSQLQueryRunner):
|
|||||||
return json_data, error
|
return json_data, error
|
||||||
try:
|
try:
|
||||||
q = self._clickhouse_query(query)
|
q = self._clickhouse_query(query)
|
||||||
data = json.dumps(q, cls=JSONEncoder)
|
data = json_dumps(q)
|
||||||
error = None
|
error = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
data = None
|
data = None
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -119,7 +118,7 @@ class DynamoDBSQL(BaseSQLQueryRunner):
|
|||||||
rows.append(item)
|
rows.append(item)
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
except ParseException as e:
|
except ParseException as e:
|
||||||
error = u"Error parsing query at line {} (column {}):\n{}".format(e.lineno, e.column, e.line)
|
error = u"Error parsing query at line {} (column {}):\n{}".format(e.lineno, e.column, e.line)
|
||||||
|
|||||||
@@ -3,10 +3,10 @@ import sys
|
|||||||
import urllib
|
import urllib
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
import simplejson as json
|
|
||||||
from requests.auth import HTTPBasicAuth
|
from requests.auth import HTTPBasicAuth
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import http.client as http_client
|
import http.client as http_client
|
||||||
@@ -315,7 +315,7 @@ class Kibana(BaseElasticSearch):
|
|||||||
error = None
|
error = None
|
||||||
|
|
||||||
logger.debug(query)
|
logger.debug(query)
|
||||||
query_params = json.loads(query)
|
query_params = json_loads(query)
|
||||||
|
|
||||||
index_name = query_params["index"]
|
index_name = query_params["index"]
|
||||||
query_data = query_params["query"]
|
query_data = query_params["query"]
|
||||||
@@ -334,7 +334,6 @@ class Kibana(BaseElasticSearch):
|
|||||||
mappings, error = self._get_query_mappings(mapping_url)
|
mappings, error = self._get_query_mappings(mapping_url)
|
||||||
if error:
|
if error:
|
||||||
return None, error
|
return None, error
|
||||||
#logger.debug(json.dumps(mappings, indent=4))
|
|
||||||
|
|
||||||
if sort:
|
if sort:
|
||||||
url += "&sort={0}".format(urllib.quote_plus(sort))
|
url += "&sort={0}".format(urllib.quote_plus(sort))
|
||||||
@@ -358,7 +357,7 @@ class Kibana(BaseElasticSearch):
|
|||||||
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
# TODO: Handle complete ElasticSearch queries (JSON based sent over HTTP POST)
|
||||||
raise Exception("Advanced queries are not supported")
|
raise Exception("Advanced queries are not supported")
|
||||||
|
|
||||||
json_data = json.dumps({
|
json_data = json_dumps({
|
||||||
"columns": result_columns,
|
"columns": result_columns,
|
||||||
"rows": result_rows
|
"rows": result_rows
|
||||||
})
|
})
|
||||||
@@ -396,7 +395,7 @@ class ElasticSearch(BaseElasticSearch):
|
|||||||
error = None
|
error = None
|
||||||
|
|
||||||
logger.debug(query)
|
logger.debug(query)
|
||||||
query_dict = json.loads(query)
|
query_dict = json_loads(query)
|
||||||
|
|
||||||
index_name = query_dict.pop("index", "")
|
index_name = query_dict.pop("index", "")
|
||||||
result_fields = query_dict.pop("result_fields", None)
|
result_fields = query_dict.pop("result_fields", None)
|
||||||
@@ -422,7 +421,7 @@ class ElasticSearch(BaseElasticSearch):
|
|||||||
result_rows = []
|
result_rows = []
|
||||||
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
|
self._parse_results(mappings, result_fields, r.json(), result_columns, result_rows)
|
||||||
|
|
||||||
json_data = json.dumps({
|
json_data = json_dumps({
|
||||||
"columns": result_columns,
|
"columns": result_columns,
|
||||||
"rows": result_rows
|
"rows": result_rows
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -1,13 +1,12 @@
|
|||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from urlparse import parse_qs, urlparse
|
from urlparse import parse_qs, urlparse
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -115,7 +114,7 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
|||||||
|
|
||||||
def _get_analytics_service(self):
|
def _get_analytics_service(self):
|
||||||
scope = ['https://www.googleapis.com/auth/analytics.readonly']
|
scope = ['https://www.googleapis.com/auth/analytics.readonly']
|
||||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
key = json_loads(b64decode(self.configuration['jsonKeyFile']))
|
||||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||||
return build('analytics', 'v3', http=creds.authorize(httplib2.Http()))
|
return build('analytics', 'v3', http=creds.authorize(httplib2.Http()))
|
||||||
|
|
||||||
@@ -147,7 +146,7 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
|||||||
def run_query(self, query, user):
|
def run_query(self, query, user):
|
||||||
logger.debug("Analytics is about to execute query: %s", query)
|
logger.debug("Analytics is about to execute query: %s", query)
|
||||||
try:
|
try:
|
||||||
params = json.loads(query)
|
params = json_loads(query)
|
||||||
except:
|
except:
|
||||||
params = parse_qs(urlparse(query).query, keep_blank_values=True)
|
params = parse_qs(urlparse(query).query, keep_blank_values=True)
|
||||||
for key in params.keys():
|
for key in params.keys():
|
||||||
@@ -171,7 +170,7 @@ class GoogleAnalytics(BaseSQLQueryRunner):
|
|||||||
response = api.get(**params).execute()
|
response = api.get(**params).execute()
|
||||||
data = parse_ga_response(response)
|
data = parse_ga_response(response)
|
||||||
error = None
|
error = None
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
except HttpError as e:
|
except HttpError as e:
|
||||||
# Make sure we return a more readable error to the end user
|
# Make sure we return a more readable error to the end user
|
||||||
error = e._get_reason()
|
error = e._get_reason()
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from base64 import b64decode
|
from base64 import b64decode
|
||||||
|
|
||||||
@@ -7,7 +6,7 @@ from requests import Session
|
|||||||
from xlsxwriter.utility import xl_col_to_name
|
from xlsxwriter.utility import xl_col_to_name
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import json_dumps
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -23,7 +22,7 @@ except ImportError:
|
|||||||
|
|
||||||
def _load_key(filename):
|
def _load_key(filename):
|
||||||
with open(filename, "rb") as f:
|
with open(filename, "rb") as f:
|
||||||
return json.loads(f.read())
|
return json_loads(f.read())
|
||||||
|
|
||||||
|
|
||||||
def _get_columns_and_column_names(row):
|
def _get_columns_and_column_names(row):
|
||||||
@@ -179,7 +178,7 @@ class GoogleSpreadsheet(BaseQueryRunner):
|
|||||||
'https://spreadsheets.google.com/feeds',
|
'https://spreadsheets.google.com/feeds',
|
||||||
]
|
]
|
||||||
|
|
||||||
key = json.loads(b64decode(self.configuration['jsonKeyFile']))
|
key = json_loads(b64decode(self.configuration['jsonKeyFile']))
|
||||||
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
creds = ServiceAccountCredentials.from_json_keyfile_dict(key, scope)
|
||||||
|
|
||||||
timeout_session = HTTPSession()
|
timeout_session = HTTPSession()
|
||||||
|
|||||||
@@ -1,9 +1,10 @@
|
|||||||
import json
|
|
||||||
import datetime
|
import datetime
|
||||||
import requests
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -21,7 +22,7 @@ def _transform_result(response):
|
|||||||
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
|
rows.append({'Time::x': timestamp, 'name::series': series['target'], 'value::y': values[0]})
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
return json.dumps(data, cls=JSONEncoder)
|
return json_dumps(data)
|
||||||
|
|
||||||
|
|
||||||
class Graphite(BaseQueryRunner):
|
class Graphite(BaseQueryRunner):
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import base64
|
import base64
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -167,7 +166,7 @@ class Hive(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
connection.cancel()
|
connection.cancel()
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -118,7 +116,7 @@ class Impala(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip(column_names, row)) for row in cursor]
|
rows = [dict(zip(column_names, row)) for row in cursor]
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
cursor.close()
|
cursor.close()
|
||||||
except DatabaseError as e:
|
except DatabaseError as e:
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -42,10 +41,10 @@ def _transform_result(results):
|
|||||||
result_row[column] = value
|
result_row[column] = value
|
||||||
result_rows.append(result_row)
|
result_rows.append(result_row)
|
||||||
|
|
||||||
return json.dumps({
|
return json_dumps({
|
||||||
"columns": [{'name': c} for c in result_columns],
|
"columns": [{'name': c} for c in result_columns],
|
||||||
"rows": result_rows
|
"rows": result_rows
|
||||||
}, cls=JSONEncoder)
|
})
|
||||||
|
|
||||||
|
|
||||||
class InfluxDB(BaseQueryRunner):
|
class InfluxDB(BaseQueryRunner):
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import json
|
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
|
|
||||||
# TODO: make this more general and move into __init__.py
|
# TODO: make this more general and move into __init__.py
|
||||||
@@ -23,7 +22,7 @@ class ResultSet(object):
|
|||||||
self.columns[column] = {'name': column, 'type': column_type, 'friendly_name': column}
|
self.columns[column] = {'name': column, 'type': column_type, 'friendly_name': column}
|
||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
return json.dumps({'rows': self.rows, 'columns': self.columns.values()})
|
return json_dumps({'rows': self.rows, 'columns': self.columns.values()})
|
||||||
|
|
||||||
|
|
||||||
def parse_issue(issue, field_mapping):
|
def parse_issue(issue, field_mapping):
|
||||||
@@ -39,7 +38,7 @@ def parse_issue(issue, field_mapping):
|
|||||||
# if field mapping with dict member mappings defined get value of each member
|
# if field mapping with dict member mappings defined get value of each member
|
||||||
for member_name in member_names:
|
for member_name in member_names:
|
||||||
if member_name in v:
|
if member_name in v:
|
||||||
result[field_mapping.get_dict_output_field_name(k,member_name)] = v[member_name]
|
result[field_mapping.get_dict_output_field_name(k, member_name)] = v[member_name]
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# these special mapping rules are kept for backwards compatibility
|
# these special mapping rules are kept for backwards compatibility
|
||||||
@@ -64,7 +63,7 @@ def parse_issue(issue, field_mapping):
|
|||||||
if member_name in listItem:
|
if member_name in listItem:
|
||||||
listValues.append(listItem[member_name])
|
listValues.append(listItem[member_name])
|
||||||
if len(listValues) > 0:
|
if len(listValues) > 0:
|
||||||
result[field_mapping.get_dict_output_field_name(k,member_name)] = ','.join(listValues)
|
result[field_mapping.get_dict_output_field_name(k, member_name)] = ','.join(listValues)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# otherwise support list values only for non-dict items
|
# otherwise support list values only for non-dict items
|
||||||
@@ -160,7 +159,7 @@ class JiraJQL(BaseHTTPQueryRunner):
|
|||||||
jql_url = '{}/rest/api/2/search'.format(self.configuration["url"])
|
jql_url = '{}/rest/api/2/search'.format(self.configuration["url"])
|
||||||
|
|
||||||
try:
|
try:
|
||||||
query = json.loads(query)
|
query = json_loads(query)
|
||||||
query_type = query.pop('queryType', 'select')
|
query_type = query.pop('queryType', 'select')
|
||||||
field_mapping = FieldMapping(query.pop('fieldMapping', {}))
|
field_mapping = FieldMapping(query.pop('fieldMapping', {}))
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,4 @@
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pymapd
|
import pymapd
|
||||||
|
|||||||
@@ -1,9 +1,8 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -131,7 +130,7 @@ class MemSQL(BaseSQLQueryRunner):
|
|||||||
})
|
})
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
cursor.close()
|
cursor.close()
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from dateutil.parser import parse
|
from dateutil.parser import parse
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder, parse_human_time
|
from redash.utils import JSONEncoder, json_dumps, json_loads, parse_human_time
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -70,7 +69,7 @@ def datetime_parser(dct):
|
|||||||
|
|
||||||
|
|
||||||
def parse_query_json(query):
|
def parse_query_json(query):
|
||||||
query_data = json.loads(query, object_hook=datetime_parser)
|
query_data = json_loads(query, object_hook=datetime_parser)
|
||||||
return query_data
|
return query_data
|
||||||
|
|
||||||
|
|
||||||
@@ -312,7 +311,7 @@ class MongoDB(BaseQueryRunner):
|
|||||||
"rows": rows
|
"rows": rows
|
||||||
}
|
}
|
||||||
error = None
|
error = None
|
||||||
json_data = json.dumps(data, cls=MongoDBJSONEncoder)
|
json_data = json_dumps(data, cls=MongoDBJSONEncoder)
|
||||||
|
|
||||||
return json_data, error
|
return json_data, error
|
||||||
|
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -26,13 +25,6 @@ types_map = {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class MSSQLJSONEncoder(JSONEncoder):
|
|
||||||
def default(self, o):
|
|
||||||
if isinstance(o, uuid.UUID):
|
|
||||||
return str(o)
|
|
||||||
return super(MSSQLJSONEncoder, self).default(o)
|
|
||||||
|
|
||||||
|
|
||||||
class SqlServer(BaseSQLQueryRunner):
|
class SqlServer(BaseSQLQueryRunner):
|
||||||
noop_query = "SELECT 1"
|
noop_query = "SELECT 1"
|
||||||
|
|
||||||
@@ -105,7 +97,7 @@ class SqlServer(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
if row['table_schema'] != self.configuration['db']:
|
if row['table_schema'] != self.configuration['db']:
|
||||||
@@ -151,7 +143,7 @@ class SqlServer(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=MSSQLJSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
error = "No data was returned."
|
error = "No data was returned."
|
||||||
|
|||||||
@@ -1,11 +1,10 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.query_runner.mssql import types_map
|
||||||
from redash.query_runner.mssql import MSSQLJSONEncoder, types_map
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -88,7 +87,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
if row['table_schema'] != self.configuration['db']:
|
if row['table_schema'] != self.configuration['db']:
|
||||||
@@ -133,7 +132,7 @@ class SQLServerODBC(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=MSSQLJSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
error = "No data was returned."
|
error = "No data was returned."
|
||||||
|
|||||||
@@ -1,10 +1,9 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.settings import parse_boolean
|
from redash.settings import parse_boolean
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
types_map = {
|
types_map = {
|
||||||
@@ -111,7 +110,7 @@ class Mysql(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
if row['table_schema'] != self.configuration['db']:
|
if row['table_schema'] != self.configuration['db']:
|
||||||
@@ -154,7 +153,7 @@ class Mysql(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
rows = [dict(zip((c['name'] for c in columns), row)) for row in data]
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,9 +1,7 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
|
|
||||||
|
from redash.utils import json_dumps, json_loads
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import cx_Oracle
|
import cx_Oracle
|
||||||
@@ -100,7 +98,7 @@ class Oracle(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
if row['OWNER'] != None:
|
if row['OWNER'] != None:
|
||||||
@@ -148,13 +146,13 @@ class Oracle(BaseSQLQueryRunner):
|
|||||||
rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]
|
rows = [dict(zip((c['name'] for c in columns), row)) for row in cursor]
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
else:
|
else:
|
||||||
columns = [{'name': 'Row(s) Affected', 'type': 'TYPE_INTEGER'}]
|
columns = [{'name': 'Row(s) Affected', 'type': 'TYPE_INTEGER'}]
|
||||||
rows = [{'Row(s) Affected': rows_count}]
|
rows = [{'Row(s) Affected': rows_count}]
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
connection.commit()
|
connection.commit()
|
||||||
except cx_Oracle.DatabaseError as err:
|
except cx_Oracle.DatabaseError as err:
|
||||||
error = u"Query failed. {}.".format(err.message)
|
error = u"Query failed. {}.".format(err.message)
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,12 +1,11 @@
|
|||||||
import os
|
import os
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import select
|
import select
|
||||||
|
|
||||||
import psycopg2
|
import psycopg2
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -92,7 +91,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
if row['table_schema'] != 'public':
|
if row['table_schema'] != 'public':
|
||||||
@@ -166,7 +165,7 @@ class PostgreSQL(BaseSQLQueryRunner):
|
|||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
else:
|
else:
|
||||||
error = 'Query completed but it returned no data.'
|
error = 'Query completed but it returned no data.'
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,7 +1,5 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
from redash.utils import JSONEncoder
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -79,7 +77,7 @@ class Presto(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||||
@@ -108,7 +106,7 @@ class Presto(BaseQueryRunner):
|
|||||||
columns = self.fetch_columns(column_tuples)
|
columns = self.fetch_columns(column_tuples)
|
||||||
rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
rows = [dict(zip(([c['name'] for c in columns]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
except DatabaseError as db:
|
except DatabaseError as db:
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,20 +1,18 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
import importlib
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import json_dumps
|
from redash.utils import json_dumps, json_loads
|
||||||
from redash import models
|
from redash import models
|
||||||
|
|
||||||
import importlib
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from RestrictedPython import compile_restricted
|
from RestrictedPython import compile_restricted
|
||||||
from RestrictedPython.Guards import safe_builtins
|
from RestrictedPython.Guards import safe_builtins
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class CustomPrint(object):
|
class CustomPrint(object):
|
||||||
"""CustomPrint redirect "print" calls to be sent as "log" on the result object."""
|
"""CustomPrint redirect "print" calls to be sent as "log" on the result object."""
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@@ -173,8 +171,8 @@ class Python(BaseQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception(error)
|
raise Exception(error)
|
||||||
|
|
||||||
# TODO: allow avoiding the json.dumps/loads in same process
|
# TODO: allow avoiding the JSON dumps/loads in same process
|
||||||
return json.loads(data)
|
return json_loads(data)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def get_source_schema(data_source_name_or_id):
|
def get_source_schema(data_source_name_or_id):
|
||||||
@@ -211,7 +209,7 @@ class Python(BaseQueryRunner):
|
|||||||
if query.latest_query_data.data is None:
|
if query.latest_query_data.data is None:
|
||||||
raise Exception("Query does not have results yet.")
|
raise Exception("Query does not have results yet.")
|
||||||
|
|
||||||
return json.loads(query.latest_query_data.data)
|
return json_loads(query.latest_query_data.data)
|
||||||
|
|
||||||
def test_connection(self):
|
def test_connection(self):
|
||||||
pass
|
pass
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import numbers
|
import numbers
|
||||||
import re
|
import re
|
||||||
@@ -12,7 +11,7 @@ from redash.permissions import has_access, not_view_only
|
|||||||
from redash.query_runner import (TYPE_BOOLEAN, TYPE_DATETIME, TYPE_FLOAT,
|
from redash.query_runner import (TYPE_BOOLEAN, TYPE_DATETIME, TYPE_FLOAT,
|
||||||
TYPE_INTEGER, TYPE_STRING, BaseQueryRunner,
|
TYPE_INTEGER, TYPE_STRING, BaseQueryRunner,
|
||||||
register)
|
register)
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -73,13 +72,12 @@ def get_query_results(user, query_id, bring_from_cache):
|
|||||||
results = query.latest_query_data.data
|
results = query.latest_query_data.data
|
||||||
else:
|
else:
|
||||||
raise Exception("No cached result available for query {}.".format(query.id))
|
raise Exception("No cached result available for query {}.".format(query.id))
|
||||||
|
else:
|
||||||
else:
|
|
||||||
results, error = query.data_source.query_runner.run_query(query.query_text, user)
|
results, error = query.data_source.query_runner.run_query(query.query_text, user)
|
||||||
if error:
|
if error:
|
||||||
raise Exception("Failed loading results for query id {}.".format(query.id))
|
raise Exception("Failed loading results for query id {}.".format(query.id))
|
||||||
|
|
||||||
return json.loads(results)
|
return json_loads(results)
|
||||||
|
|
||||||
|
|
||||||
def create_tables_from_query_ids(user, connection, query_ids, cached_query_ids=[]):
|
def create_tables_from_query_ids(user, connection, query_ids, cached_query_ids=[]):
|
||||||
@@ -170,7 +168,7 @@ class Results(BaseQueryRunner):
|
|||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
else:
|
else:
|
||||||
error = 'Query completed but it returned no data.'
|
error = 'Query completed but it returned no data.'
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,5 +1,4 @@
|
|||||||
from __future__ import absolute_import
|
from __future__ import absolute_import
|
||||||
import json
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import snowflake.connector
|
import snowflake.connector
|
||||||
@@ -10,7 +9,7 @@ except ImportError:
|
|||||||
|
|
||||||
from redash.query_runner import BaseQueryRunner, register
|
from redash.query_runner import BaseQueryRunner, register
|
||||||
from redash.query_runner import TYPE_STRING, TYPE_DATE, TYPE_DATETIME, TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN
|
from redash.query_runner import TYPE_STRING, TYPE_DATE, TYPE_DATETIME, TYPE_INTEGER, TYPE_FLOAT, TYPE_BOOLEAN
|
||||||
from redash.utils import json_dumps
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
TYPES_MAP = {
|
TYPES_MAP = {
|
||||||
0: TYPE_INTEGER,
|
0: TYPE_INTEGER,
|
||||||
@@ -98,7 +97,7 @@ class Snowflake(BaseQueryRunner):
|
|||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
schema = {}
|
schema = {}
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME'])
|
table_name = '{}.{}'.format(row['TABLE_SCHEMA'], row['TABLE_NAME'])
|
||||||
|
|||||||
@@ -1,14 +1,11 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from six import reraise
|
from six import reraise
|
||||||
|
|
||||||
from redash.query_runner import BaseSQLQueryRunner
|
from redash.query_runner import BaseSQLQueryRunner, register
|
||||||
from redash.query_runner import register
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
from redash.utils import JSONEncoder
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -47,7 +44,7 @@ class Sqlite(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
table_name = row['tbl_name']
|
table_name = row['tbl_name']
|
||||||
@@ -56,7 +53,7 @@ class Sqlite(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results_table = json.loads(results_table)
|
results_table = json_loads(results_table)
|
||||||
for row_column in results_table['rows']:
|
for row_column in results_table['rows']:
|
||||||
schema[table_name]['columns'].append(row_column['name'])
|
schema[table_name]['columns'].append(row_column['name'])
|
||||||
|
|
||||||
@@ -76,7 +73,7 @@ class Sqlite(BaseSQLQueryRunner):
|
|||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
error = None
|
error = None
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
else:
|
else:
|
||||||
error = 'Query completed but it returned no data.'
|
error = 'Query completed but it returned no data.'
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -113,7 +112,7 @@ class TreasureData(BaseQueryRunner):
|
|||||||
else:
|
else:
|
||||||
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
rows = [dict(zip(([c[0] for c in columns_data]), r)) for i, r in enumerate(cursor.fetchall())]
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
except errors.InternalError as e:
|
except errors.InternalError as e:
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,8 +1,7 @@
|
|||||||
import sys
|
import sys
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from redash.utils import JSONEncoder
|
from redash.utils import json_loads, json_dumps
|
||||||
from redash.query_runner import *
|
from redash.query_runner import *
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -83,7 +82,7 @@ class Vertica(BaseSQLQueryRunner):
|
|||||||
if error is not None:
|
if error is not None:
|
||||||
raise Exception("Failed getting schema.")
|
raise Exception("Failed getting schema.")
|
||||||
|
|
||||||
results = json.loads(results)
|
results = json_loads(results)
|
||||||
|
|
||||||
for row in results['rows']:
|
for row in results['rows']:
|
||||||
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
|
||||||
@@ -128,7 +127,7 @@ class Vertica(BaseSQLQueryRunner):
|
|||||||
'type': types_map.get(col[1], None)} for col in columns_data]
|
'type': types_map.get(col[1], None)} for col in columns_data]
|
||||||
|
|
||||||
data = {'columns': columns, 'rows': rows}
|
data = {'columns': columns, 'rows': rows}
|
||||||
json_data = json.dumps(data, cls=JSONEncoder)
|
json_data = json_dumps(data)
|
||||||
error = None
|
error = None
|
||||||
else:
|
else:
|
||||||
json_data = None
|
json_data = None
|
||||||
|
|||||||
@@ -1,10 +1,12 @@
|
|||||||
import json
|
|
||||||
import yaml
|
|
||||||
import logging
|
import logging
|
||||||
from redash.query_runner import *
|
import yaml
|
||||||
from redash.utils import JSONEncoder
|
|
||||||
import requests
|
|
||||||
from urlparse import parse_qs, urlparse
|
from urlparse import parse_qs, urlparse
|
||||||
|
|
||||||
|
import requests
|
||||||
|
|
||||||
|
from redash.query_runner import *
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
COLUMN_TYPES = {
|
COLUMN_TYPES = {
|
||||||
@@ -140,7 +142,7 @@ class YandexMetrica(BaseSQLQueryRunner):
|
|||||||
return data, error
|
return data, error
|
||||||
|
|
||||||
try:
|
try:
|
||||||
data = json.dumps(parse_ym_response(self._send_query(**params)), cls=JSONEncoder)
|
data = json_dumps(parse_ym_response(self._send_query(**params)))
|
||||||
error = None
|
error = None
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.exception(e)
|
logging.exception(e)
|
||||||
|
|||||||
@@ -3,19 +3,20 @@ This will eventually replace all the `to_dict` methods of the different model
|
|||||||
classes we have. This will ensure cleaner code and better
|
classes we have. This will ensure cleaner code and better
|
||||||
separation of concerns.
|
separation of concerns.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
|
||||||
from funcy import project
|
from funcy import project
|
||||||
|
|
||||||
from flask_login import current_user
|
from flask_login import current_user
|
||||||
|
|
||||||
from redash import models
|
from redash import models
|
||||||
from redash.permissions import has_access, view_only
|
from redash.permissions import has_access, view_only
|
||||||
|
from redash.utils import json_loads
|
||||||
|
|
||||||
|
|
||||||
def public_widget(widget):
|
def public_widget(widget):
|
||||||
res = {
|
res = {
|
||||||
'id': widget.id,
|
'id': widget.id,
|
||||||
'width': widget.width,
|
'width': widget.width,
|
||||||
'options': json.loads(widget.options),
|
'options': json_loads(widget.options),
|
||||||
'text': widget.text,
|
'text': widget.text,
|
||||||
'updated_at': widget.updated_at,
|
'updated_at': widget.updated_at,
|
||||||
'created_at': widget.created_at
|
'created_at': widget.created_at
|
||||||
@@ -27,7 +28,7 @@ def public_widget(widget):
|
|||||||
'type': widget.visualization.type,
|
'type': widget.visualization.type,
|
||||||
'name': widget.visualization.name,
|
'name': widget.visualization.name,
|
||||||
'description': widget.visualization.description,
|
'description': widget.visualization.description,
|
||||||
'options': json.loads(widget.visualization.options),
|
'options': json_loads(widget.visualization.options),
|
||||||
'updated_at': widget.visualization.updated_at,
|
'updated_at': widget.visualization.updated_at,
|
||||||
'created_at': widget.visualization.created_at,
|
'created_at': widget.visualization.created_at,
|
||||||
'query': {
|
'query': {
|
||||||
@@ -65,7 +66,7 @@ class QuerySerializer(Serializer):
|
|||||||
def __init__(self, object_or_list, **kwargs):
|
def __init__(self, object_or_list, **kwargs):
|
||||||
self.object_or_list = object_or_list
|
self.object_or_list = object_or_list
|
||||||
self.options = kwargs
|
self.options = kwargs
|
||||||
|
|
||||||
def serialize(self):
|
def serialize(self):
|
||||||
if isinstance(self.object_or_list, models.Query):
|
if isinstance(self.object_or_list, models.Query):
|
||||||
result = serialize_query(self.object_or_list, **self.options)
|
result = serialize_query(self.object_or_list, **self.options)
|
||||||
@@ -77,7 +78,7 @@ class QuerySerializer(Serializer):
|
|||||||
favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list)
|
favorite_ids = models.Favorite.are_favorites(current_user.id, self.object_or_list)
|
||||||
for query in result:
|
for query in result:
|
||||||
query['is_favorite'] = query['id'] in favorite_ids
|
query['is_favorite'] = query['id'] in favorite_ids
|
||||||
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
@@ -132,7 +133,7 @@ def serialize_visualization(object, with_query=True):
|
|||||||
'type': object.type,
|
'type': object.type,
|
||||||
'name': object.name,
|
'name': object.name,
|
||||||
'description': object.description,
|
'description': object.description,
|
||||||
'options': json.loads(object.options),
|
'options': json_loads(object.options),
|
||||||
'updated_at': object.updated_at,
|
'updated_at': object.updated_at,
|
||||||
'created_at': object.created_at
|
'created_at': object.created_at
|
||||||
}
|
}
|
||||||
@@ -147,7 +148,7 @@ def serialize_widget(object):
|
|||||||
d = {
|
d = {
|
||||||
'id': object.id,
|
'id': object.id,
|
||||||
'width': object.width,
|
'width': object.width,
|
||||||
'options': json.loads(object.options),
|
'options': json_loads(object.options),
|
||||||
'dashboard_id': object.dashboard_id,
|
'dashboard_id': object.dashboard_id,
|
||||||
'text': object.text,
|
'text': object.text,
|
||||||
'updated_at': object.updated_at,
|
'updated_at': object.updated_at,
|
||||||
@@ -181,8 +182,9 @@ def serialize_alert(alert, full=True):
|
|||||||
|
|
||||||
return d
|
return d
|
||||||
|
|
||||||
|
|
||||||
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True):
|
def serialize_dashboard(obj, with_widgets=False, user=None, with_favorite_state=True):
|
||||||
layout = json.loads(obj.layout)
|
layout = json_loads(obj.layout)
|
||||||
|
|
||||||
widgets = []
|
widgets = []
|
||||||
|
|
||||||
|
|||||||
@@ -1,4 +1,3 @@
|
|||||||
import json
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
|
|
||||||
@@ -19,8 +18,15 @@ def set_from_string(s):
|
|||||||
return set(array_from_string(s))
|
return set(array_from_string(s))
|
||||||
|
|
||||||
|
|
||||||
def parse_boolean(str):
|
def parse_boolean(s):
|
||||||
return json.loads(str.lower())
|
"""Takes a string and returns the equivalent as a boolean value."""
|
||||||
|
s = s.strip().lower()
|
||||||
|
if s in ('yes', 'true', 'on', '1'):
|
||||||
|
return True
|
||||||
|
elif s in ('no', 'false', 'off', '0', 'none'):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
raise ValueError('Invalid boolean value %r' % s)
|
||||||
|
|
||||||
|
|
||||||
def int_or_none(value):
|
def int_or_none(value):
|
||||||
|
|||||||
@@ -1,20 +1,19 @@
|
|||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import pystache
|
import pystache
|
||||||
import redis
|
import redis
|
||||||
|
|
||||||
from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
|
from celery.exceptions import SoftTimeLimitExceeded, TimeLimitExceeded
|
||||||
from celery.result import AsyncResult
|
from celery.result import AsyncResult
|
||||||
from celery.utils.log import get_task_logger
|
from celery.utils.log import get_task_logger
|
||||||
from six import text_type
|
from six import text_type
|
||||||
from redash import models, redis_connection, settings, statsd_client, utils
|
|
||||||
|
from redash import models, redis_connection, settings, statsd_client
|
||||||
from redash.query_runner import InterruptException
|
from redash.query_runner import InterruptException
|
||||||
from redash.utils import gen_query_hash
|
|
||||||
from redash.worker import celery
|
|
||||||
from redash.tasks.alerts import check_alerts_for_query
|
from redash.tasks.alerts import check_alerts_for_query
|
||||||
|
from redash.utils import gen_query_hash, json_dumps, json_loads, utcnow
|
||||||
|
from redash.worker import celery
|
||||||
|
|
||||||
logger = get_task_logger(__name__)
|
logger = get_task_logger(__name__)
|
||||||
|
|
||||||
@@ -60,7 +59,7 @@ class QueryTaskTracker(object):
|
|||||||
|
|
||||||
self.data['updated_at'] = time.time()
|
self.data['updated_at'] = time.time()
|
||||||
key_name = self._key_name(self.data['task_id'])
|
key_name = self._key_name(self.data['task_id'])
|
||||||
connection.set(key_name, utils.json_dumps(self.data))
|
connection.set(key_name, json_dumps(self.data))
|
||||||
connection.zadd(self._get_list(), time.time(), key_name)
|
connection.zadd(self._get_list(), time.time(), key_name)
|
||||||
|
|
||||||
for l in self.ALL_LISTS:
|
for l in self.ALL_LISTS:
|
||||||
@@ -97,7 +96,7 @@ class QueryTaskTracker(object):
|
|||||||
@classmethod
|
@classmethod
|
||||||
def create_from_data(cls, data):
|
def create_from_data(cls, data):
|
||||||
if data:
|
if data:
|
||||||
data = json.loads(data)
|
data = json_loads(data)
|
||||||
return cls(data)
|
return cls(data)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
@@ -307,7 +306,7 @@ def refresh_queries():
|
|||||||
redis_connection.hmset('redash:status', {
|
redis_connection.hmset('redash:status', {
|
||||||
'outdated_queries_count': outdated_queries_count,
|
'outdated_queries_count': outdated_queries_count,
|
||||||
'last_refresh_at': now,
|
'last_refresh_at': now,
|
||||||
'query_ids': json.dumps(query_ids)
|
'query_ids': json_dumps(query_ids)
|
||||||
})
|
})
|
||||||
|
|
||||||
statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now)))
|
statsd_client.gauge('manager.seconds_since_refresh', now - float(status.get('last_refresh_at', now)))
|
||||||
@@ -483,7 +482,7 @@ class QueryExecutor(object):
|
|||||||
query_result, updated_query_ids = models.QueryResult.store_result(
|
query_result, updated_query_ids = models.QueryResult.store_result(
|
||||||
self.data_source.org_id, self.data_source,
|
self.data_source.org_id, self.data_source,
|
||||||
self.query_hash, self.query, data,
|
self.query_hash, self.query, data,
|
||||||
run_time, utils.utcnow())
|
run_time, utcnow())
|
||||||
models.db.session.commit() # make sure that alert sees the latest query result
|
models.db.session.commit() # make sure that alert sees the latest query result
|
||||||
self._log_progress('checking_alerts')
|
self._log_progress('checking_alerts')
|
||||||
for query_id in updated_query_ids:
|
for query_id in updated_query_ids:
|
||||||
|
|||||||
@@ -1,17 +1,17 @@
|
|||||||
import cStringIO
|
import cStringIO
|
||||||
import csv
|
import csv
|
||||||
import codecs
|
import codecs
|
||||||
import decimal
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
import decimal
|
||||||
|
import hashlib
|
||||||
|
import os
|
||||||
import random
|
import random
|
||||||
import re
|
import re
|
||||||
import hashlib
|
import uuid
|
||||||
import pytz
|
|
||||||
import pystache
|
|
||||||
import os
|
|
||||||
import simplejson
|
|
||||||
|
|
||||||
|
import pystache
|
||||||
|
import pytz
|
||||||
|
import simplejson
|
||||||
from funcy import distinct, select_values
|
from funcy import distinct, select_values
|
||||||
from six import string_types
|
from six import string_types
|
||||||
from sqlalchemy.orm.query import Query
|
from sqlalchemy.orm.query import Query
|
||||||
@@ -68,47 +68,34 @@ def generate_token(length):
|
|||||||
return ''.join(rand.choice(chars) for x in range(length))
|
return ''.join(rand.choice(chars) for x in range(length))
|
||||||
|
|
||||||
|
|
||||||
class JSONEncoderMixin:
|
class JSONEncoder(simplejson.JSONEncoder):
|
||||||
"""Custom JSON encoding class, to handle Decimal and datetime.date instances."""
|
|
||||||
|
|
||||||
def process_default(self, o):
|
|
||||||
# Some SQLAlchemy collections are lazy.
|
|
||||||
if isinstance(o, Query):
|
|
||||||
return True, list(o)
|
|
||||||
if isinstance(o, decimal.Decimal):
|
|
||||||
return True, float(o)
|
|
||||||
|
|
||||||
if isinstance(o, (datetime.date, datetime.time)):
|
|
||||||
return True, o.isoformat()
|
|
||||||
|
|
||||||
if isinstance(o, datetime.timedelta):
|
|
||||||
return True, str(o)
|
|
||||||
|
|
||||||
return False, None # default processing
|
|
||||||
|
|
||||||
|
|
||||||
class JSONEncoder(JSONEncoderMixin, json.JSONEncoder):
|
|
||||||
"""Adapter for `json.dumps`."""
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
processed, result = self.process_default(o)
|
|
||||||
if not processed:
|
|
||||||
result = super(JSONEncoder, self).default(o)
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class SimpleJSONEncoder(JSONEncoderMixin, simplejson.JSONEncoder):
|
|
||||||
"""Adapter for `simplejson.dumps`."""
|
"""Adapter for `simplejson.dumps`."""
|
||||||
|
|
||||||
def default(self, o):
|
def default(self, o):
|
||||||
processed, result = self.process_default(o)
|
# Some SQLAlchemy collections are lazy.
|
||||||
if not processed:
|
if isinstance(o, Query):
|
||||||
result = super(SimpleJSONEncoder, self).default(o)
|
return list(o)
|
||||||
return result
|
elif isinstance(o, decimal.Decimal):
|
||||||
|
return float(o)
|
||||||
|
elif isinstance(o, (datetime.timedelta, uuid.UUID)):
|
||||||
|
return str(o)
|
||||||
|
elif isinstance(o, (datetime.date, datetime.time)):
|
||||||
|
return o.isoformat()
|
||||||
|
else:
|
||||||
|
return super(JSONEncoder, self).default(o)
|
||||||
|
|
||||||
|
|
||||||
def json_dumps(data):
|
def json_loads(data, *args, **kwargs):
|
||||||
return json.dumps(data, cls=JSONEncoder)
|
"""A custom JSON loading function which passes all parameters to the
|
||||||
|
simplejson.loads function."""
|
||||||
|
return simplejson.loads(data, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def json_dumps(data, *args, **kwargs):
|
||||||
|
"""A custom JSON dumping function which passes all parameters to the
|
||||||
|
simplejson.dumps function."""
|
||||||
|
kwargs.setdefault('cls', JSONEncoder)
|
||||||
|
return simplejson.dumps(data, *args, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
def build_url(request, host, path):
|
def build_url(request, host, path):
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
import json
|
|
||||||
import jsonschema
|
import jsonschema
|
||||||
from jsonschema import ValidationError
|
from jsonschema import ValidationError
|
||||||
|
|
||||||
from sqlalchemy.ext.mutable import Mutable
|
from sqlalchemy.ext.mutable import Mutable
|
||||||
|
|
||||||
|
from redash.utils import json_dumps, json_loads
|
||||||
|
|
||||||
SECRET_PLACEHOLDER = '--------'
|
SECRET_PLACEHOLDER = '--------'
|
||||||
|
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ class ConfigurationContainer(Mutable):
|
|||||||
jsonschema.validate(self._config, self._schema)
|
jsonschema.validate(self._config, self._schema)
|
||||||
|
|
||||||
def to_json(self):
|
def to_json(self):
|
||||||
return json.dumps(self._config, sort_keys=True)
|
return json_dumps(self._config, sort_keys=True)
|
||||||
|
|
||||||
def iteritems(self):
|
def iteritems(self):
|
||||||
return self._config.iteritems()
|
return self._config.iteritems()
|
||||||
@@ -92,4 +92,4 @@ class ConfigurationContainer(Mutable):
|
|||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, config_in_json):
|
def from_json(cls, config_in_json):
|
||||||
return cls(json.loads(config_in_json))
|
return cls(json_loads(config_in_json))
|
||||||
|
|||||||
@@ -8,5 +8,3 @@ cal = parsedatetime.Calendar()
|
|||||||
def parse_human_time(s):
|
def parse_human_time(s):
|
||||||
time_struct, _ = cal.parse(s)
|
time_struct, _ = cal.parse(s)
|
||||||
return datetime.fromtimestamp(mktime(time_struct))
|
return datetime.fromtimestamp(mktime(time_struct))
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,5 @@
|
|||||||
import os
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
import logging
|
import logging
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
@@ -17,7 +16,7 @@ os.environ['REDASH_MULTI_ORG'] = "true"
|
|||||||
from redash import create_app
|
from redash import create_app
|
||||||
from redash import redis_connection
|
from redash import redis_connection
|
||||||
from redash.models import db
|
from redash.models import db
|
||||||
from redash.utils import json_dumps
|
from redash.utils import json_dumps, json_loads
|
||||||
from tests.factories import Factory, user_factory
|
from tests.factories import Factory, user_factory
|
||||||
|
|
||||||
|
|
||||||
@@ -94,7 +93,7 @@ class BaseTestCase(TestCase):
|
|||||||
)
|
)
|
||||||
|
|
||||||
if response.data and is_json:
|
if response.data and is_json:
|
||||||
response.json = json.loads(response.data)
|
response.json = json_loads(response.data)
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
@@ -112,7 +111,8 @@ class BaseTestCase(TestCase):
|
|||||||
|
|
||||||
def assertResponseEqual(self, expected, actual):
|
def assertResponseEqual(self, expected, actual):
|
||||||
for k, v in expected.iteritems():
|
for k, v in expected.iteritems():
|
||||||
if isinstance(v, datetime.datetime) or isinstance(actual[k], datetime.datetime):
|
if isinstance(v, datetime.datetime) or isinstance(actual[k],
|
||||||
|
datetime.datetime):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(v, list):
|
if isinstance(v, list):
|
||||||
|
|||||||
@@ -1,8 +1,9 @@
|
|||||||
import json
|
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
from redash.models import ApiKey, Dashboard, AccessPermission, db
|
from redash.models import ApiKey, Dashboard, AccessPermission, db
|
||||||
from redash.permissions import ACCESS_TYPE_MODIFY
|
from redash.permissions import ACCESS_TYPE_MODIFY
|
||||||
from redash.serializers import serialize_dashboard
|
from redash.serializers import serialize_dashboard
|
||||||
|
from redash.utils import json_loads
|
||||||
|
|
||||||
|
|
||||||
class TestDashboardListResource(BaseTestCase):
|
class TestDashboardListResource(BaseTestCase):
|
||||||
@@ -25,7 +26,7 @@ class TestDashboardListGetResource(BaseTestCase):
|
|||||||
|
|
||||||
assert len(rv.json['results']) == 3
|
assert len(rv.json['results']) == 3
|
||||||
assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id, d2.id, d3.id])
|
assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id, d2.id, d3.id])
|
||||||
|
|
||||||
def test_filters_with_tags(self):
|
def test_filters_with_tags(self):
|
||||||
d1 = self.factory.create_dashboard(tags=[u'test'])
|
d1 = self.factory.create_dashboard(tags=[u'test'])
|
||||||
d2 = self.factory.create_dashboard()
|
d2 = self.factory.create_dashboard()
|
||||||
@@ -34,7 +35,7 @@ class TestDashboardListGetResource(BaseTestCase):
|
|||||||
rv = self.make_request('get', '/api/dashboards?tags=test')
|
rv = self.make_request('get', '/api/dashboards?tags=test')
|
||||||
assert len(rv.json['results']) == 1
|
assert len(rv.json['results']) == 1
|
||||||
assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id])
|
assert set(map(lambda d: d['id'], rv.json['results'])) == set([d1.id])
|
||||||
|
|
||||||
def test_search_term(self):
|
def test_search_term(self):
|
||||||
d1 = self.factory.create_dashboard(name="Sales")
|
d1 = self.factory.create_dashboard(name="Sales")
|
||||||
d2 = self.factory.create_dashboard(name="Q1 sales")
|
d2 = self.factory.create_dashboard(name="Q1 sales")
|
||||||
@@ -52,7 +53,7 @@ class TestDashboardResourceGet(BaseTestCase):
|
|||||||
self.assertEquals(rv.status_code, 200)
|
self.assertEquals(rv.status_code, 200)
|
||||||
|
|
||||||
expected = serialize_dashboard(d1, with_widgets=True, with_favorite_state=False)
|
expected = serialize_dashboard(d1, with_widgets=True, with_favorite_state=False)
|
||||||
actual = json.loads(rv.data)
|
actual = json_loads(rv.data)
|
||||||
|
|
||||||
self.assertResponseEqual(expected, actual)
|
self.assertResponseEqual(expected, actual)
|
||||||
|
|
||||||
|
|||||||
@@ -1,6 +1,7 @@
|
|||||||
import json
|
|
||||||
from tests import BaseTestCase
|
from tests import BaseTestCase
|
||||||
|
|
||||||
from redash.models import db
|
from redash.models import db
|
||||||
|
from redash.utils import json_dumps
|
||||||
|
|
||||||
|
|
||||||
class TestQueryResultsCacheHeaders(BaseTestCase):
|
class TestQueryResultsCacheHeaders(BaseTestCase):
|
||||||
@@ -162,7 +163,17 @@ class TestQueryResultExcelResponse(BaseTestCase):
|
|||||||
|
|
||||||
def test_renders_excel_file_when_rows_have_missing_columns(self):
|
def test_renders_excel_file_when_rows_have_missing_columns(self):
|
||||||
query = self.factory.create_query()
|
query = self.factory.create_query()
|
||||||
query_result = self.factory.create_query_result(data=json.dumps({'rows': [{'test': 1}, {'test': 2, 'test2': 3}], 'columns': [{'name': 'test'}, {'name': 'test2'}]}))
|
data = {
|
||||||
|
'rows': [
|
||||||
|
{'test': 1},
|
||||||
|
{'test': 2, 'test2': 3},
|
||||||
|
],
|
||||||
|
'columns': [
|
||||||
|
{'name': 'test'},
|
||||||
|
{'name': 'test2'},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
query_result = self.factory.create_query_result(data=json_dumps(data))
|
||||||
|
|
||||||
rv = self.make_request('get', '/api/queries/{}/results/{}.xlsx'.format(query.id, query_result.id), is_json=False)
|
rv = self.make_request('get', '/api/queries/{}/results/{}.xlsx'.format(query.id, query_result.id), is_json=False)
|
||||||
self.assertEquals(rv.status_code, 200)
|
self.assertEquals(rv.status_code, 200)
|
||||||
|
|||||||
@@ -1,10 +1,10 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
from pytz import utc
|
|
||||||
from redash.query_runner.mongodb import parse_query_json, parse_results, _get_column_by_name
|
|
||||||
|
|
||||||
from redash.utils import parse_human_time
|
from pytz import utc
|
||||||
|
|
||||||
|
from redash.query_runner.mongodb import parse_query_json, parse_results, _get_column_by_name
|
||||||
|
from redash.utils import json_dumps, parse_human_time
|
||||||
|
|
||||||
|
|
||||||
class TestParseQueryJson(TestCase):
|
class TestParseQueryJson(TestCase):
|
||||||
@@ -18,7 +18,7 @@ class TestParseQueryJson(TestCase):
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
query_data = parse_query_json(json.dumps(query))
|
query_data = parse_query_json(json_dumps(query))
|
||||||
self.assertDictEqual(query_data, query)
|
self.assertDictEqual(query_data, query)
|
||||||
|
|
||||||
def test_parses_isodate_fields(self):
|
def test_parses_isodate_fields(self):
|
||||||
@@ -32,7 +32,7 @@ class TestParseQueryJson(TestCase):
|
|||||||
'testIsoDate': "ISODate(\"2014-10-03T00:00\")"
|
'testIsoDate': "ISODate(\"2014-10-03T00:00\")"
|
||||||
}
|
}
|
||||||
|
|
||||||
query_data = parse_query_json(json.dumps(query))
|
query_data = parse_query_json(json_dumps(query))
|
||||||
|
|
||||||
self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0))
|
self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0))
|
||||||
|
|
||||||
@@ -49,7 +49,7 @@ class TestParseQueryJson(TestCase):
|
|||||||
'testIsoDate': "ISODate(\"2014-10-03T00:00\")"
|
'testIsoDate': "ISODate(\"2014-10-03T00:00\")"
|
||||||
}
|
}
|
||||||
|
|
||||||
query_data = parse_query_json(json.dumps(query))
|
query_data = parse_query_json(json_dumps(query))
|
||||||
|
|
||||||
self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0))
|
self.assertEqual(query_data['testIsoDate'], datetime.datetime(2014, 10, 3, 0, 0))
|
||||||
self.assertEqual(query_data['test_dict']['b']['date'], datetime.datetime(2014, 10, 4, 0, 0))
|
self.assertEqual(query_data['test_dict']['b']['date'], datetime.datetime(2014, 10, 4, 0, 0))
|
||||||
@@ -71,7 +71,7 @@ class TestParseQueryJson(TestCase):
|
|||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
query_data = parse_query_json(json.dumps(query))
|
query_data = parse_query_json(json_dumps(query))
|
||||||
|
|
||||||
self.assertDictEqual(query, query_data)
|
self.assertDictEqual(query, query_data)
|
||||||
|
|
||||||
@@ -91,7 +91,7 @@ class TestParseQueryJson(TestCase):
|
|||||||
'$undefined': None
|
'$undefined': None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
query_data = parse_query_json(json.dumps(query))
|
query_data = parse_query_json(json_dumps(query))
|
||||||
self.assertEqual(query_data['test$undefined'], None)
|
self.assertEqual(query_data['test$undefined'], None)
|
||||||
self.assertEqual(query_data['test$date'], datetime.datetime(2014, 10, 3, 0, 0).replace(tzinfo=utc))
|
self.assertEqual(query_data['test$date'], datetime.datetime(2014, 10, 3, 0, 0).replace(tzinfo=utc))
|
||||||
|
|
||||||
@@ -101,7 +101,7 @@ class TestParseQueryJson(TestCase):
|
|||||||
}
|
}
|
||||||
|
|
||||||
one_hour_ago = parse_human_time("1 hour ago")
|
one_hour_ago = parse_human_time("1 hour ago")
|
||||||
query_data = parse_query_json(json.dumps(query))
|
query_data = parse_query_json(json_dumps(query))
|
||||||
self.assertEqual(query_data['ts'], one_hour_ago)
|
self.assertEqual(query_data['ts'], one_hour_ago)
|
||||||
|
|
||||||
|
|
||||||
@@ -119,7 +119,7 @@ class TestMongoResults(TestCase):
|
|||||||
self.assertIsNotNone(_get_column_by_name(columns, 'column'))
|
self.assertIsNotNone(_get_column_by_name(columns, 'column'))
|
||||||
self.assertIsNotNone(_get_column_by_name(columns, 'column2'))
|
self.assertIsNotNone(_get_column_by_name(columns, 'column2'))
|
||||||
self.assertIsNotNone(_get_column_by_name(columns, 'column3'))
|
self.assertIsNotNone(_get_column_by_name(columns, 'column3'))
|
||||||
|
|
||||||
def test_parses_nested_results(self):
|
def test_parses_nested_results(self):
|
||||||
raw_results = [
|
raw_results = [
|
||||||
{'column': 1, 'column2': 'test', 'nested': {
|
{'column': 1, 'column2': 'test', 'nested': {
|
||||||
@@ -143,4 +143,4 @@ class TestMongoResults(TestCase):
|
|||||||
self.assertIsNotNone(_get_column_by_name(columns, 'column3'))
|
self.assertIsNotNone(_get_column_by_name(columns, 'column3'))
|
||||||
self.assertIsNotNone(_get_column_by_name(columns, 'nested.a'))
|
self.assertIsNotNone(_get_column_by_name(columns, 'nested.a'))
|
||||||
self.assertIsNotNone(_get_column_by_name(columns, 'nested.b'))
|
self.assertIsNotNone(_get_column_by_name(columns, 'nested.b'))
|
||||||
self.assertIsNotNone(_get_column_by_name(columns, 'nested.c'))
|
self.assertIsNotNone(_get_column_by_name(columns, 'nested.c'))
|
||||||
|
|||||||
@@ -1,6 +1,6 @@
|
|||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
from unittest import TestCase
|
from unittest import TestCase
|
||||||
|
|
||||||
from redash.query_runner.prometheus import get_instant_rows, get_range_rows
|
from redash.query_runner.prometheus import get_instant_rows, get_range_rows
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user