Compare commits

...

16 Commits

Author SHA1 Message Date
Arik Fraimovich
150fc6dbf0 Fix repo name 2015-04-06 12:39:30 +03:00
Arik Fraimovich
ccff9614d4 New release process 2015-04-06 12:17:47 +03:00
Arik Fraimovich
a7b881874f Remove unused script 2015-04-06 09:38:01 +03:00
Arik Fraimovich
9fb33cf746 Merge pull request #399 from EverythingMe/feature/schema
Feature: schema browser and simple autocomplete
2015-04-02 17:10:07 +03:00
Arik Fraimovich
e3c5da5bc5 Fix tests to use correct data 2015-04-02 17:05:16 +03:00
Arik Fraimovich
e675690cc6 Sort schema by name 2015-04-02 16:56:00 +03:00
Arik Fraimovich
edc1622cf5 Schema support for MySQL 2015-04-02 16:55:52 +03:00
Arik Fraimovich
5ab3d4a40d Basic autocomplete functionality 2015-04-02 16:12:33 +03:00
Arik Fraimovich
cb29d87b63 Improve formatting of schema browser 2015-04-02 15:40:43 +03:00
Arik Fraimovich
6ff6bdad9f Use the correct redis connection in tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
e3cc3ef9a4 Move schema fetching to DataSource + tests 2015-04-02 11:25:42 +03:00
Arik Fraimovich
1fe4f291f2 Flush test redis db after each test 2015-04-02 11:25:22 +03:00
Arik Fraimovich
a54119f4a2 Show schema along side the query 2015-04-02 11:25:22 +03:00
Arik Fraimovich
c5b7fe5321 Use codemirror directly without ui-codemirror 2015-04-02 11:24:47 +03:00
Arik Fraimovich
d487ec9153 Upgrade codemirror to latest version 2015-04-02 11:24:18 +03:00
Arik Fraimovich
fa19b1ddc8 Endpoint to return data source schema 2015-04-02 11:23:52 +03:00
24 changed files with 428 additions and 113 deletions

View File

@@ -1,6 +1,7 @@
NAME=redash
VERSION=`python ./manage.py version`
FULL_VERSION=$(VERSION)+b$(CIRCLE_BUILD_NUM)
BASE_VERSION=$(shell python ./manage.py version | cut -d + -f 1)
# VERSION gets evaluated every time it's referenced, therefore we need to use VERSION here instead of FULL_VERSION.
FILENAME=$(CIRCLE_ARTIFACTS)/$(NAME).$(VERSION).tar.gz
@@ -15,7 +16,7 @@ pack:
tar -zcv -f $(FILENAME) --exclude=".git*" --exclude="*.pyc" --exclude="*.pyo" --exclude="venv" --exclude="rd_ui/node_modules" --exclude="rd_ui/dist/bower_components" --exclude="rd_ui/app" *
upload:
python bin/upload_version.py $(VERSION) $(FILENAME)
python bin/release_manager.py $(CIRCLE_SHA1) $(BASE_VERSION) $(FILENAME)
test:
nosetests --with-coverage --cover-package=redash tests/*.py

View File

@@ -1,30 +0,0 @@
#!/usr/bin/env python
import sys
import requests
if __name__ == '__main__':
response = requests.get('https://api.github.com/repos/EverythingMe/redash/releases')
if response.status_code != 200:
exit("Failed getting releases (status code: %s)." % response.status_code)
sorted_releases = sorted(response.json(), key=lambda release: release['id'], reverse=True)
latest_release = sorted_releases[0]
asset_url = latest_release['assets'][0]['url']
filename = latest_release['assets'][0]['name']
wget_command = 'wget --header="Accept: application/octet-stream" %s -O %s' % (asset_url, filename)
if '--url-only' in sys.argv:
print asset_url
elif '--wget' in sys.argv:
print wget_command
else:
print "Latest release: %s" % latest_release['tag_name']
print latest_release['body']
print "\nTarball URL: %s" % asset_url
print 'wget: %s' % (wget_command)

130
bin/release_manager.py Normal file
View File

@@ -0,0 +1,130 @@
import os
import sys
import json
import re
import subprocess
import requests
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
repo = 'EverythingMe/redash'
def _github_request(method, path, params=None, headers={}):
if not path.startswith('https://api.github.com'):
url = "https://api.github.com/{}".format(path)
else:
url = path
if params is not None:
params = json.dumps(params)
response = requests.request(method, url, data=params, auth=auth)
return response
def exception_from_error(message, response):
return Exception("({}) {}: {}".format(response.status_code, message, response.json().get('message', '?')))
def rc_tag_name(version):
return "v{}-rc".format(version)
def get_rc_release(version):
tag = rc_tag_name(version)
response = _github_request('get', 'repos/{}/releases/tags/{}'.format(repo, tag))
if response.status_code == 404:
return None
elif response.status_code == 200:
return response.json()
raise exception_from_error("Unknown error while looking RC release: ", response)
def create_release(version, commit_sha):
tag = rc_tag_name(version)
params = {
'tag_name': tag,
'name': "{} - RC".format(version),
'target_commitish': commit_sha,
'prerelease': True
}
response = _github_request('post', 'repos/{}/releases'.format(repo), params)
if response.status_code != 201:
raise exception_from_error("Failed creating new release", response)
return response.json()
def upload_asset(release, filepath):
upload_url = release['upload_url'].replace('{?name}', '')
filename = filepath.split('/')[-1]
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, headers=headers, auth=auth, verify=False)
if response.status_code != 201: # not 200/201/...
raise exception_from_error('Failed uploading asset', response)
return response
def remove_previous_builds(release):
for asset in release['assets']:
response = _github_request('delete', asset['url'])
if response.status_code != 204:
raise exception_from_error("Failed deleting asset", response)
def get_changelog(commit_sha):
latest_release = _github_request('get', 'repos/{}/releases/latest'.format(repo))
if latest_release.status_code != 200:
raise exception_from_error('Failed getting latest release', latest_release)
latest_release = latest_release.json()
previous_sha = latest_release['target_commitish']
args = ['git', '--no-pager', 'log', '--merges', '--grep', 'Merge pull request', '--pretty=format:"%h|%s|%b|%p"', '{}...{}'.format(previous_sha, commit_sha)]
log = subprocess.check_output(args)
changes = ["Changes since {}:".format(latest_release['name'])]
for line in log.split('\n'):
try:
sha, subject, body, parents = line[1:-1].split('|')
except ValueError:
continue
try:
pull_request = re.match("Merge pull request #(\d+)", subject).groups()[0]
pull_request = " #{}".format(pull_request)
except Exception, ex:
pull_request = ""
author = subprocess.check_output(['git', 'log', '-1', '--pretty=format:"%an"', parents.split(' ')[-1]])[1:-1]
changes.append("{}{}: {} ({})".format(sha, pull_request, body.strip(), author))
return "\n".join(changes)
def update_release(version, build_filepath, commit_sha):
try:
release = get_rc_release(version) or create_release(version, commit_sha)
print "Using release id: {}".format(release['id'])
remove_previous_builds(release)
response = upload_asset(release, build_filepath)
changelog = get_changelog(commit_sha)
response = _github_request('patch', release['url'], {'body': changelog})
print response.status_code
print response.text
except Exception, ex:
print ex
if __name__ == '__main__':
commit_sha = sys.argv[1]
version = sys.argv[2]
filepath = sys.argv[3]
# TODO: make sure running from git directory & remote = repo
update_release(version, filepath, commit_sha)

View File

@@ -1,46 +0,0 @@
#!python
import os
import sys
import json
import requests
import subprocess
def capture_output(command):
proc = subprocess.Popen(command, stdout=subprocess.PIPE)
return proc.stdout.read()
if __name__ == '__main__':
version = sys.argv[1]
filepath = sys.argv[2]
filename = filepath.split('/')[-1]
github_token = os.environ['GITHUB_TOKEN']
auth = (github_token, 'x-oauth-basic')
commit_sha = os.environ['CIRCLE_SHA1']
commit_body = capture_output(["git", "log", "--format=%b", "-n", "1", commit_sha])
file_md5_checksum = capture_output(["md5sum", filepath]).split()[0]
file_sha256_checksum = capture_output(["sha256sum", filepath]).split()[0]
version_body = "%s\n\nMD5: %s\nSHA256: %s" % (commit_body, file_md5_checksum, file_sha256_checksum)
params = json.dumps({
'tag_name': 'v{0}'.format(version),
'name': 're:dash v{0}'.format(version),
'body': version_body,
'target_commitish': commit_sha,
'prerelease': True
})
response = requests.post('https://api.github.com/repos/everythingme/redash/releases',
data=params,
auth=auth)
upload_url = response.json()['upload_url']
upload_url = upload_url.replace('{?name}', '')
with open(filepath) as file_content:
headers = {'Content-Type': 'application/gzip'}
response = requests.post(upload_url, file_content, params={'name': filename}, auth=auth,
headers=headers, verify=False)

View File

@@ -19,6 +19,10 @@ test:
post:
- make pack
deployment:
test:
branch: chore/release_process
commands:
- make upload
github:
branch: master
commands:

View File

@@ -18,6 +18,7 @@
<link rel="stylesheet" href="/bower_components/angular-ui-select/dist/select.css">
<link rel="stylesheet" href="/bower_components/pace/themes/pace-theme-minimal.css">
<link rel="stylesheet" href="/bower_components/font-awesome/css/font-awesome.css">
<link rel="stylesheet" href="/bower_components/codemirror/addon/hint/show-hint.css">
<link rel="stylesheet" href="/styles/redash.css">
<!-- endbuild -->
</head>
@@ -105,9 +106,10 @@
<script src="/bower_components/codemirror/lib/codemirror.js"></script>
<script src="/bower_components/codemirror/addon/edit/matchbrackets.js"></script>
<script src="/bower_components/codemirror/addon/edit/closebrackets.js"></script>
<script src="/bower_components/codemirror/addon/hint/show-hint.js"></script>
<script src="/bower_components/codemirror/addon/hint/anyword-hint.js"></script>
<script src="/bower_components/codemirror/mode/sql/sql.js"></script>
<script src="/bower_components/codemirror/mode/javascript/javascript.js"></script>
<script src="/bower_components/angular-ui-codemirror/ui-codemirror.js"></script>
<script src="/bower_components/highcharts/highcharts.js"></script>
<script src="/bower_components/highcharts/modules/exporting.js"></script>
<script src="/bower_components/gridster/dist/jquery.gridster.js"></script>

View File

@@ -6,7 +6,6 @@ angular.module('redash', [
'redash.services',
'redash.renderers',
'redash.visualization',
'ui.codemirror',
'highchart',
'ui.select2',
'angular-growl',

View File

@@ -19,6 +19,27 @@
}
$scope.query = $route.current.locals.query;
var updateSchema = function() {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
var dataSourceId = $scope.query.data_source_id || $scope.dataSources[0].id;
DataSource.getSchema({id: dataSourceId}, function(data) {
if (data && data.length > 0) {
$scope.schema = data;
_.each(data, function(table) {
table.collapsed = true;
});
$scope.editorSize = "col-md-9";
$scope.hasSchema = true;
} else {
$scope.hasSchema = false;
$scope.editorSize = "col-md-12";
}
});
}
Events.record(currentUser, 'view', 'query', $scope.query.id);
getQueryResult();
$scope.queryExecuting = false;
@@ -27,6 +48,7 @@
$scope.canViewSource = currentUser.hasPermission('view_source');
$scope.dataSources = DataSource.get(function(dataSources) {
updateSchema();
$scope.query.data_source_id = $scope.query.data_source_id || dataSources[0].id;
});
@@ -126,6 +148,7 @@
});
}
updateSchema();
$scope.executeQuery();
};

View File

@@ -29,7 +29,7 @@
restrict: 'E',
template: '<span ng-show="query.id && canViewSource">\
<a ng-show="!sourceMode"\
ng-href="{{query.id}}/source#{{selectedTab}}">Show Source\
ng-href="/queries/{{query.id}}/source#{{selectedTab}}">Show Source\
</a>\
<a ng-show="sourceMode"\
ng-href="/queries/{{query.id}}#{{selectedTab}}">Hide Source\
@@ -63,26 +63,80 @@
restrict: 'E',
scope: {
'query': '=',
'lock': '='
'lock': '=',
'schema': '='
},
template: '<textarea\
ui-codemirror="editorOptions"\
ng-model="query.query">',
link: function($scope) {
$scope.editorOptions = {
template: '<textarea></textarea>',
link: {
pre: function ($scope, element) {
var textarea = element.children()[0];
var editorOptions = {
mode: 'text/x-sql',
lineWrapping: true,
lineNumbers: true,
readOnly: false,
matchBrackets: true,
autoCloseBrackets: true
};
autoCloseBrackets: true,
extraKeys: {"Ctrl-Space": "autocomplete"}
};
$scope.$watch('lock', function(locked) {
$scope.editorOptions.readOnly = locked ? 'nocursor' : false;
});
var additionalHints = [];
CodeMirror.commands.autocomplete = function(cm) {
var hinter = function(editor, options) {
var hints = CodeMirror.hint.anyword(editor, options);
var cur = editor.getCursor(), token = editor.getTokenAt(cur).string;
hints.list = _.union(hints.list, _.filter(additionalHints, function (h) {
return h.search(token) === 0;
}));
return hints;
};
// CodeMirror.showHint(cm, CodeMirror.hint.anyword);
CodeMirror.showHint(cm, hinter);
};
var codemirror = CodeMirror.fromTextArea(textarea, editorOptions);
codemirror.on('change', function(instance) {
var newValue = instance.getValue();
if (newValue !== $scope.query.query) {
$scope.$evalAsync(function() {
$scope.query.query = newValue;
});
}
});
$scope.$watch('query.query', function () {
if ($scope.query.query !== codemirror.getValue()) {
codemirror.setValue($scope.query.query);
}
});
$scope.$watch('schema', function (schema) {
if (schema) {
var keywords = [];
_.each(schema, function (table) {
keywords.push(table.name);
_.each(table.columns, function (c) {
keywords.push(c);
});
});
additionalHints = _.unique(keywords);
}
});
$scope.$watch('lock', function (locked) {
var readOnly = locked ? 'nocursor' : false;
codemirror.setOption('readOnly', readOnly);
});
}
}
}
};
}
function queryFormatter($http) {

View File

@@ -480,7 +480,12 @@
var DataSource = function ($resource) {
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, {'get': {'method': 'GET', 'cache': true, 'isArray': true}});
var actions = {
'get': {'method': 'GET', 'cache': true, 'isArray': true},
'getSchema': {'method': 'GET', 'cache': true, 'isArray': true, 'url': '/api/data_sources/:id/schema'}
};
var DataSourceResource = $resource('/api/data_sources/:id', {id: '@id'}, actions);
return DataSourceResource;
}

View File

@@ -156,7 +156,7 @@ li.widget:hover {
/* CodeMirror */
.CodeMirror {
border: 1px solid #eee;
height: auto;
/*height: auto;*/
min-height: 300px;
margin-bottom: 10px;
}
@@ -308,6 +308,18 @@ counter-renderer counter-name {
height: 100%;
}
.schema-browser {
height: 300px;
overflow: scroll;
}
div.table-name {
overflow: scroll;
text-overflow: ellipsis;
white-space: nowrap;
cursor: pointer;
}
/*
bootstrap's hidden-xs class adds display:block when not hidden
use this class when you need to keep the original display value

View File

@@ -59,9 +59,9 @@
<hr>
<div class="row">
<div class="col-lg-12">
<div ng-show="sourceMode">
<div class="row" ng-if="sourceMode">
<div ng-class="editorSize">
<div>
<p>
<button type="button" class="btn btn-primary btn-xs" ng-disabled="queryExecuting" ng-click="executeQuery()">
<span class="glyphicon glyphicon-play"></span> Execute
@@ -77,19 +77,31 @@
</button>
</span>
</p>
</div>
<!-- code editor -->
<div ng-show="sourceMode">
<p>
<query-editor query="query" lock="queryFormatting"></query-editor>
<query-editor query="query" schema="schema" lock="queryFormatting"></query-editor>
</p>
<hr>
</div>
</div>
<div class="col-md-3" ng-show="hasSchema">
<div>
<input type="text" placeholder="Search schema..." class="form-control" ng-model="schemaFilter">
</div>
<div class="schema-browser">
<div ng-repeat="table in schema | filter:schemaFilter">
<div class="table-name" ng-click="table.collapsed = !table.collapsed">
<i class="fa fa-table"></i> <strong><span title="{{table.name}}">{{table.name}}</span></strong>
</div>
<div collapse="table.collapsed">
<div ng-repeat="column in table.columns | filter:schemaFilter" style="padding-left:16px;">{{column}}</div>
</div>
</div>
</div>
</div>
</div>
</div>
<hr ng-if="sourceMode">
<div class="row">
<div class="col-lg-3 rd-hidden-xs">
<p>
@@ -97,7 +109,7 @@
<span class="text-muted">Created By </span>
<strong>{{query.user.name}}</strong>
</p>
<p ng-if="query.user.id != query.last_modified_by.id">
<p ng-if="query.last_modified_by && query.user.id != query.last_modified_by.id">
<span class="glyphicon glyphicon-user"></span>
<span class="text-muted">Last Modified By </span>
<strong>{{query.last_modified_by.name}}</strong>
@@ -190,7 +202,7 @@
<rd-tab tab-id="{{vis.id}}" name="{{vis.name}}" ng-if="vis.type!='TABLE'" ng-repeat="vis in query.visualizations">
<span class="remove" ng-click="deleteVisualization($event, vis)" ng-show="canEdit"> &times;</span>
</rd-tab>
<rd-tab tab-id="add" name="&plus; New" removeable="true" ng-show="canEdit"></rd-tab>
<rd-tab tab-id="add" name="&plus; New Visualization" removeable="true" ng-show="canEdit"></rd-tab>
<li ng-if="!sourceMode" class="rd-tab-btn"><button class="btn btn-sm btn-default" ng-click="executeQuery()" ng-disabled="queryExecuting" title="Refresh Dataset"><span class="glyphicon glyphicon-refresh"></span></button></li>
</ul>
</div>

View File

@@ -12,7 +12,7 @@
"es5-shim": "2.0.8",
"angular-moment": "0.2.0",
"moment": "2.1.0",
"angular-ui-codemirror": "0.0.5",
"codemirror": "4.8.0",
"highcharts": "3.0.10",
"underscore": "1.5.1",
"pivottable": "~1.1.1",

View File

@@ -1,6 +1,3 @@
from flask import make_response
from functools import update_wrapper
ONE_YEAR = 60 * 60 * 24 * 365.25
headers = {

View File

@@ -219,10 +219,18 @@ class DataSourceListAPI(BaseResource):
return datasource.to_dict()
api.add_resource(DataSourceListAPI, '/api/data_sources', endpoint='data_sources')
class DataSourceSchemaAPI(BaseResource):
def get(self, data_source_id):
data_source = models.DataSource.get_by_id(data_source_id)
schema = data_source.get_schema()
return schema
api.add_resource(DataSourceSchemaAPI, '/api/data_sources/<data_source_id>/schema')
class DashboardRecentAPI(BaseResource):
def get(self):
return [d.to_dict() for d in models.Dashboard.recent(current_user.id).limit(20)]

View File

@@ -13,7 +13,8 @@ from playhouse.postgres_ext import ArrayField, DateTimeTZField, PostgresqlExtDat
from flask.ext.login import UserMixin, AnonymousUserMixin
import psycopg2
from redash import utils, settings
from redash import utils, settings, redis_connection
from redash.query_runner import get_query_runner
class Database(object):
@@ -241,6 +242,23 @@ class DataSource(BaseModel):
'type': self.type
}
def get_schema(self, refresh=False):
key = "data_source:schema:{}".format(self.id)
cache = None
if not refresh:
cache = redis_connection.get(key)
if cache is None:
query_runner = get_query_runner(self.type, self.options)
schema = sorted(query_runner.get_schema(), key=lambda t: t['name'])
redis_connection.set(key, json.dumps(schema))
else:
schema = json.loads(cache)
return schema
@classmethod
def all(cls):
return cls.select().order_by(cls.id.asc())

View File

@@ -57,6 +57,9 @@ class BaseQueryRunner(object):
def run_query(self, query):
raise NotImplementedError()
def get_schema(self):
return []
@classmethod
def to_dict(cls):
return {

View File

@@ -44,6 +44,41 @@ class Mysql(BaseQueryRunner):
def __init__(self, configuration_json):
super(Mysql, self).__init__(configuration_json)
def get_schema(self):
query = """
SELECT col.table_schema,
col.table_name,
col.column_name
FROM `information_schema`.`columns` col
INNER JOIN
(SELECT table_schema,
TABLE_NAME
FROM information_schema.tables
WHERE table_type <> 'SYSTEM VIEW' AND table_schema NOT IN ('performance_schema', 'mysql')) tables ON tables.table_schema = col.table_schema
AND tables.TABLE_NAME = col.TABLE_NAME;
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != self.configuration['db']:
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
import MySQLdb

View File

@@ -83,6 +83,34 @@ class PostgreSQL(BaseQueryRunner):
self.connection_string = " ".join(values)
def get_schema(self):
query = """
SELECT table_schema, table_name, column_name
FROM information_schema.columns
WHERE table_schema NOT IN ('pg_catalog', 'information_schema');
"""
results, error = self.run_query(query)
if error is not None:
raise Exception("Failed getting schema.")
results = json.loads(results)
schema = {}
for row in results['rows']:
if row['table_schema'] != 'public':
table_name = '{}.{}'.format(row['table_schema'], row['table_name'])
else:
table_name = row['table_name']
if table_name not in schema:
schema[table_name] = {'name': table_name, 'columns': []}
schema[table_name]['columns'].append(row['column_name'])
return schema.values()
def run_query(self, query):
connection = psycopg2.connect(self.connection_string, async=True)
_wait(connection)

View File

@@ -218,6 +218,17 @@ def cleanup_query_results():
logger.info("Deleted %d unused query results out of total of %d." % (deleted_count, total_unused_query_results))
@celery.task(base=BaseTask)
def refresh_schemas():
"""
Refershs the datasources schema.
"""
for ds in models.DataSource.all():
logger.info("Refreshing schema for: {}".format(ds.name))
ds.get_schema(refresh=True)
@celery.task(bind=True, base=BaseTask, track_started=True)
def execute_query(self, query, data_source_id):
# TODO: maybe this should be a class?

View File

@@ -15,6 +15,10 @@ celery_schedule = {
'cleanup_tasks': {
'task': 'redash.tasks.cleanup_tasks',
'schedule': timedelta(minutes=5)
},
'refresh_schemas': {
'task': 'redash.tasks.refresh_schemas',
'schedule': timedelta(minutes=30)
}
}

View File

@@ -1,13 +1,17 @@
import os
os.environ['REDASH_REDIS_URL'] = "redis://localhost:6379/5"
import logging
from unittest import TestCase
import datetime
from redash import settings
settings.DATABASE_CONFIG = {
'name': 'circle_test',
'threadlocals': True
}
from redash import models
from redash import models, redis_connection
logging.getLogger('peewee').setLevel(logging.INFO)
@@ -20,6 +24,7 @@ class BaseTestCase(TestCase):
def tearDown(self):
models.db.close_db(None)
models.create_db(False, True)
redis_connection.flushdb()
def assertResponseEqual(self, expected, actual):
for k, v in expected.iteritems():

View File

@@ -47,7 +47,7 @@ user_factory = ModelFactory(redash.models.User,
data_source_factory = ModelFactory(redash.models.DataSource,
name='Test',
type='pg',
options='')
options='{"dbname": "test"}')
dashboard_factory = ModelFactory(redash.models.Dashboard,

View File

@@ -2,10 +2,12 @@
import datetime
import json
from unittest import TestCase
import mock
from tests import BaseTestCase
from redash import models
from factories import dashboard_factory, query_factory, data_source_factory, query_result_factory, user_factory, widget_factory
from redash.utils import gen_query_hash
from redash import query_runner
class DashboardTest(BaseTestCase):
@@ -195,6 +197,44 @@ class QueryArchiveTest(BaseTestCase):
self.assertEqual(None, query.schedule)
class DataSourceTest(BaseTestCase):
def test_get_schema(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
def test_get_schema_uses_cache(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
schema = ds.get_schema()
self.assertEqual(return_value, schema)
self.assertEqual(patched_get_schema.call_count, 1)
def test_get_schema_skips_cache_with_refresh_true(self):
return_value = [{'name': 'table', 'columns': []}]
with mock.patch('redash.query_runner.pg.PostgreSQL.get_schema') as patched_get_schema:
patched_get_schema.return_value = return_value
ds = data_source_factory.create()
ds.get_schema()
new_return_value = [{'name': 'new_table', 'columns': []}]
patched_get_schema.return_value = new_return_value
schema = ds.get_schema(refresh=True)
self.assertEqual(new_return_value, schema)
self.assertEqual(patched_get_schema.call_count, 2)
class QueryResultTest(BaseTestCase):
def setUp(self):