mirror of
https://github.com/apache/impala.git
synced 2026-01-07 18:02:33 -05:00
Many python files had a hashbang and the executable bit set though they were not intended to be run a standalone script. That makes determining which python files are actually scripts very difficult. A future patch will update the hashbang in real python scripts so they use $IMPALA_HOME/bin/impala-python. Change-Id: I04eafdc73201feefe65b85817a00474e182ec2ba Reviewed-on: http://gerrit.cloudera.org:8080/599 Reviewed-by: Casey Ching <casey@cloudera.com> Reviewed-by: Taras Bobrovytsky <tbobrovytsky@cloudera.com> Tested-by: Internal Jenkins
51 lines
1.9 KiB
Python
51 lines
1.9 KiB
Python
# Copyright (c) 2015 Cloudera, Inc. All rights reserved.
|
|
# Validates table stored on the LocalFileSystem.
|
|
#
|
|
import pytest
|
|
from subprocess import check_call
|
|
from tests.common.impala_test_suite import ImpalaTestSuite
|
|
from tests.common.test_dimensions import create_single_exec_option_dimension
|
|
from tests.common.skip import SkipIf, SkipIfIsilon
|
|
from tests.util.filesystem_utils import get_fs_path
|
|
|
|
@SkipIf.default_fs # Run only when a non-default filesystem is available.
|
|
@SkipIfIsilon.untriaged # Missing coverage: Find out why this is failing.
|
|
class TestMultipleFilesystems(ImpalaTestSuite):
|
|
"""
|
|
Tests that tables and queries can span multiple filesystems.
|
|
"""
|
|
|
|
TEST_DB = 'multi_fs_db'
|
|
|
|
@classmethod
|
|
def get_workload(self):
|
|
return 'functional-query'
|
|
|
|
@classmethod
|
|
def add_test_dimensions(cls):
|
|
super(TestMultipleFilesystems, cls).add_test_dimensions()
|
|
cls.TestMatrix.add_dimension(create_single_exec_option_dimension())
|
|
|
|
cls.TestMatrix.add_constraint(lambda v:\
|
|
v.get_value('table_format').file_format == 'text' and \
|
|
v.get_value('table_format').compression_codec == 'none')
|
|
|
|
def setup_method(self, method):
|
|
self.cleanup_db(self.TEST_DB)
|
|
# Note: Purposely creates database on the default filesystem. Do not specify location.
|
|
self.client.execute("create database %s" % self.TEST_DB)
|
|
self._populate_hdfs_partitions()
|
|
|
|
def teardown_method(self, method):
|
|
self.cleanup_db(self.TEST_DB)
|
|
|
|
def _populate_hdfs_partitions(self):
|
|
""" Copy some data to defaultFS HDFS filesystem so that the test can verify tables
|
|
that span the default (HDFS) and secondary filesystem (e.g. S3A)."""
|
|
check_call(["hadoop", "fs", "-cp",
|
|
get_fs_path("/test-warehouse/alltypes_parquet"),
|
|
"/test-warehouse/%s.db/" % self.TEST_DB], shell=False)
|
|
|
|
def test_local_filesystem(self, vector):
|
|
self.run_test_case('QueryTest/multiple-filesystems', vector, use_db=self.TEST_DB)
|