mirror of
https://github.com/apache/impala.git
synced 2026-02-03 00:00:40 -05:00
This takes steps to make Python 2 behave like Python 3 as
a way to flush out issues with running on Python 3. Specifically,
it handles two main differences:
1. Python 3 requires absolute imports within packages. This
can be emulated via "from __future__ import absolute_import"
2. Python 3 changed division to "true" division that doesn't
round to an integer. This can be emulated via
"from __future__ import division"
This changes all Python files to add imports for absolute_import
and division. For completeness, this also includes print_function in the
import.
I scrutinized each old-division location and converted some locations
to use the integer division '//' operator if it needed an integer
result (e.g. for indices, counts of records, etc). Some code was also using
relative imports and needed to be adjusted to handle absolute_import.
This fixes all Pylint warnings about no-absolute-import and old-division,
and these warnings are now banned.
Testing:
- Ran core tests
Change-Id: Idb0fcbd11f3e8791f5951c4944be44fb580e576b
Reviewed-on: http://gerrit.cloudera.org:8080/19588
Reviewed-by: Joe McDonnell <joemcdonnell@cloudera.com>
Tested-by: Joe McDonnell <joemcdonnell@cloudera.com>
69 lines
3.0 KiB
Python
69 lines
3.0 KiB
Python
# Licensed to the Apache Software Foundation (ASF) under one
|
|
# or more contributor license agreements. See the NOTICE file
|
|
# distributed with this work for additional information
|
|
# regarding copyright ownership. The ASF licenses this file
|
|
# to you under the Apache License, Version 2.0 (the
|
|
# "License"); you may not use this file except in compliance
|
|
# with the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing,
|
|
# software distributed under the License is distributed on an
|
|
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
|
# KIND, either express or implied. See the License for the
|
|
# specific language governing permissions and limitations
|
|
# under the License.
|
|
|
|
# Validates table stored on the LocalFileSystem.
|
|
#
|
|
from __future__ import absolute_import, division, print_function
|
|
import pytest
|
|
from subprocess import check_call, call
|
|
|
|
from tests.common.impala_test_suite import ImpalaTestSuite
|
|
from tests.common.skip import SkipIf
|
|
from tests.common.test_dimensions import create_single_exec_option_dimension
|
|
from tests.util.filesystem_utils import get_secondary_fs_path
|
|
|
|
@SkipIf.no_secondary_fs
|
|
class TestMultipleFilesystems(ImpalaTestSuite):
|
|
"""
|
|
Tests that tables and queries can span multiple filesystems.
|
|
"""
|
|
|
|
@classmethod
|
|
def get_workload(self):
|
|
return 'functional-query'
|
|
|
|
@classmethod
|
|
def add_test_dimensions(cls):
|
|
super(TestMultipleFilesystems, cls).add_test_dimensions()
|
|
cls.ImpalaTestMatrix.add_dimension(create_single_exec_option_dimension())
|
|
|
|
cls.ImpalaTestMatrix.add_constraint(lambda v:\
|
|
v.get_value('table_format').file_format == 'text' and \
|
|
v.get_value('table_format').compression_codec == 'none')
|
|
|
|
def _populate_secondary_fs_partitions(self, db_name):
|
|
# This directory may already exist. So we needn't mind if this call fails.
|
|
call(["hadoop", "fs", "-mkdir", get_secondary_fs_path("/multi_fs_tests/")], shell=False)
|
|
check_call(["hadoop", "fs", "-mkdir",
|
|
get_secondary_fs_path("/multi_fs_tests/%s.db/" % db_name)], shell=False)
|
|
self.filesystem_client.copy("/test-warehouse/alltypes_parquet/",
|
|
get_secondary_fs_path("/multi_fs_tests/%s.db/" % db_name), overwrite=True)
|
|
self.filesystem_client.copy("/test-warehouse/tinytable/", get_secondary_fs_path(
|
|
"/multi_fs_tests/%s.db/" % db_name), overwrite=True)
|
|
|
|
@pytest.mark.execute_serially
|
|
def test_multiple_filesystems(self, vector, unique_database):
|
|
try:
|
|
self._populate_secondary_fs_partitions(unique_database)
|
|
self.run_test_case('QueryTest/multiple-filesystems', vector, use_db=unique_database)
|
|
finally:
|
|
# We delete this from the secondary filesystem here because the database was created
|
|
# in HDFS but the queries will create this path in the secondary FS as well. So
|
|
# dropping the database will not delete the directory in the secondary FS.
|
|
check_call(["hadoop", "fs", "-rm", "-r",
|
|
get_secondary_fs_path("/multi_fs_tests/%s.db/" % unique_database)], shell=False)
|