Files
impala/tests/query_test/test_hive_timestamp_conversion.py
Csaba Ringhofer f98b697c7b IMPALA-13929: Make 'functional-query' the default workload in tests
This change adds get_workload() to ImpalaTestSuite and removes it
from all test suites that already returned 'functional-query'.
get_workload() is also removed from CustomClusterTestSuite which
used to return 'tpch'.

All other changes besides impala_test_suite.py and
custom_cluster_test_suite.py are just mass removals of
get_workload() functions.

The behavior is only changed in custom cluster tests that didn't
override get_workload(). By returning 'functional-query' instead
of 'tpch', exploration_strategy() will no longer return 'core' in
'exhaustive' test runs. See IMPALA-3947 on why workload affected
exploration_strategy. An example for affected test is
TestCatalogHMSFailures which was skipped both in core and exhaustive
runs before this change.

get_workload() functions that return a different workload than
'functional-query' are not changed - it is possible that some of
these also don't handle exploration_strategy() as expected, but
individually checking these tests is out of scope in this patch.

Change-Id: I9ec6c41ffb3a30e1ea2de773626d1485c69fe115
Reviewed-on: http://gerrit.cloudera.org:8080/22726
Reviewed-by: Riza Suminto <riza.suminto@cloudera.com>
Reviewed-by: Daniel Becker <daniel.becker@cloudera.com>
Tested-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com>
2025-04-08 07:12:55 +00:00

73 lines
3.8 KiB
Python

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function
from tests.common.impala_test_suite import ImpalaTestSuite
from tests.common.file_utils import create_table_and_copy_files, create_table_from_parquet
class TestHiveParquetTimestampConversion(ImpalaTestSuite):
"""Tests that Impala can read parquet files written by older versions of Hive or with
Hive legacy conversion enabled. Tests use convert_legacy_hive_parquet_utc_timestamps,
use_legacy_hive_timestamp_conversion, and timezone to test conversion."""
@classmethod
def add_test_dimensions(cls):
super(TestHiveParquetTimestampConversion, cls).add_test_dimensions()
cls.ImpalaTestMatrix.add_constraint(lambda v:
v.get_value('table_format').file_format == 'parquet'
and v.get_value('table_format').compression_codec == 'none')
def test_hive_4_legacy(self, vector, unique_database):
"""Test that legacy conversion uses the same timezone conversion as Hive when
Parquet metadata contains writer.zone.conversion.legacy=true.
Load test data generated via Hive with TZ=Asia/Kuala_Lumpur:
create table t (d timestamp) stored as parquet;
set hive.parquet.timestamp.write.legacy.conversion.enabled=true;
insert into t values ("1900-01-01 00:00:00"), ("1910-01-01 00:00:00"),
("1935-01-01 00:00:00"), ("1940-01-01 00:00:00"), ("1942-01-01 00:00:00"),
("1944-01-01 00:00:00"), ("1969-01-29 00:00:00"), ("2000-01-01 00:00:00");
"""
create_table_from_parquet(self.client, unique_database, "hive_kuala_lumpur_legacy")
self.run_test_case("QueryTest/timestamp-conversion-hive-4", vector, unique_database)
def test_hive_313(self, vector, unique_database):
"""The parquet file was written with Hive 3.1.3 using the new Date/Time APIs
(legacy=false) to convert from US/Pacific to UTC. The presence of writer.time.zone in
the metadata of the file allow us to infer that new Date/Time APIS should be used for
the conversion. The use_legacy_hive_timestamp_conversion property shouldn't be taken
into account in this case.
Test file from https://github.com/apache/hive/blob/rel/release-4.0.1/data/files/
employee_hive_3_1_3_us_pacific.parquet"""
create_table_from_parquet(
self.client, unique_database, "employee_hive_3_1_3_us_pacific")
self.run_test_case("QueryTest/timestamp-conversion-hive-313", vector, unique_database)
def test_hive_3_mixed(self, vector, unique_database):
"""Test table containing Hive legacy timestamps written with Hive prior to 3.1.3.
Test files target timezone=Asia/Singapore, sourced from
https://github.com/apache/hive/tree/rel/release-4.0.1/data/files/tbl_parq1."""
create_stmt = "create table %s.t (d timestamp) stored as parquet" % unique_database
create_table_and_copy_files(self.client, create_stmt, unique_database, "t",
["testdata/data/tbl_parq1/" + f for f in ["000000_0", "000000_1", "000000_2"]])
self.run_test_case("QueryTest/timestamp-conversion-hive-3m", vector, unique_database)