mirror of
https://github.com/apache/impala.git
synced 2025-12-19 18:12:08 -05:00
IMPALA-14138: Addendum test fix
This patch moves out a query from no-block-locations.test to only run it on HDFS because the queried Iceberg table has Iceberg delete files that contain HDFS-specific URIs. Change-Id: Iea862dd3b73a9aceceeb848d0ac85ac87627c8c2 Reviewed-on: http://gerrit.cloudera.org:8080/23189 Reviewed-by: Daniel Becker <daniel.becker@cloudera.com> Reviewed-by: Csaba Ringhofer <csringhofer@cloudera.com> Tested-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com>
This commit is contained in:
committed by
Impala Public Jenkins
parent
fdad954ce4
commit
d5f7cc3a0b
15
testdata/workloads/functional-query/queries/QueryTest/no-block-locations-hdfs-only.test
vendored
Normal file
15
testdata/workloads/functional-query/queries/QueryTest/no-block-locations-hdfs-only.test
vendored
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
====
|
||||||
|
---- QUERY
|
||||||
|
# The following query needs to run against HDFS because iceberg_lineitem_sixblocks
|
||||||
|
# contains position delete files that contain HDFS-specific URIs.
|
||||||
|
# 'iceberg_lineitem_sixblocks' contains a single data file with six HDFS blocks. Without
|
||||||
|
# block information we schedule the whole data file to a single SCAN operator.
|
||||||
|
select count(*) from functional_parquet.iceberg_lineitem_sixblocks where l_orderkey % 2 = 0;
|
||||||
|
---- RESULTS
|
||||||
|
9805
|
||||||
|
---- TYPES
|
||||||
|
BIGINT
|
||||||
|
---- RUNTIME_PROFILE
|
||||||
|
# The following should be in the ExecSummary
|
||||||
|
row_regex: 00:SCAN [A-Z0-9]+ +1 +1 +.*
|
||||||
|
====
|
||||||
@@ -36,15 +36,3 @@ BIGINT
|
|||||||
# The following should be in the ExecSummary
|
# The following should be in the ExecSummary
|
||||||
row_regex: 00:SCAN [A-Z0-9]+ +1 +1 +.*
|
row_regex: 00:SCAN [A-Z0-9]+ +1 +1 +.*
|
||||||
====
|
====
|
||||||
---- QUERY
|
|
||||||
# 'iceberg_lineitem_sixblocks' contains a single data file with six HDFS blocks. Without
|
|
||||||
# block information we schedule the whole data file to a single SCAN operator.
|
|
||||||
select count(*) from functional_parquet.iceberg_lineitem_sixblocks where l_orderkey % 2 = 0;
|
|
||||||
---- RESULTS
|
|
||||||
9805
|
|
||||||
---- TYPES
|
|
||||||
BIGINT
|
|
||||||
---- RUNTIME_PROFILE
|
|
||||||
# The following should be in the ExecSummary
|
|
||||||
row_regex: 00:SCAN [A-Z0-9]+ +1 +1 +.*
|
|
||||||
====
|
|
||||||
|
|||||||
@@ -18,6 +18,7 @@
|
|||||||
from __future__ import absolute_import, division, print_function
|
from __future__ import absolute_import, division, print_function
|
||||||
import pytest
|
import pytest
|
||||||
from os import getenv
|
from os import getenv
|
||||||
|
from tests.util.filesystem_utils import IS_HDFS
|
||||||
|
|
||||||
from tests.common.custom_cluster_test_suite import CustomClusterTestSuite
|
from tests.common.custom_cluster_test_suite import CustomClusterTestSuite
|
||||||
|
|
||||||
@@ -34,3 +35,5 @@ class TestDisabledBlockLocations(CustomClusterTestSuite):
|
|||||||
@CustomClusterTestSuite.with_args(custom_core_site_dir=CORE_SITE_CONFIG_DIR)
|
@CustomClusterTestSuite.with_args(custom_core_site_dir=CORE_SITE_CONFIG_DIR)
|
||||||
def test_no_block_locations(self, vector):
|
def test_no_block_locations(self, vector):
|
||||||
self.run_test_case('QueryTest/no-block-locations', vector)
|
self.run_test_case('QueryTest/no-block-locations', vector)
|
||||||
|
if IS_HDFS:
|
||||||
|
self.run_test_case('QueryTest/no-block-locations-hdfs-only', vector)
|
||||||
|
|||||||
Reference in New Issue
Block a user