mirror of
https://github.com/apache/impala.git
synced 2025-12-19 18:12:08 -05:00
IMPALA-3695: Remove KUDU_IS_SUPPORTED
Testing: Ran exhaustive tests. Change-Id: I059d7a42798c38b570f25283663c284f2fcee517 Reviewed-on: http://gerrit.cloudera.org:8080/16085 Reviewed-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com> Tested-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com>
This commit is contained in:
committed by
Impala Public Jenkins
parent
23bb0859cc
commit
6ec6aaae8e
@@ -402,8 +402,6 @@ else()
|
||||
set(kuduClient_DIR "$ENV{IMPALA_KUDU_HOME}/release/share/kuduClient/cmake")
|
||||
endif()
|
||||
endif()
|
||||
# When KUDU_IS_SUPPORTED is false, the Kudu client is expected to be a non-functional
|
||||
# stub. It's still needed to link though.
|
||||
find_package(kuduClient REQUIRED NO_DEFAULT_PATH)
|
||||
include_directories(SYSTEM ${KUDU_CLIENT_INCLUDE_DIR})
|
||||
|
||||
|
||||
@@ -41,9 +41,6 @@
|
||||
# DOWNLOAD_CDH_COMPONENTS - When set to true, this script will also download and extract
|
||||
# the CDP Hadoop components (i.e. Hadoop, Hive, HBase, Ranger, etc) into
|
||||
# CDP_COMPONENTS_HOME as appropriate.
|
||||
# KUDU_IS_SUPPORTED - If KUDU_IS_SUPPORTED is false, Kudu is disabled and we download
|
||||
# the toolchain Kudu and use the symbols to compile a non-functional stub library so
|
||||
# that Impala has something to link against.
|
||||
# IMPALA_<PACKAGE>_VERSION - The version expected for <PACKAGE>. This is typically
|
||||
# configured in bin/impala-config.sh and must exist for every package. This is used
|
||||
# to construct an appropriate URL and expected archive name.
|
||||
@@ -405,115 +402,6 @@ def check_custom_toolchain(toolchain_packages_home, packages):
|
||||
raise Exception("Toolchain bootstrap failed: required packages were missing")
|
||||
|
||||
|
||||
def build_kudu_stub(kudu_dir, gcc_dir):
|
||||
"""When Kudu isn't supported, the CentOS 7 Kudu package is downloaded from the
|
||||
toolchain. This replaces the client lib with a stubbed client. The
|
||||
'kudu_dir' specifies the location of the unpacked CentOS 7 Kudu package.
|
||||
The 'gcc_dir' specifies the location of the unpacked GCC/G++."""
|
||||
|
||||
print "Building kudu stub"
|
||||
# Find the client lib files in the Kudu dir. There may be several files with
|
||||
# various extensions. Also there will be a debug version.
|
||||
client_lib_paths = []
|
||||
for path, _, files in os.walk(kudu_dir):
|
||||
for file in files:
|
||||
if not file.startswith("libkudu_client.so"):
|
||||
continue
|
||||
file_path = os.path.join(path, file)
|
||||
if os.path.islink(file_path):
|
||||
continue
|
||||
client_lib_paths.append(file_path)
|
||||
if not client_lib_paths:
|
||||
raise Exception("Unable to find Kudu client lib under '%s'" % kudu_dir)
|
||||
|
||||
# The client stub will be create by inspecting a real client and extracting the
|
||||
# symbols. The choice of which client file to use shouldn't matter.
|
||||
client_lib_path = client_lib_paths[0]
|
||||
|
||||
# Use a newer version of binutils because on older systems the default binutils may
|
||||
# not be able to read the newer binary.
|
||||
binutils_dir = ToolchainPackage("binutils").pkg_directory()
|
||||
nm_path = os.path.join(binutils_dir, "bin", "nm")
|
||||
objdump_path = os.path.join(binutils_dir, "bin", "objdump")
|
||||
|
||||
# Extract the symbols and write the stubbed client source. There is a special method
|
||||
# kudu::client::GetShortVersionString() that is overridden so that the stub can be
|
||||
# identified by the caller.
|
||||
get_short_version_sig = "kudu::client::GetShortVersionString()"
|
||||
nm_out = check_output([nm_path, "--defined-only", "-D", client_lib_path])
|
||||
stub_build_dir = tempfile.mkdtemp()
|
||||
stub_client_src_file = open(os.path.join(stub_build_dir, "kudu_client.cc"), "w")
|
||||
try:
|
||||
stub_client_src_file.write("""
|
||||
#include <string>
|
||||
|
||||
static const std::string kFakeKuduVersion = "__IMPALA_KUDU_STUB__";
|
||||
|
||||
static void KuduNotSupported() {
|
||||
*((char*)0) = 0;
|
||||
}
|
||||
|
||||
namespace kudu { namespace client {
|
||||
std::string GetShortVersionString() { return kFakeKuduVersion; }
|
||||
}}
|
||||
""")
|
||||
found_start_version_symbol = False
|
||||
cpp_filt_path = os.path.join(binutils_dir, "bin", "c++filt")
|
||||
for line in nm_out.splitlines():
|
||||
addr, sym_type, mangled_name = line.split(" ")
|
||||
# Skip special functions an anything that isn't a strong symbol. Any symbols that
|
||||
# get passed this check must be related to Kudu. If a symbol unrelated to Kudu
|
||||
# (ex: a boost symbol) gets defined in the stub, there's a chance the symbol could
|
||||
# get used and crash Impala.
|
||||
if mangled_name in ["_init", "_fini"] or sym_type not in "Tt":
|
||||
continue
|
||||
demangled_name = check_output([cpp_filt_path, mangled_name]).strip()
|
||||
assert "kudu" in demangled_name, \
|
||||
"Symbol doesn't appear to be related to Kudu: " + demangled_name
|
||||
if demangled_name == get_short_version_sig:
|
||||
found_start_version_symbol = True
|
||||
continue
|
||||
stub_client_src_file.write("""
|
||||
extern "C" void %s() {
|
||||
KuduNotSupported();
|
||||
}
|
||||
""" % mangled_name)
|
||||
|
||||
if not found_start_version_symbol:
|
||||
raise Exception("Expected to find symbol a corresponding to"
|
||||
" %s but it was not found." % get_short_version_sig)
|
||||
stub_client_src_file.flush()
|
||||
|
||||
# The soname is needed to avoid problem in packaging builds. Without the soname,
|
||||
# the library dependency as listed in the impalad binary will be a full path instead
|
||||
# of a short name. Debian in particular has problems with packaging when that happens.
|
||||
objdump_out = check_output([objdump_path, "-p", client_lib_path])
|
||||
for line in objdump_out.splitlines():
|
||||
if "SONAME" not in line:
|
||||
continue
|
||||
# The line that needs to be parsed should be something like:
|
||||
# " SONAME libkudu_client.so.0"
|
||||
so_name = line.split()[1]
|
||||
break
|
||||
else:
|
||||
raise Exception("Unable to extract soname from %s" % client_lib_path)
|
||||
|
||||
# Compile the library.
|
||||
stub_client_lib_path = os.path.join(stub_build_dir, "libkudu_client.so")
|
||||
toolchain_packages_home = os.environ.get("IMPALA_TOOLCHAIN_PACKAGES_HOME")
|
||||
gpp = os.path.join(
|
||||
toolchain_packages_home, "gcc-%s" % os.environ.get("IMPALA_GCC_VERSION"),
|
||||
"bin", "g++")
|
||||
subprocess.check_call([gpp, stub_client_src_file.name, "-shared", "-fPIC",
|
||||
"-Wl,-soname,%s" % so_name, "-o", stub_client_lib_path])
|
||||
|
||||
# Replace the real libs with the stub.
|
||||
for client_lib_path in client_lib_paths:
|
||||
shutil.copyfile(stub_client_lib_path, client_lib_path)
|
||||
finally:
|
||||
shutil.rmtree(stub_build_dir)
|
||||
|
||||
|
||||
def execute_many(f, args):
|
||||
"""
|
||||
Executes f(a) for a in args using a threadpool to execute in parallel.
|
||||
@@ -581,16 +469,9 @@ def get_hadoop_downloads():
|
||||
return cluster_components
|
||||
|
||||
|
||||
def get_kudu_downloads(use_kudu_stub):
|
||||
# If Kudu is not supported, we download centos7 kudu to build the kudu stub.
|
||||
kudu_downloads = []
|
||||
if use_kudu_stub:
|
||||
kudu_downloads += [ToolchainKudu("centos7")]
|
||||
else:
|
||||
# Toolchain Kudu includes Java artifacts.
|
||||
kudu_downloads += [ToolchainKudu()]
|
||||
|
||||
return kudu_downloads
|
||||
def get_kudu_downloads():
|
||||
# Toolchain Kudu includes Java artifacts.
|
||||
return [ToolchainKudu()]
|
||||
|
||||
|
||||
def main():
|
||||
@@ -608,8 +489,6 @@ def main():
|
||||
Hadoop component packages are only downloaded if $DOWNLOAD_CDH_COMPONENTS is true. The
|
||||
versions used for Hadoop components come from the CDP versions based on the
|
||||
$CDP_BUILD_NUMBER. CDP Hadoop packages are downloaded into $CDP_COMPONENTS_HOME.
|
||||
If Kudu is not supported on this platform (or KUDU_IS_SUPPORTED=false), then this
|
||||
builds a Kudu stub to allow for compilation without Kudu support.
|
||||
"""
|
||||
logging.basicConfig(level=logging.INFO,
|
||||
format='%(asctime)s %(threadName)s %(levelname)s: %(message)s')
|
||||
@@ -624,14 +503,12 @@ def main():
|
||||
# Create the toolchain directory if necessary
|
||||
create_directory_from_env_var("IMPALA_TOOLCHAIN_PACKAGES_HOME")
|
||||
|
||||
use_kudu_stub = os.environ["KUDU_IS_SUPPORTED"] != "true"
|
||||
|
||||
downloads = []
|
||||
downloads += get_toolchain_downloads()
|
||||
kudu_download = None
|
||||
if os.getenv("DOWNLOAD_CDH_COMPONENTS", "false") == "true":
|
||||
create_directory_from_env_var("CDP_COMPONENTS_HOME")
|
||||
downloads += get_kudu_downloads(use_kudu_stub)
|
||||
downloads += get_kudu_downloads()
|
||||
downloads += get_hadoop_downloads()
|
||||
|
||||
components_needing_download = [d for d in downloads if d.needs_download()]
|
||||
@@ -641,11 +518,5 @@ def main():
|
||||
|
||||
execute_many(download, components_needing_download)
|
||||
|
||||
if use_kudu_stub:
|
||||
# Find the kudu package directory and the gcc package directory
|
||||
kudu_download = [d for d in downloads if d.name == 'kudu'][0]
|
||||
gcc_download = [d for d in downloads if d.name == 'gcc'][0]
|
||||
build_kudu_stub(kudu_download.pkg_directory(), gcc_download.pkg_directory())
|
||||
|
||||
|
||||
if __name__ == "__main__": main()
|
||||
|
||||
@@ -643,19 +643,6 @@ fi
|
||||
# overall build type) and does not apply when using a local Kudu build.
|
||||
export USE_KUDU_DEBUG_BUILD=${USE_KUDU_DEBUG_BUILD-false}
|
||||
|
||||
# Kudu doesn't compile on some old Linux distros. KUDU_IS_SUPPORTED enables building Kudu
|
||||
# into the backend.
|
||||
if [[ -z "${KUDU_IS_SUPPORTED-}" ]]; then
|
||||
if [[ -n "$KUDU_BUILD_DIR" ]]; then
|
||||
KUDU_IS_SUPPORTED=true
|
||||
elif $IS_OSX; then
|
||||
KUDU_IS_SUPPORTED=false
|
||||
else
|
||||
KUDU_IS_SUPPORTED=true
|
||||
fi
|
||||
fi
|
||||
export KUDU_IS_SUPPORTED
|
||||
|
||||
export IMPALA_KUDU_VERSION=${IMPALA_KUDU_VERSION-"d652cab17"}
|
||||
export IMPALA_KUDU_JAVA_VERSION=${IMPALA_KUDU_JAVA_VERSION-"1.13.0-SNAPSHOT"}
|
||||
export IMPALA_KUDU_HOME=${IMPALA_TOOLCHAIN_PACKAGES_HOME}/kudu-$IMPALA_KUDU_VERSION
|
||||
|
||||
@@ -233,7 +233,7 @@ error_codes = (
|
||||
|
||||
("IMPALA_KUDU_TYPE_MISSING", 73, "Impala type $0 is not available in Kudu."),
|
||||
|
||||
("KUDU_NOT_SUPPORTED_ON_OS", 74, "Kudu is not supported on this operating system."),
|
||||
("KUDU_NOT_SUPPORTED_ON_OS", 74, "Not in use."),
|
||||
|
||||
("KUDU_NOT_ENABLED", 75, "Kudu features are disabled by the startup flag "
|
||||
"--disable_kudu."),
|
||||
|
||||
@@ -198,18 +198,16 @@ function start_minicluster {
|
||||
# presumably because there's only one layer involved. See
|
||||
# https://issues.apache.org/jira/browse/KUDU-1419.
|
||||
set -x
|
||||
if [ "true" = $KUDU_IS_SUPPORTED ]; then
|
||||
pushd /home/impdev/Impala/testdata
|
||||
for x in cluster/cdh*/node-*/var/lib/kudu/*/wal; do
|
||||
echo $x
|
||||
# This mv takes time, as it's actually copying into the latest layer.
|
||||
mv $x $x-orig
|
||||
mkdir $x
|
||||
mv $x-orig/* $x
|
||||
rmdir $x-orig
|
||||
done
|
||||
popd
|
||||
fi
|
||||
pushd /home/impdev/Impala/testdata
|
||||
for x in cluster/cdh*/node-*/var/lib/kudu/*/wal; do
|
||||
echo $x
|
||||
# This mv takes time, as it's actually copying into the latest layer.
|
||||
mv $x $x-orig
|
||||
mkdir $x
|
||||
mv $x-orig/* $x
|
||||
rmdir $x-orig
|
||||
done
|
||||
popd
|
||||
|
||||
# Wait for postgresql to really start; if it doesn't, Hive Metastore will fail to start.
|
||||
for i in {1..120}; do
|
||||
|
||||
@@ -51,9 +51,6 @@ public class RuntimeEnv {
|
||||
public void setNumCores(int numCores) { this.numCores_ = numCores; }
|
||||
public void setTestEnv(boolean v) { isTestEnv_ = v; }
|
||||
public boolean isTestEnv() { return isTestEnv_; }
|
||||
public boolean isKuduSupported() {
|
||||
return "true".equals(System.getenv("KUDU_IS_SUPPORTED"));
|
||||
}
|
||||
public boolean isMtDopValidationEnabled() { return enableMtDopValidation_; }
|
||||
public void setEnableMtDopValidation(boolean v) { enableMtDopValidation_ = v; }
|
||||
|
||||
|
||||
@@ -1044,10 +1044,8 @@ public class AnalyzeDDLTest extends FrontendTestBase {
|
||||
"int_col ('numNulls'='2')");
|
||||
AnalyzesOk("alter table functional.alltypes_datasource set column stats " +
|
||||
"int_col ('numDVs'='2')");
|
||||
if (RuntimeEnv.INSTANCE.isKuduSupported()) {
|
||||
AnalyzesOk("alter table functional_kudu.testtbl set column stats " +
|
||||
"name ('numNulls'='2')");
|
||||
}
|
||||
AnalyzesOk("alter table functional_kudu.testtbl set column stats " +
|
||||
"name ('numNulls'='2')");
|
||||
|
||||
// Table does not exist.
|
||||
AnalysisError("alter table bad_tbl set column stats int_col ('numNulls'='2')",
|
||||
|
||||
@@ -72,7 +72,6 @@ public class AnalyzeKuduDDLTest extends FrontendTestBase {
|
||||
}
|
||||
|
||||
private void testDDlsOnKuduTable(boolean isExternalPurgeTbl) {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
// Test primary keys and partition by clauses
|
||||
AnalyzesOk("create table tab (x int primary key) partition by hash(x) " +
|
||||
"partitions 8 stored as kudu", isExternalPurgeTbl);
|
||||
@@ -524,7 +523,6 @@ public class AnalyzeKuduDDLTest extends FrontendTestBase {
|
||||
|
||||
@Test
|
||||
public void TestCreateExternalKuduTable() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
final String kuduMasters = catalog_.getDefaultKuduMasterHosts();
|
||||
AnalyzesOk("create external table t stored as kudu " +
|
||||
"tblproperties('kudu.table_name'='t')");
|
||||
@@ -592,7 +590,6 @@ public class AnalyzeKuduDDLTest extends FrontendTestBase {
|
||||
|
||||
@Test
|
||||
public void TestAlterKuduTable() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
// ALTER TABLE ADD/DROP range partitions
|
||||
String[] addDrop = {"add if not exists", "add", "drop if exists", "drop"};
|
||||
for (String kw: addDrop) {
|
||||
|
||||
@@ -30,7 +30,6 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
|
||||
|
||||
@Test
|
||||
public void TestFromListAliases() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
AnalysisError("update a.name set a.name = 'Oskar' from functional_kudu.testtbl a",
|
||||
"'a.name' is not a table alias. Using the FROM clause requires the target table" +
|
||||
" to be a table alias.");
|
||||
@@ -83,7 +82,6 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
|
||||
|
||||
@Test
|
||||
public void TestUpdate() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
AnalyzesOk("update functional_kudu.dimtbl set name = 'Oskar'");
|
||||
// Correct default database resolution
|
||||
AnalyzesOk("update dimtbl set name = 'Oskar'",
|
||||
@@ -142,7 +140,6 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
|
||||
|
||||
@Test
|
||||
public void TestWhereClause() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
// With where clause
|
||||
AnalyzesOk("update functional_kudu.dimtbl set name = '10' where name = '11'");
|
||||
// Complex where clause
|
||||
@@ -156,7 +153,6 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
|
||||
|
||||
@Test
|
||||
public void TestWithSourceStmtRewrite() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
// No subqueries in set statement as we cannot translate them into subqueries in
|
||||
// the select list
|
||||
AnalysisError(
|
||||
@@ -178,7 +174,6 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
|
||||
|
||||
@Test
|
||||
public void TestWithJoin() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
// Simple Join
|
||||
AnalyzesOk(
|
||||
"update a set a.name = b.name FROM functional_kudu.testtbl a join functional" +
|
||||
@@ -214,14 +209,12 @@ public class AnalyzeModifyStmtsTest extends AnalyzerTest {
|
||||
|
||||
@Test
|
||||
public void TestNoViewModification() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
AnalysisError("update functional.alltypes_view set id = 10", "Cannot modify view");
|
||||
AnalysisError("delete functional.alltypes_view", "Cannot modify view");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void TestNoNestedTypes() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
AnalysisError(
|
||||
"update a set c.item = 10 FROM functional_kudu.testtbl a, functional" +
|
||||
".allcomplextypes b, b.int_array_col c",
|
||||
|
||||
@@ -3441,14 +3441,12 @@ public class AnalyzeStmtsTest extends AnalyzerTest {
|
||||
"uncorrelated one 'functional.alltypestiny':\n" +
|
||||
"SELECT item FROM b.int_array_col, functional.alltypestiny");
|
||||
|
||||
if (RuntimeEnv.INSTANCE.isKuduSupported()) {
|
||||
// Key columns missing from permutation
|
||||
AnalysisError("insert into functional_kudu.testtbl(zip) values(1)",
|
||||
"All primary key columns must be specified for INSERTing into Kudu tables. " +
|
||||
"Missing columns are: id");
|
||||
// Mixed column name case, on both primary key and non-primary key cols.
|
||||
AnalyzesOk("insert into functional_kudu.alltypes (ID, BOOL_COL) values (0, true)");
|
||||
}
|
||||
// Key columns missing from permutation
|
||||
AnalysisError("insert into functional_kudu.testtbl(zip) values(1)",
|
||||
"All primary key columns must be specified for INSERTing into Kudu tables. " +
|
||||
"Missing columns are: id");
|
||||
// Mixed column name case, on both primary key and non-primary key cols.
|
||||
AnalyzesOk("insert into functional_kudu.alltypes (ID, BOOL_COL) values (0, true)");
|
||||
|
||||
addTestDb("d", null);
|
||||
addTestTable("create table d.dec1 (c decimal(38,37)) location '/'");
|
||||
|
||||
@@ -23,7 +23,6 @@ import org.junit.Test;
|
||||
public class AnalyzeUpsertStmtTest extends AnalyzerTest {
|
||||
@Test
|
||||
public void TestUpsert() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
// VALUES clause
|
||||
AnalyzesOk("upsert into table functional_kudu.testtbl values(1, 'a', 1)");
|
||||
AnalyzesOk("upsert into table functional_kudu.testtbl(id) values(1)");
|
||||
|
||||
@@ -37,7 +37,6 @@ import org.junit.Test;
|
||||
public class AuditingKuduTest extends FrontendTestBase {
|
||||
@Test
|
||||
public void TestKuduStatements() throws AuthorizationException, AnalysisException {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
// Select
|
||||
Set<TAccessEvent> accessEvents =
|
||||
AnalyzeAccessEvents("select * from functional_kudu.testtbl");
|
||||
|
||||
@@ -160,18 +160,16 @@ public class ExprRewriterTest extends AnalyzerTest {
|
||||
RewritesOk("insert into functional.alltypes (id, int_col, float_col, bigint_col) " +
|
||||
"partition(year=2009,month=10) " + stmt_, 23, 11);
|
||||
|
||||
if (RuntimeEnv.INSTANCE.isKuduSupported()) {
|
||||
// Update.
|
||||
RewritesOk("update t2 set name = 'test' from " +
|
||||
"functional.alltypes t1 join functional_kudu.dimtbl t2 on (t1.id = t2.id) " +
|
||||
"where t2.id < 10", 10, 5);
|
||||
RewritesOk("update functional_kudu.dimtbl set name = 'test', zip = 4711 " +
|
||||
"where exists (" + stmt_ + ")", 28, 16);
|
||||
// Delete.
|
||||
RewritesOk("delete a from " +
|
||||
"functional_kudu.testtbl a join functional.testtbl b on a.zip = b.zip", 4, 2);
|
||||
RewritesOk("delete functional_kudu.testtbl where exists (" + stmt_ + ")", 24, 12);
|
||||
}
|
||||
// Update.
|
||||
RewritesOk("update t2 set name = 'test' from " +
|
||||
"functional.alltypes t1 join functional_kudu.dimtbl t2 on (t1.id = t2.id) " +
|
||||
"where t2.id < 10", 10, 5);
|
||||
RewritesOk("update functional_kudu.dimtbl set name = 'test', zip = 4711 " +
|
||||
"where exists (" + stmt_ + ")", 28, 16);
|
||||
// Delete.
|
||||
RewritesOk("delete a from " +
|
||||
"functional_kudu.testtbl a join functional.testtbl b on a.zip = b.zip", 4, 2);
|
||||
RewritesOk("delete functional_kudu.testtbl where exists (" + stmt_ + ")", 24, 12);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -327,27 +325,25 @@ public class ExprRewriterTest extends AnalyzerTest {
|
||||
"INSERT INTO TABLE functional.alltypes(id) " +
|
||||
"PARTITION (`year`=2009, `month`=10) SELECT 2");
|
||||
|
||||
if (RuntimeEnv.INSTANCE.isKuduSupported()) {
|
||||
// Update.
|
||||
assertToSql(ctx,
|
||||
"update functional_kudu.alltypes "
|
||||
+ "set string_col = 'test' where id = (select 1 + 1)",
|
||||
"UPDATE functional_kudu.alltypes SET string_col = 'test' "
|
||||
+ "FROM functional_kudu.alltypes WHERE id = (SELECT 1 + 1)",
|
||||
"UPDATE functional_kudu.alltypes SET string_col = 'test' "
|
||||
+ "FROM functional_kudu.alltypes LEFT SEMI JOIN (SELECT 2) `$a$1` (`$c$1`) "
|
||||
+ "ON id = `$a$1`.`$c$1` WHERE id = (SELECT 2)");
|
||||
// Update.
|
||||
assertToSql(ctx,
|
||||
"update functional_kudu.alltypes "
|
||||
+ "set string_col = 'test' where id = (select 1 + 1)",
|
||||
"UPDATE functional_kudu.alltypes SET string_col = 'test' "
|
||||
+ "FROM functional_kudu.alltypes WHERE id = (SELECT 1 + 1)",
|
||||
"UPDATE functional_kudu.alltypes SET string_col = 'test' "
|
||||
+ "FROM functional_kudu.alltypes LEFT SEMI JOIN (SELECT 2) `$a$1` (`$c$1`) "
|
||||
+ "ON id = `$a$1`.`$c$1` WHERE id = (SELECT 2)");
|
||||
|
||||
// Delete
|
||||
assertToSql(ctx,
|
||||
"delete functional_kudu.alltypes "
|
||||
+ "where id = (select 1 + 1)",
|
||||
"DELETE FROM functional_kudu.alltypes "
|
||||
+ "WHERE id = (SELECT 1 + 1)",
|
||||
"DELETE functional_kudu.alltypes "
|
||||
+ "FROM functional_kudu.alltypes LEFT SEMI JOIN (SELECT 2) `$a$1` (`$c$1`) "
|
||||
+ "ON id = `$a$1`.`$c$1` WHERE id = (SELECT 2)");
|
||||
}
|
||||
// Delete
|
||||
assertToSql(ctx,
|
||||
"delete functional_kudu.alltypes "
|
||||
+ "where id = (select 1 + 1)",
|
||||
"DELETE FROM functional_kudu.alltypes "
|
||||
+ "WHERE id = (SELECT 1 + 1)",
|
||||
"DELETE functional_kudu.alltypes "
|
||||
+ "FROM functional_kudu.alltypes LEFT SEMI JOIN (SELECT 2) `$a$1` (`$c$1`) "
|
||||
+ "ON id = `$a$1`.`$c$1` WHERE id = (SELECT 2)");
|
||||
|
||||
// We don't do any rewrite for WITH clause.
|
||||
StatementBase stmt = (StatementBase) AnalyzesOk("with t as (select 1 + 1) " +
|
||||
|
||||
@@ -1872,7 +1872,6 @@ public class ParserTest extends FrontendTestBase {
|
||||
|
||||
@Test
|
||||
public void TestKuduUpdate() {
|
||||
//TestUtils.assumeKuduIsSupported();
|
||||
ParserError("update (select * from functional_kudu.testtbl) a set name = '10'");
|
||||
}
|
||||
|
||||
|
||||
@@ -1063,7 +1063,6 @@ public class ToSqlTest extends FrontendTestBase {
|
||||
|
||||
@Test
|
||||
public void TestUpdate() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
testToSql("update functional_kudu.dimtbl set name = '10' where name < '11'",
|
||||
"UPDATE functional_kudu.dimtbl SET name = '10' FROM functional_kudu.dimtbl " +
|
||||
"WHERE name < '11'");
|
||||
@@ -1086,7 +1085,6 @@ public class ToSqlTest extends FrontendTestBase {
|
||||
|
||||
@Test
|
||||
public void TestDelete() {
|
||||
TestUtils.assumeKuduIsSupported();
|
||||
testToSql("delete functional_kudu.testtbl where zip = 10",
|
||||
"DELETE FROM functional_kudu.testtbl WHERE zip = 10");
|
||||
testToSql("delete from functional_kudu.testtbl where zip = 10",
|
||||
|
||||
@@ -611,7 +611,6 @@ public class PlannerTest extends PlannerTestBase {
|
||||
|
||||
@Test
|
||||
public void testKudu() {
|
||||
Assume.assumeTrue(RuntimeEnv.INSTANCE.isKuduSupported());
|
||||
TQueryOptions options = defaultQueryOptions();
|
||||
options.setEnabled_runtime_filter_types(TEnabledRuntimeFilterTypes.ALL);
|
||||
addTestDb("kudu_planner_test", "Test DB for Kudu Planner.");
|
||||
@@ -622,13 +621,11 @@ public class PlannerTest extends PlannerTestBase {
|
||||
|
||||
@Test
|
||||
public void testKuduUpsert() {
|
||||
Assume.assumeTrue(RuntimeEnv.INSTANCE.isKuduSupported());
|
||||
runPlannerTestFile("kudu-upsert");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKuduUpdate() {
|
||||
Assume.assumeTrue(RuntimeEnv.INSTANCE.isKuduSupported());
|
||||
TQueryOptions options = defaultQueryOptions();
|
||||
options.setEnabled_runtime_filter_types(TEnabledRuntimeFilterTypes.ALL);
|
||||
runPlannerTestFile("kudu-update", options);
|
||||
@@ -636,13 +633,11 @@ public class PlannerTest extends PlannerTestBase {
|
||||
|
||||
@Test
|
||||
public void testKuduDelete() {
|
||||
Assume.assumeTrue(RuntimeEnv.INSTANCE.isKuduSupported());
|
||||
runPlannerTestFile("kudu-delete");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testKuduSelectivity() {
|
||||
Assume.assumeTrue(RuntimeEnv.INSTANCE.isKuduSupported());
|
||||
TQueryOptions options = defaultQueryOptions();
|
||||
options.setExplain_level(TExplainLevel.VERBOSE);
|
||||
runPlannerTestFile("kudu-selectivity", options);
|
||||
@@ -650,7 +645,6 @@ public class PlannerTest extends PlannerTestBase {
|
||||
|
||||
@Test
|
||||
public void testKuduTpch() {
|
||||
Assume.assumeTrue(RuntimeEnv.INSTANCE.isKuduSupported());
|
||||
TQueryOptions options = defaultQueryOptions();
|
||||
options.setEnabled_runtime_filter_types(TEnabledRuntimeFilterTypes.ALL);
|
||||
runPlannerTestFile("tpch-kudu", options,
|
||||
|
||||
@@ -108,9 +108,7 @@ public class PlannerTestBase extends FrontendTestBase {
|
||||
updateReq.setNum_executors(3);
|
||||
ExecutorMembershipSnapshot.update(updateReq);
|
||||
|
||||
if (RuntimeEnv.INSTANCE.isKuduSupported()) {
|
||||
kuduClient_ = new KuduClient.KuduClientBuilder("127.0.0.1:7051").build();
|
||||
}
|
||||
kuduClient_ = new KuduClient.KuduClientBuilder("127.0.0.1:7051").build();
|
||||
String logDir = System.getenv("IMPALA_FE_TEST_LOGS_DIR");
|
||||
if (logDir == null) logDir = "/tmp";
|
||||
outDir_ = Paths.get(logDir, "PlannerTest");
|
||||
|
||||
@@ -394,10 +394,6 @@ public class TestUtils {
|
||||
return sw.toString();
|
||||
}
|
||||
|
||||
public static void assumeKuduIsSupported() {
|
||||
Assume.assumeTrue(RuntimeEnv.INSTANCE.isKuduSupported());
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the hive major version from environment
|
||||
*/
|
||||
|
||||
@@ -277,14 +277,10 @@ def install_adls_deps():
|
||||
def install_kudu_client_if_possible():
|
||||
'''Installs the Kudu python module if possible, which depends on the toolchain and
|
||||
the compiled requirements in compiled-requirements.txt. If the toolchain isn't
|
||||
available, nothing will be done. Also nothing will be done if the Kudu client lib
|
||||
required by the module isn't available (as determined by KUDU_IS_SUPPORTED)'''
|
||||
available, nothing will be done.'''
|
||||
if reqs_are_installed(KUDU_REQS_PATH):
|
||||
LOG.debug("Skipping Kudu: matching kudu-installed-requirements.txt found")
|
||||
return
|
||||
if os.environ["KUDU_IS_SUPPORTED"] != "true":
|
||||
LOG.debug("Skipping Kudu: Kudu is not supported")
|
||||
return
|
||||
kudu_base_dir = os.environ["IMPALA_KUDU_HOME"]
|
||||
if not os.path.exists(kudu_base_dir):
|
||||
LOG.debug("Skipping Kudu: %s doesn't exist" % kudu_base_dir)
|
||||
|
||||
5
testdata/bin/compute-table-stats.sh
vendored
5
testdata/bin/compute-table-stats.sh
vendored
@@ -44,7 +44,4 @@ fi
|
||||
${COMPUTE_STATS_SCRIPT} --db_names=tpch,tpch_parquet,tpch_orc_def \
|
||||
--table_names=customer,lineitem,nation,orders,part,partsupp,region,supplier
|
||||
${COMPUTE_STATS_SCRIPT} --db_names=tpch_nested_parquet,tpcds,tpcds_parquet
|
||||
|
||||
if "$KUDU_IS_SUPPORTED"; then
|
||||
${COMPUTE_STATS_SCRIPT} --db_names=functional_kudu,tpch_kudu
|
||||
fi
|
||||
${COMPUTE_STATS_SCRIPT} --db_names=functional_kudu,tpch_kudu
|
||||
|
||||
2
testdata/bin/create-load-data.sh
vendored
2
testdata/bin/create-load-data.sh
vendored
@@ -638,7 +638,7 @@ elif [ "${TARGET_FILESYSTEM}" = "hdfs" ]; then
|
||||
load-data "functional-query" "core" "hbase/none"
|
||||
fi
|
||||
|
||||
if [[ $SKIP_METADATA_LOAD -eq 1 && $KUDU_IS_SUPPORTED ]]; then
|
||||
if [[ $SKIP_METADATA_LOAD -eq 1 ]]; then
|
||||
# Tests depend on the kudu data being clean, so load the data from scratch.
|
||||
# This is only necessary if this is not a full dataload, because a full dataload
|
||||
# already loads Kudu functional and TPC-H tables from scratch.
|
||||
|
||||
4
testdata/cluster/admin
vendored
4
testdata/cluster/admin
vendored
@@ -62,9 +62,7 @@ else
|
||||
# the other services could work after the proper configuration changes.
|
||||
SUPPORTED_SERVICES=()
|
||||
fi
|
||||
if $KUDU_IS_SUPPORTED; then
|
||||
SUPPORTED_SERVICES+=(kudu)
|
||||
fi
|
||||
SUPPORTED_SERVICES+=(kudu)
|
||||
|
||||
# All DataNodes and NodeManagers need a unique but fixed address. The IP is fixed at
|
||||
# 127.0.0.1, so the only difference is the port. The address must be fixed because it is
|
||||
|
||||
@@ -75,9 +75,6 @@ class KuduTestSuite(ImpalaTestSuite):
|
||||
|
||||
@classmethod
|
||||
def setup_class(cls):
|
||||
if os.environ["KUDU_IS_SUPPORTED"] == "false":
|
||||
pytest.skip("Kudu is not supported")
|
||||
|
||||
super(KuduTestSuite, cls).setup_class()
|
||||
|
||||
@classmethod
|
||||
|
||||
@@ -106,8 +106,6 @@ class SkipIfADLS:
|
||||
reason="The client is slow to realize changes to file metadata")
|
||||
|
||||
class SkipIfKudu:
|
||||
unsupported_env = pytest.mark.skipif(os.environ["KUDU_IS_SUPPORTED"] == "false",
|
||||
reason="Kudu is not supported in this environment")
|
||||
no_hybrid_clock = pytest.mark.skipif(
|
||||
get_kudu_master_flag("--use_hybrid_clock") == "false",
|
||||
reason="Test relies on --use_hybrid_clock=true in Kudu.")
|
||||
@@ -118,8 +116,6 @@ class SkipIfKudu:
|
||||
class SkipIf:
|
||||
skip_hbase = pytest.mark.skipif(pytest.config.option.skip_hbase,
|
||||
reason="--skip_hbase argument specified")
|
||||
kudu_not_supported = pytest.mark.skipif(os.environ["KUDU_IS_SUPPORTED"] == "false",
|
||||
reason="Kudu is not supported")
|
||||
not_s3 = pytest.mark.skipif(not IS_S3, reason="S3 Filesystem needed")
|
||||
not_hdfs = pytest.mark.skipif(not IS_HDFS, reason="HDFS Filesystem needed")
|
||||
not_ec = pytest.mark.skipif(not IS_EC, reason="Erasure Coding needed")
|
||||
|
||||
@@ -29,9 +29,7 @@ WORKLOAD_DIR = os.environ['IMPALA_WORKLOAD_DIR']
|
||||
# of what specific table format to target along with the exec options (num_nodes, etc)
|
||||
# to use when running the query.
|
||||
class TableFormatInfo(object):
|
||||
KNOWN_FILE_FORMATS = ['text', 'seq', 'rc', 'parquet', 'orc', 'avro', 'hbase']
|
||||
if os.environ['KUDU_IS_SUPPORTED'] == 'true':
|
||||
KNOWN_FILE_FORMATS.append('kudu')
|
||||
KNOWN_FILE_FORMATS = ['text', 'seq', 'rc', 'parquet', 'orc', 'avro', 'hbase', 'kudu']
|
||||
KNOWN_COMPRESSION_CODECS = ['none', 'snap', 'gzip', 'bzip', 'def', 'zstd', 'lz4']
|
||||
KNOWN_COMPRESSION_TYPES = ['none', 'block', 'record']
|
||||
|
||||
@@ -255,10 +253,6 @@ def load_table_info_dimension(workload_name, exploration_strategy, file_formats=
|
||||
vals = dict((key.strip(), value.strip()) for key, value in\
|
||||
(item.split(':') for item in line.split(',')))
|
||||
|
||||
# Skip Kudu if Kudu is not supported (IMPALA-4287).
|
||||
if os.environ['KUDU_IS_SUPPORTED'] != 'true' and vals['file_format'] == 'kudu':
|
||||
continue
|
||||
|
||||
# If only loading specific file formats skip anything that doesn't match
|
||||
if file_formats is not None and vals['file_format'] not in file_formats:
|
||||
continue
|
||||
|
||||
@@ -300,28 +300,27 @@ class ImpalaDockerEnv(object):
|
||||
# incompatibility with Kudu. First we have to get test data off the container, store
|
||||
# it somewhere, and then start another container using docker -v and mount the test
|
||||
# data as a volume to bypass AUFS. See also the README for Leopard.
|
||||
if os.environ.get('KUDU_IS_SUPPORTED') == 'true':
|
||||
LOG.info('Warming testdata cluster external volume')
|
||||
self.start_new_container()
|
||||
with settings(
|
||||
warn_only=True,
|
||||
host_string=self.host,
|
||||
user=self.host_username,
|
||||
):
|
||||
sudo(
|
||||
'mkdir -p {host_testdata_path} && '
|
||||
'rsync -e "ssh -i {priv_key} -o StrictHostKeyChecking=no '
|
||||
'' '-o UserKnownHostsFile=/dev/null -p {ssh_port}" '
|
||||
'--delete --archive --verbose --progress '
|
||||
'{user}@127.0.0.1:{container_testdata_path} {host_testdata_path} && '
|
||||
'chown -R {uid}:{gid} {host_testdata_path}'.format(
|
||||
host_testdata_path=HOST_TESTDATA_EXTERNAL_VOLUME_PATH,
|
||||
priv_key=HOST_TO_DOCKER_SSH_KEY,
|
||||
ssh_port=self.ssh_port,
|
||||
uid=DOCKER_IMPALA_USER_UID,
|
||||
gid=DOCKER_IMPALA_USER_GID,
|
||||
user=DOCKER_USER_NAME,
|
||||
container_testdata_path=DOCKER_TESTDATA_VOLUME_PATH))
|
||||
LOG.info('Warming testdata cluster external volume')
|
||||
self.start_new_container()
|
||||
with settings(
|
||||
warn_only=True,
|
||||
host_string=self.host,
|
||||
user=self.host_username,
|
||||
):
|
||||
sudo(
|
||||
'mkdir -p {host_testdata_path} && '
|
||||
'rsync -e "ssh -i {priv_key} -o StrictHostKeyChecking=no '
|
||||
'' '-o UserKnownHostsFile=/dev/null -p {ssh_port}" '
|
||||
'--delete --archive --verbose --progress '
|
||||
'{user}@127.0.0.1:{container_testdata_path} {host_testdata_path} && '
|
||||
'chown -R {uid}:{gid} {host_testdata_path}'.format(
|
||||
host_testdata_path=HOST_TESTDATA_EXTERNAL_VOLUME_PATH,
|
||||
priv_key=HOST_TO_DOCKER_SSH_KEY,
|
||||
ssh_port=self.ssh_port,
|
||||
uid=DOCKER_IMPALA_USER_UID,
|
||||
gid=DOCKER_IMPALA_USER_GID,
|
||||
user=DOCKER_USER_NAME,
|
||||
container_testdata_path=DOCKER_TESTDATA_VOLUME_PATH))
|
||||
self.stop_docker()
|
||||
volume_map = {
|
||||
HOST_TESTDATA_EXTERNAL_VOLUME_PATH: DOCKER_TESTDATA_VOLUME_PATH,
|
||||
|
||||
@@ -308,7 +308,6 @@ class TestDdlStatements(TestDdlBase):
|
||||
self.run_test_case('QueryTest/create-table-as-select', vector,
|
||||
use_db=unique_database, multiple_impalad=self._use_multiple_impalad(vector))
|
||||
|
||||
@SkipIf.kudu_not_supported
|
||||
@UniqueDatabase.parametrize(sync_ddl=True)
|
||||
@SkipIfKudu.no_hybrid_clock
|
||||
def test_create_kudu(self, vector, unique_database):
|
||||
|
||||
@@ -277,7 +277,6 @@ class TestInfraCompat(ImpalaTestSuite):
|
||||
'l_receiptdate', 'l_shipinstruct', 'l_shipmode',
|
||||
'l_comment')}]
|
||||
|
||||
@SkipIf.kudu_not_supported
|
||||
@pytest.mark.parametrize('table_primary_keys_map', TABLE_PRIMARY_KEYS_MAPS)
|
||||
def test_primary_key_parse(self, impala_testinfra_cursor, table_primary_keys_map):
|
||||
"""
|
||||
@@ -288,7 +287,6 @@ class TestInfraCompat(ImpalaTestSuite):
|
||||
assert impala_testinfra_cursor._fetch_primary_key_names(
|
||||
table_primary_keys_map['table']) == table_primary_keys_map['primary_keys']
|
||||
|
||||
@SkipIf.kudu_not_supported
|
||||
@pytest.mark.parametrize('table_primary_keys_map', TABLE_PRIMARY_KEYS_MAPS)
|
||||
def test_load_table_with_primary_key_attr(self, impala_testinfra_cursor,
|
||||
table_primary_keys_map):
|
||||
|
||||
@@ -16,7 +16,7 @@
|
||||
# under the License.
|
||||
|
||||
from tests.common.impala_test_suite import ImpalaTestSuite
|
||||
from tests.common.skip import SkipIfEC, SkipIfKudu, SkipIfLocal, SkipIfS3, SkipIfABFS, \
|
||||
from tests.common.skip import SkipIfEC, SkipIfLocal, SkipIfS3, SkipIfABFS, \
|
||||
SkipIfADLS
|
||||
from tests.common.test_dimensions import create_parquet_dimension
|
||||
|
||||
@@ -52,7 +52,6 @@ class TestResourceLimits(ImpalaTestSuite):
|
||||
def test_resource_limits_hbase(self, vector):
|
||||
self.run_test_case('QueryTest/query-resource-limits-hbase', vector)
|
||||
|
||||
@SkipIfKudu.unsupported_env
|
||||
@SkipIfLocal.multiple_impalad
|
||||
def test_resource_limits_kudu(self, vector):
|
||||
self.run_test_case('QueryTest/query-resource-limits-kudu', vector)
|
||||
|
||||
@@ -754,7 +754,6 @@ class TestImpalaShell(ImpalaTestSuite):
|
||||
(expected_rows_modified, expected_row_errors)
|
||||
assert expected_output in results.stderr, results.stderr
|
||||
|
||||
@SkipIf.kudu_not_supported
|
||||
def test_kudu_dml_reporting(self, vector, unique_database):
|
||||
db = unique_database
|
||||
run_impala_shell_cmd(vector, [
|
||||
|
||||
Reference in New Issue
Block a user