Files
impala/cmake_modules/FindHDFS.cmake
Martin Grund 81f247b171 Optional Impala Toolchain
This patch allows to optionally enable the new Impala binary
toolchain. For now there are now major version differences in the
toolchain dependencies and what is currently kept in thirdparty.

To enable the toolchain, export the variable IMPALA_TOOLCHAIN to the
folder where the binaries are available.

In addition this patch moves gutil from the thirdparty directory into
the source tree of be/src to allow easy propagation of compiler and
linker flags. Furthermore, the thrift-cpp target was added as a
dependency to all targets that require the generated thrift sources to
be available before the build is started.

What is the new toolchain: The goal of the toolchain is to homogenize
the build environment and to make sure that Impala is build nearly
identical on every platform. To achieve this, we limit the flexibility
of using the systems host libraries and rather rely on a set of custom
produced binaries including the necessary compiler.

Change-Id: If2dac920520e4a18be2a9a75b3184a5bd97a065b
Reviewed-on: http://gerrit.cloudera.org:8080/427
Reviewed-by: Adar Dembo <adar@cloudera.com>
Tested-by: Internal Jenkins
Reviewed-by: Martin Grund <mgrund@cloudera.com>
2015-06-13 03:11:44 +00:00

73 lines
1.9 KiB
CMake

# - Find HDFS (hdfs.h and libhdfs.so)
# This module defines
# Hadoop_VERSION, version string of ant if found
# HDFS_INCLUDE_DIR, directory containing hdfs.h
# HDFS_LIBS, location of libhdfs.so
# HDFS_FOUND, If false, do not try to use ant
# hdfsstatic
exec_program(hadoop ARGS version OUTPUT_VARIABLE Hadoop_VERSION
RETURN_VALUE Hadoop_RETURN)
# currently only looking in HADOOP_HOME
find_path(HDFS_INCLUDE_DIR hdfs.h PATHS
$ENV{HADOOP_HOME}/include/
# make sure we don't accidentally pick up a different version
NO_DEFAULT_PATH
)
if ("${CMAKE_SIZEOF_VOID_P}" STREQUAL "8")
set(arch_hint "x64")
elseif ("$ENV{LIB}" MATCHES "(amd64|ia64)")
set(arch_hint "x64")
else ()
set(arch_hint "x86")
endif()
message(STATUS "Architecture: ${arch_hint}")
if ("${arch_hint}" STREQUAL "x64")
set(HDFS_LIB_PATHS $ENV{HADOOP_HOME}/lib/native)
else ()
set(HDFS_LIB_PATHS $ENV{HADOOP_HOME}/lib/native)
endif ()
message(STATUS "HDFS_LIB_PATHS: ${HDFS_LIB_PATHS}")
find_library(HDFS_LIB NAMES hdfs PATHS
${HDFS_LIB_PATHS}
# make sure we don't accidentally pick up a different version
NO_DEFAULT_PATH
)
if (HDFS_LIB)
set(HDFS_FOUND TRUE)
set(HDFS_LIBRARIES ${HDFS_LIB})
set(HDFS_STATIC_LIB ${HDFS_LIB_PATHS}/libhdfs.a)
add_library(HDFS_STATIC STATIC IMPORTED)
set_target_properties(HDFS_STATIC PROPERTIES IMPORTED_LOCATION ${HDFS_STATIC_LIB})
else ()
set(HDFS_FOUND FALSE)
endif ()
if (HDFS_FOUND)
if (NOT HDFS_FIND_QUIETLY)
message(STATUS "${Hadoop_VERSION}")
message(STATUS "HDFS_INCLUDE_DIR: ${HDFS_INCLUDE_DIR}")
message(STATUS "HDFS_LIBRARIES: ${HDFS_LIBRARIES}")
message(STATUS "HDFS_STATIC: ${HDFS_STATIC_LIB}")
endif ()
else ()
message(FATAL_ERROR "HDFS includes and libraries NOT found."
"Thrift support will be disabled (${Thrift_RETURN}, "
"${HDFS_INCLUDE_DIR}, ${HDFS_LIB})")
endif ()
mark_as_advanced(
HDFS_LIBRARIES
HDFS_INCLUDE_DIR
HDFS_STATIC
)