IMPALA-12401: Support more info types for HS2 GetInfo() API

This patch adds support for 40+ additional TGetInfoType values in the
HiveServer2 GetInfo() API, improving ODBC/JDBC driver compatibility.

Previously, only 3 info types were supported (CLI_SERVER_NAME,
CLI_DBMS_NAME, CLI_DBMS_VER).

The implementation follows the ODBC CLI specification and matches the
behavior of Hive's GetInfo implementation where applicable.

Testing:
- Added unit tests in test_hs2.py for new info types
- Tests verify correct return values and data types for each info type

Change-Id: I1ce5f2b9dcc2e4633b4679b002f57b5b4ea3e8bf
Reviewed-on: http://gerrit.cloudera.org:8080/23528
Tested-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com>
Reviewed-by: Csaba Ringhofer <csringhofer@cloudera.com>
This commit is contained in:
Arnab Karmakar
2025-10-11 23:29:33 +05:30
committed by Csaba Ringhofer
parent f2243b76b5
commit 068158e495
10 changed files with 512 additions and 19 deletions

View File

@@ -36,6 +36,7 @@ import org.apache.hadoop.security.ShellBasedUnixGroupsMapping;
import org.apache.hadoop.security.ShellBasedUnixGroupsNetgroupMapping;
import org.apache.impala.analysis.DescriptorTable;
import org.apache.impala.analysis.ToSqlUtils;
import org.apache.impala.analysis.SqlScanner;
import org.apache.impala.authentication.saml.WrappedWebContext;
import org.apache.impala.authorization.AuthorizationFactory;
import org.apache.impala.authorization.ImpalaInternalAdminUser;
@@ -125,6 +126,7 @@ import java.util.Enumeration;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.HashSet;
import java.util.TimeZone;
/**
@@ -352,6 +354,34 @@ public class JniFrontend {
}
}
/**
* Returns a comma-separated list of Impala SQL keywords that are not part of the
* provided ODBC-reserved keywords CSV.
*/
public String getNonOdbcKeywords(byte[] odbcKeywordsCsvT) throws ImpalaException {
final TStringLiteral odbcCsv = new TStringLiteral();
JniUtil.deserializeThrift(protocolFactory_, odbcCsv, odbcKeywordsCsvT);
String csv = odbcCsv.isSetValue()
? StandardCharsets.UTF_8.decode(odbcCsv.value).toString()
: "";
Set<String> excludes = new HashSet<>();
if (csv != null && !csv.isEmpty()) {
for (String s : csv.split(",")) {
if (s != null) excludes.add(s.trim().toUpperCase());
}
}
StringBuilder sb = new StringBuilder();
for (String kw : SqlScanner.getKeywords()) {
String upper = kw.toUpperCase();
// Exclude symbolic tokens like &&, ||
if (upper.isEmpty() || !Character.isLetter(upper.charAt(0))) continue;
if (excludes.contains(upper)) continue;
if (sb.length() > 0) sb.append(",");
sb.append(upper);
}
return sb.toString();
}
/**
* Returns files info of a table or partition.
* The argument is a serialized TShowFilesParams object.

View File

@@ -27,6 +27,7 @@ import java.util.Set;
import java.util.Iterator;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Collections;
import com.google.common.base.Preconditions;
import org.apache.impala.analysis.SqlParserSymbols;
@@ -444,6 +445,11 @@ import org.apache.impala.thrift.TReservedWordsVersion;
return token != null && keywordMap.containsKey(token.toLowerCase());
}
// Returns an unmodifiable view of the current keyword names.
public static Set<String> getKeywords() {
return Collections.unmodifiableSet(keywordMap.keySet());
}
private Symbol newToken(int id, Object value) {
return new Symbol(id, yyline+1, yycolumn+1, value);
}