IMPALA-14092 Part1: Prohibit Unsupported Operation for paimon table

This patch is to prohibit un-supported operation against
paimon table. All unsupported operations are added the
checked in the analyze stage in order to avoid
mis-operation. Currently only CREATE/DROP statement
is supported, the prohibition will be removed later
after the corresponding operation is truly supported.

TODO:
    - Patches pending submission:
        - Support jni based query for paimon data table.
        - Support tpcds/tpch data-loading
          for paimon data table.
        - Virtual Column query support for querying
          paimon data table.
        - Query support with time travel.
        - Query support for paimon meta tables.

Testing:
    - Add unit test for AnalyzeDDLTest.java.
    - Add unit test for AnalyzerTest.java.
    - Add test_paimon_negative and test_paimon_query in test_paimon.py.

Change-Id: Ie39fa4836cb1be1b1a53aa62d5c02d7ec8fdc9d7
Reviewed-on: http://gerrit.cloudera.org:8080/23530
Reviewed-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com>
Tested-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com>
This commit is contained in:
jichen0919
2025-10-13 20:32:04 +08:00
committed by Impala Public Jenkins
parent ea0ef5a799
commit 541fb3f405
20 changed files with 727 additions and 51 deletions

View File

@@ -708,6 +708,7 @@ struct TSystemTable {
1: required TSystemTableName table_name
}
// Represents a Paimon Catalog Type
enum TPaimonCatalog {
HADOOP_CATALOG = 0
HIVE_CATALOG = 1

View File

@@ -19,6 +19,7 @@ package org.apache.impala.analysis;
import org.apache.impala.authorization.Privilege;
import org.apache.impala.catalog.FeView;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.thrift.TAccessEvent;
import org.apache.impala.thrift.TAlterTableOrViewRenameParams;
@@ -81,6 +82,10 @@ public class AlterTableOrViewRenameStmt extends AlterTableStmt {
if (!(table_ instanceof FeView) && !renameTable_) {
throw new AnalysisException(String.format(
"ALTER VIEW not allowed on a table: %s", table_.getFullName()));
} else if (table_ instanceof FePaimonTable) {
throw new AnalysisException(String.format(
"ALTER TABLE RENAME statement not allowed on PAIMON table: %s",
table_.getFullName()));
}
newDbName_ = analyzer.getTargetDbName(newTableName_);
if (analyzer.dbContainsTable(newDbName_, newTableName_.getTbl(), Privilege.CREATE)) {

View File

@@ -22,6 +22,7 @@ import java.util.List;
import org.apache.impala.authorization.Privilege;
import org.apache.impala.catalog.FeDataSourceTable;
import org.apache.impala.catalog.FeTable;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.thrift.TAlterTableParams;
import org.apache.impala.thrift.TTableName;
@@ -119,6 +120,10 @@ public abstract class AlterTableStmt extends StatementBase implements SingleTabl
"ALTER TABLE %s not allowed on a table %s: %s", getOperation(),
(storedByJdbc ? "STORED BY JDBC": "PRODUCED BY DATA SOURCE"), tableName_));
}
if (table_ instanceof FePaimonTable) {
throw new AnalysisException(String.format(
"ALTER TABLE not allowed on PAIMON table: %s", table_.getFullName()));
}
}
protected void checkTransactionalTable() throws AnalysisException {

View File

@@ -42,6 +42,7 @@ import org.apache.impala.catalog.HdfsFileFormat;
import org.apache.impala.catalog.HdfsTable;
import org.apache.impala.catalog.PartitionStatsUtil;
import org.apache.impala.catalog.Type;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.common.PrintUtils;
import org.apache.impala.common.RuntimeEnv;
@@ -406,6 +407,10 @@ public class ComputeStatsStmt extends StatementBase implements SingleTableStmt {
throw new AnalysisException(String.format(
"COMPUTE STATS not supported for system table: %s", tableName_));
}
if (tableRef.getTable() instanceof FePaimonTable) {
throw new AnalysisException(String.format(
"COMPUTE STATS not supported for PAIMON table: %s", tableName_));
}
table_ = analyzer.getTable(tableName_, Privilege.ALTER, Privilege.SELECT);
if (!(table_ instanceof FeFsTable)) {

View File

@@ -72,6 +72,9 @@ public class CreateTableLikeFileStmt extends CreateTableStmt {
} else if (getFileFormat() == THdfsFileFormat.JDBC) {
throw new AnalysisException("CREATE TABLE LIKE FILE statement is not supported " +
"for JDBC tables.");
} else if (getFileFormat() == THdfsFileFormat.PAIMON) {
throw new AnalysisException("CREATE TABLE LIKE FILE statement is not supported" +
" for PAIMON tables.");
}
schemaLocation_.analyze(analyzer, Privilege.ALL, FsAction.READ);
switch (schemaFileFormat_) {

View File

@@ -24,6 +24,7 @@ import org.apache.impala.authorization.Privilege;
import org.apache.impala.catalog.FeTable;
import org.apache.impala.catalog.IcebergTable;
import org.apache.impala.catalog.KuduTable;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.common.Pair;
import org.apache.impala.thrift.TAccessEvent;
@@ -183,6 +184,10 @@ public class CreateTableLikeStmt extends StatementBase {
+ "Iceberg table because it is not an Iceberg table.");
} else if (fileFormat_ == THdfsFileFormat.JDBC) {
throw new AnalysisException("CREATE TABLE LIKE is not supported for JDBC tables.");
} else if (fileFormat_ == THdfsFileFormat.PAIMON
|| srcTable instanceof FePaimonTable) {
throw new AnalysisException("CREATE TABLE LIKE is not supported for " +
"PAIMON tables.");
}
srcDbName_ = srcTable.getDb().getName();

View File

@@ -23,6 +23,7 @@ import org.apache.impala.authorization.Privilege;
import org.apache.impala.catalog.FeFsTable;
import org.apache.impala.catalog.FeIcebergTable;
import org.apache.impala.catalog.FeTable;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.thrift.TDropStatsParams;
import org.apache.impala.thrift.TTableName;
@@ -107,6 +108,10 @@ public class DropStatsStmt extends StatementBase implements SingleTableStmt {
throw new AnalysisException(
String.format("DROP STATS not allowed on a nested collection: %s", tableName_));
}
if (tableRef_.getTable() instanceof FePaimonTable) {
throw new AnalysisException(
String.format("DROP STATS not allowed on a PAIMON table: %s", tableName_));
}
tableRef_.analyze(analyzer);
FeTable table_ = analyzer.getTable(tableName_, Privilege.ALTER);

View File

@@ -50,6 +50,7 @@ import org.apache.impala.catalog.MaterializedViewHdfsTable;
import org.apache.impala.catalog.PrunablePartition;
import org.apache.impala.catalog.Type;
import org.apache.impala.catalog.View;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.common.FileSystemUtil;
import org.apache.impala.common.Pair;
@@ -617,6 +618,11 @@ public class InsertStmt extends DmlStatementBase {
if (isHBaseTable && overwrite_) {
throw new AnalysisException("HBase doesn't have a way to perform INSERT OVERWRITE");
}
if (table_ instanceof FePaimonTable) {
throw new AnalysisException(String.format(
"Impala does not support INSERT into PAIMON table: %s", table_.getFullName()));
}
}
/**

View File

@@ -28,7 +28,7 @@ import org.apache.impala.analysis.ShowStatsStmt;
import org.apache.impala.catalog.HdfsFileFormat;
import org.apache.impala.catalog.Table;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.catalog.paimon.ImpalaTypeUtils;
import org.apache.impala.catalog.paimon.PaimonImpalaTypeUtils;
import org.apache.impala.catalog.paimon.PaimonUtil;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.thrift.TBucketType;
@@ -231,7 +231,7 @@ public class PaimonAnalyzer {
*/
private static void throwIfColumnTypeIsNotSupported(ColumnDef columnDef)
throws AnalysisException {
if (!ImpalaTypeUtils.isSupportedColumnType(columnDef.getType())) {
if (!PaimonImpalaTypeUtils.isSupportedColumnType(columnDef.getType())) {
throw new AnalysisException("Tables stored by Paimon do not support the column "
+ columnDef.getColName() + " type: " + columnDef.getType().toSql());
}

View File

@@ -74,4 +74,10 @@ public class LocalPaimonTable extends LocalTable implements FePaimonTable {
} catch (IOException e) { throw new RuntimeException(e); }
return tableDescriptor;
}
@Override
public String toString() {
return String.format("Local Paimon Table: %s", getFullName());
}
}

View File

@@ -0,0 +1,276 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.impala.catalog.paimon;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
import org.apache.hadoop.hive.serde2.typeinfo.CharTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.DecimalTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.MapTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.hive.serde2.typeinfo.VarcharTypeInfo;
import org.apache.paimon.hive.LocalZonedTimestampTypeUtils;
import org.apache.paimon.types.ArrayType;
import org.apache.paimon.types.BigIntType;
import org.apache.paimon.types.BinaryType;
import org.apache.paimon.types.BooleanType;
import org.apache.paimon.types.CharType;
import org.apache.paimon.types.DataField;
import org.apache.paimon.types.DataType;
import org.apache.paimon.types.DataTypeDefaultVisitor;
import org.apache.paimon.types.DataTypes;
import org.apache.paimon.types.DateType;
import org.apache.paimon.types.DecimalType;
import org.apache.paimon.types.DoubleType;
import org.apache.paimon.types.FloatType;
import org.apache.paimon.types.IntType;
import org.apache.paimon.types.LocalZonedTimestampType;
import org.apache.paimon.types.MapType;
import org.apache.paimon.types.MultisetType;
import org.apache.paimon.types.RowType;
import org.apache.paimon.types.SmallIntType;
import org.apache.paimon.types.TimeType;
import org.apache.paimon.types.TimestampType;
import org.apache.paimon.types.TinyIntType;
import org.apache.paimon.types.VarBinaryType;
import org.apache.paimon.types.VarCharType;
import org.apache.paimon.types.VariantType;
/**
* Utils for paimon and hive Type conversions, the class is from
* org.apache.paimon.hive.HiveTypeUtils, refactor to fix the
* following incompatible conversion issue:
* paimon type LocalZonedTimestampType will convert to
* org.apache.hadoop.hive.serde2.typeinfo.TimestampLocalTZTypeInfo
* however, it is not supported in impala, TableLoadingException
* will raise while loading the table in method:
* apache.impala.catalog.FeCatalogUtils#parseColumnType
* To fix the issue LocalZonedTimestampType will be converted to
* hive timestamp type.
*/
public class PaimonHiveTypeUtils {
public static TypeInfo toTypeInfo(DataType logicalType) {
return (TypeInfo)logicalType.accept(PaimonToHiveTypeVisitor.INSTANCE);
}
public static DataType toPaimonType(String type) {
TypeInfo typeInfo = TypeInfoUtils.getTypeInfoFromTypeString(type);
return toPaimonType(typeInfo);
}
public static DataType toPaimonType(TypeInfo typeInfo) {
return HiveToPaimonTypeVisitor.visit(typeInfo);
}
private static class PaimonToHiveTypeVisitor extends
DataTypeDefaultVisitor<TypeInfo> {
private static final PaimonToHiveTypeVisitor INSTANCE =
new PaimonToHiveTypeVisitor();
public TypeInfo visit(BooleanType booleanType) {
return TypeInfoFactory.booleanTypeInfo;
}
public TypeInfo visit(TinyIntType tinyIntType) {
return TypeInfoFactory.byteTypeInfo;
}
public TypeInfo visit(SmallIntType smallIntType) {
return TypeInfoFactory.shortTypeInfo;
}
public TypeInfo visit(IntType intType) {
return TypeInfoFactory.intTypeInfo;
}
public TypeInfo visit(BigIntType bigIntType) {
return TypeInfoFactory.longTypeInfo;
}
public TypeInfo visit(FloatType floatType) {
return TypeInfoFactory.floatTypeInfo;
}
public TypeInfo visit(DoubleType doubleType) {
return TypeInfoFactory.doubleTypeInfo;
}
public TypeInfo visit(DecimalType decimalType) {
return TypeInfoFactory.getDecimalTypeInfo(decimalType.getPrecision(),
decimalType.getScale());
}
public TypeInfo visit(CharType charType) {
return (TypeInfo)(charType.getLength() > 255
? TypeInfoFactory.stringTypeInfo
: TypeInfoFactory.getCharTypeInfo(charType.getLength()));
}
public TypeInfo visit(VarCharType varCharType) {
return (TypeInfo)(varCharType.getLength() > 65535
? TypeInfoFactory.stringTypeInfo
: TypeInfoFactory.getVarcharTypeInfo(varCharType.getLength()));
}
public TypeInfo visit(BinaryType binaryType) {
return TypeInfoFactory.binaryTypeInfo;
}
public TypeInfo visit(VarBinaryType varBinaryType) {
return TypeInfoFactory.binaryTypeInfo;
}
public TypeInfo visit(DateType dateType) {
return TypeInfoFactory.dateTypeInfo;
}
public TypeInfo visit(TimeType timeType) {
return TypeInfoFactory.stringTypeInfo;
}
public TypeInfo visit(TimestampType timestampType) {
return TypeInfoFactory.timestampTypeInfo;
}
// changed to timestamp without local timezone.
public TypeInfo visit(LocalZonedTimestampType localZonedTimestampType) {
return TypeInfoFactory.timestampTypeInfo;
}
public TypeInfo visit(ArrayType arrayType) {
DataType elementType = arrayType.getElementType();
return TypeInfoFactory.getListTypeInfo((TypeInfo)elementType.accept(this));
}
public TypeInfo visit(MultisetType multisetType) {
return TypeInfoFactory.getMapTypeInfo((TypeInfo)multisetType
.getElementType()
.accept(this), TypeInfoFactory.intTypeInfo);
}
public TypeInfo visit(MapType mapType) {
return TypeInfoFactory.getMapTypeInfo((TypeInfo)mapType
.getKeyType().accept(this), (TypeInfo)mapType.getValueType()
.accept(this));
}
public TypeInfo visit(RowType rowType) {
List<String> fieldNames = (List)rowType.getFields().stream()
.map(DataField::name)
.collect(Collectors.toList());
List<TypeInfo> typeInfos = (List)rowType.getFields().stream()
.map(DataField::type).map((type)
-> (TypeInfo)type.accept(this))
.collect(Collectors.toList());
return TypeInfoFactory.getStructTypeInfo(fieldNames, typeInfos);
}
public TypeInfo visit(VariantType variantType) {
List<String> fieldNames = Arrays.asList("value", "metadata");
List<TypeInfo> typeInfos = Arrays.asList(TypeInfoFactory.binaryTypeInfo,
TypeInfoFactory.binaryTypeInfo);
return TypeInfoFactory.getStructTypeInfo(fieldNames, typeInfos);
}
protected TypeInfo defaultMethod(DataType dataType) {
throw new UnsupportedOperationException("Unsupported type: " + dataType);
}
}
private static class HiveToPaimonTypeVisitor {
static DataType visit(TypeInfo type) {
return visit(type, new HiveToPaimonTypeVisitor());
}
static DataType visit(TypeInfo type, HiveToPaimonTypeVisitor visitor) {
if (!(type instanceof StructTypeInfo)) {
if (type instanceof MapTypeInfo) {
MapTypeInfo mapTypeInfo = (MapTypeInfo)type;
return DataTypes.MAP(visit(mapTypeInfo.getMapKeyTypeInfo(), visitor),
visit(mapTypeInfo.getMapValueTypeInfo(), visitor));
} else if (type instanceof ListTypeInfo) {
ListTypeInfo listTypeInfo = (ListTypeInfo)type;
return DataTypes.ARRAY(
visit(listTypeInfo.getListElementTypeInfo(), visitor));
} else {
return visitor.atomic(type);
}
} else {
StructTypeInfo structTypeInfo = (StructTypeInfo)type;
ArrayList<String> fieldNames = structTypeInfo.getAllStructFieldNames();
ArrayList<TypeInfo> typeInfos = structTypeInfo
.getAllStructFieldTypeInfos();
RowType.Builder builder = RowType.builder();
for(int i = 0; i < fieldNames.size(); ++i) {
builder.field((String)fieldNames.get(i),
visit((TypeInfo)typeInfos.get(i), visitor));
}
return builder.build();
}
}
public DataType atomic(TypeInfo atomic) {
if (LocalZonedTimestampTypeUtils.isHiveLocalZonedTimestampType(atomic)) {
return DataTypes.TIMESTAMP_MILLIS();
} else if (TypeInfoFactory.booleanTypeInfo.equals(atomic)) {
return DataTypes.BOOLEAN();
} else if (TypeInfoFactory.byteTypeInfo.equals(atomic)) {
return DataTypes.TINYINT();
} else if (TypeInfoFactory.shortTypeInfo.equals(atomic)) {
return DataTypes.SMALLINT();
} else if (TypeInfoFactory.intTypeInfo.equals(atomic)) {
return DataTypes.INT();
} else if (TypeInfoFactory.longTypeInfo.equals(atomic)) {
return DataTypes.BIGINT();
} else if (TypeInfoFactory.floatTypeInfo.equals(atomic)) {
return DataTypes.FLOAT();
} else if (TypeInfoFactory.doubleTypeInfo.equals(atomic)) {
return DataTypes.DOUBLE();
} else if (atomic instanceof DecimalTypeInfo) {
DecimalTypeInfo decimalTypeInfo = (DecimalTypeInfo)atomic;
return DataTypes.DECIMAL(decimalTypeInfo.getPrecision(),
decimalTypeInfo.getScale());
} else if (atomic instanceof CharTypeInfo) {
return DataTypes.CHAR(((CharTypeInfo)atomic).getLength());
} else if (atomic instanceof VarcharTypeInfo) {
return DataTypes.VARCHAR(((VarcharTypeInfo)atomic).getLength());
} else if (TypeInfoFactory.stringTypeInfo.equals(atomic)) {
return DataTypes.VARCHAR(Integer.MAX_VALUE);
} else if (TypeInfoFactory.binaryTypeInfo.equals(atomic)) {
return DataTypes.VARBINARY(Integer.MAX_VALUE);
} else if (TypeInfoFactory.dateTypeInfo.equals(atomic)) {
return DataTypes.DATE();
} else if (TypeInfoFactory.timestampTypeInfo.equals(atomic)) {
return DataTypes.TIMESTAMP_MILLIS();
} else {
throw new UnsupportedOperationException("Not a supported type: "
+ atomic.getTypeName());
}
}
}
}

View File

@@ -53,7 +53,7 @@ import java.util.List;
import java.util.stream.Collectors;
/** Utils for converting types related classes between Paimon and Impala. */
public class ImpalaTypeUtils {
public class PaimonImpalaTypeUtils {
/**
* Convert paimon data type {@link DataType} to impala data type {@link Type}.
*

View File

@@ -263,4 +263,8 @@ public class PaimonTable extends Table implements FePaimonTable {
return table_;
}
@Override
public String toString() {
return String.format("Paimon Table: %s", getFullName());
}
}

View File

@@ -23,6 +23,7 @@ import static org.apache.paimon.CoreOptions.PARTITION_DEFAULT_NAME;
import static org.apache.paimon.CoreOptions.PARTITION_GENERATE_LEGCY_NAME;
import static org.apache.paimon.utils.HadoopUtils.HADOOP_LOAD_DEFAULT_CONFIG;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import org.apache.commons.lang3.SerializationUtils;
@@ -70,7 +71,6 @@ import org.apache.paimon.data.Timestamp;
import org.apache.paimon.fs.FileIO;
import org.apache.paimon.fs.Path;
import org.apache.paimon.hive.HiveCatalog;
import org.apache.paimon.hive.HiveTypeUtils;
import org.apache.paimon.hive.LocationKeyExtractor;
import org.apache.paimon.hive.utils.HiveUtils;
import org.apache.paimon.options.CatalogOptions;
@@ -104,7 +104,7 @@ import org.apache.paimon.types.SmallIntType;
import org.apache.paimon.types.TinyIntType;
import org.apache.paimon.utils.InternalRowPartitionComputer;
import org.apache.thrift.TException;
import org.postgresql.shaded.com.ongres.scram.common.util.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -123,6 +123,9 @@ import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
/**
* Utils for common paimon related functions.
*/
public class PaimonUtil {
final static Logger LOG = LoggerFactory.getLogger(PaimonUtil.class);
@@ -191,7 +194,7 @@ public class PaimonUtil {
List<FieldSchema> ret = new ArrayList<>();
for (DataField dataField : schema.getFields()) {
ret.add(new FieldSchema(dataField.name().toLowerCase(),
HiveTypeUtils.toTypeInfo(dataField.type()).getTypeName(),
PaimonHiveTypeUtils.toTypeInfo(dataField.type()).getTypeName(),
dataField.description()));
}
return ret;
@@ -205,7 +208,7 @@ public class PaimonUtil {
List<Column> ret = new ArrayList<>();
int pos = 0;
for (DataField dataField : schema.getFields()) {
Type colType = ImpalaTypeUtils.toImpalaType(dataField.type());
Type colType = PaimonImpalaTypeUtils.toImpalaType(dataField.type());
ret.add(new Column(dataField.name().toLowerCase(), colType, pos++));
}
return ret;
@@ -219,7 +222,7 @@ public class PaimonUtil {
Schema.Builder schemaBuilder = Schema.newBuilder();
for (TColumn column : columns) {
schemaBuilder.column(column.getColumnName().toLowerCase(),
ImpalaTypeUtils.fromImpalaType(Type.fromThrift(column.getColumnType())));
PaimonImpalaTypeUtils.fromImpalaType(Type.fromThrift(column.getColumnType())));
}
if (!partitionKeys.isEmpty()) { schemaBuilder.partitionKeys(partitionKeys); }
if (!options.isEmpty()) { schemaBuilder.options(options); }

View File

@@ -74,6 +74,7 @@ import org.apache.impala.catalog.HdfsFileFormat;
import org.apache.impala.catalog.ScalarType;
import org.apache.impala.catalog.TableLoadingException;
import org.apache.impala.catalog.iceberg.IcebergMetadataTable;
import org.apache.impala.catalog.paimon.FePaimonTable;
import org.apache.impala.common.AnalysisException;
import org.apache.impala.common.ImpalaException;
import org.apache.impala.common.InternalException;
@@ -1927,6 +1928,9 @@ public class SingleNodePlanner implements SingleNodePlannerIntf {
scanNode.addConjuncts(conjuncts);
scanNode.init(analyzer);
return scanNode;
} else if (tblRef.getTable() instanceof FePaimonTable) {
// This function will be supported in the future
throw new NotImplementedException("Query is not supported for PAIMON table now");
} else {
throw new NotImplementedException(
"Planning not implemented for table class: " + table.getClass());

View File

@@ -379,6 +379,15 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalyzesOk(partitioned + "(hour(event_time) in ('2020-01-01-9', '2020-01-01-1'))");
AnalyzesOk(evolution + "(truncate(4,date_string_col,4) = '1231')");
AnalyzesOk(evolution + "(month = 12)");
// Paimon ADD/DROP PARTITION Test
String paimon_partitioned =
"ALTER TABLE functional_parquet.paimon_partitioned";
AnalysisError(paimon_partitioned + " ADD PARTITION (userid = 3) ",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
AnalysisError(paimon_partitioned + " DROP PARTITION (userid = 3) ",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -518,6 +527,12 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError("alter table functional.alltypes add column new_col int not null",
"The specified column options are only supported in Kudu tables: " +
"new_col INT NOT NULL");
// Paimon ADD COLUMN Test
String paimon_partitioned =
"ALTER TABLE functional_parquet.paimon_partitioned";
AnalysisError(paimon_partitioned + " add column new_col int not null",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -605,6 +620,12 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError("alter table functional.alltypes add columns(new_col int not null)",
"The specified column options are only supported in Kudu tables: " +
"new_col INT NOT NULL");
// Paimon ADD COLUMNS Test
String paimon_partitioned =
"ALTER TABLE functional_parquet.paimon_partitioned";
AnalysisError(paimon_partitioned + " add columns(new_col int not null)",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -664,6 +685,12 @@ public class AnalyzeDDLTest extends FrontendTestBase {
// Cannot ALTER TABLE REPLACE COLUMNS on a Kudu table.
AnalysisError("alter table functional_kudu.alltypes replace columns (i int)",
"ALTER TABLE REPLACE COLUMNS is not supported on Kudu tables.");
// Paimon REPLACE COLUMNS Test
String paimon_partitioned =
"ALTER TABLE functional_parquet.paimon_partitioned";
AnalysisError(paimon_partitioned + " replace columns (i int)",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -702,6 +729,12 @@ public class AnalyzeDDLTest extends FrontendTestBase {
// Cannot ALTER TABLE DROP COLUMN on an HBase table.
AnalysisError("alter table functional_hbase.alltypes drop column int_col",
"ALTER TABLE DROP COLUMN not currently supported on HBase tables.");
// Paimon DROP COLUMNS Test
String paimon_partitioned =
"ALTER TABLE functional_parquet.paimon_partitioned";
AnalysisError(paimon_partitioned + " drop column userid",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -756,6 +789,12 @@ public class AnalyzeDDLTest extends FrontendTestBase {
// Cannot ALTER TABLE CHANGE COLUMN on an HBase table.
AnalysisError("alter table functional_hbase.alltypes CHANGE COLUMN int_col i int",
"ALTER TABLE CHANGE/ALTER COLUMN not currently supported on HBase tables.");
// Paimon CHANGE COLUMNS Test
String paimon_partitioned =
"ALTER TABLE functional_parquet.paimon_partitioned";
AnalysisError(paimon_partitioned + " change column userid user_id bigint",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -789,6 +828,13 @@ public class AnalyzeDDLTest extends FrontendTestBase {
"delimited fields terminated by ' '",
"ALTER TABLE SET ROW FORMAT not allowed on a table STORED BY JDBC: " +
"functional.alltypes_jdbc_datasource");
// Paimon SET ROW FORMAT Test
String paimon_partitioned =
"ALTER TABLE functional_parquet.paimon_partitioned";
AnalysisError(paimon_partitioned + " set row format " +
"delimited fields terminated by ' '",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -1063,7 +1109,28 @@ public class AnalyzeDDLTest extends FrontendTestBase {
// Cannot ALTER TABLE SET on an HBase table.
AnalysisError("alter table functional_hbase.alltypes set tblproperties('a'='b')",
"ALTER TABLE SET not currently supported on HBase tables.");
// Cannot ALTER TABLE SET on an Paimon table.
AnalysisError("alter table functional_parquet.paimon_partitioned set " +
"serdeproperties ('a'='2')","ALTER TABLE not allowed " +
"on PAIMON table: functional_parquet.paimon_partitioned");
AnalysisError("alter table functional_parquet.paimon_partitioned " +
"PARTITION (year=2010) set tblproperties ('a'='2')",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
AnalysisError("alter table functional_parquet.paimon_partitioned set tblproperties" +
" ('__IMPALA_DATA_SOURCE_NAME'='test')","ALTER TABLE not allowed on" +
" PAIMON table: functional_parquet.paimon_partitioned");
AnalysisError("alter table functional_parquet.paimon_partitioned unset " +
"serdeproperties ('a')","ALTER TABLE not allowed on PAIMON table:" +
" functional_parquet.paimon_partitioned");
AnalysisError("alter table functional_parquet.paimon_partitioned PARTITION " +
"(year=2010) unset tblproperties ('a')",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
AnalysisError("alter table functional_parquet.paimon_partitioned " +
"unset tblproperties ('__IMPALA_DATA_SOURCE_NAME')",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
// serialization.encoding
AnalyzesOk("alter table functional.alltypes set serdeproperties(" +
"'serialization.encoding'='GBK')");
@@ -1192,6 +1259,11 @@ public class AnalyzeDDLTest extends FrontendTestBase {
"'testPool'",
"ALTER TABLE SET CACHED not allowed on a table STORED BY JDBC: " +
"functional.alltypes_jdbc_datasource");
// Cannot ALTER TABLE to set cached for PAIMON tables.
AnalysisError("alter table functional_parquet.paimon_partitioned set cached in " +
"'testPool'",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -1337,6 +1409,11 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError(
"alter table functional.alltypes set column stats string_col ('avgSize'='inf')",
"Invalid stats value 'inf' for column stats key: avgSize");
// Cannot ALTER TABLE to set column stats for PAIMON tables.
AnalysisError("alter table functional_parquet.paimon_partitioned set column stats" +
" userid ('avgSize'='8')",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -1482,6 +1559,11 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalyzesOk("alter table functional.alltypes_datasource rename to new_datasrc_tbl");
AnalyzesOk("alter table functional.alltypes_jdbc_datasource rename to " +
"new_jdbc_datasrc_tbl");
// Cannot ALTER TABLE rename for PAIMON tables.
AnalysisError("alter table functional_parquet.paimon_partitioned rename to" +
" new_datasrc_tbl",
"ALTER TABLE RENAME statement not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -1501,6 +1583,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError("alter table functional_hbase.alltypes recover partitions",
"ALTER TABLE RECOVER PARTITIONS must target an HDFS table: " +
"functional_hbase.alltypes");
// Cannot ALTER TABLE recover partitions for PAIMON tables.
AnalysisError("alter table functional_parquet.paimon_partitioned RECOVER PARTITIONS",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -1519,6 +1605,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError("alter table functional.alltypes_jdbc_datasource sort by (id)",
"ALTER TABLE SORT BY not allowed on a table STORED BY JDBC: " +
"functional.alltypes_jdbc_datasource");
// Cannot ALTER TABLE to sort by for PAIMON tables.
AnalysisError("alter table functional_parquet.paimon_partitioned sort by (userid)",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -1541,6 +1631,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError("alter table functional.alltypes_jdbc_datasource sort by zorder (id)",
"ALTER TABLE SORT BY not allowed on a table STORED BY JDBC: " +
"functional.alltypes_jdbc_datasource");
// Cannot ALTER TABLE to sort by for PAIMON tables.
AnalysisError("alter table functional_parquet.paimon_partitioned sort by (userid)",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -1797,6 +1891,11 @@ public class AnalyzeDDLTest extends FrontendTestBase {
"default", "Unsupported column option for non-Kudu table: DROP DEFAULT");
AnalysisError("alter table functional.alltypes_jdbc_datasource alter int_col drop " +
"default", "Unsupported column option for non-Kudu table: DROP DEFAULT");
// Cannot ALTER TABLE after drop for PAIMON tables.
AnalysisError("alter table functional_parquet.paimon_partitioned alter int_col" +
" set comment 'a' ",
"ALTER TABLE not allowed on PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
ComputeStatsStmt checkComputeStatsStmt(String stmt) throws AnalysisException {
@@ -1963,7 +2062,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError(
"compute stats functional.alltypes_datasource tablesample system (3)",
"TABLESAMPLE is only supported on file-based tables.");
// Cannot COMPUTE STATS for PAIMON tables.
AnalysisError("compute stats functional_parquet.paimon_partitioned",
"COMPUTE STATS not supported for PAIMON table: " +
"functional_parquet.paimon_partitioned");
// Test different COMPUTE_STATS_MIN_SAMPLE_BYTES.
TQueryOptions queryOpts = new TQueryOptions();
@@ -2089,6 +2191,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
"compute incremental stats functional.alltypes partition(year=2009, month<12)");
BackendConfig.INSTANCE.getBackendCfg().setInc_stats_size_limit_bytes(bytes);
// Cannot COMPUTE INCREMENTAL STATS for PAIMON tables.
AnalysisError("compute incremental stats functional_parquet.paimon_partitioned",
"COMPUTE STATS not supported for PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@@ -2107,6 +2213,11 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError("drop incremental stats functional_parquet.iceberg_partitioned "
+ "partition(year=2010, month=1)", "DROP INCREMENTAL STATS ... PARTITION "
+ "not supported for Iceberg table functional_parquet.iceberg_partitioned");
// Cannot DROP INCREMENTAL STATS for PAIMON tables.
AnalysisError("drop incremental stats functional_parquet.paimon_partitioned" +
" partition(userid=10)",
"DROP STATS not allowed on a PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@@ -2125,6 +2236,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
"Syntax error");
AnalysisError("drop stats functional.alltypes partition(year, month)",
"Syntax error");
// Cannot DROP STATS for PAIMON tables.
AnalysisError("drop stats functional_parquet.paimon_partitioned",
"DROP STATS not allowed on a PAIMON table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -2218,6 +2333,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
// Cannot truncate a non hdfs table.
AnalysisError("truncate table functional.alltypes_view",
"TRUNCATE TABLE not supported on non-HDFS table: functional.alltypes_view");
// Cannot TRUNCATE for PAIMON tables.
AnalysisError("truncate table functional_parquet.paimon_partitioned",
"TRUNCATE TABLE not supported on non-HDFS table: " +
"functional_parquet.paimon_partitioned");
}
@Test
@@ -2356,6 +2475,9 @@ public class AnalyzeDDLTest extends FrontendTestBase {
AnalysisError("create table newtbl_jdbc like parquet " +
"'/test-warehouse/schemas/alltypestiny.parquet' stored as JDBC",
"CREATE TABLE LIKE FILE statement is not supported for JDBC tables.");
AnalysisError("create table newtbl_jdbc like parquet " +
"'/test-warehouse/schemas/alltypestiny.parquet' stored AS PAIMON",
"CREATE TABLE LIKE FILE statement is not supported for PAIMON tables.");
}
@Test
@@ -2760,6 +2882,10 @@ public class AnalyzeDDLTest extends FrontendTestBase {
"functional.alltypes");
AnalysisError("create table tbl sort by zorder (int_col,foo) like " +
"functional.alltypes", "Could not find SORT BY column 'foo' in table.");
// Cannot support create table like for PAIMON tables.
AnalysisError("create table test_like like functional_parquet.paimon_partitioned" +
" STORED BY PAIMON",
"CREATE TABLE LIKE is not supported for PAIMON tables.");
}
@Test

View File

@@ -964,4 +964,39 @@ public class AnalyzerTest extends FrontendTestBase {
AnalysisError("alter table functional.bucketed_table add columns(col3 int)",
errorMsgBucketed);
}
@Test
public void tesPaimonDML() {
AnalysisError("INSERT INTO functional_parquet.paimon_non_partitioned " +
"VALUES (101, 201, 4.5, 1640995200000)",
"Impala does not support INSERT into PAIMON table: " +
"functional_parquet.paimon_non_partitioned");
AnalysisError("INSERT OVERWRITE TABLE functional_parquet.paimon_non_partitioned " +
"VALUES (102, 202, 3.8, 1640995201000)",
"Impala does not support INSERT into PAIMON table: " +
"functional_parquet.paimon_non_partitioned");
AnalysisError("DELETE FROM functional_parquet.paimon_non_partitioned " +
"WHERE userid = 101",
"Impala only supports modifying Kudu and Iceberg tables, " +
"but the following table is neither: " +
"functional_parquet.paimon_non_partitioned");
AnalysisError("MERGE INTO functional_parquet.paimon_partitioned a USING " +
"functional_parquet.paimon_non_partitioned source ON " +
"a.userid = source.userid WHEN NOT MATCHED THEN INSERT " +
"VALUES(a.userid,a.movieid, source.rating,souce.ts)",
"Target table must be an Iceberg table: " +
"functional_parquet.paimon_partitioned");
AnalysisError("MERGE INTO functional_parquet.paimon_partitioned a USING " +
"functional_parquet.paimon_non_partitioned source ON a.userid = source.id " +
"WHEN MATCHED THEN DELETE",
"Target table must be an Iceberg table: " +
"functional_parquet.paimon_partitioned");
AnalysisError("MERGE INTO functional_parquet.paimon_partitioned a USING " +
"functional_parquet.paimon_non_partitioned source ON " +
"a.userid = source.userid WHEN MATCHED THEN UPDATE " +
"SET movieid = source.movieid",
"Target table must be an Iceberg table: " +
"functional_parquet.paimon_partitioned");
}
}

View File

@@ -39,7 +39,6 @@ import org.apache.paimon.types.SmallIntType;
import org.apache.paimon.types.TimestampType;
import org.apache.paimon.types.TinyIntType;
import org.apache.paimon.types.VarCharType;
import org.junit.Assert;
import org.junit.Test;
import java.util.Arrays;
@@ -58,104 +57,120 @@ public class ImpalaTypeUtilsTest {
@Test
public void testToImpalaType() {
// Test primitive types
assertEquals(Type.BOOLEAN, ImpalaTypeUtils.toImpalaType(new BooleanType()));
assertEquals(Type.TINYINT, ImpalaTypeUtils.toImpalaType(new TinyIntType()));
assertEquals(Type.SMALLINT, ImpalaTypeUtils.toImpalaType(new SmallIntType()));
assertEquals(Type.INT, ImpalaTypeUtils.toImpalaType(new IntType()));
assertEquals(Type.BIGINT, ImpalaTypeUtils.toImpalaType(new BigIntType()));
assertEquals(Type.FLOAT, ImpalaTypeUtils.toImpalaType(new FloatType()));
assertEquals(Type.DOUBLE, ImpalaTypeUtils.toImpalaType(new DoubleType()));
assertEquals(Type.DATE, ImpalaTypeUtils.toImpalaType(DataTypes.DATE()));
assertEquals(Type.TIMESTAMP, ImpalaTypeUtils.toImpalaType(new TimestampType()));
assertEquals(Type.BOOLEAN, PaimonImpalaTypeUtils.toImpalaType(new BooleanType()));
assertEquals(Type.TINYINT, PaimonImpalaTypeUtils.toImpalaType(new TinyIntType()));
assertEquals(Type.SMALLINT, PaimonImpalaTypeUtils.toImpalaType(new SmallIntType()));
assertEquals(Type.INT, PaimonImpalaTypeUtils.toImpalaType(new IntType()));
assertEquals(Type.BIGINT, PaimonImpalaTypeUtils.toImpalaType(new BigIntType()));
assertEquals(Type.FLOAT, PaimonImpalaTypeUtils.toImpalaType(new FloatType()));
assertEquals(Type.DOUBLE, PaimonImpalaTypeUtils.toImpalaType(new DoubleType()));
assertEquals(Type.DATE, PaimonImpalaTypeUtils.toImpalaType(DataTypes.DATE()));
assertEquals(Type.TIMESTAMP, PaimonImpalaTypeUtils
.toImpalaType(new TimestampType()));
// Test decimal type
DecimalType decimalType = new DecimalType(10, 2);
assertEquals(
ScalarType.createDecimalType(10, 2), ImpalaTypeUtils.toImpalaType(decimalType));
ScalarType.createDecimalType(10, 2),
PaimonImpalaTypeUtils.toImpalaType(decimalType));
// Test char and varchar types
assertEquals(
ScalarType.createCharType(10), ImpalaTypeUtils.toImpalaType(new CharType(10)));
ScalarType.createCharType(10),
PaimonImpalaTypeUtils.toImpalaType(new CharType(10)));
assertEquals(ScalarType.createVarcharType(10),
ImpalaTypeUtils.toImpalaType(new VarCharType(10)));
assertEquals(Type.STRING, ImpalaTypeUtils.toImpalaType(new CharType(256)));
assertEquals(Type.STRING, ImpalaTypeUtils.toImpalaType(new VarCharType(65536)));
PaimonImpalaTypeUtils.toImpalaType(new VarCharType(10)));
assertEquals(Type.STRING, PaimonImpalaTypeUtils
.toImpalaType(new CharType(256)));
assertEquals(Type.STRING, PaimonImpalaTypeUtils
.toImpalaType(new VarCharType(65536)));
// Test array type
ArrayType arrayType = new ArrayType(new IntType());
assertEquals(new org.apache.impala.catalog.ArrayType(Type.INT),
ImpalaTypeUtils.toImpalaType(arrayType));
PaimonImpalaTypeUtils.toImpalaType(arrayType));
// Test map type
MapType mapType = new MapType(new IntType(), DataTypes.STRING());
assertEquals(new org.apache.impala.catalog.MapType(Type.INT, Type.STRING),
ImpalaTypeUtils.toImpalaType(mapType));
PaimonImpalaTypeUtils.toImpalaType(mapType));
// Test row type
RowType rowType = new RowType(Arrays.asList(new DataField(0, "id", new IntType()),
new DataField(1, "name", DataTypes.STRING())));
StructType expectedStructType = new StructType(Arrays.asList(
new StructField("id", Type.INT), new StructField("name", Type.STRING)));
assertEquals(expectedStructType, ImpalaTypeUtils.toImpalaType(rowType));
new StructField("id", Type.INT,
rowType.getField(0).description()),
new StructField("name", Type.STRING,
rowType.getField(1).description())));
assertEquals(expectedStructType, PaimonImpalaTypeUtils.toImpalaType(rowType));
// doesn't support time
assertThrowUnexpected(() -> ImpalaTypeUtils.toImpalaType(DataTypes.TIME()));
assertThrowUnexpected(() -> PaimonImpalaTypeUtils.toImpalaType(DataTypes.TIME()));
// doesn't support multiset
assertThrowUnexpected(
() -> ImpalaTypeUtils.toImpalaType(DataTypes.MULTISET(DataTypes.INT())));
() -> PaimonImpalaTypeUtils
.toImpalaType(DataTypes.MULTISET(DataTypes.INT())));
// doesn't support timestamp with local timezone
assertThrowUnexpected(
() -> ImpalaTypeUtils.toImpalaType(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE()));
() -> PaimonImpalaTypeUtils
.toImpalaType(DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE()));
}
@Test
public void testFromImpalaType() {
// Test primitive types
assertEquals(DataTypes.BOOLEAN(), ImpalaTypeUtils.fromImpalaType(Type.BOOLEAN));
assertEquals(DataTypes.TINYINT(), ImpalaTypeUtils.fromImpalaType(Type.TINYINT));
assertEquals(DataTypes.SMALLINT(), ImpalaTypeUtils.fromImpalaType(Type.SMALLINT));
assertEquals(DataTypes.INT(), ImpalaTypeUtils.fromImpalaType(Type.INT));
assertEquals(DataTypes.BIGINT(), ImpalaTypeUtils.fromImpalaType(Type.BIGINT));
assertEquals(DataTypes.FLOAT(), ImpalaTypeUtils.fromImpalaType(Type.FLOAT));
assertEquals(DataTypes.DOUBLE(), ImpalaTypeUtils.fromImpalaType(Type.DOUBLE));
assertEquals(DataTypes.DATE(), ImpalaTypeUtils.fromImpalaType(Type.DATE));
assertEquals(DataTypes.TIMESTAMP(), ImpalaTypeUtils.fromImpalaType(Type.TIMESTAMP));
assertEquals(DataTypes.BOOLEAN(), PaimonImpalaTypeUtils
.fromImpalaType(Type.BOOLEAN));
assertEquals(DataTypes.TINYINT(), PaimonImpalaTypeUtils
.fromImpalaType(Type.TINYINT));
assertEquals(DataTypes.SMALLINT(), PaimonImpalaTypeUtils
.fromImpalaType(Type.SMALLINT));
assertEquals(DataTypes.INT(), PaimonImpalaTypeUtils.fromImpalaType(Type.INT));
assertEquals(DataTypes.BIGINT(), PaimonImpalaTypeUtils.fromImpalaType(Type.BIGINT));
assertEquals(DataTypes.FLOAT(), PaimonImpalaTypeUtils.fromImpalaType(Type.FLOAT));
assertEquals(DataTypes.DOUBLE(), PaimonImpalaTypeUtils.fromImpalaType(Type.DOUBLE));
assertEquals(DataTypes.DATE(), PaimonImpalaTypeUtils.fromImpalaType(Type.DATE));
assertEquals(DataTypes.TIMESTAMP(), PaimonImpalaTypeUtils
.fromImpalaType(Type.TIMESTAMP));
// Test decimal type
ScalarType decimalType = ScalarType.createDecimalType(10, 2);
assertEquals(DataTypes.DECIMAL(10, 2), ImpalaTypeUtils.fromImpalaType(decimalType));
assertEquals(DataTypes.DECIMAL(10, 2),
PaimonImpalaTypeUtils.fromImpalaType(decimalType));
// Test char and varchar types
assertEquals(DataTypes.CHAR(10),
ImpalaTypeUtils.fromImpalaType(ScalarType.createCharType(10)));
PaimonImpalaTypeUtils.fromImpalaType(ScalarType.createCharType(10)));
assertEquals(DataTypes.VARCHAR(255),
ImpalaTypeUtils.fromImpalaType(ScalarType.createVarcharType(255)));
PaimonImpalaTypeUtils.fromImpalaType(ScalarType.createVarcharType(255)));
// Test array type
org.apache.impala.catalog.ArrayType arrayType =
new org.apache.impala.catalog.ArrayType(Type.INT);
assertEquals(
DataTypes.ARRAY(DataTypes.INT()), ImpalaTypeUtils.fromImpalaType(arrayType));
DataTypes.ARRAY(DataTypes.INT()), PaimonImpalaTypeUtils
.fromImpalaType(arrayType));
// Test map type
org.apache.impala.catalog.MapType mapType =
new org.apache.impala.catalog.MapType(Type.INT, Type.STRING);
assertEquals(DataTypes.MAP(DataTypes.INT(), DataTypes.STRING()),
ImpalaTypeUtils.fromImpalaType(mapType));
PaimonImpalaTypeUtils.fromImpalaType(mapType));
// Test struct type
StructType structType = new StructType(Arrays.asList(
new StructField("id", Type.INT), new StructField("name", Type.STRING)));
RowType expectedRowType = RowType.of(new DataField(0, "id", DataTypes.INT()),
new DataField(1, "name", DataTypes.STRING()));
assertEquals(expectedRowType, ImpalaTypeUtils.fromImpalaType(structType));
assertEquals(expectedRowType, PaimonImpalaTypeUtils.fromImpalaType(structType));
// doesn't support datetime
assertThrowUnexpected(() -> ImpalaTypeUtils.fromImpalaType(Type.DATETIME));
assertThrowUnexpected(() -> PaimonImpalaTypeUtils.fromImpalaType(Type.DATETIME));
// doesn't support NULL TYPE
assertThrowUnexpected(() -> ImpalaTypeUtils.fromImpalaType(Type.NULL));
assertThrowUnexpected(() -> PaimonImpalaTypeUtils.fromImpalaType(Type.NULL));
// doesn't support INVALID
assertThrowUnexpected(() -> ImpalaTypeUtils.fromImpalaType(Type.INVALID));
assertThrowUnexpected(() -> PaimonImpalaTypeUtils.fromImpalaType(Type.INVALID));
// doesn't support FIXED_UDA_INTERMEDIATE
assertThrowUnexpected(
() -> ImpalaTypeUtils.fromImpalaType(Type.FIXED_UDA_INTERMEDIATE));
() -> PaimonImpalaTypeUtils.fromImpalaType(Type.FIXED_UDA_INTERMEDIATE));
}
}

View File

@@ -0,0 +1,168 @@
====
---- QUERY
CREATE TABLE paimon_test(
level STRING
)
STORED AS PAIMON;
INSERT INTO paimon_test values('1');
---- CATCH
AnalysisException: Impala does not support INSERT into PAIMON table: $DATABASE.paimon_test
====
---- QUERY
TRUNCATE paimon_test
---- CATCH
AnalysisException: TRUNCATE TABLE not supported on non-HDFS table: $DATABASE.paimon_test
====
---- QUERY
CREATE TABLE paimon_test_ctas STORED AS PAIMON AS SELECT * FROM functional_parquet.alltypes;
---- CATCH
AnalysisException: CREATE TABLE AS SELECT does not support the (PAIMON) file format. Supported formats are: (PARQUET, TEXTFILE, KUDU, ICEBERG)
====
---- QUERY
CREATE TABLE paimon_test_ctas LIKE functional_parquet.alltypes STORED AS PAIMON;
---- CATCH
AnalysisException: CREATE TABLE LIKE is not supported for PAIMON tables.
====
---- QUERY
CREATE TABLE paimon_test_ctas LIKE functional_parquet.paimon_non_partitioned;
---- CATCH
AnalysisException: CREATE TABLE LIKE is not supported for PAIMON tables.
====
---- QUERY
ALTER TABLE paimon_test ADD COLUMN event_time TIMESTAMP
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test ADD COLUMN event_time TIMESTAMP
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test ADD COLUMNS (event_time TIMESTAMP, event_type STRING)
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test DROP COLUMN event_time
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test CHANGE COLUMN old_column new_column STRING
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test SET TBLPROPERTIES ('key'='value')
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test UNSET TBLPROPERTIES ('key')
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test SET FILEFORMAT PARQUET
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test SET LOCATION '/new/location/path'
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test RECOVER PARTITIONS
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test ADD PARTITION (dt='2023-01-01')
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test DROP PARTITION (dt='2023-01-01')
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test PARTITION (dt='2023-01-01') SET FILEFORMAT TEXTFILE
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test PARTITION (dt='2023-01-01') SET LOCATION '/specific/partition/path'
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test SET CACHED IN 'pool_name'
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
ALTER TABLE paimon_test SET UNCACHED
---- CATCH
AnalysisException: ALTER TABLE not allowed on PAIMON table: $DATABASE.paimon_test
====
---- QUERY
COMPUTE STATS paimon_test;
---- CATCH
AnalysisException: COMPUTE STATS not supported for PAIMON table: paimon_test
====
---- QUERY
COMPUTE INCREMENTAL STATS paimon_test(userid) ;
---- CATCH
AnalysisException: COMPUTE STATS not supported for PAIMON table: paimon_test
====
---- QUERY
COMPUTE INCREMENTAL STATS paimon_test(userid) ;
---- CATCH
AnalysisException: COMPUTE STATS not supported for PAIMON table: paimon_test
====
---- QUERY
INSERT INTO functional_parquet.paimon_non_partitioned VALUES (101, 201, 4.5, 1640995200000);
---- CATCH
AnalysisException: Impala does not support INSERT into PAIMON table: functional_parquet.paimon_non_partitioned
====
---- QUERY
INSERT OVERWRITE TABLE functional_parquet.paimon_non_partitioned VALUES (102, 202, 3.8, 1640995201000);
---- CATCH
AnalysisException: Impala does not support INSERT into PAIMON table: functional_parquet.paimon_non_partitioned
====
---- QUERY
DELETE FROM functional_parquet.paimon_non_partitioned WHERE userid = 101;
---- CATCH
AnalysisException: Impala only supports modifying Kudu and Iceberg tables, but the following table is neither: functional_parquet.paimon_non_partitioned
====
---- QUERY
MERGE INTO functional_parquet.paimon_partitioned a
USING functional_parquet.paimon_non_partitioned source
ON a.userid = source.userid
WHEN NOT MATCHED THEN INSERT VALUES(a.userid,a.movieid, source.rating,souce.ts);
---- CATCH
AnalysisException: Target table must be an Iceberg table: functional_parquet.paimon_partitioned
====
---- QUERY
MERGE INTO functional_parquet.paimon_partitioned a
USING functional_parquet.paimon_non_partitioned source
ON a.userid = source.id
WHEN MATCHED THEN DELETE;
---- CATCH
AnalysisException: Target table must be an Iceberg table: functional_parquet.paimon_partitioned
====
---- QUERY
MERGE INTO functional_parquet.paimon_partitioned a
USING functional_parquet.paimon_non_partitioned source
ON a.userid = source.userid
WHEN MATCHED THEN UPDATE SET movieid = source.movieid;
---- CATCH
AnalysisException: Target table must be an Iceberg table: functional_parquet.paimon_partitioned
====
---- QUERY
SELECT * FROM functional_parquet.paimon_partitioned;
---- CATCH
NotImplementedException: Query is not supported for PAIMON table now

View File

@@ -48,3 +48,7 @@ class TestCreatingPaimonTable(ImpalaTestSuite):
def test_create_paimon_ddl_negative(self, vector, unique_database):
self.run_test_case('QueryTest/paimon-ddl-negative',
vector, unique_database)
def test_paimon_negative(self, vector, unique_database):
self.run_test_case('QueryTest/paimon-negative',
vector, unique_database)