mirror of
https://github.com/apache/impala.git
synced 2025-12-19 18:12:08 -05:00
The original IMPALA-14552 patch pass precommit tests before
IMPALA-12893: (part 2) (275f03f) merged. As consequence, it does not
catch missing comma in updated show-create-table.test. This patch add
that missing comma.
Testing:
Pass metadata/test_show_create_table.py
Change-Id: Ib06e690a81e6b0ca483b3647cc59c73802a0a7b7
Reviewed-on: http://gerrit.cloudera.org:8080/23673
Reviewed-by: Riza Suminto <riza.suminto@cloudera.com>
Tested-by: Impala Public Jenkins <impala-public-jenkins@cloudera.com>
1087 lines
33 KiB
Plaintext
1087 lines
33 KiB
Plaintext
====
|
|
---- CREATE_TABLE
|
|
# Simple table
|
|
CREATE TABLE test1 (
|
|
id INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test1 (
|
|
id INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test1 (
|
|
id INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Simple JSON table
|
|
CREATE TABLE json_test1 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
STORED AS JSONFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.json_test1 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
STORED AS JSONFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.json_test1 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
STORED AS JSONFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# simple table with all types
|
|
CREATE TABLE test2 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test2 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test2 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# all types and partitioned
|
|
CREATE TABLE test3 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
PARTITIONED BY (
|
|
x INT,
|
|
y INT,
|
|
a BOOLEAN
|
|
)
|
|
COMMENT 'This is a test'
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test3 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
PARTITIONED BY (
|
|
x INT,
|
|
y INT,
|
|
a BOOLEAN
|
|
)
|
|
COMMENT 'This is a test'
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test3 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment',
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP
|
|
)
|
|
PARTITIONED BY (
|
|
x INT,
|
|
y INT,
|
|
a BOOLEAN
|
|
)
|
|
COMMENT 'This is a test'
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# With a table comment
|
|
CREATE TABLE test4 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
COMMENT 'This is a test'
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test4 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
COMMENT 'This is a test'
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test4 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
COMMENT 'This is a test'
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# With the row format specified
|
|
CREATE TABLE test5 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ESCAPED BY '\\' LINES TERMINATED BY '\n'
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test5 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ESCAPED BY '\\' LINES TERMINATED BY '\n'
|
|
WITH SERDEPROPERTIES ('line.delim'='\n', 'field.delim'=',',
|
|
'serialization.format'=',', 'escape.delim'='\\')
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test5 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ESCAPED BY '\\' LINES TERMINATED BY '\n'
|
|
WITH SERDEPROPERTIES ('line.delim'='\n', 'field.delim'=',',
|
|
'serialization.format'=',', 'escape.delim'='\\')
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# testing with parquet specified
|
|
CREATE TABLE test6 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS PARQUET
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test6 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS PARQUET
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test6 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS PARQUET
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# with extra table properties and sequencefile
|
|
CREATE TABLE test7 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS SEQUENCEFILE
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'key3'='val3',
|
|
'key2'='val2', 'key1'='val1')
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test7 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS SEQUENCEFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'key3'='val3',
|
|
'key2'='val2', 'key1'='val1')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test7 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS SEQUENCEFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'key3'='val3',
|
|
'key2'='val2', 'key1'='val1', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# testing with rcfile specified
|
|
CREATE TABLE test8 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS RCFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test8 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS RCFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test8 (
|
|
year INT,
|
|
month INT,
|
|
id INT COMMENT 'Add a comment'
|
|
)
|
|
STORED AS RCFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Test create table as select
|
|
CREATE TABLE test_as_select (
|
|
id INT,
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP,
|
|
year INT,
|
|
month INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test_as_select (
|
|
id INT,
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP,
|
|
year INT,
|
|
month INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test_as_select (
|
|
id INT,
|
|
bool_col BOOLEAN,
|
|
tinyint_col TINYINT,
|
|
smallint_col SMALLINT,
|
|
int_col INT,
|
|
bigint_col BIGINT,
|
|
float_col FLOAT,
|
|
double_col DOUBLE,
|
|
date_string_col STRING,
|
|
string_col STRING,
|
|
timestamp_col TIMESTAMP,
|
|
year INT,
|
|
month INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
create table i_1687_p partitioned by (int_col) as
|
|
select bigint_col, int_col from functional.alltypessmall;
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.i_1687_p (
|
|
bigint_col BIGINT
|
|
)
|
|
PARTITIONED BY (
|
|
int_col INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.i_1687_p (
|
|
bigint_col BIGINT
|
|
)
|
|
PARTITIONED BY (
|
|
int_col INT
|
|
)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
====
|
|
---- QUERY
|
|
SHOW CREATE TABLE functional.allcomplextypes
|
|
---- RESULTS-HIVE
|
|
CREATE EXTERNAL TABLE functional.allcomplextypes (
|
|
id INT,
|
|
int_array_col ARRAY<INT>,
|
|
array_array_col ARRAY<ARRAY<INT>>,
|
|
map_array_col ARRAY<MAP<STRING,INT>>,
|
|
struct_array_col ARRAY<STRUCT<f1:BIGINT,f2:STRING>>,
|
|
int_map_col MAP<STRING,INT>,
|
|
array_map_col MAP<STRING,ARRAY<INT>>,
|
|
map_map_col MAP<STRING,MAP<STRING,INT>>,
|
|
struct_map_col MAP<STRING,STRUCT<f1:BIGINT,f2:STRING>>,
|
|
int_struct_col STRUCT<f1:INT,f2:INT>,
|
|
complex_struct_col STRUCT<f1:INT,f2:ARRAY<INT>,f3:MAP<STRING,INT>>,
|
|
nested_struct_col STRUCT<f1:INT,f2:STRUCT<f11:BIGINT,f12:STRUCT<f21:BIGINT>>>,
|
|
complex_nested_struct_col STRUCT<f1:INT,f2:ARRAY<STRUCT<f11:BIGINT,f12:MAP<STRING,STRUCT<f21:BIGINT>>>>>
|
|
)
|
|
PARTITIONED BY (
|
|
year INT,
|
|
month INT
|
|
)
|
|
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' ESCAPED BY '\\'
|
|
WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'=',', 'escape.delim'='\\')
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
====
|
|
---- QUERY
|
|
SHOW CREATE VIEW functional.alltypes_view
|
|
---- RESULTS-HIVE
|
|
CREATE VIEW functional.alltypes_view AS
|
|
SELECT * FROM functional.alltypes
|
|
====
|
|
---- QUERY
|
|
# SHOW CREATE TABLE should also work for views.
|
|
SHOW CREATE TABLE functional.alltypes_view
|
|
---- RESULTS-HIVE
|
|
CREATE VIEW functional.alltypes_view AS
|
|
SELECT * FROM functional.alltypes
|
|
====
|
|
---- CREATE_VIEW
|
|
# View with column aliases
|
|
CREATE VIEW column_aliases_view (foo, bar, baz) AS
|
|
SELECT tinyint_col, id, bigint_col
|
|
FROM functional.alltypes;
|
|
---- RESULTS-HIVE
|
|
# A view with column aliases is expanded into a SELECT with an inline view.
|
|
CREATE VIEW show_create_table_test_db.column_aliases_view AS
|
|
SELECT column_aliases_view.tinyint_col foo, column_aliases_view.id bar, column_aliases_view.bigint_col baz FROM (SELECT tinyint_col, id, bigint_col FROM functional.alltypes) column_aliases_view
|
|
====
|
|
---- QUERY
|
|
# Test views referencing views.
|
|
SHOW CREATE VIEW functional.view_view;
|
|
---- RESULTS-HIVE
|
|
CREATE VIEW functional.view_view AS
|
|
SELECT * FROM functional.alltypes_view
|
|
====
|
|
---- QUERY
|
|
# Test complex views with multiple tables and Hive-style column aliases.
|
|
SHOW CREATE VIEW functional.complex_view
|
|
---- RESULTS-HIVE
|
|
CREATE VIEW functional.complex_view AS
|
|
SELECT complex_view.`_c0` abc, complex_view.string_col xyz FROM (SELECT count(a.bigint_col), b.string_col FROM functional.alltypesagg a INNER JOIN functional.alltypestiny b ON a.id = b.id WHERE a.bigint_col < 50 GROUP BY b.string_col HAVING count(a.bigint_col) > 1 ORDER BY b.string_col ASC LIMIT 100) complex_view
|
|
====
|
|
---- CREATE_VIEW
|
|
# Test that identifiers are correctly quoted.
|
|
CREATE VIEW _quote_view (_foo, bar) AS
|
|
SELECT tinyint_col, id _id
|
|
FROM functional.alltypes;
|
|
---- RESULTS-HIVE
|
|
CREATE VIEW show_create_table_test_db.`_quote_view` AS
|
|
SELECT `_quote_view`.tinyint_col `_foo`, `_quote_view`.`_id` bar FROM (SELECT tinyint_col, id `_id` FROM functional.alltypes) `_quote_view`
|
|
====
|
|
---- QUERY
|
|
# SHOW CREATE VIEW should also work on tables.
|
|
SHOW CREATE VIEW functional_parquet.tinytable;
|
|
---- RESULTS-HIVE
|
|
CREATE EXTERNAL TABLE functional_parquet.tinytable (
|
|
a STRING,
|
|
b STRING
|
|
)
|
|
STORED AS PARQUET
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ()
|
|
====
|
|
---- CREATE_VIEW
|
|
# Create view that contains a subquery (IMPALA-4579)
|
|
CREATE VIEW view_with_subquery AS SELECT * FROM functional.alltypestiny
|
|
WHERE id IN (SELECT id FROM functional.alltypesagg);
|
|
---- RESULTS-HIVE
|
|
CREATE VIEW show_create_table_test_db.view_with_subquery
|
|
AS SELECT * FROM functional.alltypestiny
|
|
WHERE id IN (SELECT id FROM functional.alltypesagg)
|
|
====
|
|
---- CREATE_TABLE
|
|
# Simple table with sort columns.
|
|
CREATE TABLE test1 (id INT)
|
|
SORT BY (id)
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test1 (id INT)
|
|
SORT BY LEXICAL (id)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test1 (id INT)
|
|
SORT BY LEXICAL (id)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
====
|
|
---- CREATE_TABLE
|
|
# Simple partitioned table with sort columns.
|
|
CREATE TABLE test1 (id INT)
|
|
PARTITIONED BY (x INT, y INT)
|
|
SORT BY (id)
|
|
STORED AS TEXTFILE
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.test1 (id INT)
|
|
PARTITIONED BY (x INT, y INT)
|
|
SORT BY LEXICAL (id)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'external.table.purge'='TRUE')
|
|
---- RESULTS-HIVE
|
|
CREATE TABLE show_create_table_test_db.test1 (id INT)
|
|
PARTITIONED BY (x INT, y INT)
|
|
SORT BY LEXICAL (id)
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
====
|
|
---- QUERY
|
|
SHOW CREATE TABLE functional.parent_table
|
|
---- RESULTS-HIVE
|
|
CREATE EXTERNAL TABLE functional.parent_table (
|
|
id INT,
|
|
year STRING,
|
|
PRIMARY KEY (id, year)
|
|
)
|
|
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|
|
WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'=',')
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
====
|
|
---- QUERY
|
|
SHOW CREATE TABLE functional.child_table
|
|
---- RESULTS-HIVE
|
|
CREATE EXTERNAL TABLE functional.child_table (
|
|
seq INT,
|
|
id INT,
|
|
year STRING,
|
|
a INT,
|
|
PRIMARY KEY (seq),
|
|
FOREIGN KEY(id, year) REFERENCES functional.parent_table(id, year),
|
|
FOREIGN KEY(a) REFERENCES functional.parent_table_2(a)
|
|
)
|
|
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
|
|
WITH SERDEPROPERTIES ('field.delim'=',', 'serialization.format'=',')
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_test1 (
|
|
level STRING
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'iceberg.catalog'='hadoop.tables')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_test1 (
|
|
level STRING NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE',
|
|
'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'engine.hive.enabled'='true',
|
|
'iceberg.catalog'='hadoop.tables')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_test2 (
|
|
level STRING
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'iceberg.catalog'='hadoop.catalog',
|
|
'iceberg.catalog_location'='$$warehouse$$/hadoop_catalog_test')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_test2 (
|
|
level STRING NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE',
|
|
'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'engine.hive.enabled'='true',
|
|
'iceberg.catalog'='hadoop.catalog',
|
|
'iceberg.catalog_location'='$$warehouse$$/hadoop_catalog_test')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_test3 (
|
|
level STRING
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'iceberg.catalog'='hadoop.catalog',
|
|
'iceberg.catalog_location'='$$warehouse$$/hadoop_catalog_test',
|
|
'iceberg.table_identifier'='org.my_db.my_table')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_test3 (
|
|
level STRING NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE',
|
|
'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'engine.hive.enabled'='true',
|
|
'iceberg.catalog'='hadoop.catalog',
|
|
'iceberg.catalog_location'='$$warehouse$$/hadoop_catalog_test',
|
|
'iceberg.table_identifier'='org.my_db.my_table')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_test1_partitioned (
|
|
level STRING,
|
|
p1 DATE,
|
|
p2 STRING,
|
|
p3 TIMESTAMP,
|
|
p4 INT
|
|
)
|
|
PARTITIONED BY SPEC (
|
|
YEAR(p1),
|
|
p2,
|
|
BUCKET(500, p2),
|
|
TRUNCATE(15, p2),
|
|
HOUR(p3),
|
|
BUCKET(10, p4),
|
|
TRUNCATE(5, p4)
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'iceberg.catalog'='hadoop.catalog',
|
|
'iceberg.catalog_location'='$$warehouse$$/hadoop_catalog_test')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_test1_partitioned (
|
|
level STRING NULL,
|
|
p1 DATE NULL,
|
|
p2 STRING NULL,
|
|
p3 TIMESTAMP NULL,
|
|
p4 INT NULL
|
|
)
|
|
PARTITIONED BY SPEC (
|
|
YEAR(p1),
|
|
p2,
|
|
BUCKET(500, p2),
|
|
TRUNCATE(15, p2),
|
|
HOUR(p3),
|
|
BUCKET(10, p4),
|
|
TRUNCATE(5, p4)
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE',
|
|
'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'engine.hive.enabled'='true',
|
|
'iceberg.catalog'='hadoop.catalog',
|
|
'iceberg.catalog_location'='$$warehouse$$/hadoop_catalog_test')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_test_orc (
|
|
level STRING
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='orc', 'iceberg.catalog'='hadoop.tables')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_test_orc (
|
|
level STRING NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='orc',
|
|
'engine.hive.enabled'='true', 'iceberg.catalog'='hadoop.tables',
|
|
'write.delete.mode'='merge-on-read', 'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Default Iceberg table
|
|
CREATE TABLE iceberg_default_tbl (
|
|
level STRING
|
|
)
|
|
STORED AS ICEBERG;
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_default_tbl (
|
|
level STRING NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read', 'write.merge.mode'='merge-on-read',
|
|
'write.parquet.compression-codec'='snappy')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Default Iceberg table with ORC format
|
|
CREATE TABLE iceberg_default_tbl_orc (
|
|
level STRING
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='orc');
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_default_tbl_orc (
|
|
level STRING NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='orc', 'write.parquet.compression-codec'='snappy',
|
|
'engine.hive.enabled'='true', 'external.table.purge'='TRUE', 'table_type'='ICEBERG',
|
|
'write.update.mode'='merge-on-read', 'write.merge.mode'='merge-on-read',
|
|
'write.delete.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Iceberg table in HiveCatalog
|
|
CREATE TABLE iceberg_hive_cat_explicit (
|
|
level STRING
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'iceberg.catalog'='hive.catalog');
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_hive_cat_explicit (
|
|
level STRING NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='parquet', 'iceberg.catalog'='hive.catalog',
|
|
'engine.hive.enabled'='true', 'external.table.purge'='TRUE', 'table_type'='ICEBERG',
|
|
'write.parquet.compression-codec'='snappy', 'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read', 'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_nullable_test (
|
|
level STRING NOT NULL,
|
|
event_time TIMESTAMP NULL,
|
|
register_time DATE
|
|
)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'iceberg.catalog'='hadoop.tables')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_nullable_test (
|
|
level STRING NOT NULL,
|
|
event_time TIMESTAMP NULL,
|
|
register_time DATE NULL
|
|
)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE',
|
|
'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read',
|
|
'write.format.default'='parquet',
|
|
'write.parquet.compression-codec'='zstd',
|
|
'write.parquet.compression-level'='12',
|
|
'write.parquet.row-group-size-bytes'='134217728',
|
|
'write.parquet.page-size-bytes'='65536',
|
|
'write.parquet.dict-size-bytes'='131072',
|
|
'engine.hive.enabled'='true',
|
|
'iceberg.catalog'='hadoop.tables')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_old_style_partitions (i INT)
|
|
PARTITIONED BY (p STRING, d DATE)
|
|
STORED AS ICEBERG
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_old_style_partitions (
|
|
i INT NULL,
|
|
p STRING NULL,
|
|
d DATE NULL
|
|
)
|
|
PARTITIONED BY SPEC (p, d)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.parquet.compression-codec'='snappy',
|
|
'write.delete.mode'='merge-on-read', 'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_ctas
|
|
PARTITIONED BY SPEC (bucket(5, id))
|
|
STORED AS ICEBERG
|
|
AS SELECT id, bool_col, bigint_col FROM functional.alltypes;
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_ctas (
|
|
id INT NULL,
|
|
bool_col BOOLEAN NULL,
|
|
bigint_col BIGINT NULL
|
|
)
|
|
PARTITIONED BY SPEC (BUCKET(5, id))
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.parquet.compression-codec'='snappy',
|
|
'write.delete.mode'='merge-on-read', 'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_ctas_ht
|
|
PARTITIONED BY SPEC (bucket(5, id))
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'iceberg.catalog'='hadoop.tables')
|
|
AS SELECT id, bool_col, bigint_col FROM functional.alltypes;
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_ctas_ht (
|
|
id INT NULL,
|
|
bool_col BOOLEAN NULL,
|
|
bigint_col BIGINT NULL
|
|
)
|
|
PARTITIONED BY SPEC (BUCKET(5, id))
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'iceberg.catalog'='hadoop.tables', 'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read', 'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_catalogs_hive (i int)
|
|
PARTITIONED BY SPEC (bucket(3, i))
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'iceberg.catalog'='ice_hive_cat')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_catalogs_hive (i INT NULL)
|
|
PARTITIONED BY SPEC (BUCKET(3, i))
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.parquet.compression-codec'='snappy',
|
|
'write.delete.mode'='merge-on-read', 'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_catalogs_hadoop (i int)
|
|
PARTITIONED BY SPEC (bucket(3, i))
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'iceberg.catalog'='ice_hadoop_cat')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_catalogs_hadoop (i INT NULL)
|
|
PARTITIONED BY SPEC (BUCKET(3, i))
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'iceberg.catalog'='ice_hadoop_cat', 'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read', 'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
CREATE TABLE iceberg_void_transform (i int, j int)
|
|
PARTITIONED BY SPEC (void(i), void(j))
|
|
STORED AS ICEBERG
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.iceberg_void_transform (i INT NULL, j INT NULL)
|
|
PARTITIONED BY SPEC (VOID(i), VOID(j))
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.parquet.compression-codec'='snappy',
|
|
'write.delete.mode'='merge-on-read', 'write.update.mode'='merge-on-read',
|
|
'write.merge.mode'='merge-on-read')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Creating V1 tables explicitly should not set 'merge-on-read' write modes if no write mode is
|
|
# specified.
|
|
CREATE TABLE ice_explicit_v1 (i int)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE', 'format-version'='1')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.ice_explicit_v1 (i INT NULL)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'format-version'='1', 'engine.hive.enabled'='true', 'external.table.purge'='TRUE',
|
|
'table_type'='ICEBERG',
|
|
'write.format.default'='parquet', 'write.parquet.compression-codec'='snappy')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Creating V2 tables should set 'merge-on-read' write modes if no write mode is specified.
|
|
CREATE TABLE ice_v2 (i int)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE', 'format-version'='2')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.ice_v2 (i INT NULL)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.delete.mode'='merge-on-read',
|
|
'write.update.mode'='merge-on-read', 'write.merge.mode'='merge-on-read',
|
|
'write.parquet.compression-codec'='snappy')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Creating V2 tables should not set write mode if user specified any of it to any value.
|
|
CREATE TABLE ice_v2_explicit_delete (i int)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'format-version'='2', 'write.delete.mode'='merge-on-read')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.ice_v2_explicit_delete (i INT NULL)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.delete.mode'='merge-on-read',
|
|
'write.parquet.compression-codec'='snappy')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Creating V2 tables should not set write mode if user specified any of it to any value.
|
|
CREATE TABLE ice_v2_explicit_delete_2 (i int)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE', 'format-version'='2',
|
|
'write.delete.mode'='copy-on-write')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.ice_v2_explicit_delete_2 (i INT NULL)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.delete.mode'='copy-on-write',
|
|
'write.parquet.compression-codec'='snappy')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Creating V2 tables should not set write mode if user specified any of it to any value.
|
|
CREATE TABLE ice_v2_explicit_update (i int)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'format-version'='2', 'write.update.mode'='copy-on-write')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.ice_v2_explicit_update (i INT NULL)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.update.mode'='copy-on-write',
|
|
'write.parquet.compression-codec'='snappy')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Creating V2 tables should not set write mode if user specified any of it to any value.
|
|
CREATE TABLE ice_v2_explicit_merge (i int)
|
|
STORED AS ICEBERG
|
|
TBLPROPERTIES('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'format-version'='2', 'write.merge.mode'='copy-on-write')
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.ice_v2_explicit_merge (i INT NULL)
|
|
STORED AS ICEBERG
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE',
|
|
'external.table.purge'='TRUE', 'write.format.default'='parquet',
|
|
'engine.hive.enabled'='true', 'table_type'='ICEBERG', 'write.merge.mode'='copy-on-write',
|
|
'write.parquet.compression-codec'='snappy')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Test create Bucketed Table
|
|
CREATE TABLE bucketed_test (a int, b string) CLUSTERED BY (a) into 4 buckets;
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.bucketed_test (a INT, b STRING)
|
|
CLUSTERED BY (a) INTO 4 BUCKETS
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'bucketing_version'='2',
|
|
'external.table.purge'='TRUE')
|
|
====
|
|
---- CREATE_TABLE
|
|
# Test create Bucketed Table
|
|
CREATE TABLE bucketed_sort_test (a int, b string) CLUSTERED BY (a) sort by (b) into 4 buckets;
|
|
---- RESULTS-HIVE-3
|
|
CREATE EXTERNAL TABLE show_create_table_test_db.bucketed_sort_test (a INT, b STRING)
|
|
CLUSTERED BY (a) SORT BY LEXICAL (b) INTO 4 BUCKETS
|
|
STORED AS TEXTFILE
|
|
LOCATION '$$location_uri$$'
|
|
TBLPROPERTIES ('TRANSLATED_TO_EXTERNAL'='TRUE', 'bucketing_version'='2',
|
|
'external.table.purge'='TRUE')
|
|
====
|