Hive外表使用示例

Hive外表演练

Textfile(hive建表默认文件类型)

Hive

CREATE TABLE test

(

siteid int,

citycode int,

username string,

pv int)

ROW FORMAT SERDE

‘org.apache.hadoop.hive.serde2.lazy.LazySimpleSerDe’

WITH SERDEPROPERTIES (

‘field.delim’=’,’,

‘serialization.format’=’,’)

STORED AS INPUTFORMAT

‘org.apache.hadoop.mapred.TextInputFormat’

OUTPUTFORMAT

‘org.apache.hadoop.hive.ql.io.HiveIgnoreKeyTextOutputFormat’

LOCATION

‘hdfs://xxxx:9002/user/hive/warehouse/test.db/test’

TBLPROPERTIES (

‘transient_lastDdlTime’=‘1634021246’)

StarRocks

-- 创建一个名为hive0的Hive资源
CREATE EXTERNAL RESOURCE "hive0"
PROPERTIES (
  "type" = "hive",
  "hive.metastore.uris" = "thrift://xxxx:9082"
);

-- 查看StarRocks中创建的资源
SHOW RESOURCES;

-- 删除名为hive0的资源
DROP RESOURCE "hive0";

-- 创建外表
CREATE EXTERNAL TABLE hive_test(
 siteid

int,

citycode int,

username string,

pv int

) ENGINE=HIVE

PROPERTIES (

“resource” = “hive0”,

“database” = “test”,

“table” = “test”

);

创建资源成功,创建外表失败报错如下:

get current notification event id failed: java.net.SocketException: Broken pipe (Write failed)

确认资源中hive.metastore.uris是否正确。

查看hive配置文件

cat /home/disk1/sr/app/apache-hive-2.3.7-bin/conf/hive-site.xml

端口错误,更改信息如下:

-- 删除名为hive0的资源
DROP RESOURCE "hive0";
-- 创建一个名为hive0的Hive资源
CREATE EXTERNAL RESOURCE "hive0"
PROPERTIES (
  "type" = "hive",
  "hive.metastore.uris" = "thrift://xxxx:9083"
);

查询外表报错:

mysql> select * from hive_test;
ERROR 1064 (HY000): get partition detail failed: org.apache.doris.common.DdlException: get hive partition meta data failed: unsupported file format [org.apache.hadoop.mapred.TextInputFormat]

不支持text文件类型外表。(默认textfile格式)

orc格式

Hive

## 建表
CREATE TABLE test

_orc(

siteid int,

citycode int,

username string,

pv int)

row format delimited fields terminated by ‘\t’

stored as orc

导入数据

insert into test_orc select siteid , citycode , username , pv from test;

show create table test_orc

CREATE TABLE test_orc(

siteid int,

citycode int,

username string,

pv int)

ROW FORMAT SERDE

‘org.apache.hadoop.hive.ql.io.orc.OrcSerde’

WITH SERDEPROPERTIES (

‘field.delim’=’\t’,

‘serialization.format’=’\t’)

STORED AS INPUTFORMAT

‘org.apache.hadoop.hive.ql.io.orc.OrcInputFormat’

OUTPUTFORMAT

‘org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat’

LOCATION

‘hdfs://xxxx:9002/user/hive/warehouse/test.db/test_orc’

TBLPROPERTIES (

‘transient_lastDdlTime’=‘1634731190’)

StarRocks查询外表

## 创建hive外表
CREATE EXTERNAL TABLE hive_test_orc(
 siteid

int,

citycode int,

username string,

pv int

) ENGINE=HIVE

PROPERTIES (

“resource” = “hive0”,

“database” = “test”,

“table” = “test_orc”

);

查询外表

select * from hive_test_orc;

Hive更新数据:

## 插入数据
insert into test_orc select siteid , citycode , username , pv from test;

## 查询数据
hive> select * from test_orc;
OK
777        100        基本        1
778        101        概念        1
779        102        测试        1
780        103        spark        1
781        104        load        1
782        105        honest        1
777        100        基本        1
778        101        概念        1
779        102        测试        1
780        103        spark        1
781        104        load        1
782        105        honest        1
Time taken: 0.086 seconds, Fetched: 12 row(s)

StarRocks查询数据

mysql> select * from hive_test_orc;
+--------+----------+----------+------+
| siteid | citycode | username | pv   |
+--------+----------+----------+------+
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
+--------+----------+----------+------+
6 rows in set (0.01 sec)

数据不同步。

refresh

mysql> REFRESH EXTERNAL TABLE hive_test_orc;
Query OK, 0 rows affected (1.31 sec)

mysql> select * from hive_test_orc;
+--------+----------+----------+------+
| siteid | citycode | username | pv   |
+--------+----------+----------+------+
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
+--------+----------+----------+------+
12 rows in set (1.03 sec)

Parquet格式

Hive

## 建表
CREATE TABLE test

_parquet(

siteid int,

citycode int,

username string,

pv int)

stored as parquet

导入数据

insert into test_parquet select siteid , citycode , username , pv from test;

show create table test_orc

CREATE TABLE test_parquet(

siteid int,

citycode int,

username string,

pv int)

ROW FORMAT SERDE

‘org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe’

STORED AS INPUTFORMAT

‘org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat’

OUTPUTFORMAT

‘org.apache.hadoop.hive.ql.io.parquet.MapredParquetOutputFormat’

LOCATION

‘hdfs://xxxx:9002/user/hive/warehouse/test.db/test_parquet’

TBLPROPERTIES (

‘transient_lastDdlTime’=‘1634797148’)

StarRocks查询外表

## 创建hive外表
CREATE EXTERNAL TABLE hive_test_parquet(
 siteid

int,

citycode int,

username string,

pv int

) ENGINE=HIVE

PROPERTIES (

“resource” = “hive0”,

“database” = “test”,

“table” = “test_parquet”

);

查询外表

mysql> select * from hive_test_parquet;

±-------±---------±---------±-----+

| siteid | citycode | username | pv |

±-------±---------±---------±-----+

| 777 | 100 | 基本 | 1 |

| 778 | 101 | 概念 | 1 |

| 779 | 102 | 测试 | 1 |

| 780 | 103 | spark | 1 |

| 781 | 104 | load | 1 |

| 782 | 105 | honest | 1 |

±-------±---------±---------±-----+

6 rows in set (0.04 sec)

Schame change

add columns

Hive

hive> alter table test_orc add columns (name1 varchar(10));
OK
Time taken: 0.055 seconds
hive> select * from test_orc;
OK
777        100        基本        1        NULL
778        101        概念        1        NULL
779        102        测试        1        NULL
780        103        spark        1        NULL
781        104        load        1        NULL
782        105        honest        1        NULL
777        100        基本        1        NULL
778        101        概念        1        NULL
779        102        测试        1        NULL
780        103        spark        1        NULL
781        104        load        1        NULL
782        105        honest        1        NULL
Time taken: 0.079 seconds, Fetched: 12 row(s)

StarRocks 查询:

mysql> select * from hive_test_orc;
+--------+----------+----------+------+
| siteid | citycode | username | pv   |
+--------+----------+----------+------+
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
+--------+----------+----------+------+
12 rows in set (0.01 sec)

refresh

## refresh 外表
REFRESH EXTERNAL TABLE hive_test_orc;

## 查询外表
mysql> select * from hive_test_orc;
+--------+----------+----------+------+
| siteid | citycode | username | pv   |
+--------+----------+----------+------+
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
+--------+----------+----------+------+
12 rows in set (0.03 sec)

refresh不生效,可以正常查询,但是要看见新增列需要重新创建外表。

Delete columns

Hive

## 删除pv列
hive> alter table test_parquet replace columns(
    >  siteid int, 
    >  citycode int, 
    >  username string
    >  );
OK
Time taken: 0.046 seconds
 
 ## 查询hive数据
 hive> select * from test_parquet;
OK
777        100        基本
778        101        概念
779        102        测试
780        103        spark
781        104        load
782        105        honest
Time taken: 0.077 seconds, Fetched: 6 row(s)

StarRocks 查询:

## 查询数据
mysql> select * from hive_test_parquet;
+--------+----------+----------+------+
| siteid | citycode | username | pv   |
+--------+----------+----------+------+
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
+--------+----------+----------+------+
6 rows in set (0.00 sec)

## refresh外表
REFRESH EXTERNAL TABLE hive_test_parquet;

## 查询数据
mysql> select * from hive_test_parquet;
+--------+----------+----------+------+
| siteid | citycode | username | pv   |
+--------+----------+----------+------+
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
+--------+----------+----------+------+
6 rows in set (0.01 sec)

refresh不生效,需要重新建立外表

Hive导入新数据

## 导入数据
insert into test_parquet select siteid

,

citycode

,

username

from test;

查询hive数据

hive> select * from test_parquet;

OK

777 100 基本

778 101 概念

779 102 测试

780 103 spark

781 104 load

782 105 honest

777 100 基本

778 101 概念

779 102 测试

780 103 spark

781 104 load

782 105 honest

Time taken: 0.087 seconds, Fetched: 12 row(s)

StarRocks 查询:

## refresh外表
REFRESH EXTERNAL TABLE hive_test_parquet;

## 查询数据
mysql> select * from hive_test_parquet;
+--------+----------+----------+------+
| siteid | citycode | username | pv   |
+--------+----------+----------+------+
|    777 |      100 | 基本     |    1 |
|    778 |      101 | 概念     |    1 |
|    779 |      102 | 测试     |    1 |
|    780 |      103 | spark    |    1 |
|    781 |      104 | load     |    1 |
|    782 |      105 | honest   |    1 |
|    777 |      100 | 基本     | NULL |
|    778 |      101 | 概念     | NULL |
|    779 |      102 | 测试     | NULL |
|    780 |      103 | spark    | NULL |
|    781 |      104 | load     | NULL |
|    782 |      105 | honest   | NULL |
+--------+----------+----------+------+
12 rows in set (0.02 sec)

可以查询,pv列为空,没报错。

删除字段可以, 生产过程中更多的是hive增加字段, 经测试, hive增加字段后starrocks原来建的表也可以读取数据。

奇怪:那我的,使用cdh(Kerberos已经关闭)的 hive,TEXTFILE 表,和 ORC表怎么都不行。


------- MYSQL 客户端的数据信息
mysql>
mysql> DROP RESOURCE “hive3”;
Query OK, 0 rows affected (0.00 sec)

mysql>
mysql> CREATE EXTERNAL RESOURCE “hive3”
-> PROPERTIES (
-> “type” = “hive”,
-> “hive.metastore.uris” = “thrift://wise3:9083”
-> );
Query OK, 0 rows affected (0.00 sec)

mysql> CREATE EXTERNAL TABLE export_hive3 (
-> p_partkey int ,
-> p_name String ,
-> p_mfgr String ,
-> p_category String ,
-> p_brand String ,
-> p_color String ,
-> p_type String ,
-> p_size int ,
-> p_container String
-> ) ENGINE=HIVE
-> PROPERTIES (
-> “resource” = “hive3”,
-> “database” = “wisedata”,
-> “table” = “export_hive_orc”
-> );
ERROR 1064 (HY000): Could not initialize class org.apache.hadoop.hive.conf.HiveConf
mysql>

-------- fe节点 的 日志内容
2022-05-27 17:52:49,316 [query] |Client=122.14.231.78:46426|User=root|Db=default_cluster:example_db|State=EOF|Time=1|ScanBytes=0|ScanRows=0|ReturnRows=13|StmtId=45|QueryId=c4853f91-dda2-11ec-8ebb-5c546d532d23|IsQuery=false|feIp=122.14.231.78|Stmt=SHOW RESOURCES|Digest=
2022-05-27 17:52:59,429 [query] |Client=122.14.231.78:46426|User=root|Db=default_cluster:example_db|State=OK|Time=5|ScanBytes=0|ScanRows=0|ReturnRows=0|StmtId=46|QueryId=ca8bc262-dda2-11ec-8ebb-5c546d532d23|IsQuery=false|feIp=122.14.231.78|Stmt=DROP RESOURCE “hive3”|Digest=
2022-05-27 17:53:04,560 [query] |Client=122.14.231.78:46426|User=root|Db=default_cluster:example_db|State=OK|Time=4|ScanBytes=0|ScanRows=0|ReturnRows=0|StmtId=47|QueryId=cd9ab013-dda2-11ec-8ebb-5c546d532d23|IsQuery=false|feIp=122.14.231.78|Stmt=CREATE EXTERNAL RESOURCE “hive3” PROPERTIES ( “type” = “hive”, “hive.metastore.uris” = “thrift://wise3:9083” )|Digest=
2022-05-27 17:53:11,715 [query] |Client=122.14.231.78:46426|User=root|Db=default_cluster:example_db|State=ERR|Time=4|ScanBytes=0|ScanRows=0|ReturnRows=0|StmtId=48|QueryId=d1de9b54-dda2-11ec-8ebb-5c546d532d23|IsQuery=false|feIp=122.14.231.78|Stmt=CREATE EXTERNAL TABLE export_hive3 (
p_partkey int(11) NULL COMMENT “”,
p_name varchar(65533) NULL COMMENT “”,
p_mfgr varchar(65533) NULL COMMENT “”,
p_category varchar(65533) NULL COMMENT “”,
p_brand varchar(65533) NULL COMMENT “”,
p_color varchar(65533) NULL COMMENT “”,
p_type varchar(65533) NULL COMMENT “”,
p_size int(11) NULL COMMENT “”,
p_container varchar(65533) NULL COMMENT “”
) ENGINE = HIVE
PROPERTIES (“database” = “wisedata”,
“resource” = “hive3”,
“table” = “export_hive_orc”)|Digest=


image
相关的hive配置已经发送到:fe,和 be的 配置目录下了

你好,麻烦提供下fe.info或fe.warn中的完整异常栈

hive外部表调试.zip (1.5 MB)
你好,谢谢支持。这里面的文件信息如下图

你好,发的日志里面不包括export_hive4的相关信息哈。辛苦在日志里面搜下这个相关的报错。

image


在master fe的fe.log里面搜export_hive4,不要在fe.audit.log里面搜

image



相关的日志,文本信息------------------------
2022-05-31 18:02:01,316 WARN (thrift-server-pool-6|155) [StmtExecutor.execute():460] execute Exception, sql CREATE EXTERNAL TABLE export_hive4 (
p_partkey int ,
p_name String ,
p_mfgr String ,
p_category String ,
p_brand String ,
p_color String ,
p_type String ,
p_size int ,
p_container String
) ENGINE=HIVE
PROPERTIES (
“database” = “wisedata”,
“resource” = “hive4”,
“table” = “export_hive_orc”
)
java.lang.ExceptionInInitializerError: null
at org.apache.hadoop.hive.conf.HiveConf.(HiveConf.java:109) ~[hive-jdbc-standalone.jar:1.1.0-cdh5.10.1]
at com.starrocks.external.hive.HiveMetaClient.(HiveMetaClient.java:82) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getClient(HiveRepository.java:73) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getTable(HiveRepository.java:111) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.validate(HiveTable.java:358) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.(HiveTable.java:110) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.Catalog.createHiveTable(Catalog.java:4258) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.Catalog.createTable(Catalog.java:3075) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DdlExecutor.execute(DdlExecutor.java:115) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleDdlStmt(StmtExecutor.java:909) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:429) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.proxyExecute(ConnectProcessor.java:600) [starrocks-fe.jar:?]
at com.starrocks.service.FrontendServiceImpl.forward(FrontendServiceImpl.java:588) [starrocks-fe.jar:?]
at com.starrocks.thrift.FrontendService$Processor$forward.getResult(FrontendService.java:1951) [starrocks-fe.jar:?]
at com.starrocks.thrift.FrontendService$Processor$forward.getResult(FrontendService.java:1931) [starrocks-fe.jar:?]
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) [libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:38) [libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:313) [libthrift-0.13.0.jar:0.13.0]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_144]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_144]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_144]
Caused by: java.lang.IllegalArgumentException: Unrecognized Hadoop major version number: 3.3.0
at org.apache.hadoop.hive.shims.ShimLoader.getMajorVersion(ShimLoader.java:169) ~[hive-shims-common.jar:1.1.0-cdh5.10.1]
at org.apache.hadoop.hive.shims.ShimLoader.loadShims(ShimLoader.java:134) ~[hive-shims-common.jar:1.1.0-cdh5.10.1]
at org.apache.hadoop.hive.shims.ShimLoader.getHadoopShims(ShimLoader.java:95) ~[hive-shims-common.jar:1.1.0-cdh5.10.1]
at org.apache.hadoop.hive.conf.HiveConf$ConfVars.(HiveConf.java:362) ~[hive-jdbc-standalone.jar:1.1.0-cdh5.10.1]
… 21 more
2022-05-31 18:02:05,719 WARN (thrift-server-pool-6|155) [StmtExecutor.execute():460] execute Exception, sql CREATE EXTERNAL TABLE export_hive4 (
p_partkey int ,
p_name String ,
p_mfgr String ,
p_category String ,
p_brand String ,
p_color String ,
p_type String ,
p_size int ,
p_container String
) ENGINE=HIVE
PROPERTIES (
“database” = “wisedata”,
“resource” = “hive4”,
“table” = “export_hive_orc”
)
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hive.conf.HiveConf
at com.starrocks.external.hive.HiveMetaClient.(HiveMetaClient.java:82) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getClient(HiveRepository.java:73) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getTable(HiveRepository.java:111) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.validate(HiveTable.java:358) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.(HiveTable.java:110) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.Catalog.createHiveTable(Catalog.java:4258) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.Catalog.createTable(Catalog.java:3075) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DdlExecutor.execute(DdlExecutor.java:115) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleDdlStmt(StmtExecutor.java:909) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:429) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.proxyExecute(ConnectProcessor.java:600) [starrocks-fe.jar:?]
at com.starrocks.service.FrontendServiceImpl.forward(FrontendServiceImpl.java:588) [starrocks-fe.jar:?]
at com.starrocks.thrift.FrontendService$Processor$forward.getResult(FrontendService.java:1951) [starrocks-fe.jar:?]
at com.starrocks.thrift.FrontendService$Processor$forward.getResult(FrontendService.java:1931) [starrocks-fe.jar:?]
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) [libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:38) [libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:313) [libthrift-0.13.0.jar:0.13.0]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_144]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_144]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_144]
2022-05-31 18:21:11,175 WARN (thrift-server-pool-6|155) [StmtExecutor.execute():460] execute Exception, sql CREATE EXTERNAL TABLE export_hive4 (
p_partkey int ,
p_name String ,
p_mfgr String ,
p_category String ,
p_brand String ,
p_color String ,
p_type String ,
p_size int ,
p_container String
) ENGINE=HIVE
PROPERTIES (
“database” = “wisedata”,
“resource” = “hive4”,
“table” = “export_hive_orc”
)
java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hive.conf.HiveConf
at com.starrocks.external.hive.HiveMetaClient.(HiveMetaClient.java:82) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getClient(HiveRepository.java:73) ~[starrocks-fe.jar:?]
at com.starrocks.external.hive.HiveRepository.getTable(HiveRepository.java:111) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.validate(HiveTable.java:358) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.HiveTable.(HiveTable.java:110) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.Catalog.createHiveTable(Catalog.java:4258) ~[starrocks-fe.jar:?]
at com.starrocks.catalog.Catalog.createTable(Catalog.java:3075) ~[starrocks-fe.jar:?]
at com.starrocks.qe.DdlExecutor.execute(DdlExecutor.java:115) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.handleDdlStmt(StmtExecutor.java:909) ~[starrocks-fe.jar:?]
at com.starrocks.qe.StmtExecutor.execute(StmtExecutor.java:429) [starrocks-fe.jar:?]
at com.starrocks.qe.ConnectProcessor.proxyExecute(ConnectProcessor.java:600) [starrocks-fe.jar:?]
at com.starrocks.service.FrontendServiceImpl.forward(FrontendServiceImpl.java:588) [starrocks-fe.jar:?]
at com.starrocks.thrift.FrontendService$Processor$forward.getResult(FrontendService.java:1951) [starrocks-fe.jar:?]
at com.starrocks.thrift.FrontendService$Processor$forward.getResult(FrontendService.java:1931) [starrocks-fe.jar:?]
at org.apache.thrift.ProcessFunction.process(ProcessFunction.java:38) [libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.TBaseProcessor.process(TBaseProcessor.java:38) [libthrift-0.13.0.jar:0.13.0]
at org.apache.thrift.server.TThreadPoolServer$WorkerProcess.run(TThreadPoolServer.java:313) [libthrift-0.13.0.jar:0.13.0]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [?:1.8.0_144]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [?:1.8.0_144]
at java.lang.Thread.run(Thread.java:748) [?:1.8.0_144]

@U_1650944101036_5913 看下这个文件是否存在 fe/lib/hive-apache-3.0.0-7.jar

这个包是哪来的?

这个是来自:CDH的hive lib下的包

看下你be的环境是不是没有配置jdk.需要jdk8+,不能是jre