-- 语法CREATE [EXTERNAL] TABLE table_name (col_name col_type [NULL | NOT NULL] [COMMENT "comment"]) ENGINE=HIVE[COMMENT "comment"]PROPERTIES ('property_name'='property_value',...);-- 例子1:创建 Hive 集群中 hive_db 下的 hive_table 表CREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table');-- 例子2:创建 Hive 集群中 hive_db 下的 hive_table 表,HDFS使用HA配置CREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table','dfs.nameservices'='hacluster','dfs.ha.namenodes.hacluster'='n1,n2','dfs.namenode.rpc-address.hacluster.n1'='192.168.0.1:8020','dfs.namenode.rpc-address.hacluster.n2'='192.168.0.2:8020','dfs.client.failover.proxy.provider.hacluster'='org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider');-- 例子3:创建 Hive 集群中 hive_db 下的 hive_table 表, HDFS使用HA配置并开启kerberos认证方式CREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table','dfs.nameservices'='hacluster','dfs.ha.namenodes.hacluster'='n1,n2','dfs.namenode.rpc-address.hacluster.n1'='192.168.0.1:8020','dfs.namenode.rpc-address.hacluster.n2'='192.168.0.2:8020','dfs.client.failover.proxy.provider.hacluster'='org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider','dfs.namenode.kerberos.principal'='hadoop/_HOST@REALM.COM''hadoop.security.authentication'='kerberos','hadoop.kerberos.principal'='doris_test@REALM.COM','hadoop.kerberos.keytab'='/path/to/doris_test.keytab');-- 例子4:创建 Hive 集群中 hive_db 下的 hive_table 表, Hive数据存储在S3上CREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table','AWS_ACCESS_KEY' = 'your_access_key','AWS_SECRET_KEY' = 'your_secret_key','AWS_ENDPOINT' = 's3.us-east-1.amazonaws.com','AWS_REGION' = 'us-east-1');
hive.metastore.uris
:Hive Metastore 服务地址。database
:挂载 Hive 对应的数据库名。table
:挂载 Hive 对应的表名。dfs.nameservices
:name service名称,与 hdfs-site.xml 保持一致。dfs.ha.namenodes.[nameservice ID]
:namenode 的 ID 列表,与 hdfs-site.xml 保持一致。dfs.namenode.rpc-address.[nameservice ID].[name node ID]
:namenode 的 rpc 地址,数量与 namenode 数量相同,与 hdfs-site.xml 保持一致。dfs.client.failover.proxy.provider.[nameservice ID]
:HDFS 客户端连接活跃 namenode 的 Java 类,通常是 org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider
。hadoop.security.authentication
:认证方式请设置为 Kerberos,默认为 simple。dfs.namenode.kerberos.principal
:HDFS namenode 服务的 Kerberos 主体。hadoop.kerberos.principal
:设置 Doris 连接 HDFS 时使用的 Kerberos 主体。hadoop.kerberos.keytab
:设置 keytab 本地文件路径。yarn.resourcemanager.principal
: Doris链接Hadoop集群的resource mananger的Kerberos主体。AWS_ACCESS_KEY
:腾讯云账户的 SecretId。AWS_SECRET_KEY
:腾讯云账户的 SecretKey。AWS_ENDPOINT
:COS 桶的 endpoint,例如:cos.ap-nanjing.myqcloud.com。AWS_REGION
:COS 桶的区域,例如:ap-nanjing。hadoop.kerberos.keytab
的值需要指定 keytab 本地文件的绝对路径,并允许 Doris 进程访问该本地文件。Hive | Doris | 描述 |
BOOLEAN | BOOLEAN | |
CHAR | CHAR | 当前仅支持 UTF8编码 |
VARCHAR | VARCHAR | 当前仅支持 UTF8编码 |
TINYINT | TINYINT | |
SMALLINT | SMALLINT | |
INT | INT | |
BIGINT | BIGINT | |
FLOAT | FLOAT | |
DOUBLE | DOUBLE | |
DECIMAL | DECIMAL | |
DATE | DATE | |
TIMESTAMP | DATETIME | Timestamp 转成 Datetime 会损失精度 |
2.3.7、3.1.2
,未在其他版本进行测试。后续后支持更多版本。select * from t_hive where k1 > 1000 and k3 ='term' or k4 like '%doris';
本页内容是否解决了您的问题?