-- SyntaxCREATE [EXTERNAL] TABLE table_name (col_name col_type [NULL | NOT NULL] [COMMENT "comment"]) ENGINE=HIVE[COMMENT "comment"]PROPERTIES ('property_name'='property_value',...);-- Example 1: Create the hive_table in hive_db of the Hive clusterCREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table');-- Example 2: Create the hive_table in hive_db of the Hive cluster, with HDFS using HA configurationCREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table','dfs.nameservices'='hacluster','dfs.ha.namenodes.hacluster'='n1,n2','dfs.namenode.rpc-address.hacluster.n1'='192.168.0.1:8020','dfs.namenode.rpc-address.hacluster.n2'='192.168.0.2:8020','dfs.client.failover.proxy.provider.hacluster'='org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider');-- Example 3: Create the hive_table in hive_db of the Hive cluster, with HDFS using HA configuration and Kerberos authentication enabledCREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table','dfs.nameservices'='hacluster','dfs.ha.namenodes.hacluster'='n1,n2','dfs.namenode.rpc-address.hacluster.n1'='192.168.0.1:8020','dfs.namenode.rpc-address.hacluster.n2'='192.168.0.2:8020','dfs.client.failover.proxy.provider.hacluster'='org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider','dfs.namenode.kerberos.principal'='hadoop/_HOST@REALM.COM''hadoop.security.authentication'='kerberos','hadoop.kerberos.principal'='doris_test@REALM.COM','hadoop.kerberos.keytab'='/path/to/doris_test.keytab');-- Example 4: Create the hive_table in hive_db of the Hive cluster, with Hive data stored on S3CREATE TABLE `t_hive` (`k1` int NOT NULL COMMENT "",`k2` char(10) NOT NULL COMMENT "",`k3` datetime NOT NULL COMMENT "",`k5` varchar(20) NOT NULL COMMENT "",`k6` double NOT NULL COMMENT "") ENGINE=HIVECOMMENT "HIVE"PROPERTIES ('hive.metastore.uris' = 'thrift://192.168.0.1:9083','database' = 'hive_db','table' = 'hive_table','AWS_ACCESS_KEY' = 'your_access_key','AWS_SECRET_KEY' = 'your_secret_key','AWS_ENDPOINT' = 's3.us-east-1.amazonaws.com','AWS_REGION' = 'us-east-1');
hive.metastore.uris
: Hive Metastore service address.database
: The name of the corresponding database when Hive is mounted.table
: The name of the corresponding table when Hive is mounted.dfs.nameservices
: The name of name service, consistent with hdfs-site.xml.dfs.ha.namenodes.[nameservice ID]
: List of namenode IDs, consistent with hdfs-site.xml.dfs.namenode.rpc-address.[nameservice ID].[name node ID]
: The rpc address of the namenode, the number of which is the same as the number of namenodes, and is consistent with hdfs-site.xml.dfs.client.failover.proxy.provider.[nameservice ID]
:HDFS client's Java class for connecting to the active namenode, usually org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider
.hadoop.security.authentication
: Please set the authentication method to Kerberos, the default is simple.dfs.namenode.kerberos.principal
: The Kerberos entity of the HDFS namenode service.hadoop.kerberos.principal
: Setting the Kerberos entity used when Doris is connected to HDFS.hadoop.kerberos.keytab
: Setting the local path to the keytab file.yarn.resourcemanager.principal
: The Kerberos entity of the resource manager when Doris is connected to a Hadoop cluster.AWS_ACCESS_KEY
: The SecretId of your Tencent Cloud account.AWS_SECRET_KEY
: The SecretKey of your Tencent Cloud account.AWS_ENDPOINT
: The endpoint of the COS bucket, for example: cos.ap-nanjing.myqcloud.com.AWS_REGION
: The region of the COS bucket, for example: ap-nanjing.hadoop.kerberos.keytab
, and allow Doris process to access this local file.Hive | Doris | Description |
BOOLEAN | BOOLEAN | |
CHAR | CHAR | Only UTF8 encoding is currently supported. |
VARCHAR | VARCHAR | Only UTF8 encoding is currently supported. |
TINYINT | TINYINT | |
SMALLINT | SMALLINT | |
INT | INT | |
BIGINT | BIGINT | |
FLOAT | FLOAT | |
DOUBLE | DOUBLE | |
DECIMAL | DECIMAL | |
DATE | DATE | |
TIMESTAMP | DATETIME | Converting timestamp to datetime may cause loss of precision |
2.3.7 and 3.1.2
by default, and other versions have not been tested. More versions will be supported in the future.select * from t_hive where k1 > 1000 and k3 ='term' or k4 like '%doris';
Was this page helpful?