hive
beeline -u "jdbc:hive2://${hs2_ip}:${hs2_port}" -n hadoop
mvn archetype:generate -DgroupId=${yourgroupID} -DartifactId=${yourartifactID} -DarchetypeArtifactId=maven-archetype-quickstart
<dependencies><dependency><groupId>org.apache.hive</groupId><artifactId>hive-jdbc</artifactId><version>${hive_version}</version></dependency><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>${hadoop_version}</version></dependency></dependencies>
<build><plugins><plugin><groupId>org.apache.maven.plugins</groupId><artifactId>maven-compiler-plugin</artifactId><configuration><source>1.8</source><target>1.8</target><encoding>utf-8</encoding></configuration></plugin><plugin><artifactId>maven-assembly-plugin</artifactId><configuration><descriptorRefs><descriptorRef>jar-with-dependencies</descriptorRef></descriptorRefs></configuration><executions><execution><id>make-assembly</id><phase>package</phase><goals><goal>single</goal></goals></execution></executions></plugin></plugins></build>
package org.example;import java.sql.*;/*** Created by tencent on 2023/8/11.*/public class App {private static final String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";public static void main(String[] args) throws SQLException {try {// 加载hive-jdbc驱动Class.forName(DRIVER_NAME);} catch (ClassNotFoundException e) {e.printStackTrace();System.exit(1);}// 根据连接信息和账号密码获取连接Connection conn = DriverManager.getConnection("jdbc:hive2://$hs2_ip:$hs2_port/default", "hadoop", "");// 创建状态参数(使用conn.prepareStatement(sql)预编译sql防止sql注入,但常用于参数化执行sql,批量执行不同的sql建议使用下面这种方式)Statement stmt = conn.createStatement();// 以下是执行简单的建表和增查操作String tableName = "hive_test";stmt.execute("drop table if exists " + tableName);stmt.execute("create table " + tableName + " (key int, value string)");System.out.println("Create table success!");// show tablesString sql = "show tables '" + tableName + "'";System.out.println("Running: " + sql);ResultSet res = stmt.executeQuery(sql);if (res.next()) {System.out.println(res.getString(1));}// describe tablesql = "describe " + tableName;System.out.println("Running: " + sql);res = stmt.executeQuery(sql);while (res.next()) {System.out.println(res.getString(1) + "\\t" + res.getString(2));}sql = "insert into " + tableName + " values (42,\\"hello\\"),(48,\\"world\\")";stmt.execute(sql);sql = "select * from " + tableName;System.out.println("Running: " + sql);res = stmt.executeQuery(sql);while (res.next()) {System.out.println(res.getInt(1) + "\\t" + res.getString(2));}sql = "select count(1) from " + tableName;System.out.println("Running: " + sql);res = stmt.executeQuery(sql);while (res.next()) {System.out.println(res.getString(1));}}}
mvn clean package -DskipTests
scp ${localfile} root@${master_pubilc_ip}:/usr/local/service/hive
yarn jar ./hive-test-1.0-SNAPSHOT-jar-with-dependencies.jar org.example.App
Create table success!Running: show tables 'hive_test'hive_testRunning: describe hive_testkey intvalue stringRunning: select * from hive_test42 hello48 worldRunning: select count(1) from hive_test2
pip3 install saslpip3 install thriftpip3 install thrift-saslpip3 install pyhive
from pyhive import hiveimport sysdefault_encoding = 'utf-8'conn = hive.connect(host='${hs2_host}',port='${hs2_port}',username='hadoop',password='hadoop',database='default',auth="CUSTOM",)tablename = 'HiveByPython'cur = conn.cursor()print("\\n")print('show the tables in default: ')cur.execute('show tables')for i in cur.fetchall():print(i)cur.execute('drop table if exists ' + tablename)cur.execute('create table ' + tablename + ' (key int,value string)')print("\\n")print('show the new table: ')cur.execute('show tables ' +"'" +tablename+"'")for i in cur.fetchall():print(i)print("\\n")print("contents from " + tablename + ":")cur.execute('insert into ' + tablename + ' values (42,"hello"),(48,"world")')cur.execute('select * from ' + tablename)for i in cur.fetchall():print(i)
python3 hivetest.py
show the tables in default:show the new table:('hivebypython',)contents from HiveByPython:(42, 'hello')(48, 'world')
本页内容是否解决了您的问题?