java 操作hive通过jdbc
直接代码吧:记得要开启hive jdbc服务hive --service hiveserver
package hive; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; public class HiveDemo { static{ //注册jdbc驱动 try { Class.forName("org.apache.hadoop.hive.jdbc.HiveDriver"); } catch (ClassNotFoundException e) { // TODO Auto-generated catch block e.printStackTrace(); } } public static void main(String[] args) throws Exception { //创建连接 Connection conn = DriverManager.getConnection("jdbc:hive://hadoop:10000/default","",""); //System.out.println(conn); Statement st = conn.createStatement(); String tableName = "u1_data"; //删除表 st.executeQuery("drop table "+tableName); //创建表 ResultSet rs = st.executeQuery("create table "+tableName+"(" + "userid int," + "movieid int," + "rating int," + "city string," + "viewTime string" + ")" + "row format delimited " + "fields terminated by ‘\t‘ " + "stored as textfile"); //显示所有的表 String sql = "show tables"; System.out.println("running:"+sql); rs = st.executeQuery(sql); if(rs.next()){ System.out.println(rs.getString(1)); } //得到表信息 sql = "describe "+tableName; System.out.println("running:"+sql); rs = st.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(1)+"\t"+rs.getString(2)); } //加载数据 String filePath = "hdfs://hadoop:9000/input"; sql = "load data inpath ‘"+filePath+"‘ overwrite into table "+tableName; System.out.println("running:"+sql); rs = st.executeQuery(sql); //查询数据 sql = "select * from "+tableName+" limit 5"; System.out.println("running:"+sql); rs = st.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(3)+"\t"+rs.getString(4)); } //查询数量 sql = "select count(*) from "+tableName; System.out.println("running:"+sql); rs = st.executeQuery(sql); while(rs.next()){ System.out.println(rs.getString(1)); } //关闭资源 rs.close(); st.close(); conn.close(); } }
郑重声明:本站内容如果来自互联网及其他传播媒体,其版权均属原媒体及文章作者所有。转载目的在于传递更多信息及用于网络分享,并不代表本站赞同其观点和对其真实性负责,也不构成任何其他建议。