Maven依赖配置
4.0.0
com.volitation.hive
bigdata-data-management
1.0.0-SNAPSHOT
jar UTF-8
1.8 4.11 1.2.12.1.1
spring-milestones
http://repo.spring.io/libs-milestone/
junit
junit
${junit.version}
org.apache.hadoop
hadoop-core
${hadoop.core.version}
org.apache.hive
hive-jdbc
${hive.version}
Hive JDBC四大参数定义
/**
* 代码集定义
*
* @author volitation
*
*/
public class PlatformDictionary {/*
* Hive
*/
public static final String DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver";
public static final String URL = "jdbc:hive2://192.168.9.87:10000";
public static final String USER_NAME = "hadoop";
public static final String PASSWORD = "hadoop!QWE";
}
异常处理
import java.util.HashMap;
import java.util.Map;
/**
* 异常处理工具类
*
* @author volitation
*
*/
public class AbnormalUtils { /**
* 获取异常信息
*
* @param e
* @return
*/
public static Object getAbnormal(Exception e) {
Object abnormalType = e.getCause().getClass().toString();
Object abnormalName = e.getCause().getMessage().toString();
Map map = new HashMap<>();
map.put("异常类型", abnormalType);
map.put("异常点信息", abnormalName);
return map.toString();
}}
【大数据Hive系列之Hive API】Hive通过JDBC进行增删查操作
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import com.volitation.hive.platform.PlatformDictionary;
import com.volitation.hive.platform.utils.AbnormalUtils;
/**
* Hive JDBC操作
*
* @author volitation
*
*/
public class HiveJDBC { private static Connection conn = null;
private static Statement stmt = null;
private static ResultSet rs = null;
/**
* 加载驱动,创建连接
*
* @throws Exception
*/
private static void init() {
try {
Class.forName(PlatformDictionary.DRIVER_NAME);
conn = DriverManager.getConnection(PlatformDictionary.URL, PlatformDictionary.USER_NAME,
PlatformDictionary.PASSWORD);
stmt = conn.createStatement();
} catch (ClassNotFoundException | SQLException e) {
Object object = AbnormalUtils.getAbnormal(e);
System.err.println(object);
}
} /**
* 创建数据库
*
* @param databaseName
*/
public static void createDatabase(String databaseName) {
try {
init();
String sql = "create database " + databaseName;
System.out.println("Running: " + sql);
stmt.execute(sql);
} catch (SQLException e) {
Object object = AbnormalUtils.getAbnormal(e);
System.err.println(object);
} finally {
destory();
}
} /**
* 查询所有数据库
*
* @return
*/
public static List showDatabases() {
List list = new ArrayList<>();
try {
init();
String sql = "show databases";
System.out.println("Running: " + sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
list.add(rs.getString(1));
}} catch (SQLException e) {
Object object = AbnormalUtils.getAbnormal(e);
System.err.println(object);
} finally {
destory();
}
return list;
} /**
* 删除数据库
*
* @param databaseName
*/
public static void dropDatabase(String databaseName) {
try {
init();
String sql = "drop database if exists " + databaseName;
System.out.println("Running: " + sql);
stmt.execute(sql);
} catch (SQLException e) {
Object object = AbnormalUtils.getAbnormal(e);
System.err.println(object);
} finally {
destory();
}
} /**
* 创建表
*
* @param createTableSql
*/
public static void createTable(String createTableSql) {
try {
init();
System.out.println("Running: " + createTableSql);
stmt.execute(createTableSql);
} catch (SQLException e) {
Object object = AbnormalUtils.getAbnormal(e);
System.err.println(object);
} finally {
destory();
}
} /**
* 查询所有表
*
* @return
*/
public static List showTables(String databaseName) {
List list = new ArrayList<>();
try {
init();
String useSql = "use " + databaseName;
System.out.println("Running: " + useSql);
stmt.execute(useSql);
String sql = "show tables";
System.out.println("Running: " + sql);
rs = stmt.executeQuery(sql);
while (rs.next()) {
list.add(rs.getString(1));
}} catch (SQLException e) {
Object object = AbnormalUtils.getAbnormal(e);
System.err.println(object);
} finally {
destory();
}
return list;
} /**
* 查看表结构
*
* @param databaseName
* @param tableName
* @return
*/
public static List
推荐阅读
- 大数据|大数据开发技术hive篇
- Hive中order by,sort by,distribute by,cluster by的区别
- Hive 函数使用(一)(datediff,row_number,partition)
- SCD|一种基于SparkSQL的Hive数据仓库拉链表缓慢变化维(SCD2+SCD1)的示例实现
- Hive ORC和Parquet
- HiveQL执行过程
- hive-LanguageManual
- 关于Hive数据查询的18种方
- hive|使用Hive-JDBC遇到的一些问题解决