实验环境
Hadoop2.8.0
Hbase-1.4.9
Centos7.2
pom.xml
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63
| <?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion>
<groupId>java_Hadoop</groupId> <artifactId>java_Hadoop</artifactId> <version>1.0-SNAPSHOT</version> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <configuration> <source>6</source> <target>6</target> </configuration> </plugin> </plugins> </build> <dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>2.8.0</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>2.8.0</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-core</artifactId> <version>2.8.0</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> <version>2.8.0</version> </dependency> <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId> <version>1.2.17</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>2.8.0</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>2.3.0</version> </dependency> </dependencies>
</project>
|
Java代码
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151
| package Hive;
import Hdfs.*; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem;
import java.io.File; import java.io.IOException; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ResultSet; import java.sql.Statement; import java.text.SimpleDateFormat; import java.util.Date;
public class Hive {
private static Configuration conf = new Configuration(); static {
conf.set("fs.defaultFS", "172.18.74.236:9000");
} FileSystem fs = FileSystem.get(conf);
private static String driverName = "org.apache.hive.jdbc.HiveDriver"; private static String url = "jdbc:hive2://172.18.74.236:10000/default"; private static String user = "root"; private static String password = "输入你的密码"; public static String today = new SimpleDateFormat("yyyy-MM-dd").format(new Date());
private static Connection conn = null; private static Statement stmt = null; private static ResultSet rs = null;
public Hive() throws IOException { }
private static void init() throws Exception { Class.forName(driverName); conn = DriverManager.getConnection(url,user,password); stmt = conn.createStatement(); }
public static void destory() throws Exception { if ( rs != null) { rs.close(); } if (stmt != null) { stmt.close(); } if (conn != null) { conn.close(); } }
public static void main(String[] args) throws Exception { init(); String Tablename = "Test";
String localFilePath = "E:\\Data.txt"; String hdfsFilePath = "/Test" +today.substring(0,7) + "/upload_date=" + today + "/"; File localfilepath = new File(localFilePath);
descTable(Tablename);
destory();
}
private static void createTable(String Tablename) throws Exception { stmt.execute("drop table if exists " + Tablename ); String sql = "create table " + Tablename + " (id int, name string)"; stmt.execute(sql); }
private static void showTables() throws Exception { String sql = "show tables"; rs = stmt.executeQuery(sql); while (rs.next()) { System.out.println(rs.getString(1)); } }
public static void descTable(String Tablename) throws Exception { String sql = "desc "+Tablename+""; rs = stmt.executeQuery(sql); while (rs.next()) { System.out.println(rs.getString(1) + "\t" + rs.getString(2)); } }
public static void loadData(String filePath,String Tablename) throws Exception {
String sql = "load data inpath '" + filePath + "' into table "+Tablename; stmt.execute(sql); }
public static void selectData(String Tablename) throws Exception { String sql = "select * from "+Tablename+""; rs = stmt.executeQuery(sql); while (rs.next()) { System.out.println(rs.getString("foo") + "\t\t" + rs.getString("bar")); } }
public static void dropTable(String Tablename) throws Exception { String sql = "drop table if exists "+ Tablename; stmt.execute(sql); }
}
|
这里跟HDFS接口有所依赖,可以参考我这篇文章HDFS接口