博客
关于我
强烈建议你试试无所不能的chatGPT,快点击我
【甘道夫】Eclipse+Maven搭建HBase开发环境及HBaseDAO代码示例
阅读量:4283 次
发布时间:2019-05-27

本文共 7016 字,大约阅读时间需要 23 分钟。

环境:
Win764bit
Eclipse Version: Kepler Service Release 1
java version "1.7.0_40"

第一步:Eclipse中新建Maven项目,编辑pom.xml并更新下载jar包
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
    xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>
    <groupId>fulong.bigdata</groupId>
    <artifactId>myHbase</artifactId>
    <version>0.0.1-SNAPSHOT</version>
    <dependencies>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>0.96.2-hadoop2</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>2.2.0</version>
        </dependency>
        <dependency>
            <groupId>jdk.tools</groupId>
            <artifactId>jdk.tools</artifactId>
            <version>1.7</version>
            <scope>system</scope>
            <systemPath>${JAVA_HOME}/lib/tools.jar</systemPath>
        </dependency>
    </dependencies>
</
project
>
 

第二步:
将目标集群的
Hadoop和HBase
配置文件拷贝到工程中
目的是为了让工程能找到Zookeeper及Hbase Master。
配置文件在工程中的路径为:
/src/main/resources/hadoop
/src/main/resources/hbase
然后将这两个目录添加进工程的classpath中:
最终目录结构如下:

第三步:hbase-site.xml中添加
    <property>
        <name>fs.hdfs.impl</name>
        <value>org.apache.hadoop.hdfs.DistributedFileSystem</value>
    </property>  

第四步:
编写Java程序调用Hbase接口
该代码包含了部分常用HBase接口。
package myHbase;import java.io.IOException;import org.apache.hadoop.conf.Configuration;import org.apache.hadoop.hbase.HBaseConfiguration;import org.apache.hadoop.hbase.HColumnDescriptor;import org.apache.hadoop.hbase.HTableDescriptor;import org.apache.hadoop.hbase.TableName;import org.apache.hadoop.hbase.client.Delete;import org.apache.hadoop.hbase.client.Get;import org.apache.hadoop.hbase.client.HBaseAdmin;import org.apache.hadoop.hbase.client.HTable;import org.apache.hadoop.hbase.client.Put;import org.apache.hadoop.hbase.client.Result;import org.apache.hadoop.hbase.client.ResultScanner;import org.apache.hadoop.hbase.client.Scan;import org.apache.hadoop.hbase.filter.Filter;import org.apache.hadoop.hbase.util.Bytes;public class HBaseDAO {        static Configuration conf = HBaseConfiguration.create();    /**     * create a table :table_name(columnFamily)      * @param tablename     * @param columnFamily     * @throws Exception     */    public static void createTable(String tablename, String columnFamily) throws Exception {        HBaseAdmin admin = new HBaseAdmin(conf);        if(admin.tableExists(tablename)) {            System.out.println("Table exists!");            System.exit(0);        }        else {            HTableDescriptor tableDesc = new HTableDescriptor(TableName.valueOf(tablename));            tableDesc.addFamily(new HColumnDescriptor(columnFamily));            admin.createTable(tableDesc);            System.out.println("create table success!");        }        admin.close();            }        /**     * delete table ,caution!!!!!! ,dangerous!!!!!!     * @param tablename     * @return     * @throws IOException     */    public static boolean deleteTable(String tablename) throws IOException {        HBaseAdmin admin = new HBaseAdmin(conf);        if(admin.tableExists(tablename)) {            try {                admin.disableTable(tablename);                admin.deleteTable(tablename);            } catch (Exception e) {                // TODO: handle exception                e.printStackTrace();                admin.close();                return false;            }        }        admin.close();        return true;    }    /**     * put a cell data into a row identified by rowKey,columnFamily,identifier      * @param HTable, create by : HTable table = new HTable(conf, "tablename")     * @param rowKey     * @param columnFamily     * @param identifier     * @param data     * @throws Exception     */    public static void putCell(HTable table, String rowKey, String columnFamily, String identifier, String data) throws Exception{        Put p1 = new Put(Bytes.toBytes(rowKey));        p1.add(Bytes.toBytes(columnFamily), Bytes.toBytes(identifier), Bytes.toBytes(data));        table.put(p1);        System.out.println("put '"+rowKey+"', '"+columnFamily+":"+identifier+"', '"+data+"'");    }        /**     * get a row identified by rowkey     * @param HTable, create by : HTable table = new HTable(conf, "tablename")     * @param rowKey     * @throws Exception     */    public static Result getRow(HTable table, String rowKey) throws Exception {        Get get = new Get(Bytes.toBytes(rowKey));        Result result = table.get(get);        System.out.println("Get: "+result);        return result;    }        /**     * delete a row identified by rowkey     * @param HTable, create by : HTable table = new HTable(conf, "tablename")     * @param rowKey     * @throws Exception     */    public static void deleteRow(HTable table, String rowKey) throws Exception {        Delete delete = new Delete(Bytes.toBytes(rowKey));        table.delete(delete);        System.out.println("Delete row: "+rowKey);    }        /**     * return all row from a table     * @param HTable, create by : HTable table = new HTable(conf, "tablename")     * @throws Exception     */    public static ResultScanner scanAll(HTable table) throws Exception {        Scan s =new Scan();        ResultScanner rs = table.getScanner(s);        return rs;    }        /**     * return a range of rows specified by startrow and endrow     * @param HTable, create by : HTable table = new HTable(conf, "tablename")     * @param startrow     * @param endrow     * @throws Exception     */    public static ResultScanner scanRange(HTable table,String startrow,String endrow) throws Exception {        Scan s =new Scan(Bytes.toBytes(startrow),Bytes.toBytes(endrow));        ResultScanner rs = table.getScanner(s);        return rs;    }    /**     * return a range of rows filtered by specified condition     * @param HTable, create by : HTable table = new HTable(conf, "tablename")     * @param startrow     * @param filter     * @throws Exception     */    public static ResultScanner scanFilter(HTable table,String startrow, Filter filter) throws Exception {        Scan s =new Scan(Bytes.toBytes(startrow),filter);        ResultScanner rs = table.getScanner(s);        return rs;    }        public static void main(String[] args) throws Exception {        // TODO Auto-generated method stub        HTable table = new HTable(conf, "apitable");        //      ResultScanner rs = HBaseDAO.scanRange(table, "2013-07-10*", "2013-07-11*");//    	ResultScanner rs = HBaseDAO.scanRange(table, "100001", "100003");    	ResultScanner rs = HBaseDAO.scanAll(table);    	for(Result r:rs) {    		System.out.println("Scan: "+r);    	}    	table.close();    	//      HBaseDAO.createTable("apitable", "testcf");//      HBaseDAO.putRow("apitable", "100001", "testcf", "name", "liyang");//      HBaseDAO.putRow("apitable", "100003", "testcf", "name", "leon");//    	HBaseDAO.deleteRow("apitable", "100002");//    	HBaseDAO.getRow("apitable", "100003");//    	HBaseDAO.deleteTable("apitable");            }}
你可能感兴趣的文章
OK6410A 开发板 (八) 55 linux-5.11 OK6410A armv6 异常总览
查看>>
OK6410A 开发板 (八) 56 linux-5.11 OK6410A armv6 异常之访存异常
查看>>
OK6410A 开发板 (八) 60 linux-5.11 OK6410A 异常相关初始化
查看>>
OK6410A 开发板 (八) 64 linux-5.11 OK6410A linux异常解析
查看>>
OK6410A 开发板 (八) 65 linux-5.11 OK6410A linux tick与schedule
查看>>
OK6410A 开发板 (八) 67 linux-5.11 OK6410A linux 内核同步机制 per-cpu的实现
查看>>
OK6410A 开发板 (八) 68 linux-5.11 OK6410A linux 内核同步机制 atomic 的实现
查看>>
OK6410A 开发板 (八) 69 linux-5.11 OK6410A linux 内核同步机制 禁中断/中断屏蔽的实现
查看>>
OK6410A 开发板 (八) 70 linux-5.11 OK6410A linux 内核同步机制 禁抢占的实现
查看>>
platform总线相关_3
查看>>
I2C总线相关_1
查看>>
I2C总线相关_2
查看>>
I2C总线相关_3
查看>>
I2C总线相关_4
查看>>
MDIO总线相关_1
查看>>
MDIO总线相关_2
查看>>
MDIO总线相关_4
查看>>
ffmpeg移植
查看>>
shell命令之nm
查看>>
杂项设备驱动
查看>>