首页 > 技术文章 > Hbase2.2.2笔记

zhangkaipc 2019-11-21 11:40 原文

笔记

Hbase2.2.2笔记
1.下载
https://hbase.apache.org/downloads.html

2.解压设置环境变量

3.修改配置文件
单机版配置
zookeeper ==> Mode: standalone
zoo.cfg  == > 修改了这一项dataDir=/home/hadoop/zookeeper/tmp
hbase ==> hbase-env.sh
export JAVA_HOME=/home/hadoop/javajdk18/jdk
export HBASE_CLASSPATH=/home/hadoop/hadoop/etc/hadoop
export HBASE_MANAGES_ZK=true
==> hbae-site.sh
<configuration>
    <property>
        <name>hbase.master</name>
        <value>hadoop01:60000</value>
    </property>
    <property>
        <name>hbase.master.maxclockskew</name>
        <value>180000</value>
    </property>
    <property>
        <name>hbase.rootdir</name>
        <value>hdfs://hadoop01:9000/hbase</value>
    </property>
    <property>
        <name>hbase.cluster.distributed</name>
        <value>true</value>
    </property>
    <property>
        <name>hbase.zookeeper.quorum</name>
        <value>hadoop01</value>
    </property>
    <property>
        <name>hbase.zookeeper.property.dataDir</name>
        <value>/home/hadoop/hbase/zookeeperdir</value>
    </property>
</configuration>

4.全部重启
zkServer.sh start
start-dfs.sh
start-yarn.sh
start-hbase.sh

5.Java Hbase Api开发包
hbase/lib下的jar包
hbase/lib/client-facing-thirdparty下的jar包


6.CURD代码
===============================================================================
package com.kizzle.hadoop.hbase;


import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.BufferedMutator;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
import org.apache.hadoop.hbase.client.CoprocessorDescriptorBuilder;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.FamilyFilter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.MultipleColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.RegexStringComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.filter.SubstringComparator;
import org.apache.hadoop.hbase.filter.ValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Before;
import org.junit.Test;


public class HbaseDemo {
    Configuration conf = null;
    Admin admin = null;
    Connection conn = null;
    
    @Before
    public void init() throws Exception {
        conf = HBaseConfiguration.create();
        conf.set("hbase.zookeeper.quorum", "hadoop01");
        conf.set("hbase.zookeeper.property.clientPort", "2181");
        conn = ConnectionFactory.createConnection(conf);
        admin = conn.getAdmin();
    }
    
    @Test
    public void createTableOldFunction() throws Exception {
        TableName tableName = TableName.valueOf("student");    
        HTableDescriptor desc = new HTableDescriptor(tableName);
        HColumnDescriptor family1 = new HColumnDescriptor("f1");
        desc.addFamily(family1);
        admin.createTable(desc);
    }
    
    @Test
    public void createTableOneFamily() throws Exception{
        TableName tableName = TableName.valueOf("goods");        
        //ColumnFamilyDescriptor family2 = ColumnFamilyDescriptorBuilder.of("info");
        //ModifyableTableDescriptor desc = new ModifyableTableDescriptor(tableName);
        //desc.setColumnFamily(family2);
        //admin.createTable(desc);
        
        boolean tableExists = admin.tableExists(tableName);
        if(!tableExists) {
            TableDescriptorBuilder tableBuilder = TableDescriptorBuilder.newBuilder(tableName);
            ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder("food".getBytes()).build();
            TableDescriptor desc = tableBuilder.setColumnFamily(family).build();
            admin.createTable(desc);
            System.out.println("Create table1 success.");
        }else {
            System.out.println("tatble1 exists.");
        }
    }
    
    @Test
    public void createTableManyFamily() throws IOException {
        TableName tableName = TableName.valueOf("languages");
        boolean tableExists = admin.tableExists(tableName);
        if(!tableExists) {            
            TableDescriptorBuilder table = TableDescriptorBuilder.newBuilder(tableName);
            
            String[] familyNames = new String[] {"java","python","php","golang"};
            for(String familyName: familyNames) {
                ColumnFamilyDescriptor family = ColumnFamilyDescriptorBuilder.newBuilder(familyName.getBytes()).build();
                table.setColumnFamily(family);
            }
            TableDescriptor desc = table.build();
            admin.createTable(desc);
            System.out.println("Create table2 success.");
        }else {
            System.out.println("table2 exists.");
        }
    }
    
    @Test
    public void insertData() throws IOException {
        ArrayList<Map<String, String>> dataList = new ArrayList<Map<String, String>>();
        
        HashMap<String, String> dataDict1 = new HashMap<String, String>();
        dataDict1.put("rowKey", "ceshi1");
        dataDict1.put("columnFamily", "java");
        dataDict1.put("columnName","bookname");
        dataDict1.put("columnValue", "java book 22222");
        dataList.add(dataDict1);
        
        HashMap<String, String> dataDict2 = new HashMap<String, String>();
        dataDict2.put("rowKey", "ceshi1");
        dataDict2.put("columnFamily", "java");
        dataDict2.put("columnName","author");
        dataDict2.put("columnValue", "wangwu");
        dataList.add(dataDict2);
        
        HashMap<String, String> dataDict3 = new HashMap<String, String>();
        dataDict3.put("rowKey", "ceshi1");
        dataDict3.put("columnFamily", "java1");
        dataDict3.put("columnName","price");
        dataDict3.put("columnValue", "222.8");
        dataList.add(dataDict3);
        
        Table table = conn.getTable(TableName.valueOf("languages"));
        List<Put> puts = new ArrayList<Put>();
        for(Map<String,String> d: dataList) {
            String rowKey = d.get("rowKey");
            Put put = new Put(rowKey.getBytes());
            put.addColumn(d.get("columnFamily").getBytes(), d.get("columnName").getBytes(), d.get("columnValue").getBytes());
            puts.add(put);
        }
        table.put(puts);
        table.close();
    }
    
    @Test
    public void updateData() throws IOException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        Put put = new Put("ceshi1".getBytes());
        put.addColumn("java".getBytes(), "price".getBytes(), "188.8".getBytes());
        table.put(put);
        table.close();
    }
    
    @Test
    public void deleteData() throws IOException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        Delete delete = new Delete("ceshi1".getBytes()); //delete one row
        delete.addFamily("java".getBytes()); //delete one family
        //delete.addColumn("java".getBytes(), "price".getBytes()); //delete one column 
        //delete.addColumns(family, qualifier) //delete many columns
        table.delete(delete);
        table.close();
    }
    
    @Test
    public void queryData() throws Exception {
        Table table = conn.getTable(TableName.valueOf("languages"));
        
        Get get = new Get("ceshi1".getBytes());
        //get.setFilter(filter);
        Result result = table.get(get);
        Cell[] rawCells = result.rawCells();
        System.out.println(rawCells.length);
        for(Cell cell: rawCells) {
            String columnName = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
            String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
            System.out.println(columnName+":"+value);
        }
        table.close();
    }
    
    @Test
    public void scanTable() throws IOException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);
        for(Result res:scanner) {
            byte[] row = res.getRow();
            String rowKey = Bytes.toString(row);
            System.out.println("rowKey:"+rowKey);
            Cell[] rawCells = res.rawCells();
            for(Cell cell:rawCells) {
                String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
                String columnName = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
                String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                System.out.println(rowKey+"\t\t\t"+"column="+family+":"+columnName+", value="+value);
            }
        }
        table.close();
    }
    
    @Test
    public void rowKeyFilter() throws IOException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        Scan scan = new Scan();
        RowFilter filter = new RowFilter(CompareOperator.EQUAL, new RegexStringComparator("shi1$"));
        scan.setFilter(filter);
        ResultScanner scanner = table.getScanner(scan);
        for(Result res:scanner) {
            byte[] row = res.getRow();
            String rowKey = Bytes.toString(row);
            System.out.println("rowKey:"+rowKey);
            Cell[] cells = res.rawCells();
            for(Cell cell:cells) {
                String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
                String columnName = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
                String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                System.out.println(rowKey+"\t\t\t"+"column="+family+":"+columnName+", value="+value);
            }
        }
        table.close();
    }
    
    @Test
    public void oneColumnNameFilter() throws IOException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        Scan scan = new Scan();
        ColumnPrefixFilter filter = new ColumnPrefixFilter("author".getBytes());
        scan.setFilter(filter);
        ResultScanner scanner = table.getScanner(scan);
        for(Result res: scanner) {
            byte[] row = res.getRow();
            String rowKey = Bytes.toString(row);
            System.out.println("rowKey:"+rowKey);
            Cell[] cells = res.rawCells();
            for(Cell cell:cells) {
                String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
                String columnName = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
                String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                System.out.println(rowKey+"\t\t\t"+"column="+family+":"+columnName+",value="+value);
            }
        }
        table.close();
    }
    
    @Test
    public void manyColumnNameFilter() throws IOException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        Scan scan = new Scan();
        byte[][] prefixes = new byte[][] {"author".getBytes(),"bookname".getBytes()};
        MultipleColumnPrefixFilter filter = new MultipleColumnPrefixFilter(prefixes);
        scan.setFilter(filter);
        ResultScanner scanner = table.getScanner(scan);
        for(Result res:scanner) {
            byte[] row = res.getRow();
            String rowKey = Bytes.toString(row);
            System.out.println("rowKey:"+rowKey);
            Cell[] cells = res.rawCells();
            for(Cell cell: cells) {
                String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
                String columnName = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
                String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                System.out.println(rowKey+"\t\t\t"+"column="+family+":"+columnName+",value="+value);
            }
        }
        table.close();
    }
    
    @Test
    public void columnValueFilter() throws IOException, DeserializationException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        Scan scan = new Scan();
        SingleColumnValueFilter filter = new SingleColumnValueFilter("python".getBytes(), "bookname".getBytes(), CompareOperator.EQUAL, new RegexStringComparator("oo"));
        scan.setFilter(filter);
        ResultScanner scanner = table.getScanner(scan);
        for(Result res: scanner) {
            byte[] row = res.getRow();
            String rowKey = Bytes.toString(row);
            System.out.println("rawKey:"+rowKey);
            Cell[] cells = res.rawCells();
            for(Cell cell: cells) {
                String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
                String columnName = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
                String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                System.out.println(rowKey+"\t\t\t"+"column="+family+":"+columnName+", value="+value);
            }
        }
        table.close();
    }
    
    @Test
    public void filterListFilter() throws IOException {
        Table table = conn.getTable(TableName.valueOf("languages"));
        Scan scan = new Scan();
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        //ValueFilter valueFilter = new ValueFilter(CompareOperator.EQUAL, new SubstringComparator("2"));
        //FamilyFilter familyFilter = new FamilyFilter(CompareOperator.EQUAL, new BinaryComparator("java".getBytes()));
        SingleColumnValueFilter filter1 = new SingleColumnValueFilter("python".getBytes(), "bookname".getBytes(), CompareOperator.EQUAL, new RegexStringComparator("oo"));
        ColumnPrefixFilter filter2 = new ColumnPrefixFilter("price".getBytes());
        filterList.addFilter(filter1);
        filterList.addFilter(filter2);
        scan.setFilter(filterList);
        
        ResultScanner scanner = table.getScanner(scan);
        for(Result res: scanner) {
            byte[] row = res.getRow();
            String rowKey = Bytes.toString(row);
            System.out.println("rawKey:"+rowKey);
            Cell[] cells = res.rawCells();
            for(Cell cell: cells) {
                String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
                String columnName = Bytes.toString(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
                String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
                System.out.println(rowKey+"\t\t\tcolumn="+family+":"+columnName+",value="+value);
            }
        }
        table.close();
    }
    
    @Test
    public void counterDemo() throws IOException {
        Table table = conn.getTable(TableName.valueOf("counters"));
        table.incrementColumnValue("20110101".getBytes(), "daily".getBytes(), "hits".getBytes(), 10);
    }
    
    @Test
    public void coprocessorTest() throws IOException {
        TableName tableName = TableName.valueOf("users");
        String path = "hdfs://hadoop01:9000/user/hadoop/hbase-region-coprocessor3.jar";
        
        admin.disableTable(tableName);
        
        ColumnFamilyDescriptor family1 = ColumnFamilyDescriptorBuilder.newBuilder("personalDet".getBytes()).setMaxVersions(3).build();
        ColumnFamilyDescriptor family2 = ColumnFamilyDescriptorBuilder.newBuilder("salaryDet".getBytes()).setMaxVersions(3).build();
        
        
        //tableDesc.addCoprocessor(RegionObserverExample.class.getCanonicalName(), path, Coprocessor.PRIORITY_USER, null);
//        CoprocessorDescriptorBuilder co = CoprocessorDescriptorBuilder.newBuilder("com.kizzle.hadoop.hbase.RegionObserverExample");
//        co.setJarPath(path);
//        co.setPriority(Coprocessor.PRIORITY_USER);
        //co.setProperties(null);
//        CoprocessorDescriptor coprocessor = co.build();
        
        
        TableDescriptorBuilder tableDesc = TableDescriptorBuilder.newBuilder(tableName);
        tableDesc.setColumnFamily(family1);
        tableDesc.setColumnFamily(family2);
        tableDesc.setValue("COPROCESSOR$1", path + "|"
                + RegionObserverExample.class.getCanonicalName() + "|"
                + Coprocessor.PRIORITY_USER);

        TableDescriptor desc = tableDesc.build();
        
        admin.modifyTable(desc);
        admin.enableTable(tableName);
    
    }
    
    
    @Test
    public void coprocessorDemo() throws Exception, IOException {
        Table table = conn.getTable(TableName.valueOf("users".getBytes()));
//        Get get = new Get(Bytes.toBytes("admin"));
//        Result result = table.get(get);
//        Cell[] cells = result.rawCells();
//        for(Cell cell: cells) {
//            //String family = Bytes.toString(cell.getFamilyArray(), cell.getFamilyOffset(), cell.getFamilyLength());
//            //short columnName = Bytes.toShort(cell.getQualifierArray(), cell.getQualifierOffset(), cell.getQualifierLength());
//            //String value = Bytes.toString(cell.getValueArray(), cell.getValueOffset(), cell.getValueLength());
//            //System.out.println(family+":"+columnName+",value="+value);
//            System.out.println(Bytes.toString(CellUtil.cloneRow(cell)));
//            System.out.println(Bytes.toString(CellUtil.cloneFamily(cell)));
//            System.out.println(Bytes.toString(CellUtil.cloneQualifier(cell)));
//        }
        System.out.println("===============================");
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);
        for(Result res: scanner) {
            byte[] row = res.getRow();
            String rowKey = Bytes.toString(row);
            System.out.println("rowKey:"+rowKey);
            Cell[] cs = res.rawCells();
            for(Cell ce: cs) {
                System.out.println(ce);
            }
        }
    }
}

===============================================================================

7.过滤器总结
RowFilter       --> CompareFilter
FamilyFilter    --> CompareFilter
QualifierFilter --> CompareFilter
ValueFilter     --> CompareFilter
DependentColumnFilter   --> CompareFilter   (相当于ValueFilter+时间戳过滤器的组合)

专业过滤器
SingleColumnValueFilter --> FilterBase
SingleColumnValueExcludeFilter  -->
PrefixFilter
PageFilter
KeyOnlyFilter
FirstKeyOnlyFilter
InclusiveStopFilter
TimestampsFilter
ColumnCountGetFilter
ColumnPaginationFilter
ColumnPrefixFilter
RandomRowFilter

附加过滤器
SkipFilter
WhileMatchFilter

FilterList

================================================
RegionObserver
================
package com.kizzle.hadoop.hbase;

import java.io.IOException;
import java.util.Iterator;
import java.util.List;
import java.util.Optional;

import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.log4j.Logger;

public class RegionObserverExample implements RegionObserver, RegionCoprocessor {
    private static final byte[] ADMIN = Bytes.toBytes("admin");
    private static final byte[] COLUMN_FAMILY = Bytes.toBytes("details");
    private static final byte[] COLUMN = Bytes.toBytes("Admin_det");
    private static final byte[] VALUE = Bytes.toBytes("You can't see Admin details");
    private static final Logger LOG = Logger.getLogger(RegionObserverExample.class);
    
    @Override
    public Optional<RegionObserver> getRegionObserver() {
        return Optional.of(this);
    }
    
    @Override
    public void start(CoprocessorEnvironment env) throws IOException {
        LOG.info("****** start method ******");
    }
    
    @Override
    public void stop(CoprocessorEnvironment env) throws IOException {
        LOG.info("****** stop method ******");
    }
    
    @Override
    public void preGetOp(ObserverContext<RegionCoprocessorEnvironment> c, Get get, List<Cell> result)
            throws IOException {
        if(Bytes.equals(get.getRow(), ADMIN)) {
            Cell cell = CellUtil.createCell(get.getRow(), COLUMN_FAMILY, COLUMN, System.currentTimeMillis(), (byte)4, VALUE);
            //ExtendedCellBuilder cell = ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY);
            //cell.setRow(get.getRow());
            //cell.setFamily(COLUMN_FAMILY);
            //cell.setQualifier(COLUMN);
            //cell.setValue(VALUE);
            //ExtendedCell e = cell.build();
            result.add(cell);
            c.bypass();
        }
    }
    
    @Override
    public void preScannerOpen(ObserverContext<RegionCoprocessorEnvironment> c, Scan scan) throws IOException {
        RowFilter filter = new RowFilter(CompareOperator.NOT_EQUAL, new BinaryComparator(ADMIN));
        scan.setFilter(filter);
        c.bypass();
    }
    
    @Override
    public boolean postScannerNext(ObserverContext<RegionCoprocessorEnvironment> c, InternalScanner s,
            List<Result> result, int limit, boolean hasNext) throws IOException {
        Iterator<Result> it = result.iterator();
        while(it.hasNext()) {
            Result res = it.next();
            if(Bytes.equals(res.getRow(), ADMIN)) {
                it.remove();
                break;
            }
        }
        return hasNext;
    }
    
}
==================================================

 

协处理器安装-表级别安装(这里coprocessor是hdfs路径,需提前将jar包发布到hdfs)
disable 'tableName'
alter 'tableName' , METHOD =>'table_att','coprocessor'=>'/hbase/coprocessor/hbase-coprocessor-1.0.0.jar|com.xxx.hbase.coprocessor.xxx|1001'
enable 'tableName'

协处理器卸载
disable 'tableName'
alter 'tableName', METHOD => 'table_att_unset', NAME => 'COPROCESSOR$1'
enable 'tableName'



MasterObserver步骤:
==================================================
package com.kizzle.hadoop.hbase;

import java.io.IOException;
import java.net.URI;
import java.util.Optional;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;


public class MasterObserverExample implements MasterObserver, MasterCoprocessor {
    private static final Logger LOG = LoggerFactory.getLogger(MasterObserverExample.class);
    
    @Override
    public Optional<MasterObserver> getMasterObserver() {
        return Optional.of(this);
    }
    
    @Override
    public void start(CoprocessorEnvironment env) throws IOException {
        //LOG.info("******* start method *****");
        System.out.println("start method ......");
    }
    
    @Override
    public void stop(CoprocessorEnvironment env) throws IOException {
        //LOG.info("****** stop method ******");
        System.out.println("stop method ......");
    }
    
    @Override
    public void postCreateTable(ObserverContext<MasterCoprocessorEnvironment> ctx, TableDescriptor desc,
            RegionInfo[] regions) throws IOException {
        String tableName = desc.getTableName().getNameAsString();
        System.out.println("tableName:"+tableName);
        Configuration conf = ctx.getEnvironment().getConfiguration();
        FileSystem fs = FileSystem.get(URI.create(conf.get("fs.defaultFS")), conf);
        Path path = new Path(tableName+"-blobs-demo");  //hdfs --> /user/hadoop/tableName-blobs-demo
        fs.mkdirs(path);
    }
}
==================================================
打包jar,放在/home/hadoop/hbase/lib目录中
==================================================
配置hbase-site.sh
<property>
  <name>hbase.coprocessor.master.classes</name>
  <value>com.kizzle.hadoop.hbase.MasterObserverExample</value>
</property>
==================================================
重启hbase
hbase shell
hbase(main):004:0>create 'mytable','info'
程序中输出的信息:tail -f tail -f /home/hadoop/hbase/logs/hbase-hadoop-master-hadoop01.out
程序中创建的文件夹:hdfs://hadoop01:9000/user/hadoop/mytable-blobs-demo


REST服务器
hbase-daemon.sh start rest
curl http://hadoop01:8080/version
-p或--port可以指定监听端口,默认是8080.
hbase-daemon.sh stop rest

curl -H 'Accept: application/json' http://hadoop01:8080/users/abc/info:col1

curl -H 'Accept: application/json' http://hadoop01:8080/users/abc/info:col1| hexdump -C
curl -H 'Accept: application/octet-stream' http://hadoop01:8080/users/abc/info:col1| hexdump -C


查看Hfile文件
hbase org.apache.hadoop.hbase.io.hfile.HFile -f /hbase/data/default/mytable/c1883148a7f6fa56f2924c0241783343/info/9aa7addd85804557ab50f43898e55728 -v -m -p

ganlia监控
安装依赖
yum -y install apr-devel apr-util check-devel cairo-devel pango-devel libxml2-devel rpm-build glib2-devel dbus-devel freetype-devel fontconfig-devel gcc gcc-c++ expat-devel python-devel libXrender-devel
yum install -y libart_lgpl-devel pcre-devel libtool
yum install libconfuse libconfuse-devel -y
yum install  -y rrdtool rrdtool-devel
wget https://sourceforge.net/projects/ganglia/files/ganglia%20monitoring%20core/3.7.2/ganglia-3.7.2.tar.gz
./configure --prefix=/usr/local/ganglia --enable-gexec --enable-status --with-gmetad
教程:https://blog.51cto.com/net881004/2437590
Hadoop-HA模式下部署集群Hbase
1.解压下载包hbase-2.2.2-bin.tar.gz到/home/hadoop目录中
tar -zxf /tmp/download/hbase-2.2.2-bin.tar.gz -C /home/hadoop

2.配置hbase-env.sh
cd hbase-2.2.2/
vi conf/hbase-env.sh 
=======================================
export JAVA_HOME=/home/hadoop/jdk1.8.0_201
export HBASE_CLASSPATH=/home/hadoop/hadoop-2.7.7/etc/hadoop
export HBASE_MANAGES_ZK=true
=======================================

3.配置hbase-site.xml
=======================================
<configuration>
    <property>
        <name>hbase.rootdir</name>
        <value>hdfs://ns1/hbase</value>
    </property>
    <property>
        <name>hbase.cluster.distributed</name>
        <value>true</value>
    </property>
    <property>
        <name>hbase.zookeeper.quorum</name>
        <value>hadoop01,hadoop02,hadoop03</value>
    </property>
    <property>
        <name>hbase.zookeeper.property.dataDir</name>
        <value>/home/hadoop/hbase/zookeeperdir</value>
    </property>
</configuration>
=======================================
创建/home/hadoop/hbase/zookeeperdir目录


4.配置节点
vi regionservers 
=======================================
hadoop01
hadoop02
hadoop03
=======================================
vi backup-masters  这个文件没有,自行创建
=======================================
hadoop01
=======================================

5.拷贝hadoop下的core-site.xml和hdfs-site.xml到hbase/conf中
cp ~/hadoop-2.7.7/etc/hadoop/{core-site.xml,hdfs-site.xml} hbase/conf
rm -rf hbase/docs/

6.分发文件
scp -rq hbase-2.2.2 hadoop@hadoop02:/home/hadoop
scp -rq hbase-2.2.2 hadoop@hadoop03:/home/hadoop

7.每个节点都配置环境变量将hbase添加到Path
vi ~/.bashrc
source ~/.bashrc
scp ~/.bashrc hadoop@hadoop02:~
scp ~/.bashrc hadoop@hadoop03:~

8.启动hbase
start-hbase.sh

9.测试hbase
hbase shell
create 'mytable','info'

推荐阅读