首页 > 技术文章 > Java进行Hbase查询

fanpc 2020-11-20 15:06 原文

Hbase存储结构和查询方式已经有所了解,如何调用连接并且进行数据查询呢,可以使用jmeter的Java脚本进行操作.
Hbase查询主要是scaner通过滤器filter进行操作,根据要查询列族还是rowkey可分为多种filter,可根据具体条件来进行查询,放个filter比较全的链接,有兴趣可以参考:https://www.jianshu.com/p/bcc54f63abe4
以下为代码,包括rowkey前缀模糊查询+列前缀模糊查询+值范围查询等几个组合查询,多个filter可以放到一个list里,单个filter查询拆出来使用即可:

package hbasetest;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.filter.BinaryPrefixComparator;
import org.apache.hadoop.hbase.filter.ColumnPrefixFilter;
import org.apache.hadoop.hbase.filter.CompareFilter;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.FilterList;
import org.apache.hadoop.hbase.filter.RowFilter;
import org.apache.hadoop.hbase.filter.SingleColumnValueFilter;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.jmeter.config.Arguments;
import org.apache.jmeter.protocol.java.sampler.AbstractJavaSamplerClient;
import org.apache.jmeter.protocol.java.sampler.JavaSamplerContext;
import org.apache.jmeter.samplers.SampleResult;
import org.apache.logging.log4j.Logger;

import hbase.HbaseTable;

public class ValueRange extends AbstractJavaSamplerClient {

        public Connection connection;

        public Arguments getDefaultParameters() {
                Arguments args = new Arguments();
                //数据仅为示例,需要根据实际情况添加
                args.addArgument("hbase.zookeeper.quorum", "192.168.1.1,192.168.1.2,192.168.1.3");
                args.addArgument("hbase.zookeeper.property.clientPort","2020");
                args.addArgument("zookeeper.znode.parent","/hbase-unsecure");
                args.addArgument("hbase.master", "hbasetest.com:16000");
                args.addArgument("TableName","P_test");
                args.addArgument("family","info");
                args.addArgument("column","Str");
                args.addArgument("value","7A");
                args.addArgument("rowkey","BH");
                args.addArgument("range","HXMFNQFOTXW9024ZJ");
                return args;
        }

        public void setupTest(JavaSamplerContext context)
        {
                Configuration conf = HBaseConfiguration.create();
                conf.set("hbase.zookeeper.quorum", context.getParameter("hbase.zookeeper.quorum"));
                conf.set("hbase.zookeeper.property.clientPort", context.getParameter("hbase.zookeeper.property.clientPort"));
                conf.set("zookeeper.znode.parent", context.getParameter("zookeeper.znode.parent"));
                conf.set("hbase.master", context.getParameter("hbase.master"));
                try {
                         connection = ConnectionFactory.createConnection(conf);
                } catch (IOException e) {

                        e.printStackTrace();
                }
        }
        @SuppressWarnings("finally")
        public  SampleResult runTest(JavaSamplerContext arg0) {
                // TODO 自动生成的方法存根
                SampleResult sr = new SampleResult();

                try {
                        String tableName = arg0.getParameter("TableName");
                        if (StringUtils.isBlank(tableName)) {
                        }

                        Table table =connection.getTable(TableName.valueOf(tableName));
                        Admin admin =connection.getAdmin();

                        if(!admin.isTableAvailable(TableName.valueOf(tableName))){
                      connection.close();
                      sr.setSuccessful(false);
                      return sr;
                    }
                        Scan scan = new Scan();
                        scan.setCaching(60000);
                        List<Filter> lf = new ArrayList<Filter>();

            //1.rowkey前缀模糊查询+列前缀模糊查询
            Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator(arg0.getParameter("rowkey").getBytes()));
            Filter filter1 = new ColumnPrefixFilter(arg0.getParameter("column").getBytes());

            //2.rowkey前缀模糊查询+列前缀模糊查询+值范围查询
            //Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator(arg0.getParameter("rowkey").getBytes()));
            //Filter filter1 = new ColumnPrefixFilter(arg0.getParameter("column").getBytes());
            //Filter filter2 = new SingleColumnValueFilter(arg0.getParameter("family").getBytes(), Bytes.toBytes("Str"), CompareFilter.CompareOp.GREATER_OR_EQUAL, arg0.getParameter("range").getBytes());

            //3.rowkey前缀模糊查询+值范围查询
            //Filter filter = new RowFilter(CompareFilter.CompareOp.EQUAL, new BinaryPrefixComparator(arg0.getParameter("rowkey").getBytes()));
            //Filter filter1 = new SingleColumnValueFilter(arg0.getParameter("family").getBytes(), Bytes.toBytes("Str"), CompareFilter.CompareOp.GREATER_OR_EQUAL, arg0.getParameter("range").getBytes());

            //4.列前缀模糊查询+值范围查询
            //Filter filter = new ColumnPrefixFilter(arg0.getParameter("column").getBytes());
            //Filter filter1 = new SingleColumnValueFilter(arg0.getParameter("family").getBytes(), Bytes.toBytes("Str"), CompareFilter.CompareOp.GREATER_OR_EQUAL, arg0.getParameter("range").getBytes());
                        lf.add(filter);
                        lf.add(filter1);
                        //lf.add(filter2);

                        FilterList fl = new FilterList(lf);
                        sr.sampleStart();
                        scan.setFilter(fl);
                        ResultScanner scanner = table.getScanner(scan);
                        sr.sampleEnd();
                        sr.setSuccessful(true);
                } catch (Exception e) {
                        e.printStackTrace();
                        sr.setSuccessful(false);
                }
                finally {
                        return sr;
                }
        }

}

可使用Hbase的Ambari进行监控服务器资源,包括cpu,io,内存以及Hbase的磁盘HDFS等:
在这里插入图片描述
该工具还支持图表按照时间范围展示:
在这里插入图片描述
在这里插入图片描述

推荐阅读