首页 > 解决方案 > 引起:java.lang.ClassNotFoundException:org.apache.hbase.thirdparty.com.google.common.cache.CacheLoader

问题描述

我想使用 spark DStreams 将一些数据放到 hbase 中,但是在运行时 Configuration conf1 = HBaseConfiguration.create();,它失败了。我该如何解决这个问题?

当我运行 Java 代码时:




import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.log4j.BasicConfigurator;
import org.apache.log4j.Level;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaPairRDD;
import org.apache.spark.api.java.function.Function;
import org.apache.spark.api.java.function.Function2;
import org.apache.spark.api.java.function.Function3;
import org.apache.spark.api.java.function.PairFunction;
import org.apache.spark.rdd.JdbcRDD;
import org.apache.spark.sql.execution.datasources.jdbc.JDBCRDD;
import org.apache.spark.streaming.State;
import org.apache.spark.streaming.StateSpec;
import org.apache.spark.streaming.api.java.*;
import org.apache.spark.streaming.dstream.ReceiverInputDStream;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.apache.spark.streaming.Durations;
import org.apache.spark.streaming.kafka.KafkaUtils;
import org.apache.spark.api.java.Optional;
import scala.Tuple2;
import scala.util.control.Exception;

import java.io.IOException;
import java.util.*;

........
.........

        // connect to hbase
        Configuration conf1 = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(conf1);

.......
.......

错误日志:

21/10/19 17:16:49 INFO ContextHandler: 启动 osjsServletContextHandler@8a589a2{/stages/stage/kill,null,AVAILABLE,@Spark}

21/10/19 17:16:49 INFO ContextHandler: 启动 osjsServletContextHandler@775594f2{/metrics/json,null,AVAILABLE,@Spark}

Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/hbase/thirdparty/com/google/common/cache/CacheLoader

    at org.apache.hadoop.hbase.AuthUtil.loginClient(AuthUtil.java:106)

    at org.apache.hadoop.hbase.client.ConnectionFactory.createConnection(ConnectionFactory.java:128)

    at wordcountspark.SparkStreamingCount.main(SparkStreamingCount.java:133)

Caused by: java.lang.ClassNotFoundException: 

org.apache.hbase.thirdparty.com.google.common.cache.CacheLoader

    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)

    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)

    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338)

    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

21/10/19 17:16:50 INFO AbstractConnector: Stopped Spark@464649c{HTTP/1.1,[http/1.1]}{0.0.0.0:4041}

版本:

spark version: 2.4.0
hadoop version:2.7.1
hbase version:2.4.7

maven依赖:

    <dependencies>
        <dependency> <!-- Spark dependency -->
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>2.4.0</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.11</artifactId>
            <version>2.4.0</version>
        </dependency>

        <!--<dependency>-->
        <!--<groupId>mysql</groupId>-->
        <!--<artifactId>mysql-connector-java</artifactId>-->
        <!--<version>5.1.40</version>-->
        <!--</dependency>-->

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.11</artifactId>
            <version>2.4.0</version>
        </dependency>

        <dependency>
            <groupId>com.googlecode.json-simple</groupId>
            <artifactId>json-simple</artifactId>
            <version>1.1.1</version>
        </dependency>

        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.43</version>
        </dependency>

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>0.8.2.1</version>
        </dependency>

        <dependency>
            <groupId>com.github.sgroschupf</groupId>
            <artifactId>zkclient</artifactId>
            <version>0.1</version>
        </dependency>

        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.10.0</version>
        </dependency>

        <dependency>
            <groupId>com.yammer.metrics</groupId>
            <artifactId>metrics-core</artifactId>
            <version>2.1.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-common</artifactId>
            <version>2.4.6</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-spark</artifactId>
            <version>2.0.0-alpha3</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-protocol-shaded</artifactId>
            <version>2.4.6</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>2.4.6</version>
        </dependency>
    </dependencies>

标签: javamavenapache-sparkhbase

解决方案


推荐阅读