Java – Spark : java. lang. NoClassDefFoundError: com/mongodb/hadoop/MongoInputFormat

Spark : java. lang. NoClassDefFoundError: com/mongodb/hadoop/MongoInputFormat… here is a solution to the problem.

Spark : java. lang. NoClassDefFoundError: com/mongodb/hadoop/MongoInputFormat

I’m trying to read data from MongoDB using Spark using the mongo-hadoop connector.

I tried different versions of the hadoop-mongo connector jar and I still get this error.

Compile without errors

What can I do to fix this?

Thanks in advance.

Exception in thread "main" java.lang.NoClassDefFoundError: com/mongodb/hadoop/MongoInputFormat
    at com.geekcap.javaworld.wordcount.Mongo.main(Mongo.java:47)
Caused by: java.lang.ClassNotFoundException: com.mongodb.hadoop.MongoInputFormat
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    ... 1 more

My code

     import com.mongodb.hadoop.BSONFileOutputFormat;
    import com.mongodb.hadoop.MongoInputFormat;
    import com.mongodb.hadoop.MongoOutputFormat;
    import java.util.Arrays;
    import java.util.Collections;
    import java.util.LinkedList;
    import java.util.Queue;

import org.apache.hadoop.conf.Configuration;
    import org.apache.spark.SparkConf;
    import org.apache.spark.api.java.JavaPairRDD;
    import org.apache.spark.api.java.JavaRDD;
    import org.apache.spark.api.java.JavaSparkContext;
    import org.apache.spark.api.java.function.FlatMapFunction;
    import org.bson.BSONObject;

public class MongoTest {
     Set configuration options for the MongoDB Hadoop Connector.
public static void main(String[] args) {
 SparkConf conf = new SparkConf().setMaster("local").setAppName("App1");

JavaSparkContext sc = new JavaSparkContext(conf);

Configuration mongodbConfig;
    mongodbConfig = new Configuration();

mongodbConfig.set("mongo.job.input.format", "com.mongodb.hadoop.MongoInputFormat");
    mongodbConfig.set("mongo.input.uri","mongodb://localhost:27017/MyCollectionName.collection");

JavaPairRDD<Object, BSONObject> documents = sc.newAPIHadoopRDD(
            mongodbConfig, // Configuration
            MongoInputFormat.class, // InputFormat: read from a live cluster.
            Object.class, // Key class
            BSONObject.class // Value class
    );

documents.saveAsTextFile("b.txt");
  }
 }

pom.xml dependency:

    <!-- Import Spark -->
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-core_2.11</artifactId>
        <version>1.4.0</version>
    </dependency>

<dependency>
        <groupId>junit</groupId>
        <artifactId>junit</artifactId>
        <version>4.11</version>
        <scope>test</scope>
    </dependency>

<dependency>
        <groupId>org.mongodb</groupId>
        <artifactId>mongodb-driver</artifactId>
        <version>3.0.4</version>
    </dependency>
    <dependency>
        <groupId>hadoopCom</groupId>
        <artifactId>com.sample</artifactId>
        <version>1.0</version>
        <scope>system</scope>
        <systemPath>/home/sys6002/NetBeansProjects/WordCount/lib/hadoop-common-2.7.1.jar</systemPath>
    </dependency>

<dependency>
        <groupId>hadoopCon1</groupId>
        <artifactId>com.sample1</artifactId>
        <version>1.0</version>
        <scope>system</scope>
        <systemPath>/home/sys6002/Downloads/mongo-hadoop-core-1.3.0.jar</systemPath>
    </dependency>
</dependencies>

Solution

After many trials and modifications, this one succeeded.

   <dependencies>
              <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.11</artifactId>
            <version>1.5.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>1.5.1</version>
        </dependency>
        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.14</version>
        </dependency>
        <dependency>
            <groupId>org.mongodb.mongo-hadoop</groupId>
            <artifactId>mongo-hadoop-core</artifactId>
            <version>1.4.1</version>
        </dependency>
    </dependencies>

Java code

  Configuration conf = new Configuration();
    conf.set("mongo.job.input.format", "com.mongodb.hadoop.MongoInputFormat");
    conf.set("mongo.input.uri", "mongodb://localhost:27017/databasename.collectionname");
    SparkConf sconf = new SparkConf().setMaster("local").setAppName("Spark UM Jar");

JavaRDD<User> UserMaster = sc.newAPIHadoopRDD(conf, MongoInputFormat.class, Object.class, BSONObject.class)
            .map(new Function<Tuple2<Object, BSONObject>, User>() {
                @Override
                public User call(Tuple2<Object, BSONObject> v1) throws Exception {
                    return User
                }

}

Related Problems and Solutions