mapreduce - Error in hadoop : "Exception in thread "main" java.lang.ClassNotFoundException" -
i have downloaded hadoop-2.7.1 ( apache hadoop) on ubuntu 14.04.3 lts (gnu/linux 3.19.0-25-generic x86_64) on oracle vm virtualbox.
i'm using following commands compile , run code :
compile
hduser@dt-virtualbox:~/desktop/project/try1$ javac -classpath $hadoop_home/share/hadoop/common/hadoop-common-2.7.1.jar:$hadoop_home/share/hadoop/mapreduce/hadoop-mapreduce-client-core-2.7.1.jar:$hadoop_home/share/hadoop/common/lib/commons-cli-1.2.jar -d /home/hduser/desktop/project/try1 *.java}
then making jar file of class files , running jar file using:
{hduser@dt-virtualbox:~/desktop/project/try1$ hadoop jar table_one.jar drivermap /trial/trial/ output_tryy1}
please find below error
{exception in thread "main" java.lang.classnotfoundexception: drivermap @ java.net.urlclassloader$1.run(urlclassloader.java:366) @ java.net.urlclassloader$1.run(urlclassloader.java:355) @ java.security.accesscontroller.doprivileged(native method) @ java.net.urlclassloader.findclass(urlclassloader.java:354) @ java.lang.classloader.loadclass(classloader.java:425) @ java.lang.classloader.loadclass(classloader.java:358) @ java.lang.class.forname0(native method) @ java.lang.class.forname(class.java:278) @ org.apache.hadoop.util.runjar.run(runjar.java:214) @ org.apache.hadoop.util.runjar.main(runjar.java:136)}
this drivermap.java file:
import org.apache.hadoop.conf.configuration; import org.apache.hadoop.fs.path; import org.apache.hadoop.io.text; import org.apache.hadoop.mapreduce.job; import org.apache.hadoop.mapreduce.mapper; import org.apache.hadoop.mapreduce.lib.output.fileoutputformat; import java.io.bufferedreader; import java.io.filereader; import java.io.ioexception; import java.util.*; import java.util.regex.matcher; import java.util.regex.pattern; import org.apache.hadoop.conf.*; import org.apache.hadoop.io.*; import org.apache.hadoop.mapreduce.*; import org.apache.hadoop.mapreduce.lib.input.fileinputformat; import org.apache.hadoop.mapreduce.lib.input.textinputformat; import org.apache.hadoop.mapreduce.lib.output.fileoutputformat; import org.apache.hadoop.mapreduce.lib.output.textoutputformat; public class drivermap { public class mymapper extends mapper<text, text, text, text> { string str, token = null; list<string> tokens = new arraylist<string>(); string productid, userid, score; private text word = new text(); public void map(text key, text value, context context) throws ioexception, interruptedexception { str = value.tostring(); pattern p = pattern.compile(".*productid.*$|.*userid.*$|.*score.*$"); matcher m = p.matcher(str); while(m.find()) { token = m.group( 0 ); //group 0 entire match tokens.add(token); } //system.out.println(tokens); string[] = tokens.tostring().split(":|\\,|]"); for(int j=0; j<a.length; j=j+6) { //system.out.println("a1 " + j+ " : "+ a1[j]); productid = a[j+1]; userid = a[j+3]; score = a[j+5]; word.set(productid + "|" +userid); context.write(word, new text(score)); /*system.out.println("productid is: "+ a[j+1]); system.out.println("userid is: "+ a[j+3]); system.out.println("score is: "+ a[j+5]);*/ } } } public static void main(string[] args) throws exception{ // todo auto-generated method stub configuration conf = new configuration(); job job = new job(conf, "recommendation"); job.setoutputkeyclass(text.class); job.setoutputvalueclass(text.class); job.setmapoutputkeyclass(text.class); job.setmapoutputvalueclass(text.class); job.setmapperclass(mymapper.class); //job.setreducerclass(reduce.class); job.setnumreducetasks(0); //this turns off reducer , our mapper result outputed output file. job.setinputformatclass(textinputformat.class); job.setoutputformatclass(textoutputformat.class); job.setjarbyclass(drivermap.class); /* fileinputformat.addinputpath(job, new path(args[0])); fileoutputformat.setoutputpath(job, new path(args[1])); */ textinputformat.setinputpaths(job, new path(args[0])); textoutputformat.setoutputpath(job, new path(args[1])); job.waitforcompletion(true); } }
you have give drivermap package name. "packagename.drivermap" while running jar file.
also, if craeting jar file, not require compile src code.
Comments
Post a Comment