2013-11-04 39 views
0

演示非常简单。当使用hadoop jar hia-1.0-SNAPSHOT.jar cha1.CharCount运行它,它失败例外:由于ClassNotFound发生CDH Hadoop未能执行hadoop作业

Error: java.lang.RuntimeException: java.lang.ClassNotFoundException: Class cha1.CharCount$CMapper not found 
    at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:1587) 
    at org.apache.hadoop.mapreduce.task.JobContextImpl.getMapperClass(JobContextImpl.java:186) 
    at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:715) 
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:338) 
    at org.apache.hadoop.mapred.YarnChild$2.run(YarnChild.java:157) 
    at java.security.AccessController.doPrivileged(Native Method) 
    at javax.security.auth.Subject.doAs(Subject.java:396) 
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1408) 
    at org.apache.hadoop.mapred.YarnChild.main(YarnChild.java:152) 
Caused by: java.lang.ClassNotFoundException: Class cha1.CharCount$CMapper not found 
    at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:1493) 
    at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:1585) 
    ... 8 more 

这里是演示代码

package cha1; 

import org.apache.hadoop.conf.Configuration; 
import org.apache.hadoop.conf.Configured; 
import org.apache.hadoop.fs.Path; 
import org.apache.hadoop.io.ByteWritable; 
import org.apache.hadoop.io.LongWritable; 
import org.apache.hadoop.io.Text; 
import org.apache.hadoop.mapreduce.Job; 
import org.apache.hadoop.mapreduce.Mapper; 
import org.apache.hadoop.mapreduce.Reducer; 
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; 
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; 
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; 
import org.apache.hadoop.util.Tool; 
import org.apache.hadoop.util.ToolRunner; 

import java.io.IOException; 
import java.util.HashMap; 
import java.util.Map; 

/** 
* User: mzang 
* Date: 10/31/13 
* Time: 4:21 PM 
*/ 
public class CharCount extends Configured implements Tool { 

    class CMapper extends Mapper<LongWritable, Text, ByteWritable, LongWritable> { 

     @Override 
     protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException { 
      byte[] content = value.getBytes(); 
      Map<ByteWritable, Integer> map = new HashMap<ByteWritable, Integer>(); 
      for (byte b : content) { 
       ByteWritable bw = new ByteWritable(b); 
       Integer c = map.get(bw); 
       if (c == null) { 
        map.put(bw, 1); 
       } else { 
        c++; 
        map.put(bw, c); 
       } 
      } 
      for (Map.Entry<ByteWritable, Integer> entry : map.entrySet()) { 
       context.write(entry.getKey(), new LongWritable(entry.getValue())); 
      } 
     } 
    } 

    class CCombiner extends Reducer<ByteWritable, LongWritable, ByteWritable, LongWritable> { 

     @Override 
     protected void reduce(ByteWritable key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException { 

      long sum = 0; 
      for (LongWritable longWritable : values) { 
       sum += longWritable.get(); 
      } 

      context.write(key, new LongWritable(sum)); 

     } 
    } 

    class CReducer extends Reducer<ByteWritable, LongWritable, Text, Text> { 

     @Override 
     protected void reduce(ByteWritable key, Iterable<LongWritable> values, Context context) throws IOException, InterruptedException { 

      long sum = 0; 
      for (LongWritable longWritable : values) { 
       sum += longWritable.get(); 
      } 

      context.write(new Text(String.valueOf(key.get())), new Text(String.valueOf(sum))); 
     } 
    } 

    public int run(String[] args) throws IOException, ClassNotFoundException, InterruptedException { 


     Job job = Job.getInstance(this.getConf()); 
     FileOutputFormat.setOutputPath(job, new Path("/tmp/test/output")); 

     FileInputFormat.setInputPaths(job, new Path("/tmp/test/input")); 

     job.setOutputKeyClass(Text.class); 
     job.setOutputValueClass(Text.class); 
     job.setOutputFormatClass(TextOutputFormat.class); 
     job.setMapperClass(CMapper.class); 
     job.setCombinerClass(CCombiner.class); 
     job.setReducerClass(CReducer.class); 

     job.submit(); 

     return job.waitForCompletion(true) ? 0 : 1; 
    } 

    public static void main(String[] args) throws Exception { 
     Configuration conf = new Configuration(); 
     conf.addResource("hdfs-site.xml"); 
     int res = ToolRunner.run(conf, new CharCount(), args); 

     System.exit(res); 
    } 

} 

它应该在Apache Hadoop的作品。

我检查了hadoop日志。它只说了很多尝试失败。

然后我试着检查hadoop fs -ls /tmp/hadoop-yarn/staging/history/。但是done_intermediate和done的两个目录是空的。

该jar应分发给hdfs并由每个任务跟踪器下载,对不对?类CharCount $ CMapper应该在jar中。

+2

您是否使用job.setJarByClass(CharCount.class)? –

+0

看[http://stackoverflow.com/questions/8488554/classnotfoundexception-while-running-example-job-of-hadoop][1] [1]:http://stackoverflow.com/questions/8488554/classnotfoundexception-while-running-example-work-of-hadoop – pasha701

回答

1

品牌CMapperstaticpublic。事实上,它不是static意味着Hadoop无法创建实例,因为它不知道它是内部类CharCount

+0

我认为他必须为CReducer添加公共访问权限,CCombiner类也是对的? –

+0

这是正确的 –

+0

试图添加'公共静态',并试图移出课程。除了类名不同之外,还有相同的异常情况:Class cha1.CharCount $ CMapper not found或Class cha1.CMapper not found。加入job.setJarByClass(CharCount.class)后加工 – DeepNightTwo