2012-11-27 64 views
2

运行模板匹配的Hadoop的一个javacv代码,但得到以下错误,请帮忙图像处理(模板匹配)在Hadoop javacv代码

[email protected]:/usr/local/hadoop$ bin/hadoop dfs -rmr /user/hduser/out 
Warning: $HADOOP_HOME is deprecated. 

Deleted hdfs://master:54310/user/hduser/out 
[email protected]:/usr/local/hadoop$ bin/hadoop jar back.jar back /user/hduser/in  /user/hduser/out 
Warning: $HADOOP_HOME is deprecated. 

12/11/27 19:42:47 WARN mapred.JobClient: Use GenericOptionsParser for parsing the  arguments. Applications should implement Tool for the same. 
12/11/27 19:42:47 INFO util.NativeCodeLoader: Loaded the native-hadoop library 
12/11/27 19:42:47 WARN snappy.LoadSnappy: Snappy native library not loaded 
12/11/27 19:42:47 INFO mapred.FileInputFormat: Total input paths to process : 1 
12/11/27 19:42:47 INFO mapred.JobClient: Running job: job_201211271649_0022 
12/11/27 19:42:48 INFO mapred.JobClient: map 0% reduce 0% 
12/11/27 19:43:01 INFO mapred.JobClient: Task Id : attempt_201211271649_0022_m_000000_0, Status : FAILED 
java.lang.RuntimeException: Error in configuring object 
at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93) 
at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64) 
at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117) 
at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432) 
at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372) 
at org.apache.hadoop.mapred.Child$4.run(Child.java:255) 
at java.security.AccessController.doPrivileged(Native Method) 
at javax.security.auth.Subject.doAs(Subject.java:396) 
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121) 
at org.apache.hadoop.mapred.Child.main(Child.java:249) 
    Caused by: java.lang.reflect.InvocationTargetException 
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) 
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) 
at java.lang.reflect.Method.invoke(Method.java:597) 
at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88) 
... 9 more 
    Caused by: java.lang.NoClassDefFoundError: com/googlecode/javacv/cpp/opencv_core$CvArr 
at java.lang.Class.forName0(Native Method) 
at java.lang.Class.forName(Class.java:247) 
at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:820) 
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:865) 
at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:891) 
at org.apache.hadoop.mapred.JobConf.getMapperClass(JobConf.java:947) 
at org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34) 
... 14 more 
    Caused by: java.lang.ClassNotFoundException: com.googlecode.javacv.cpp.opencv_core$CvArr 
at java.net.URLClassLoader$1.run(URLClassLoader.java:202) 
at java.security.AccessController.doPrivileged(Native Method) 
at java.net.URLClassLoader.findClass(URLClassLoader.java:190) 
at java.lang.ClassLoader.loadClass(ClassLoader.java:306) 
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:301) 
at java.lang.ClassLoader.loadClass(ClassLoader.java:247) 
... 21 more 

    12/11/27 19:43:02 INFO mapred.JobClient: Task Id : attempt_201211271649_0022_m_000001_0, Status : FAILED 
    12/11/27 19:43:26 INFO mapred.JobClient: Job complete: job_201211271649_0022 
    12/11/27 19:43:26 INFO mapred.JobClient: Counters: 7 
    12/11/27 19:43:26 INFO mapred.JobClient: Job Counters 
    12/11/27 19:43:26 INFO mapred.JobClient:  SLOTS_MILLIS_MAPS=42747 
    12/11/27 19:43:26 INFO mapred.JobClient:  Total time spent by all reduces waiting after reserving slots (ms)=0 
    12/11/27 19:43:26 INFO mapred.JobClient:  Total time spent by all maps waiting after reserving slots (ms)=0 
    12/11/27 19:43:26 INFO mapred.JobClient:  Launched map tasks=8 
    12/11/27 19:43:26 INFO mapred.JobClient:  Data-local map tasks=8 
    12/11/27 19:43:26 INFO mapred.JobClient:  SLOTS_MILLIS_REDUCES=0 
    12/11/27 19:43:26 INFO mapred.JobClient:  Failed map tasks=1 
    12/11/27 19:43:26 INFO mapred.JobClient: Job Failed: # of failed Map Tasks exceeded allowed limit. FailedCount: 1. LastFailedTask: task_201211271649_0022_m_000000 
    Exception in thread "main" java.io.IOException: Job failed! 
at org.apache.hadoop.mapred.JobClient.runJob(JobClient.java:1265) 
at back.main(back.java:192) 
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) 
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) 
at java.lang.reflect.Method.invoke(Method.java:597) 
at org.apache.hadoop.util.RunJar.main(RunJar.java:156) 

我javacv代码如下:

import static com.googlecode.javacv.cpp.opencv_highgui.*; 

    import java.io.File; 
    import static com.googlecode.javacv.cpp.opencv_imgproc.*; 

    import java.io.File; 
    import java.io.IOException; 
    import java.util.*; 
    import org.apache.hadoop.fs.Path; 
    import org.apache.hadoop.conf.*; 
    import org.apache.hadoop.io.*; 
    import org.apache.hadoop.mapred.*; 
    import org.apache.hadoop.util.*; 
    import java.awt.image.BufferedImage; 
    import java.io.FileInputStream; 
    import javax.imageio.ImageIO; 
    import com.googlecode.javacv.*; 
    import com.googlecode.javacv.cpp.opencv_core.CvPoint; 
    import com.googlecode.javacv.cpp.opencv_core.CvSize; 
    import com.googlecode.javacv.cpp.opencv_core.IplImage; 
    import com.googlecode.javacv.cpp.opencv_imgproc.CvHistogram; 

    import static com.googlecode.javacv.cpp.opencv_core.*; 

    public class back { 

//private static String[] testFiles = new String[] {"my.jpg","my1.jpg"}; 
// private static String testFilespath = "/home/student/Desktop/images"; 
private static String testFilespath ="images"; 
public static class Map extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> 
    { 
    private Text image = new Text(); 
    private final static IntWritable one = new IntWritable(1); 
    private final static IntWritable zero = new IntWritable(0); 
    public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException 
    { 
     String line = value.toString(); 
    // for (String identifier : testFiles) 
    // { 
    // FileInputStream fis=new FileInputStream(testFilespath+"/"+"my.jpg"); 
//Document doc = builder.createDocument(fis, identifier); 
    //FileInputStream imageStream = new FileInputStream(testFilespath + "/" + identifier); 
     //BufferedImage bimg = ImageIO.read(fis); 
     String f=testFilespath+"/"+line; 
     IplImage src_img = cvLoadImage (f, CV_LOAD_IMAGE_COLOR); 
     f=testFilespath+"/"+"my1.jpg"; 
     IplImage tmp_img = cvLoadImage (f, CV_LOAD_IMAGE_COLOR); 
     IplImage src_planes[]={null,null,null}; 
     IplImage src_hsv,tmp_hsv,dst_img; 
     CvHistogram hist; 
     int i, hist_size[] = {90}; 
     float h_ranges[] = { 0, 180 }; 
     float ranges[][] = { h_ranges }; 
     CvSize dst_size; 
     CvPoint min_loc=new CvPoint(); 
     CvPoint max_loc=new CvPoint(); 
     //0,src_planes1,src_planes2; 
     IplImage tmp_planes[]={null,null,null}; 
     for (i = 0; i < 3; i++) { 
      src_planes[i]=cvCreateImage(cvGetSize (src_img), IPL_DEPTH_8U, 1); 
      tmp_planes[i] = cvCreateImage (cvGetSize (tmp_img), IPL_DEPTH_8U, 1); 
     // src_planes1=cvCreateImage(cvGetSize (src_img), IPL_DEPTH_8U, 1); 
      // tmp_planes1 = cvCreateImage (cvGetSize (tmp_img), IPL_DEPTH_8U, 1); 
      // src_planes2=cvCreateImage(cvGetSize (src_img), IPL_DEPTH_8U, 1); 
      // tmp_planes2 = cvCreateImage (cvGetSize (tmp_img), IPL_DEPTH_8U, 1); 
      //cout<<"I"; 
      } 
     src_hsv= cvCreateImage (cvGetSize (src_img), IPL_DEPTH_8U, 3); 
     tmp_hsv = cvCreateImage (cvGetSize (tmp_img), IPL_DEPTH_8U, 3); 
    //   CanvasFrame canvas1 = new CanvasFrame("sreeeee"); 
    // canvas1.showImage(src_hsv); 
     cvCvtColor (src_img, src_hsv, CV_BGR2HSV); 
     cvCvtColor (tmp_img, tmp_hsv, CV_BGR2HSV); 
    // CanvasFrame canvas2 = new CanvasFrame("sczx"); 
    // canvas2.showImage(src_hsv); 
     cvSplit(src_hsv, src_planes[0], src_planes[1], src_planes[2], null); 
     cvSplit(tmp_hsv, tmp_planes[0], tmp_planes[1], tmp_planes[2], null); 
     // f=testFilespath+"/"+"my1.jpg"; 
     // IplImage tmp_img = cvLoadImage (f, CV_LOAD_IMAGE_COLOR); 
//  CanvasFrame canvas2 = new CanvasFrame("pix"); 
// canvas2.showImage(src_hsv); 
    hist = cvCreateHist (1, hist_size, CV_HIST_ARRAY, ranges, 1); 

// IplImage.PointerByReference planesPointer = new IplImage.PointerByReference(tmp_planes); 
    cvCalcHist(tmp_planes, hist, 0, null); 
    // (5)探索画像全体に対して,テンプレートのヒストグラムとの距離(手法に依存)を計算します. 
    dst_size = 
    cvSize (src_img.width() - tmp_img.width() + 1, 
      src_img.height() - tmp_img.height() + 1); 
    int n=src_img.width() - tmp_img.width() + 1; 
    int g=src_img.height() - tmp_img.height() + 1; 
// System.out.println("dfsd"+n+"dfgdfgdf"+g); 
    //- tmp_img.width() + 1); 

image.set(line); 
int flag=0; 
    if(n>0 && g>0) 
    {dst_img = cvCreateImage (dst_size, IPL_DEPTH_32F, 1); 
// CanvasFrame canvasr = new CanvasFrame("klkhj"); 
// canvasr.showImage(dst_img); 

    cvCalcBackProjectPatch (src_planes, dst_img, cvGetSize (tmp_img), hist, 
          CV_COMP_CORREL, 1.0); 
// CanvasFrame canvas3 = new CanvasFrame("pixkjhkjh"); 
// canvas3.showImage(src_hsv); 
    double[] min_val={0}; 
    double[] max_val={0}; 
// min_loc.x()=10; 

cvMinMaxLoc(dst_img, min_val, max_val, min_loc, max_loc, null); 
    // System.out.println("vxcvxc "+min_val[0]+" "+max_val[0]); 
    // System.out.println("vxcvxc "+max_loc.x()+" "+max_loc.y()); 
    // System.out.println("vxcvxc "+min_loc.x()+" "+min_loc.y()); 
    ***strong text***// System.out.println("vxcvxc "+tmp_img.width()+" "+tmp_img.height()); 
// (6)テンプレートに対応する位置に矩形を描画します. 

if(max_loc.x() !=0 && max_loc.y() !=0) 
    { cvRectangle (src_img, max_loc, 
       cvPoint (max_loc.x() + tmp_img.width(), 
         max_loc.y() + tmp_img.height()), CV_RGB (255, 0, 0), 3, 8, 0); 
    flag=1; 
    } 
// cvNamedWindow ("src", CV_WINDOW_AUTOSIZE); 
// cvNamedWindow ("dst", CV_WINDOW_AUTOSIZE); 
    //  CanvasFrame canvas4 = new CanvasFrame("sad"); 
// canvas4.showImage(src_img); 
    //CanvasFrame canvas5 = new CanvasFrame("lkj"); 
    //canvas5.showImage(dst_img); 
    //CanvasFrame canvas6 = new CanvasFrame("jkl"); 
    //canvas6.showImage(src_hsv); 
// cvWaitKey(0); 

// cvDestroyWindow("Image"); 

    cvReleaseImage(dst_img); 
    cvReleaseImage(tmp_img); 
    cvReleaseImage(src_hsv); 
    cvReleaseImage(tmp_hsv); 
    for(i=0; i<3; i++) { 
    cvReleaseImage(src_planes[i]); 
    cvReleaseImage(tmp_planes[i]); 
// cvReleaseImage(src_planes1); 
// cvReleaseImage(tmp_planes1); 
// cvReleaseImage(src_planes2); 
// cvReleaseImage(tmp_planes2); 
    } 


    } 
    if(flag==1) 
    { 
    output.collect(image,one); 
    line = "/user/hduser/output/"+line; 
    cvSaveImage(line, src_img); 
    cvReleaseImage(src_img); 
    } 
    else 
    output.collect(image,zero); 

    //System.out.println("key value pair "+line); 
     // } 
    } 
    } 

public static class Reduce extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable> { 
    public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException { 
     int sum = 0; 
     while (values.hasNext()) { 
     sum += values.next().get(); 
     // System.out.println("jkgj"+key+"nbnbmnbmnb"+sum+"jghgjhgjhg"); 
     } 
     output.collect(key, new IntWritable(sum)); 
    } 
    } 

    public static void main(String[] args) throws Exception { 
    JobConf conf = new JobConf(back.class); 
    conf.setJobName("back"); 

    conf.setOutputKeyClass(Text.class); 
    conf.setOutputValueClass(IntWritable.class); 

    conf.setMapperClass(Map.class); 
    conf.setCombinerClass(Reduce.class); 
    conf.setReducerClass(Reduce.class); 

    conf.setInputFormat(TextInputFormat.class); 
    conf.setOutputFormat(TextOutputFormat.class); 

    FileInputFormat.setInputPaths(conf, new Path(args[0])); 
    FileOutputFormat.setOutputPath(conf, new Path(args[1])); 

    JobClient.runJob(conf); 
    } 


} 
+0

您可以从代码的布局开始,保持总览。使代码好得多,可以帮助您避免错误 – 2pietjuh2

+0

您是否试图将OpenCV的库包含在JAR文件中,方式类似于小应用程序的情况:http://code.google.com/p/javacv/wiki/HowToMakeAnApplet? –

回答

0

你遇到的错误看起来像你的jar文件不包括所有必需的依赖关系 - 特别是openCV jar。有解决这个错误的方法有两种:

  1. 把需要的jar在分布式缓存(见Including Third-Party Libraries in my Map-Reduce Job (using distributed cache)
  2. 创建一个罐子里与所有的依赖关系包含在内。第二种方式很容易用maven完成。 Maven还会帮助您解决所有依赖性问题,然后它会让您执行任何操作。这通常被误解为maven不“正常工作”。问题是,如果maven不能“正常工作”,那么你的环境中就有复杂性,你不明白这会在以后给你带来问题。