2015-04-30 25 views
0

嗨,我正在使用独立的hbase,我想测试它的火花。我的机器上没有hadoop。带Spark的独立HBase,HBaseTest.scala给出错误

当我试图让使用HBaseTest.scala(Scala中的例子) 表的计数我得到以下错误:

ERROR TableInputFormat: java.io.IOException: java.lang.reflect.InvocationTargetException 
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:416) 
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:393) 
    at org.apache.hadoop.hbase.client.HConnectionManager.getConnection(HConnectionManager.java:274) 
    at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:194) 
    at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:156) 
    at org.apache.hadoop.hbase.mapreduce.TableInputFormat.setConf(TableInputFormat.java:101) 
    at org.apache.spark.rdd.NewHadoopRDD.getPartitions(NewHadoopRDD.scala:91) 
    at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219) 
    at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217) 
    at scala.Option.getOrElse(Option.scala:120) 
    at org.apache.spark.rdd.RDD.partitions(RDD.scala:217) 
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1632) 
    at org.apache.spark.rdd.RDD.count(RDD.scala:1012) 
    at org.apache.spark.examples.HBaseTest$.main(HBaseTest.scala:59) 
    at org.apache.spark.examples.HBaseTest.main(HBaseTest.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:607) 
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:167) 
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:190) 
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:111) 
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 
Caused by: java.lang.reflect.InvocationTargetException 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526) 
    at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414) 
    ... 23 more 
Caused by: java.lang.VerifyError: class org.apache.hadoop.hbase.protobuf.generated.ClientProtos$Result overrides final method getUnknownFields.()Lcom/google/protobuf/UnknownFieldSet; 
    at java.lang.ClassLoader.defineClass1(Native Method) 
    at java.lang.ClassLoader.defineClass(ClassLoader.java:800) 
    at java.security.SecureClassLoader.defineClass(SecureClassLoader.java:142) 
    at java.net.URLClassLoader.defineClass(URLClassLoader.java:449) 
    at java.net.URLClassLoader.access$100(URLClassLoader.java:71) 
    at java.net.URLClassLoader$1.run(URLClassLoader.java:361) 
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355) 
    at java.security.AccessController.doPrivileged(Native Method) 
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425) 
    at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358) 
    at org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:176) 
    at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64) 
    at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69) 
    at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83) 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857) 
    at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:662) 
    ... 28 more 

Exception in thread "main" java.io.IOException: No table was provided. 
    at org.apache.hadoop.hbase.mapreduce.TableInputFormatBase.getSplits(TableInputFormatBase.java:154) 
    at org.apache.spark.rdd.NewHadoopRDD.getPartitions(NewHadoopRDD.scala:95) 
    at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219) 
    at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217) 
    at scala.Option.getOrElse(Option.scala:120) 
    at org.apache.spark.rdd.RDD.partitions(RDD.scala:217) 
    at org.apache.spark.SparkContext.runJob(SparkContext.scala:1632) 
    at org.apache.spark.rdd.RDD.count(RDD.scala:1012) 
    at org.apache.spark.examples.HBaseTest$.main(HBaseTest.scala:59) 
    at org.apache.spark.examples.HBaseTest.main(HBaseTest.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
    at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:607) 
    at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:167) 
    at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:190) 
    at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:111) 
    at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) 

我不能够在这里找出什么问题。 HBaseTest.scala:

object HBaseTest { 
    def main(args: Array[String]) { 
    val sparkConf = new SparkConf().setAppName("HBaseTest").setMaster("local") 
    val sc = new SparkContext(sparkConf) 
    val conf = HBaseConfiguration.create() 
    // Other options for configuring scan behavior are available. More information available at 
    // http://hbase.apache.org/apidocs/org/apache/hadoop/hbase/mapreduce/TableInputFormat.html 
    conf.set("zookeeper.znode.parent", "/hbase-unsecure") 
    conf.set("hbase.zookeeper.quorum", "localhost") 
    conf.set("hbase.zookeeper.property.clientPort","2181") 
    conf.addResource(new Path("/usr/lib/hbase/hbase-0.94.8/conf/hbase-site.xml")) 
    conf.set(TableInputFormat.INPUT_TABLE,"test") 
    // Initialize hBase table if necessary 
    val admin = new HBaseAdmin(conf) 
    if (!admin.isTableAvailable("test")) { 
     print ("inside if statement") 
     val tableDesc = new HTableDescriptor(TableName.valueOf("test")) 
     admin.createTable(tableDesc) 
    } 
    val hBaseRDD = sc.newAPIHadoopRDD(conf, classOf[TableInputFormat], 
     classOf[org.apache.hadoop.hbase.io.ImmutableBytesWritable], 
     classOf[org.apache.hadoop.hbase.client.Result]) 
    hBaseRDD.count() 

    sc.stop() 
    } 
} 
+0

我面临同样的问题,你有没有发现它的灵魂? –

回答

1

您使用的AR类TableInputFormat作为输入格式。 TableInputFormat类属于hadoop Map-reduce API。您需要安装使用TableInputFormat的hadoop。