2016-07-26 126 views
5

我用下面的蜂巢查询DROP TABLE查询:抛出ClassCastException在apache的火花蜂巢

this.queryExecutor.executeQuery("Drop table user") 

和我收到以下异常:

java.lang.LinkageError: ClassCastException: attempting to castjar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/javax/ws/rs/ext/RuntimeDelegate.classtojar:file:/usr/hdp/2.4.2.0-258/spark/lib/spark-assembly-1.6.1.2.4.2.0-258-hadoop2.7.1.2.4.2.0-258.jar!/javax/ws/rs/ext/RuntimeDelegate.class 
at javax.ws.rs.ext.RuntimeDelegate.findDelegate(RuntimeDelegate.java:116) 
    at javax.ws.rs.ext.RuntimeDelegate.getInstance(RuntimeDelegate.java:91) 
    at javax.ws.rs.core.MediaType.<clinit>(MediaType.java:44) 
    at com.sun.jersey.core.header.MediaTypes.<clinit>(MediaTypes.java:64) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.initReaders(MessageBodyFactory.java:182) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.initReaders(MessageBodyFactory.java:175) 
    at com.sun.jersey.core.spi.factory.MessageBodyFactory.init(MessageBodyFactory.java:162) 
    at com.sun.jersey.api.client.Client.init(Client.java:342) 
    at com.sun.jersey.api.client.Client.access$000(Client.java:118) 
    at com.sun.jersey.api.client.Client$1.f(Client.java:191) 
    at com.sun.jersey.api.client.Client$1.f(Client.java:187) 
    at com.sun.jersey.spi.inject.Errors.processWithErrors(Errors.java:193) 
    at com.sun.jersey.api.client.Client.<init>(Client.java:187) 
    at com.sun.jersey.api.client.Client.<init>(Client.java:170) 
    at org.apache.hadoop.yarn.client.api.impl.TimelineClientImpl.serviceInit(TimelineClientImpl.java:340) 
    at org.apache.hadoop.service.AbstractService.init(AbstractService.java:163) 
    at org.apache.hadoop.hive.ql.hooks.ATSHook.<init>(ATSHook.java:67) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) 
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) 
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) 
    at java.lang.reflect.Constructor.newInstance(Constructor.java:423) 
    at java.lang.Class.newInstance(Class.java:442) 
    at org.apache.hadoop.hive.ql.hooks.HookUtils.getHooks(HookUtils.java:60) 
    at org.apache.hadoop.hive.ql.Driver.getHooks(Driver.java:1309) 
    at org.apache.hadoop.hive.ql.Driver.getHooks(Driver.java:1293) 
    at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1347) 
    at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1195) 
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1059) 
    at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1049) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:495) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$runHive$1.apply(ClientWrapper.scala:484) 
    at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:290) 
    at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:237) 
    at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:236) 
    at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:279) 
    at org.apache.spark.sql.hive.client.ClientWrapper.runHive(ClientWrapper.scala:484) 
    at org.apache.spark.sql.hive.client.ClientWrapper.runSqlHive(ClientWrapper.scala:474) 
    at org.apache.spark.sql.hive.HiveContext.runSqlHive(HiveContext.scala:613) 
    at org.apache.spark.sql.hive.execution.DropTable.run(commands.scala:89) 
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58) 
    at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56) 
    at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70) 
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132) 
    at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130) 
    at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150) 
    at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130) 
    at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55) 
    at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55) 
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145) 
    at org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130) 
    at org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52) 
    at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817) 
    at com.accenture.aa.dmah.spark.core.QueryExecutor.executeQuery(QueryExecutor.scala:35) 
    at com.accenture.aa.dmah.attribution.transformer.MulltipleUserJourneyTransformer.transform(MulltipleUserJourneyTransformer.scala:32) 
    at com.accenture.aa.dmah.attribution.userjourney.UserJourneyBuilder$$anonfun$buildUserJourney$1.apply$mcVI$sp(UserJourneyBuilder.scala:31) 
    at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) 
    at com.accenture.aa.dmah.attribution.userjourney.UserJourneyBuilder.buildUserJourney(UserJourneyBuilder.scala:29) 
    at com.accenture.aa.dmah.attribution.core.AttributionHub.executeAttribution(AttributionHub.scala:47) 
    at com.accenture.aa.dmah.attribution.jobs.AttributionJob.process(AttributionJob.scala:33) 
    at com.accenture.aa.dmah.core.DMAHJob.processJob(DMAHJob.scala:73) 
    at com.accenture.aa.dmah.core.DMAHJob.execute(DMAHJob.scala:27) 
    at com.accenture.aa.dmah.core.JobRunner.<init>(JobRunner.scala:17) 
    at com.accenture.aa.dmah.core.ApplicationInstance.initilize(ApplicationInstance.scala:48) 
    at com.accenture.aa.dmah.core.Bootstrap.boot(Bootstrap.scala:112) 
    at com.accenture.aa.dmah.core.BootstrapObj$.main(Bootstrap.scala:134) 
    at com.accenture.aa.dmah.core.BootstrapObj.main(Bootstrap.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:498) 
    at scala.tools.nsc.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:71) 
    at scala.tools.nsc.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31) 
    at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:139) 
    at scala.tools.nsc.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:71) 
    at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:139) 
    at scala.tools.nsc.CommonRunner$class.run(ObjectRunner.scala:28) 
    at scala.tools.nsc.ObjectRunner$.run(ObjectRunner.scala:45) 
    at scala.tools.nsc.CommonRunner$class.runAndCatch(ObjectRunner.scala:35) 
    at scala.tools.nsc.ObjectRunner$.runAndCatch(ObjectRunner.scala:45) 
    at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:74) 
    at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:96) 
    at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:105) 
    at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala) 

我看到也出现了类似的帖子herehere,但他们至今没有任何回应。 也看了here,但不认为这是我的情况下的有效行动。

有趣的是,当我们尝试使用drop table(或drop table if exists)查询时,这是特定的。

希望能找到相同的解决方案。

+0

你对这个问题有什么答案吗? –

+0

你解决了这个问题吗? – Edge7

+0

@ Edge7嗨,不...我们无法为此解决问题。事实证明,删除该表的要求已被逐步淘汰,因此无法对此进行更多的研究。 – hbabbar

回答

0

据我所知,上述错误可能是因为样本类具有相同的包结构,即:在不同的JAR问题中发现'javax.ws.rs.ext.RuntimeDelegate'。类对象在运行时创建和铸造。因此,负责触发DROP语法的代码很有可能会被使用和破坏,因为它在类路径中被多次发现。

我试图DROP和跌落如果在CHD5 EXISTS,并正在没有问题,下面是我运行的详细信息:

第一次运行 - 的Hadoop版本 - 2.6,蜂巢1.1.0和星火 - 1.3.1 (包括蜂巢库火花LIB) 第二运行-Hadoop版本 - 2.6,蜂房1.1.0和火花 - 1.6.1 模式下运行的 - CLI

scala> sqlContext.sql("DROP TABLE SAMPLE"); 
16/08/04 11:31:39 INFO parse.ParseDriver: Parsing command: DROP TABLE SAMPLE 
16/08/04 11:31:39 INFO parse.ParseDriver: Parse Completed 
...... 
scala>sqlContext.sql("DROP TABLE IF EXISTS SAMPLE"); 
16/08/04 11:40:34 INFO parse.ParseDriver: Parsing command: DROP TABLE IF EXISTS SAMPLE 
16/08/04 11:40:35 INFO parse.ParseDriver: Parse Completed 
..... 

如果可能的话,请确认使用不同版本命令DROP spark lib来缩小问题范围。

与此同时,我正在分析jar来找出两个同类RuntimeDelegate存在的链接,并将返回以检查任何jar的移除是否可以解决问题,并且添加jar应该重新创建同样的问题。

+0

另外,如果可能的话,请在您的环境中列出Jersey和javax.ws.rs-api jar引用以获得疑问 – Aditya