2016-07-05 39 views
1

我想下面的程序:java.lang.NoSuchMethodError而星火运行Java程序

import org.apache.spark.SparkConf; 
import org.apache.spark.api.java.JavaRDD; 
import org.apache.spark.api.java.JavaSparkContext; 
import org.apache.spark.api.java.function.Function; 


public class SparkMain { 
    public static void main(String[] args) { 
     String logFile = "~/useful/json.json "; // Should be some file on your system 
     SparkConf conf = new SparkConf().setAppName("Simple Application").setMaster("local"); 
     JavaSparkContext sc = new JavaSparkContext(conf); 
     JavaRDD<String> logData = sc.textFile(logFile).cache(); 

     long numAs = logData.filter(new Function<String, Boolean>() { 
      public Boolean call(String s) { return s.contains("a"); } 
     }).count(); 

     long numBs = logData.filter(new Function<String, Boolean>() { 
      public Boolean call(String s) { return s.contains("b"); } 
     }).count(); 

     System.out.println("Lines with a: " + numAs + ", lines with b: " + numBs); 
    } 
} 

pom.xml文件是:

<?xml version="1.0" encoding="UTF-8"?> 
<project xmlns="http://maven.apache.org/POM/4.0.0" 
     xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" 
     xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> 
    <modelVersion>4.0.0</modelVersion> 

    <groupId>com.company</groupId> 
    <artifactId>artid</artifactId> 
    <version>1.0-SNAPSHOT</version> 
    <build> 
     <plugins> 
      <plugin> 
       <groupId>org.apache.maven.plugins</groupId> 
       <artifactId>maven-compiler-plugin</artifactId> 
       <configuration> 
        <source>1.8</source> 
        <target>1.8</target> 
       </configuration> 
      </plugin> 
     </plugins> 
    </build> 

    <dependencies> 
     <dependency> 
      <groupId>com.sparkjava</groupId> 
      <artifactId>spark-core</artifactId> 
      <version>2.5</version> 
     </dependency> 

     <dependency> 
      <groupId>org.slf4j</groupId> 
      <artifactId>slf4j-simple</artifactId> 
      <version>1.7.12</version> 
     </dependency> 

     <dependency> 
      <groupId>org.apache.hadoop</groupId> 
      <artifactId>hadoop-client</artifactId> 
      <version>2.2.0</version> 
     </dependency> 


     <dependency> 
      <groupId>org.scala-lang</groupId> 
      <artifactId>scala-library</artifactId> 
      <version>2.11.0</version> 
     </dependency> 

     <dependency> 
      <groupId>org.scala-lang</groupId> 
      <artifactId>scala-compiler</artifactId> 
      <version>2.11.0</version> 
     </dependency> 


     <dependency> 
      <groupId>org.scala-lang</groupId> 
      <artifactId>scala-reflect</artifactId> 
      <version>2.11.0</version> 
     </dependency> 

     <dependency> 
      <groupId>org.apache.spark</groupId> 
      <artifactId>spark-core_2.11</artifactId> 
      <version>1.6.2</version> 
     </dependency> 


    </dependencies> 


</project> 

我使用的IntelliJ,当我运行调试在主要功能上,我收到以下错误:

Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties 
SLF4J: Class path contains multiple SLF4J bindings. 
SLF4J: Found binding in [jar:file:/Users/sudeep/spark/lib/slf4j-log4j12-1.7.5.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
SLF4J: Found binding in [jar:file:/Users/sudeep/.m2/repository/org/slf4j/slf4j-simple/1.7.12/slf4j-simple-1.7.12.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
SLF4J: Found binding in [jar:file:/Users/sudeep/.m2/repository/org/slf4j/slf4j-log4j12/1.7.10/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class] 
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. 
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory] 
16/07/06 00:55:57 INFO SparkContext: Running Spark version 1.6.2 
16/07/06 00:55:57 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable 
16/07/06 00:55:58 INFO SecurityManager: Changing view acls to: sudeep 
16/07/06 00:55:58 INFO SecurityManager: Changing modify acls to: sudeep 
16/07/06 00:55:58 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(sudeep); users with modify permissions: Set(sudeep) 
Exception in thread "main" java.lang.NoSuchMethodError: org.apache.spark.network.util.JavaUtils.timeStringAsSec(Ljava/lang/String;)J 
    at org.apache.spark.util.Utils$.timeStringAsSeconds(Utils.scala:993) 
    at org.apache.spark.rpc.RpcTimeout$.apply(RpcTimeout.scala:128) 
    at org.apache.spark.util.RpcUtils$.lookupRpcTimeout(RpcUtils.scala:61) 
    at org.apache.spark.rpc.RpcEnv.<init>(RpcEnv.scala:69) 
    at org.apache.spark.rpc.netty.NettyRpcEnv.<init>(NettyRpcEnv.scala:43) 
    at org.apache.spark.rpc.netty.NettyRpcEnvFactory.create(NettyRpcEnv.scala:447) 
    at org.apache.spark.rpc.RpcEnv$.create(RpcEnv.scala:53) 
    at org.apache.spark.SparkEnv$.create(SparkEnv.scala:253) 
    at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193) 
    at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288) 
    at org.apache.spark.SparkContext.<init>(SparkContext.scala:457) 
    at org.apache.spark.api.java.JavaSparkContext.<init>(JavaSparkContext.scala:59) 
    at SparkMain.main(SparkMain.java:14) 
Disconnected from the target VM, address: '127.0.0.1:51826', transport: 'socket' 

Process finished with exit code 1 

回答

0

每当在IDE中收到“ClassNotFound”时,您需要确认这不是IDE故障。通常,当Spark的编译版本与运行时使用的版本不同时,会发生此错误。

要测试这不是IDE故障,只需使用您的构建工具(maven run)即可运行您的程序。如果它起作用,那么至少你知道你的pom文件是正确的。如果是这种情况,那么我建议创建一个新的项目与新的依赖关系。您的IDE可能正在使用旧版本的Spark。