3
用Java写的星火计划项目,代码如下:缺少应用程序资源
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.Function;
public class SimpleApp {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("wordCount").setMaster("local");
JavaSparkContext sc = new JavaSparkContext(conf);
JavaRDD<String> input = sc.textFile("/bigdata/softwares/spark-2.1.0-bin-hadoop2.7/testdata/a.txt");
System.out.println();
Long bCount = input.filter(new Function<String,Boolean>(){
public Boolean call(String s){return s.contains("yes");}
}).count();
Long cCount = input.filter(new Function<String,Boolean>(){
public Boolean call(String s){return s.contains("ywq");}
}).count();
System.out.println("yes:"+bCount+" ywq:"+cCount+" all:");
// sc.stop();
}
}
双龙如下:
<dependencies>
<dependency> <!-- Spark dependency -->
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.1.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
</execution>
</executions>
</plugin>
</plugins>
</build>
出现以下错误 Maven的所有资源都打包成jar文件,运行时报出了以下错误,我只是s tarted学习,谁知道教,谢谢 enter image description here