<dependencies>
<dependency>
<groupId>org.apache.spark</groupId>
</dependency>
</dependencies>
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
public class MySparkApp {
public static void main(String[] args) {
SparkConf conf = new SparkConf().setAppName("My Spark App").setMaster("local[*]");
JavaSparkContext sparkContext = new JavaSparkContext(conf);
sparkContext.stop();
}
}
import org.apache.spark.api.java.JavaRDD;
public class MySparkApp {
public static void main(String[] args) {
JavaRDD<String> inputRDD = sparkContext.textFile("input.txt");
JavaRDD<String> wordsRDD = inputRDD.flatMap(line -> Arrays.asList(line.split(" ")).iterator());
JavaRDD<String> filteredRDD = wordsRDD.filter(word -> word.startsWith("S"));
filteredRDD.saveAsTextFile("output.txt");
}
}