import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; public class SparkExample { public static void main(String[] args) { SparkConf conf = new SparkConf().setAppName("SparkExample").setMaster("local"); JavaSparkContext sc = new JavaSparkContext(conf); JavaRDD<String> lines = sc.textFile("input.txt"); JavaRDD<Integer> numbers = lines.flatMap(line -> Arrays.asList(line.split(" ")).iterator()) .map(Integer::parseInt); Integer sum = numbers.reduce((a, b) -> a + b); System.out.println("Sum: " + sum); sc.close(); } }


上一篇:
下一篇:
切换中文