import org.apache.spark.SparkConf; import org.apache.spark.api.java.JavaRDD; import org.apache.spark.api.java.JavaSparkContext; public class SparkHadoopIntegrationExample { public static void main(String[] args) { SparkConf conf = new SparkConf() .setAppName("SparkHadoopIntegrationExample") .set("spark.hadoop.fs.defaultFS", "hdfs://localhost:9000"); JavaSparkContext sc = new JavaSparkContext(conf); JavaRDD<String> lines = sc.textFile("hdfs://localhost:9000/path/to/data.txt"); long numLines = lines.count(); sc.stop(); } }


上一篇:
下一篇:
切换中文