import org.apache.spark.SparkConf;
import org.apache.spark.SparkException;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.sql.RowFactory;
import org.apache.spark.sql.SparkSession;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.Metadata;
import org.apache.spark.sql.types.StructField;
import org.apache.spark.sql.types.StructType;
import java.util.ArrayList;
public class demo {
public static void main(String[] args) throws SparkException {
SparkConf conf = new SparkConf().setAppName("JavaWordCount").setMaster("local[*]");
JavaSparkContext jsc = new JavaSparkContext(conf);
SparkSession spark = SparkSession
.builder()
.appName("Java Spark SQL basic example")
.getOrCreate();
java.util.List<Integer> list=new ArrayList<>();
for (int i=0;i<10;i++){
list.add(i);
}
JavaRDD<Integer> s1 = jsc.parallelize(list);
JavaRDD r1 = s1.map(re -> {
return RowFactory.create("zhangsan", "10", "beijing");
});
StructType schema = new StructType(new StructField[]{
new StructField("name", DataTypes.StringType, false, Metadata.empty()),
new StructField("age", DataTypes.StringType, false, Metadata.empty()),
new StructField("area",DataTypes.StringType,false,Metadata.empty())
});
spark.createDataFrame(r1,schema).show();
}
}