package com.chb.shopanalysis;
import java.util.Properties;
import org.apache.log4j.Logger;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.DataFrame;
import org.apache.spark.sql.SQLContext;
import org.apache.spark.sql.SaveMode;
public class Spark2Mysql {
public static void main(String[] args) {
SparkConf sparkConf = new SparkConf();
sparkConf.setAppName("Spark2Mysql");
sparkConf.setMaster("local[5]");
JavaSparkContext sc = null;
try {
sc = new JavaSparkContext(sparkConf);
SQLContext sqlContext = new SQLContext(sc);
//设置数据库连接参数
String url = "jdbc:mysql://localhost:3307/shopanalysis?useSSL=false";
String table = "demo3";
Properties connProps = new Properties();
connProps.setProperty("dbtable", table);// 设置表
connProps.setProperty("user", "root");// 设置用户名
connProps.setProperty("password", "root");// 设置密码
// 读取数据
DataFrame jdbcDF = sqlContext.read().jdbc(url, table, connProps);
jdbcDF.show();
//===================写入数据===================
// 添加的模式
// SaveMode.Append:在数据源后添加;
// SaveMode.Overwrite:如果如果数据源已经存在记录,则覆盖;
// SaveMode.ErrorIfExists:如果如果数据源已经存在记录,则报异常;
// SaveMode.Ignore:如果如果数据源已经存在记录,则忽略;
jdbcDF.write().mode(SaveMode.Append).jdbc(url, table, connProps);
} catch (Exception e) {
e.printStackTrace();
} finally {
if (sc != null) {
sc.stop();
}
}
}
}
spark向mysql数据库读写数据
关注
打赏