本样例是使用spark从mysql中读出数据
import java.io.Serializable;
import java.sql.*;
import java.util.Properties;
import com.alibaba.fastjson.JSONObject;
import com.yhxd.einvoicegd.InvoiceAPI;
import org.apache.spark.SparkConf;
import org.apache.spark.api.java.JavaRDD;
import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.api.java.function.VoidFunction;
import org.apache.spark.rdd.JdbcRDD;
import scala.reflect.ClassManifestFactory$;
import scala.runtime.AbstractFunction0;
import scala.runtime.AbstractFunction1;
public class Main {
private static final org.apache.log4j.Logger LOGGER = org.apache.log4j.Logger.getLogger(Main.class);
private static final JavaSparkContext sc =
new JavaSparkContext(new SparkConf().setAppName("SparkJdbc").setMaster("local[*]"));
<