spark作为客户端
代码:
import org.apache.spark.SparkConf
import org.apache.spark.streaming.Seconds
import org.apache.spark.streaming.StreamingContext
object sparkStreaming extends Serializable{
def main(args: Array[String]): Unit = {
val conf = new SparkConf().setMaster("local[2]").setAppName("NetworkWordCount")
val ssc = new StreamingContext(conf, Seconds(10))
val lines = ssc.socketTextStream("localhost",9999)
val words = lines.flatMap(_.split(" "))
val pairs = words.map(word => (word,1))
val wordCounts = pairs.reduceByKey(_ + _)
wordCounts.print()
ssc.start()
ssc.awaitTermination()
}
}
java scoket作为服务器端
代码:
import java.io.IOException;
import java.io.PrintWriter;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
/**
*
*搭建服务器端
*
**/
public class MysocketService {
public static void main(String[] args) throws IOException, InterruptedException{
MysocketService socketService = new MysocketService();
socketService.oneServer();
}
@SuppressWarnings("resource")
public void oneServer() throws IOException, InterruptedException{
final ServerSocket server = new ServerSocket(9999);
Executor executor = Executors.newFixedThreadPool(100);
final List<String> list = new ArrayList<String>();
list.add("aa");
list.add("bb");
list.add("cc");
list.add("dd");
list.add("ee");
for (int i=0 ; i<100 ; i++) {
Thread.sleep(10000);
executor.execute( new Runnable() {
@Override
public void run() {
try (
//与客户端建立连接
Socket conn = server.accept();
PrintWriter writer = new PrintWriter(conn.getOutputStream());
){
System.out.println("启动线程");
Random r = new java.util.Random();
//动态字符串
String outPut = list.get(r.nextInt(4)).toString()
+ " " + list.get(r.nextInt(4)).toString()
+ " " + list.get(r.nextInt(4)).toString();
//向客户端输出
writer.write(outPut);
writer.flush();
System.out.println("Server:"+outPut);
writer.close();
conn.close();
} catch (Throwable tb) {
tb.printStackTrace();
}
}
});
}
}
}