spark与java的联系

一、
spark依赖于java和python,

(一)看了下python的版本python -v,没想到反应这么大,进入了python命令模式。
    [root@localhost spark]# python -v
# installing zipimport hook
import zipimport # builtin
# installed zipimport hook
# /usr/lib/python2.6/site.pyc matches /usr/lib/python2.6/site.py
import site # precompiled from /usr/lib/python2.6/site.pyc
# /usr/lib/python2.6/os.pyc matches /usr/lib/python2.6/os.py
import os # precompiled from /usr/lib/python2.6/os.pyc
import errno # builtin
import posix # builtin
# /usr/lib/python2.6/posixpath.pyc matches /usr/lib/python2.6/posixpath.py
import posixpath # precompiled from /usr/lib/python2.6/posixpath.pyc
# /usr/lib/python2.6/stat.pyc matches /usr/lib/python2.6/stat.py
import stat # precompiled from /usr/lib/python2.6/stat.pyc
# /usr/lib/python2.6/genericpath.pyc matches /usr/lib/python2.6/genericpath.py
import genericpath # precompiled from /usr/lib/python2.6/genericpath.pyc
# /usr/lib/python2.6/warnings.pyc matches /usr/lib/python2.6/warnings.py
import warnings # precompiled from /usr/lib/python2.6/warnings.pyc
# /usr/lib/python2.6/linecache.pyc matches /usr/lib/python2.6/linecache.py
import linecache # precompiled from /usr/lib/python2.6/linecache.pyc
# /usr/lib/python2.6/types.pyc matches /usr/lib/python2.6/types.py
import types # precompiled from /usr/lib/python2.6/types.pyc
# /usr/lib/python2.6/UserDict.pyc matches /usr/lib/python2.6/UserDict.py
import UserDict # precompiled from /usr/lib/python2.6/UserDict.pyc
# /usr/lib/python2.6/_abcoll.pyc matches /usr/lib/python2.6/_abcoll.py
import _abcoll # precompiled from /usr/lib/python2.6/_abcoll.pyc
# /usr/lib/python2.6/abc.pyc matches /usr/lib/python2.6/abc.py
import abc # precompiled from /usr/lib/python2.6/abc.pyc
# /usr/lib/python2.6/copy_reg.pyc matches /usr/lib/python2.6/copy_reg.py
import copy_reg # precompiled from /usr/lib/python2.6/copy_reg.pyc
# /usr/lib/python2.6/site-packages/abrt_exception_handler.pyc matches /usr/lib/python2.6/site-packages/abrt_exception_handler.py
import abrt_exception_handler # precompiled from /usr/lib/python2.6/site-packages/abrt_exception_handler.pyc
import encodings # directory /usr/lib/python2.6/encodings
# /usr/lib/python2.6/encodings/__init__.pyc matches /usr/lib/python2.6/encodings/__init__.py
import encodings # precompiled from /usr/lib/python2.6/encodings/__init__.pyc
# /usr/lib/python2.6/codecs.pyc matches /usr/lib/python2.6/codecs.py
import codecs # precompiled from /usr/lib/python2.6/codecs.pyc
import _codecs # builtin
# /usr/lib/python2.6/encodings/aliases.pyc matches /usr/lib/python2.6/encodings/aliases.py
import encodings.aliases # precompiled from /usr/lib/python2.6/encodings/aliases.pyc
# /usr/lib/python2.6/encodings/gb18030.pyc matches /usr/lib/python2.6/encodings/gb18030.py
import encodings.gb18030 # precompiled from /usr/lib/python2.6/encodings/gb18030.pyc
dlopen("/usr/lib/python2.6/lib-dynload/_codecs_cn.so", 2);
import _codecs_cn # dynamically loaded from /usr/lib/python2.6/lib-dynload/_codecs_cn.so
dlopen("/usr/lib/python2.6/lib-dynload/_multibytecodecmodule.so", 2);
import _multibytecodec # dynamically loaded from /usr/lib/python2.6/lib-dynload/_multibytecodecmodule.so
Python 2.6.6 (r266:84292, Jun 18 2012, 14:10:23)
[GCC 4.4.6 20110731 (Red Hat 4.4.6-3)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
dlopen("/usr/lib/python2.6/lib-dynload/readline.so", 2);
import readline # dynamically loaded from /usr/lib/python2.6/lib-dynload/readline.so
>>>
(二)crtl+D,退出python命令模式。
# clear __builtin__._
# clear sys.path
# clear sys.argv
# clear sys.ps1
# clear sys.ps2
# clear sys.exitfunc
# clear sys.exc_type
# clear sys.exc_value
# clear sys.exc_traceback
# clear sys.last_type
# clear sys.last_value
# clear sys.last_traceback
# clear sys.path_hooks
# clear sys.path_importer_cache
# clear sys.meta_path
# clear sys.flags
# clear sys.float_info
# restore sys.stdin
# restore sys.stdout
# restore sys.stderr
# cleanup __main__
# cleanup[1] encodings
# cleanup[1] site
# cleanup[1] abc
# cleanup[1] _codecs
# cleanup[1] _warnings
# cleanup[1] zipimport
# cleanup[1] readline
# cleanup[1] encodings.gb18030
# cleanup[1] signal
# cleanup[1] posix
# cleanup[1] encodings.aliases
# cleanup[1] exceptions
# cleanup[1] abrt_exception_handler
# cleanup[1] _multibytecodec
# cleanup[1] codecs
# cleanup[1] _codecs_cn
# cleanup[2] copy_reg
# cleanup[2] posixpath
# cleanup[2] errno
# cleanup[2] _abcoll
# cleanup[2] types
# cleanup[2] genericpath
# cleanup[2] stat
# cleanup[2] warnings
# cleanup[2] UserDict
# cleanup[2] os.path
# cleanup[2] linecache
# cleanup[2] os
# cleanup sys
# cleanup __builtin__
# cleanup ints: 19 unfreed ints
# cleanup floats


二、spark的安装很简单,在单机单节点下,解压压缩文件,基本部署完毕。但它的运行模式很多:本地模式运行,伪分布模式运行,分布式集群的方式
等传统的,还有很多新的模式:hadoop YARN模式。。。

三、spark安装包下载的示例代码都在(/examples/src/main)目录下,可以直接调用,运行sparkip程序如下(计算一个pi值),并控制打印结果。

(一)示例代码目录

[root@localhost bin]# ls
beeline             pyspark.cmd       spark-class.cmd  spark-shell2.cmd
beeline.cmd         run-example       Sparkpilog.txt   spark-shell.cmd
load-spark-env.cmd  run-example2.cmd  sparkR           spark-sql
load-spark-env.sh   run-example.cmd   sparkR2.cmd      spark-submit
pyspark             spark-class       sparkR.cmd       spark-submit2.cmd
pyspark2.cmd        spark-class2.cmd  spark-shell      spark-submit.cmd
[root@localhost bin]# cd ..
[root@localhost spark-1.6.1-bin-hadoop1]# ls
bin          data      lib       logs    R          sbin
CHANGES.txt  ec2       LICENSE   NOTICE  README.md  work
conf         examples  licenses  python  RELEASE
[root@localhost spark-1.6.1-bin-hadoop1]# cd examples
[root@localhost examples]# ls
src
[root@localhost examples]# cd src
[root@localhost src]# ls
main
[root@localhost src]# cd main
[root@localhost main]# ls
java  python  r  resources  scala
[root@localhost main]# cd java
[root@localhost java]# ls
org
[root@localhost java]# cd org
[root@localhost org]# ls
apache
[root@localhost org]# cd apache
[root@localhost apache]# ls
spark
[root@localhost apache]# cd saprk
bash: cd: saprk: No such file or directory
[root@localhost apache]# cd spark
[root@localhost spark]# ls
examples
[root@localhost spark]# ce examples
bash: ce: command not found

[root@localhost spark]# cd examples
[root@localhost examples]# ls
JavaHdfsLR.java    JavaSparkPi.java            JavaWordCount.java  sql
JavaLogQuery.java  JavaStatusTrackerDemo.java  ml                  streaming
JavaPageRank.java  JavaTC.java                 mllib

(二)运行sparkip程序如下(计算一个pi值)并控制打印结果。run-example SparkPi 10 > Sparkpilog.txt

[root@localhost spark-1.6.1-bin-hadoop1]# cd bin

[root@localhost bin]# ls
beeline             pyspark2.cmd      spark-class.cmd   spark-sql
beeline.cmd         pyspark.cmd       sparkR            spark-submit
derby.log           run-example       sparkR2.cmd       spark-submit2.cmd
load-spark-env.cmd  run-example2.cmd  sparkR.cmd        spark-submit.cmd
load-spark-env.sh   run-example.cmd   spark-shell
metastore_db        spark-class       spark-shell2.cmd
pyspark             spark-class2.cmd  spark-shell.cmd
[root@localhost bin]# run-example
Usage: ./bin/run-example <example-class> [example-args]
  - set MASTER=XX to use a specific master
  - can use abbreviated example class name relative to com.apache.spark.examples
     (e.g. SparkPi, mllib.LinearRegression, streaming.KinesisWordCountASL)
[root@localhost bin]# ls
beeline             pyspark2.cmd      spark-class.cmd   spark-sql
beeline.cmd         pyspark.cmd       sparkR            spark-submit
derby.log           run-example       sparkR2.cmd       spark-submit2.cmd
load-spark-env.cmd  run-example2.cmd  sparkR.cmd        spark-submit.cmd
load-spark-env.sh   run-example.cmd   spark-shell
metastore_db        spark-class       spark-shell2.cmd

pyspark             spark-class2.cmd  spark-shell.cmd


gedit Sparkpilog.txt
结果:
Pi is roughly 3.142888

四、本地模式运行wordcont

[root@localhost bin]# run-example JavaWordCount input/0.txt > output.txt

16/04/30 20:18:47 INFO spark.SparkContext: Running Spark version 1.6.1
16/04/30 20:18:49 WARN util.Utils: Your hostname, localhost.localdomain resolves to a loopback address: 127.0.0.1; using 192.168.86.139 instead (on interface eth0)
16/04/30 20:18:49 WARN util.Utils: Set SPARK_LOCAL_IP if you need to bind to another address
16/04/30 20:18:49 INFO spark.SecurityManager: Changing view acls to: root
16/04/30 20:18:49 INFO spark.SecurityManager: Changing modify acls to: root
16/04/30 20:18:49 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); users with modify permissions: Set(root)
16/04/30 20:18:52 INFO util.Utils: Successfully started service 'sparkDriver' on port 37197.
16/04/30 20:18:54 INFO slf4j.Slf4jLogger: Slf4jLogger started
16/04/30 20:18:54 INFO Remoting: Starting remoting
16/04/30 20:18:55 INFO Remoting: Remoting started; listening on addresses :[akka.tcp://sparkDriverActorSystem@192.168.86.139:52398]
16/04/30 20:18:55 INFO util.Utils: Successfully started service 'sparkDriverActorSystem' on port 52398.
16/04/30 20:18:55 INFO spark.SparkEnv: Registering MapOutputTracker
16/04/30 20:18:55 INFO spark.SparkEnv: Registering BlockManagerMaster
16/04/30 20:18:56 INFO storage.DiskBlockManager: Created local directory at /tmp/blockmgr-1538b230-e118-4cb8-ae03-69220d4f59e6
16/04/30 20:18:56 INFO storage.MemoryStore: MemoryStore started with capacity 517.4 MB
16/04/30 20:18:56 INFO spark.SparkEnv: Registering OutputCommitCoordinator
16/04/30 20:19:07 INFO server.Server: jetty-8.y.z-SNAPSHOT
16/04/30 20:19:08 INFO server.AbstractConnector: Started SelectChannelConnector@0.0.0.0:4040
16/04/30 20:19:08 INFO util.Utils: Successfully started service 'SparkUI' on port 4040.
16/04/30 20:19:08 INFO ui.SparkUI: Started SparkUI at http://192.168.86.139:4040
16/04/30 20:19:08 INFO spark.HttpFileServer: HTTP File server directory is /tmp/spark-debe3a1f-bbdf-40c3-981d-da572fea6a03/httpd-80fad9c1-c148-49e2-861c-e6fd1b17de5d
16/04/30 20:19:08 INFO spark.HttpServer: Starting HTTP Server
16/04/30 20:19:08 INFO server.Server: jetty-8.y.z-SNAPSHOT
16/04/30 20:19:08 INFO server.AbstractConnector: Started SocketConnector@0.0.0.0:52998
16/04/30 20:19:08 INFO util.Utils: Successfully started service 'HTTP file server' on port 52998.
16/04/30 20:19:12 INFO spark.SparkContext: Added JAR file:/usr/lib/spark/spark-1.6.1-bin-hadoop1/lib/spark-examples-1.6.1-hadoop1.2.1.jar at http://192.168.86.139:52998/jars/spark-examples-1.6.1-hadoop1.2.1.jar with timestamp 1462072752175
16/04/30 20:19:12 INFO executor.Executor: Starting executor ID driver on host localhost
16/04/30 20:19:12 INFO util.Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 34480.
16/04/30 20:19:12 INFO netty.NettyBlockTransferService: Server created on 34480
16/04/30 20:19:12 INFO storage.BlockManagerMaster: Trying to register BlockManager
16/04/30 20:19:12 INFO storage.BlockManagerMasterEndpoint: Registering block manager localhost:34480 with 517.4 MB RAM, BlockManagerId(driver, localhost, 34480)
16/04/30 20:19:12 INFO storage.BlockManagerMaster: Registered BlockManager
16/04/30 20:19:17 WARN util.SizeEstimator: Failed to check whether UseCompressedOops is set; assuming yes
16/04/30 20:19:17 INFO storage.MemoryStore: Block broadcast_0 stored as values in memory (estimated size 41.9 KB, free 41.9 KB)
16/04/30 20:19:17 INFO storage.MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 4.2 KB, free 46.1 KB)
16/04/30 20:19:17 INFO storage.BlockManagerInfo: Added broadcast_0_piece0 in memory on localhost:34480 (size: 4.2 KB, free: 517.4 MB)
16/04/30 20:19:17 INFO spark.SparkContext: Created broadcast 0 from textFile at JavaWordCount.java:45
16/04/30 20:19:29 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
16/04/30 20:19:29 WARN snappy.LoadSnappy: Snappy native library not loaded
16/04/30 20:19:29 INFO mapred.FileInputFormat: Total input paths to process : 1
16/04/30 20:19:38 INFO spark.SparkContext: Starting job: collect at JavaWordCount.java:68
16/04/30 20:19:39 INFO scheduler.DAGScheduler: Registering RDD 3 (mapToPair at JavaWordCount.java:54)
16/04/30 20:19:39 INFO scheduler.DAGScheduler: Got job 0 (collect at JavaWordCount.java:68) with 1 output partitions
16/04/30 20:19:39 INFO scheduler.DAGScheduler: Final stage: ResultStage 1 (collect at JavaWordCount.java:68)
16/04/30 20:19:39 INFO scheduler.DAGScheduler: Parents of final stage: List(ShuffleMapStage 0)
16/04/30 20:19:39 INFO scheduler.DAGScheduler: Missing parents: List(ShuffleMapStage 0)
16/04/30 20:19:39 INFO scheduler.DAGScheduler: Submitting ShuffleMapStage 0 (MapPartitionsRDD[3] at mapToPair at JavaWordCount.java:54), which has no missing parents
16/04/30 20:19:39 INFO storage.MemoryStore: Block broadcast_1 stored as values in memory (estimated size 4.8 KB, free 50.9 KB)
16/04/30 20:19:39 INFO storage.MemoryStore: Block broadcast_1_piece0 stored as bytes in memory (estimated size 2.6 KB, free 53.5 KB)
16/04/30 20:19:39 INFO storage.BlockManagerInfo: Added broadcast_1_piece0 in memory on localhost:34480 (size: 2.6 KB, free: 517.4 MB)
16/04/30 20:19:39 INFO spark.SparkContext: Created broadcast 1 from broadcast at DAGScheduler.scala:1006
16/04/30 20:19:39 INFO scheduler.DAGScheduler: Submitting 1 missing tasks from ShuffleMapStage 0 (MapPartitionsRDD[3] at mapToPair at JavaWordCount.java:54)
16/04/30 20:19:39 INFO scheduler.TaskSchedulerImpl: Adding task set 0.0 with 1 tasks
16/04/30 20:19:40 INFO scheduler.TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, partition 0,PROCESS_LOCAL, 2219 bytes)
16/04/30 20:19:40 INFO executor.Executor: Running task 0.0 in stage 0.0 (TID 0)
16/04/30 20:19:40 INFO executor.Executor: Fetching http://192.168.86.139:52998/jars/spark-examples-1.6.1-hadoop1.2.1.jar with timestamp 1462072752175
16/04/30 20:19:40 INFO util.Utils: Fetching http://192.168.86.139:52998/jars/spark-examples-1.6.1-hadoop1.2.1.jar to /tmp/spark-debe3a1f-bbdf-40c3-981d-da572fea6a03/userFiles-0a6c7248-3bcd-4b94-b5ed-cb3f1394e1e9/fetchFileTemp4496955273132858252.tmp
16/04/30 20:19:47 INFO executor.Executor: Adding file:/tmp/spark-debe3a1f-bbdf-40c3-981d-da572fea6a03/userFiles-0a6c7248-3bcd-4b94-b5ed-cb3f1394e1e9/spark-examples-1.6.1-hadoop1.2.1.jar to class loader
16/04/30 20:19:47 INFO rdd.HadoopRDD: Input split: hdfs://192.168.86.139:9000/user/root/input/0.txt:0+0
16/04/30 20:19:48 INFO executor.Executor: Finished task 0.0 in stage 0.0 (TID 0). 2253 bytes result sent to driver
16/04/30 20:19:49 INFO scheduler.TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 9059 ms on localhost (1/1)
16/04/30 20:19:49 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool
16/04/30 20:19:49 INFO scheduler.DAGScheduler: ShuffleMapStage 0 (mapToPair at JavaWordCount.java:54) finished in 9.298 s
16/04/30 20:19:49 INFO scheduler.DAGScheduler: looking for newly runnable stages
16/04/30 20:19:49 INFO scheduler.DAGScheduler: running: Set()
16/04/30 20:19:49 INFO scheduler.DAGScheduler: waiting: Set(ResultStage 1)
16/04/30 20:19:49 INFO scheduler.DAGScheduler: failed: Set()
16/04/30 20:19:49 INFO scheduler.DAGScheduler: Submitting ResultStage 1 (ShuffledRDD[4] at reduceByKey at JavaWordCount.java:61), which has no missing parents
16/04/30 20:19:49 INFO storage.MemoryStore: Block broadcast_2 stored as values in memory (estimated size 2.9 KB, free 56.4 KB)
16/04/30 20:19:49 INFO storage.MemoryStore: Block broadcast_2_piece0 stored as bytes in memory (estimated size 1752.0 B, free 58.2 KB)
16/04/30 20:19:49 INFO storage.BlockManagerInfo: Added broadcast_2_piece0 in memory on localhost:34480 (size: 1752.0 B, free: 517.4 MB)
16/04/30 20:19:49 INFO spark.SparkContext: Created broadcast 2 from broadcast at DAGScheduler.scala:1006
16/04/30 20:19:49 INFO scheduler.DAGScheduler: Submitting 1 missing tasks from ResultStage 1 (ShuffledRDD[4] at reduceByKey at JavaWordCount.java:61)
16/04/30 20:19:49 INFO scheduler.TaskSchedulerImpl: Adding task set 1.0 with 1 tasks
16/04/30 20:19:49 INFO scheduler.TaskSetManager: Starting task 0.0 in stage 1.0 (TID 1, localhost, partition 0,PROCESS_LOCAL, 1973 bytes)
16/04/30 20:19:49 INFO executor.Executor: Running task 0.0 in stage 1.0 (TID 1)
16/04/30 20:19:49 INFO storage.ShuffleBlockFetcherIterator: Getting 0 non-empty blocks out of 1 blocks
16/04/30 20:19:49 INFO storage.ShuffleBlockFetcherIterator: Started 0 remote fetches in 89 ms
16/04/30 20:19:49 INFO executor.Executor: Finished task 0.0 in stage 1.0 (TID 1). 1161 bytes result sent to driver
16/04/30 20:19:50 INFO scheduler.DAGScheduler: ResultStage 1 (collect at JavaWordCount.java:68) finished in 0.554 s
16/04/30 20:19:50 INFO scheduler.TaskSetManager: Finished task 0.0 in stage 1.0 (TID 1) in 555 ms on localhost (1/1)
16/04/30 20:19:50 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 1.0, whose tasks have all completed, from pool
16/04/30 20:19:50 INFO scheduler.DAGScheduler: Job 0 finished: collect at JavaWordCount.java:68, took 11.299262 s
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/metrics/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/kill,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/api,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/static,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/threadDump,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/executors,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/environment,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/rdd,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/storage,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/pool,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/stage,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/stages,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/job,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs/json,null}
16/04/30 20:19:50 INFO handler.ContextHandler: stopped o.s.j.s.ServletContextHandler{/jobs,null}
16/04/30 20:19:50 INFO ui.SparkUI: Stopped Spark web UI at http://192.168.86.139:4040
16/04/30 20:19:50 INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
16/04/30 20:19:50 INFO storage.MemoryStore: MemoryStore cleared
16/04/30 20:19:50 INFO storage.BlockManager: BlockManager stopped
16/04/30 20:19:50 INFO storage.BlockManagerMaster: BlockManagerMaster stopped
16/04/30 20:19:50 INFO scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
16/04/30 20:19:50 INFO spark.SparkContext: Successfully stopped SparkContext
16/04/30 20:19:50 INFO remote.RemoteActorRefProvider$RemotingTerminator: Shutting down remote daemon.
16/04/30 20:19:50 INFO util.ShutdownHookManager: Shutdown hook called
16/04/30 20:19:51 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-debe3a1f-bbdf-40c3-981d-da572fea6a03
16/04/30 20:19:51 INFO remote.RemoteActorRefProvider$RemotingTerminator: Remote daemon shut down; proceeding with flushing remote transports.
16/04/30 20:19:51 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-debe3a1f-bbdf-40c3-981d-da572fea6a03/httpd-80fad9c1-c148-49e2-861c-e6fd1b17de5d


[root@localhost spark-1.6.1-bin-hadoop1]# cd bin
[root@localhost bin]# ls
beeline             pyspark.cmd       Sparkpilog.txt    spark-sql
beeline.cmd         run-example       sparkR            spark-submit
load-spark-env.cmd  run-example2.cmd  sparkR2.cmd       spark-submit2.cmd
load-spark-env.sh   run-example.cmd   sparkR.cmd        spark-submit.cmd
output.txt          spark-class       spark-shell
pyspark             spark-class2.cmd  spark-shell2.cmd
pyspark2.cmd        spark-class.cmd   spark-shell.cmd
[root@localhost bin]# gedit output.txt


started;: 1
scala: 1
created: 1
pyspark.cmd: 2
load-spark-env.cmd: 2
spark: 2
sparkR2.cmd: 2
listening: 1
'sparkDriver': 1
[root@localhost: 24
MapOutputTracker: 1
Remoting: 1
spark-submit2.cmd: 2
o.s.j.s.ServletContextHandler{/executors,null}: 1
o.s.j.s.ServletContextHandler{/stages/stage,null}: 1
WARN: 5
broadcast_0: 1
ce:: 1
ID: 1
instead: 1
47539.: 1
o.s.j.s.ServletContextHandler{/static,null}: 1
version: 1
file: 2
Unable: 1
stop(): 1
address:: 1
org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206): 1
cd:: 1
host: 1
shut: 1
timestamp: 1
server.Server:: 2
46.1: 1
JavaWordCount: 1
org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:197): 1
values: 1
http://192.168.86.139:43838/jars/spark-examples-1.6.1-hadoop1.2.1.jar: 1
ec2: 1
org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237): 4
java: 2
Added: 2
JAR: 1
ml: 1
localhost: 1
/tmp/blockmgr-77d5002a-93cb-4d9c-849d-51a41b7c927f: 1
SelectChannelConnector@0.0.0.0:4040: 1
proceeding: 1
size: 2
Sparkpilog.txt: 3
spark]#: 3
o.s.j.s.ServletContextHandler{/storage,null}: 1
localhost:34253: 2
found: 1
KB): 2
Successfully: 6
CHANGES.txt: 1
Deleting: 2
517.4: 3
19:58:55: 3
Registering: 4
spark-class.cmd: 2
examples: 5
at: 5
o.s.j.s.ServletContextHandler{/jobs/job,null}: 1
util.Utils:: 7
localhost,: 1
Block: 2
JavaWordCount.java:45: 1
o.s.j.s.ServletContextHandler{/api,null}: 1
'sparkDriverActorSystem': 1
UI: 1
mllib: 1
o.s.j.s.ServletContextHandler{/storage/rdd,null}: 1
org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239): 4
spark-sql: 2
stopped!: 2
disabled;: 2
(estimated: 2
command: 1
main: 2
cd: 11
spark-shell: 2
or: 1
org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181): 1
need: 1
storage.BlockManager:: 1
Remoting:: 2
http://192.168.86.139:4040: 2
eth0): 1
o.s.j.s.ServletContextHandler{/jobs/job/json,null}: 1
does: 1
JavaStatusTrackerDemo.java: 1
'org.apache.spark.network.netty.NettyBlockTransferService': 1
spark-submit.cmd: 2
RELEASE: 1
storage.BlockManagerMaster:: 3
native: 1
    at: 34
remote: 2
remoting: 1
127.0.0.1;: 1
JavaPageRank.java: 1
util.ShutdownHookManager:: 3
Stopped: 1
INFO: 72
No: 1
org: 2
main]#: 2
for: 1
o.s.j.s.ServletContextHandler{/stages/pool/json,null}: 1
Slf4jLogger: 1
capacity: 1
resources: 1
OutputCommitCoordinator: 2
/tmp/spark-a9679d37-7536-45fd-87de-12917189c3e7: 1
localhost.localdomain: 1
BlockManagerId(driver,: 1
loaded: 1
SocketConnector@0.0.0.0:43838: 1
run-example: 3
broadcast: 1
resolves: 1
executor.Executor:: 1
19:59:10: 2
34253): 1
57283.: 1
sun.reflect.NativeMethodAccessorImpl.invoke0(Native: 1
started: 7
MB): 1
file:/usr/lib/spark/spark-1.6.1-bin-hadoop1/lib/spark-examples-1.6.1-hadoop1.2.1.jar: 1
..: 1
Method): 1
builtin-java: 1
r: 1
free:: 1
exist:: 1
broadcast_0_piece0: 2
org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35): 3
Snappy: 1
ls: 11
Invoking: 1
Registered: 1
bind: 1
spark.SecurityManager:: 3
spark-shell.cmd: 2
modify: 2
scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint:: 1
classes: 1
RAM,: 1
storage.DiskBlockManager:: 1
1462071550387: 1
service: 5
in: 4
'HTTP: 1
19:58:52: 1
JavaLogQuery.java: 1
Failed: 1
server': 1
shutdown: 1
from: 2
Running: 1
spark.SparkContext:: 5
src]#: 2
Exception: 1
Set: 1
34253: 1
UseCompressedOops: 1
interface: 1
textFile: 1
spark.HttpFileServer:: 1
./wordcountdata.txt: 1
o.s.j.s.ServletContextHandler{/jobs/json,null}: 1
storage.BlockManagerInfo:: 1
pyspark: 2
JavaHdfsLR.java: 1
"main": 1
bin: 2
Input: 1
org.apache.spark.api.java.JavaPairRDD.reduceByKey(JavaPairRDD.scala:526): 1
org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:199): 1
util.SizeEstimator:: 1
Remote: 1
applicable: 1
beeline: 2
: 304
apache]#: 3
SPARK_LOCAL_IP: 1
users: 2
19:59:11: 5
flushing: 1
o.s.j.s.ServletContextHandler{/,null}: 1
16/04/30: 77
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43): 1
org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:65): 1
19:58:49: 1
netty.NettyBlockTransferService:: 1
o.s.j.s.ServletContextHandler{/metrics/json,null}: 1
o.s.j.s.ServletContextHandler{/stages/pool,null}: 1
org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala): 1
conf: 1
register: 1
such: 1
4040.: 1
spark-class: 2
JavaSparkPi.java: 1
sparkR: 2
scala.Option.getOrElse(Option.scala:120): 4
hook: 2
Trying: 1
org]#: 2
ui.SparkUI:: 2
Changing: 2
set;: 1
o.s.j.s.ServletContextHandler{/executors/json,null}: 1
remote.RemoteActorRefProvider$RemotingTerminator:: 2
called: 1
sbin: 1
acls: 3
free: 2
licenses: 1
not: 3
java]#: 2
address: 1
if: 1
transports.: 1
ce: 1
thread: 1
as: 2
o.s.j.s.ServletContextHandler{/stages/json,null}: 1
File: 1
slf4j.Slf4jLogger:: 1
storage.MemoryStore:: 4
41.9: 2
o.s.j.s.ServletContextHandler{/storage/json,null}: 1
stored: 2
o.s.j.s.ServletContextHandler{/jobs,null}: 1
authentication: 1
is: 2
R: 1
spark-class2.cmd: 2
sparkR.cmd: 2
on: 9
check: 1
JavaTC.java: 1
4.2: 2
down;: 1
spark-1.6.1-bin-hadoop1]#: 3
spark.MapOutputTrackerMasterEndpoint:: 1
org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731): 1
using: 2
beeline.cmd: 2
LICENSE: 1
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57): 1
/tmp/spark-a9679d37-7536-45fd-87de-12917189c3e7/httpd-11d327ec-8c66-4c85-bcd2-698a68f3a5c4: 2
root: 2
src: 2
native-hadoop: 1
BlockManager: 3
o.s.j.s.ServletContextHandler{/environment,null}: 1
saprk:: 1
HTTP: 2
to:: 2
ui: 1
directory: 5
executor: 1
NOTICE: 1
Started: 3
lib: 1
bin]#: 4
logs: 1
MapOutputTrackerMasterEndpoint: 1
apache: 2
(on: 1
192.168.86.139: 1
server.AbstractConnector:: 2
spark-shell2.cmd: 2
'SparkUI': 1
Starting: 3
:[akka.tcp://sparkDriverActorSystem@192.168.86.139:47539]: 1
o.s.j.s.ServletContextHandler{/stages/stage/json,null}: 1
Spark: 2
jetty-8.y.z-SNAPSHOT: 2
platform...: 1
hostname,: 1
permissions:: 2
addresses: 1
library: 2
saprk: 1
19:59:14: 5
README.md: 1
1.6.1: 1
19:59:07: 9
loopback: 1
whether: 1
19:58:56: 4
org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:208): 1
bash:: 2
Server: 2
o.s.j.s.ServletContextHandler{/executors/threadDump/json,null}: 1
cleared: 1
sql: 1
your: 1
SparkContext: 1
load-spark-env.sh: 2
org.apache.hadoop.mapred.InvalidInputException:: 1
load: 1
Shutdown: 1
run-example.cmd: 2
spark.HttpServer:: 1
>: 1
Shutting: 1
19:58:54: 2
assuming: 1
path: 1
JavaWordCount.java: 1
o.s.j.s.ServletContextHandler{/executors/threadDump,null}: 1
streaming: 1
snappy.LoadSnappy:: 1
43838.: 1
Created: 2
driver: 1
BlockManagerMaster: 2
o.s.j.s.ServletContextHandler{/stages,null}: 1
util.NativeCodeLoader:: 1
o.s.j.s.ServletContextHandler{/storage/rdd/json,null}: 1
stopped: 28
storage.BlockManagerMasterEndpoint:: 1
19:58:50: 5
handler.ContextHandler:: 25
with: 6
data: 1
java.lang.reflect.Method.invoke(Method.java:606): 1
org.apache.spark.rdd.RDD.partitions(RDD.scala:237): 4
SparkUI: 1
Set(root): 1
19:59:26: 34
block: 1
hdfs://192.168.86.139:9000/user/root/Sparkpilog.txt: 1
19:59:27: 6
bytes: 1
port: 5
Set(root);: 1
view: 2
(size:: 1
daemon.: 1
server: 1
you: 1
org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121): 1
yes: 1
another: 1
spark-submit: 2
a: 1
memory: 3
SecurityManager:: 1
down: 1
0: 1
work: 1
local: 1
manager: 1
python: 2
to: 6
spark.SparkEnv:: 3
org.apache.spark.examples.JavaWordCount.main(JavaWordCount.java:61): 1
web: 1
MB: 2
run-example2.cmd: 2
Your: 1
where: 1
o.s.j.s.ServletContextHandler{/environment/json,null}: 1
daemon: 1
pyspark2.cmd: 2
KB,: 3
MemoryStore: 2
o.s.j.s.ServletContextHandler{/stages/stage/kill,null}: 1
examples]#: 3
34253.: 1


  • 1
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

星之擎

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值