[hadoop@master Desktop]$ cd
[hadoop@master ~]$ cd /usr/local/spark
[hadoop@master spark]$ bin/run-example SparkPi
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/spark/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
20/04/15 13:08:01 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
20/04/15 13:08:02 INFO spark.SparkContext: Running Spark version 2.4.5
20/04/15 13:08:03 INFO spark.SparkContext: Submitted application: Spark Pi
20/04/15 13:08:03 INFO spark.SecurityManager: Changing view acls to: hadoop
20/04/15 13:08:03 INFO spark.SecurityManager: Changing modify acls to: hadoop
20/04/15 13:08:03 INFO spark.SecurityManager: Changing view acls groups to:
20/04/15 13:08:03 INFO spark.SecurityManager: Changing modify acls groups to:
20/04/15 13:08:03 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(hadoop); groups with view permissions: Set(); users with modify permissions: Set(hadoop); groups with modify permissions: Set()
20/04/15 13:08:03 INFO util.Utils: Successfully started service 'sparkDriver' on port 60073.
20/04/15 13:08:03 INFO spark.SparkEnv: Registering MapOutputTracker
20/04/15 13:08:04 INFO spark.SparkEnv: Registering BlockManagerMaster
20/04/15 13:08:04 INFO storage.BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
20/04/15 13:08:04 INFO storage.BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
20/04/15 13:08:04 INFO storage.DiskBlockManager: Created local directory at /tmp/blockmgr-c6b26eca-e3a2-42e1-8e19-114ad90224c9
20/04/15 13:08:04 INFO memory.MemoryStore: MemoryStore started with capacity 413.9 MB
20/04/15 13:08:04 INFO spark.SparkEnv: Registering OutputCommitCoordinator
20/04/15 13:08:04 INFO util.log: Logging initialized @4870ms
20/04/15 13:08:04 INFO server.Server: jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown
20/04/15 13:08:04 INFO server.Server: Started @5043ms
20/04/15 13:08:04 INFO server.AbstractConnector: Started ServerConnector@43cf6ea3{HTTP/1.1,[http/1.1]}{192.168.58.132:4040}
20/04/15 13:08:04 INFO util.Utils: Successfully started service 'SparkUI' on port 4040.
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@63e5e5b4{/jobs,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@b558294{/jobs/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@bb095{/jobs/job,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@27aae97b{/jobs/job/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4c9e38{/stages,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@5d1e09bc{/stages/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4bdc8b5d{/stages/stage,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@726a17c4{/stages/stage/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@5dc3fcb7{/stages/pool,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@c4c0b41{/stages/pool/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@76911385{/storage,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@5467eea4{/storage/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@160396db{/storage/rdd,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7a799159{/storage/rdd/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@40ab8a8{/environment,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@6ff37443{/environment/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@65cc8228{/executors,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@53093491{/executors/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@21719a0{/executors/threadDump,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@76b224cd{/executors/threadDump/json,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@77ee25f1{/static,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@340b7ef6{/,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@30404dba{/api,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@11841b15{/jobs/job/kill,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@73877e19{/stages/stage/kill,null,AVAILABLE,@Spark}
20/04/15 13:08:04 INFO ui.SparkUI: Bound SparkUI to 192.168.58.132, and started at http://master:4040
20/04/15 13:08:04 INFO spark.SparkContext: Added JAR file:///usr/local/spark/examples/jars/spark-examples_2.11-2.4.5.jar at spark://master:60073/jars/spark-examples_2.11-2.4.5.jar with timestamp 1586927284748
20/04/15 13:08:04 INFO spark.SparkContext: Added JAR file:///usr/local/spark/examples/jars/scopt_2.11-3.7.0.jar at spark://master:60073/jars/scopt_2.11-3.7.0.jar with timestamp 1586927284748
20/04/15 13:08:04 INFO executor.Executor: Starting executor ID driver on host localhost
20/04/15 13:08:05 INFO util.Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 35188.
20/04/15 13:08:05 INFO netty.NettyBlockTransferService: Server created on master:35188
20/04/15 13:08:05 INFO storage.BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
20/04/15 13:08:05 INFO storage.BlockManagerMaster: Registering BlockManager BlockManagerId(driver, master, 35188, None)
20/04/15 13:08:05 INFO storage.BlockManagerMasterEndpoint: Registering block manager master:35188 with 413.9 MB RAM, BlockManagerId(driver, master, 35188, None)
20/04/15 13:08:05 INFO storage.BlockManagerMaster: Registered BlockManager BlockManagerId(driver, master, 35188, None)
20/04/15 13:08:05 INFO storage.BlockManager: Initialized BlockManager: BlockManagerId(driver, master, 35188, None)
20/04/15 13:08:05 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@5e5073ab{/metrics/json,null,AVAILABLE,@Spark}
20/04/15 13:08:07 INFO spark.SparkContext: Starting job: reduce at SparkPi.scala:38
20/04/15 13:08:07 INFO scheduler.DAGScheduler: Got job 0 (reduce at SparkPi.scala:38) with 2 output partitions
20/04/15 13:08:07 INFO scheduler.DAGScheduler: Final stage: ResultStage 0 (reduce at SparkPi.scala:38)
20/04/15 13:08:07 INFO scheduler.DAGScheduler: Parents of final stage: List()
20/04/15 13:08:07 INFO scheduler.DAGScheduler: Missing parents: List()
20/04/15 13:08:07 INFO scheduler.DAGScheduler: Submitting ResultStage 0 (MapPartitionsRDD[1] at map at SparkPi.scala:34), which has no missing parents
20/04/15 13:08:07 INFO memory.MemoryStore: Block broadcast_0 stored as values in memory (estimated size 2.0 KB, free 413.9 MB)
20/04/15 13:08:07 INFO memory.MemoryStore: Block broadcast_0_piece0 stored as bytes in memory (estimated size 1381.0 B, free 413.9 MB)
20/04/15 13:08:07 INFO storage.BlockManagerInfo: Added broadcast_0_piece0 in memory on master:35188 (size: 1381.0 B, free: 413.9 MB)
20/04/15 13:08:07 INFO spark.SparkContext: Created broadcast 0 from broadcast at DAGScheduler.scala:1163
20/04/15 13:08:07 INFO scheduler.DAGScheduler: Submitting 2 missing tasks from ResultStage 0 (MapPartitionsRDD[1] at map at SparkPi.scala:34) (first 15 tasks are for partitions Vector(0, 1))
20/04/15 13:08:07 INFO scheduler.TaskSchedulerImpl: Adding task set 0.0 with 2 tasks
20/04/15 13:08:07 INFO scheduler.TaskSetManager: Starting task 0.0 in stage 0.0 (TID 0, localhost, executor driver, partition 0, PROCESS_LOCAL, 7866 bytes)
20/04/15 13:08:07 INFO executor.Executor: Running task 0.0 in stage 0.0 (TID 0)
20/04/15 13:08:07 INFO executor.Executor: Fetching spark://master:60073/jars/scopt_2.11-3.7.0.jar with timestamp 1586927284748
20/04/15 13:08:08 INFO client.TransportClientFactory: Successfully created connection to master/192.168.58.132:60073 after 114 ms (0 ms spent in bootstraps)
20/04/15 13:08:08 INFO util.Utils: Fetching spark://master:60073/jars/scopt_2.11-3.7.0.jar to /tmp/spark-1222468a-edf8-4362-a819-39abf498c13d/userFiles-ab011cfe-c9e5-4b77-92c7-7a63374a7fac/fetchFileTemp5705332323635684398.tmp
20/04/15 13:08:08 INFO executor.Executor: Adding file:/tmp/spark-1222468a-edf8-4362-a819-39abf498c13d/userFiles-ab011cfe-c9e5-4b77-92c7-7a63374a7fac/scopt_2.11-3.7.0.jar to class loader
20/04/15 13:08:08 INFO executor.Executor: Fetching spark://master:60073/jars/spark-examples_2.11-2.4.5.jar with timestamp 1586927284748
20/04/15 13:08:08 INFO util.Utils: Fetching spark://master:60073/jars/spark-examples_2.11-2.4.5.jar to /tmp/spark-1222468a-edf8-4362-a819-39abf498c13d/userFiles-ab011cfe-c9e5-4b77-92c7-7a63374a7fac/fetchFileTemp456694376781202604.tmp
20/04/15 13:08:08 INFO executor.Executor: Adding file:/tmp/spark-1222468a-edf8-4362-a819-39abf498c13d/userFiles-ab011cfe-c9e5-4b77-92c7-7a63374a7fac/spark-examples_2.11-2.4.5.jar to class loader
20/04/15 13:08:08 INFO executor.Executor: Finished task 0.0 in stage 0.0 (TID 0). 867 bytes result sent to driver
20/04/15 13:08:08 INFO scheduler.TaskSetManager: Starting task 1.0 in stage 0.0 (TID 1, localhost, executor driver, partition 1, PROCESS_LOCAL, 7866 bytes)
20/04/15 13:08:08 INFO executor.Executor: Running task 1.0 in stage 0.0 (TID 1)
20/04/15 13:08:09 INFO executor.Executor: Finished task 1.0 in stage 0.0 (TID 1). 824 bytes result sent to driver
20/04/15 13:08:09 INFO scheduler.TaskSetManager: Finished task 0.0 in stage 0.0 (TID 0) in 1145 ms on localhost (executor driver) (1/2)
20/04/15 13:08:09 INFO scheduler.TaskSetManager: Finished task 1.0 in stage 0.0 (TID 1) in 104 ms on localhost (executor driver) (2/2)
20/04/15 13:08:09 INFO scheduler.TaskSchedulerImpl: Removed TaskSet 0.0, whose tasks have all completed, from pool
20/04/15 13:08:09 INFO scheduler.DAGScheduler: ResultStage 0 (reduce at SparkPi.scala:38) finished in 1.648 s
20/04/15 13:08:09 INFO scheduler.DAGScheduler: Job 0 finished: reduce at SparkPi.scala:38, took 1.902123 s
Pi is roughly 3.1409357046785233
20/04/15 13:08:09 INFO server.AbstractConnector: Stopped Spark@43cf6ea3{HTTP/1.1,[http/1.1]}{192.168.58.132:4040}
20/04/15 13:08:09 INFO ui.SparkUI: Stopped Spark web UI at http://master:4040
20/04/15 13:08:09 INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
20/04/15 13:08:09 INFO memory.MemoryStore: MemoryStore cleared
20/04/15 13:08:09 INFO storage.BlockManager: BlockManager stopped
20/04/15 13:08:09 INFO storage.BlockManagerMaster: BlockManagerMaster stopped
20/04/15 13:08:09 INFO scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
20/04/15 13:08:09 INFO spark.SparkContext: Successfully stopped SparkContext
20/04/15 13:08:09 INFO util.ShutdownHookManager: Shutdown hook called
20/04/15 13:08:09 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-1222468a-edf8-4362-a819-39abf498c13d
20/04/15 13:08:09 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-b4feadce-0b1a-4a7c-b8c3-0ed38b9c8e99
[hadoop@master spark]$ bin/run-example SparkPi 2>&1 | grep "Pi is"
Pi is roughly 3.13839569197846
[hadoop@master spark]$ ./bin/spark-shell --master local[4]
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/spark/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
20/04/15 13:09:28 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Setting default log level to "WARN".
To adjust logging level use sc.setLogLevel(newLevel). For SparkR, use setLogLevel(newLevel).
Spark context Web UI available at http://master:4040
Spark context available as 'sc' (master = local[4], app id = local-1586927386597).
Spark session available as 'spark'.
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 2.4.5
/_/
Using Scala version 2.11.12 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_101)
Type in expressions to have them evaluated.
Type :help for more information.
scala> 8*2+5
res0: Int = 21
scala> :quit
[hadoop@master spark]$ cd
[hadoop@master ~]$ sudo mkdir /usr/local/sbt
[sudo] password for hadoop:
hadoop is not in the sudoers file. This incident will be reported.
[hadoop@master ~]$ sudo su
[sudo] password for hadoop:
hadoop is not in the sudoers file. This incident will be reported.
[hadoop@master ~]$ su root
Password:
[root@master hadoop]# sudo mkdir /usr/local/sbt
mkdir: cannot create directory ‘/usr/local/sbt’: File exists
[root@master hadoop]# sudo chown -R hadoop /usr/local/sbt
[root@master hadoop]# cd /usr/local/sbt
[root@master sbt]# cp ~/下载/sbt-launch.jar .
cp: cannot stat ‘/root/下载/sbt-launch.jar’: No such file or directory
[root@master sbt]# vim ./sbt
[root@master sbt]# ./sbt sbt-version
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256M; support was removed in 8.0
[info] Set current project to sbt (in build file:/usr/local/sbt/)
[info] 0.13.11
[root@master sbt]# cd ~
[root@master ~]# mkdir ./sparkapp
[root@master ~]# mkdir -p ./sparkapp/src/main/scala
[root@master ~]# vim ./sparkapp/src/main/scala/SimpleApp.scala
[root@master ~]# vim ./sparkapp/simple.sbt
[root@master ~]# vim ./sparkapp/simple.sbt
[root@master ~]# cd ~/sparkapp
[root@master sparkapp]# find .
.
./src
./src/main
./src/main/scala
./src/main/scala/SimpleApp.scala
./simple.sbt
[root@master sparkapp]# /usr/local/sbt/sbt package
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256M; support was removed in 8.0
/root/sparkapp/simple.sbt:1: error: not found: value me
me := "Simple Project"
^
[error] Type error in expression
Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? r
/root/sparkapp/simple.sbt:1: error: not found: value me
me := "Simple Project"
^
[error] Type error in expression
Project loading failed: (r)etry, (q)uit, (l)ast, or (i)gnore? i
[warn] Ignoring load failure: no project loaded.
[error] Not a valid command: package
[error] package
[error] ^
[root@master sparkapp]# cd
[root@master ~]# bin/spark-shell
bash: bin/spark-shell: No such file or directory
[root@master ~]# vi spark-env.sh
[root@master ~]# su hadoop
[hadoop@master root]$ cd
[hadoop@master ~]$ vi spark-env.sh
[hadoop@master ~]$ cd /usr/local/spark/conf
[hadoop@master conf]$ vi spark-env.sh
[hadoop@master conf]$ /usr/local/sbt/sbt package
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256M; support was removed in 8.0
Getting org.scala-sbt sbt 0.13.11 ...
You probably access the destination server through a proxy server that is not well configured.
You probably access the destination server through a proxy server that is not well configured.
You probably access the destination server through a proxy server that is not well configured.
You probably access the destination server through a proxy server that is not well configured.
:: problems summary ::
:::: WARNINGS
Host repo1.maven.org not found. url=https://repo1.maven.org/maven2/org/scala-sbt/sbt/0.13.11/sbt-0.13.11.pom
Host repo1.maven.org not found. url=https://repo1.maven.org/maven2/org/scala-sbt/sbt/0.13.11/sbt-0.13.11.jar
Host repo.typesafe.com not found. url=https://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt/0.13.11/ivys/ivy.xml
Host repo.scala-sbt.org not found. url=https://repo.scala-sbt.org/scalasbt/ivy-snapshots/org.scala-sbt/sbt/0.13.11/ivys/ivy.xml
module not found: org.scala-sbt#sbt;0.13.11
==== local: tried
/home/hadoop/.ivy2/local/org.scala-sbt/sbt/0.13.11/ivys/ivy.xml
-- artifact org.scala-sbt#sbt;0.13.11!sbt.jar:
/home/hadoop/.ivy2/local/org.scala-sbt/sbt/0.13.11/jars/sbt.jar
==== Maven Central: tried
https://repo1.maven.org/maven2/org/scala-sbt/sbt/0.13.11/sbt-0.13.11.pom
-- artifact org.scala-sbt#sbt;0.13.11!sbt.jar:
https://repo1.maven.org/maven2/org/scala-sbt/sbt/0.13.11/sbt-0.13.11.jar
==== typesafe-ivy-releases: tried
https://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt/0.13.11/ivys/ivy.xml
==== sbt-ivy-snapshots: tried
https://repo.scala-sbt.org/scalasbt/ivy-snapshots/org.scala-sbt/sbt/0.13.11/ivys/ivy.xml
::::::::::::::::::::::::::::::::::::::::::::::
:: UNRESOLVED DEPENDENCIES ::
::::::::::::::::::::::::::::::::::::::::::::::
:: org.scala-sbt#sbt;0.13.11: not found
::::::::::::::::::::::::::::::::::::::::::::::
:: USE VERBOSE OR DEBUG MESSAGE LEVEL FOR MORE DETAILS
unresolved dependency: org.scala-sbt#sbt;0.13.11: not found
Error during sbt execution: Error retrieving required libraries
(see /home/hadoop/.sbt/boot/update.log for complete log)
Error: Could not retrieve sbt 0.13.11
[hadoop@master conf]$ cd /usr/share/sbt/bin/
bash: cd: /usr/share/sbt/bin/: No such file or directory
[hadoop@master conf]$ cd
[hadoop@master ~]$ cd /usr/share/sbt/bin/
bash: cd: /usr/share/sbt/bin/: No such file or directory
[hadoop@master ~]$ cd /usr/local/sbt
[hadoop@master sbt]$ wget https://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/0.13.9/sbt-launch.jar -O ./sbt-launch.jar
--2020-04-15 13:37:20-- https://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/0.13.9/sbt-launch.jar
Resolving repo.typesafe.com (repo.typesafe.com)... failed: Temporary failure in name resolution.
wget: unable to resolve host address ‘repo.typesafe.com’
[hadoop@master sbt]$ cd
[hadoop@master ~]$ su root
Password:
[root@master hadoop]# vim /etc/resolv.conf
[root@master hadoop]# su hadoop
[hadoop@master ~]$ cd /usr/local/sbt
[hadoop@master sbt]$ wget https://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/0.13.9/sbt-launch.jar -O ./sbt-launch.jar
--2020-04-15 13:40:44-- https://repo.typesafe.com/typesafe/ivy-releases/org.scala-sbt/sbt-launch/0.13.9/sbt-launch.jar
Resolving repo.typesafe.com (repo.typesafe.com)... failed: Temporary failure in name resolution.
wget: unable to resolve host address ‘repo.typesafe.com’
[hadoop@master sbt]$ vim ./sbt/sbt.boot.properties
[hadoop@master sbt]$ cd
[hadoop@master ~]$ su root
Password:
[root@master hadoop]# cd
[root@master ~]# vim ./sbt/sbt.boot.properties
[root@master ~]# cd /usr/local/sbt
[root@master sbt]# vim ./sbt/sbt.boot.properties
[root@master sbt]# su hadoop
[hadoop@master sbt]$ cd
[hadoop@master ~]$ cd /usr/share/sbt/bin/
bash: cd: /usr/share/sbt/bin/: No such file or directory
[hadoop@master ~]$ mkdir cd /usr/share/sbt/bin/
mkdir: cannot create directory ‘/usr/share/sbt/bin/’: No such file or directory
[hadoop@master ~]$ mkdir /usr/share/sbt/bin
mkdir: cannot create directory ‘/usr/share/sbt/bin’: No such file or directory
[hadoop@master ~]$ mkdir /usr/share/sbt/bin/
mkdir: cannot create directory ‘/usr/share/sbt/bin/’: No such file or directory
[hadoop@master ~]$ vim conf/repo.properties
[hadoop@master ~]$ cd /usr/local/sbt/bin
bash: cd: /usr/local/sbt/bin: No such file or directory
[hadoop@master ~]$ cd /usr/local/sbt
[hadoop@master sbt]$ cd
[hadoop@master ~]$ cd /usr/local/sbt/bin
bash: cd: /usr/local/sbt/bin: No such file or directory
[hadoop@master ~]$ cd /usr/local/sbt
[hadoop@master sbt]$ ls
sbt sbt-launch.jar
[hadoop@master sbt]$ cd /usr/local/sbt
[hadoop@master sbt]$ mkdir 123 && cd 123
[hadoop@master 123]$ mv ../sbt-launch.jar .
[hadoop@master 123]$ unzip -q sbt-launch.jar
[sbt-launch.jar]
End-of-central-directory signature not found. Either this file is not
a zipfile, or it constitutes one disk of a multi-part archive. In the
latter case the central directory and zipfile comment will be found on
the last disk(s) of this archive.
unzip: cannot find zipfile directory in one of sbt-launch.jar or
sbt-launch.jar.zip, and cannot find sbt-launch.jar.ZIP, period.
[hadoop@master 123]$ rm sbt-launch.jar
[hadoop@master 123]$ vim conf/repo.properties
[hadoop@master 123]$ vim ./sbt/sbt.boot.properties
[hadoop@master 123]$ cd
[hadoop@master ~]$ vim ./sbt/sbt.boot.properties
[hadoop@master ~]$ cd /usr/local/sbt/sbt
bash: cd: /usr/local/sbt/sbt: Not a directory
[hadoop@master ~]$ /usr/local/sbt
bash: /usr/local/sbt: Is a directory
[hadoop@master ~]$ cd /usr/local/sbt
[hadoop@master sbt]$ ls
123 sbt
[hadoop@master sbt]$
[hadoop@master sbt]$ sudo mkdir /usr/local/sbt
[sudo] password for hadoop:
hadSorry, try again.
[sudo] password for hadoop:
Sorry, try again.
[sudo] password for hadoop:
Sorry, try again.
sudo: 3 incorrect password attempts
[hadoop@master sbt]$ cd
[hadoop@master ~]$ sudo mkdir /usr/local/sbt
[sudo] password for hadoop:
hadoop is not in the sudoers file. This incident will be reported.
[hadoop@master ~]$ su root
Password:
[root@master hadoop]# sudo mkdir /usr/local/sbt
mkdir: cannot create directory ‘/usr/local/sbt’: File exists
[root@master hadoop]# cd
[root@master ~]# sudo chown -R hadoop /usr/local/sbt
[root@master ~]# cd /usr/local/sbt
[root@master sbt]# mkdir sbtlaunch
[root@master sbt]# cd /usr/local/sbt/sbtlaunch
[root@master sbtlaunch]# mv sbt-launch-1.1.1.jar sbt-launch.jar
mv: cannot stat ‘sbt-launch-1.1.1.jar’: No such file or directory
[root@master sbtlaunch]# vim ./sbt/sbt.boot.properties
[root@master sbtlaunch]# cd
[root@master ~]# su root
[root@master ~]# su hadoop
[hadoop@master root]$ cd
[hadoop@master ~]$ vim ./sbt/sbt.boot.properties
[hadoop@master ~]$ unzip -q ./sbt-launch.jar
unzip: cannot find or open ./sbt-launch.jar, ./sbt-launch.jar.zip or ./sbt-launch.jar.ZIP.
[hadoop@master ~]$ wget https://jaist.dl.sourceforge.net/project/p7zip/p7zip/16.02/p7zip_16.02_src_all.tar.bz2
--2020-04-15 14:02:00-- https://jaist.dl.sourceforge.net/project/p7zip/p7zip/16.02/p7zip_16.02_src_all.tar.bz2
Resolving jaist.dl.sourceforge.net (jaist.dl.sourceforge.net)... failed: Temporary failure in name resolution.
wget: unable to resolve host address ‘jaist.dl.sourceforge.net’
[hadoop@master ~]$ /etc/resolv.conf
bash: /etc/resolv.conf: Permission denied
[hadoop@master ~]$ su root
Password:
hadoop
su: Authentication failure
[hadoop@master ~]$ hadoop
Usage: hadoop [--config confdir] [COMMAND | CLASSNAME]
CLASSNAME run the class named CLASSNAME
or
where COMMAND is one of:
fs run a generic filesystem user client
version print the version
jar <jar> run a jar file
note: please use "yarn jar" to launch
YARN applications, not this command.
checknative [-a|-h] check native hadoop and compression libraries availability
distcp <srcurl> <desturl> copy file or directories recursively
archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive
classpath prints the class path needed to get the
credential interact with credential providers
Hadoop jar and the required libraries
daemonlog get/set the log level for each daemon
trace view and modify Hadoop tracing settings
Most commands print help when invoked w/o parameters.
[hadoop@master ~]$ su root
Password:
[root@master hadoop]# vim /etc/resolv.conf
[root@master hadoop]# ping www.baidu.com
ping: unknown host www.baidu.com
[root@master hadoop]# cd
[root@master ~]# ping www.baidu.com
ping: unknown host www.baidu.com
[root@master ~]# ./sbt sbt-version
bash: ./sbt: No such file or directory
[root@master ~]# cd /usr/local/sbt
[root@master sbt]# ./sbt sbt-version
Error: Unable to access jarfile ./sbt-launch.jar
[root@master sbt]# cd /usr/local/spark
[root@master spark]# ./bin/spark-shell --master local[4] --jars code.jar
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/usr/local/spark/jars/slf4j-log4j12-1.7.16.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/usr/local/hadoop/share/hadoop/common/lib/slf4j-log4j12-1.7.10.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
20/04/15 14:14:30 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
20/04/15 14:14:31 WARN deploy.DependencyUtils: Local jar /usr/local/spark/code.jar does not exist, skipping.
20/04/15 14:14:31 INFO util.SignalUtils: Registered signal handler for INT
20/04/15 14:14:43 INFO spark.SparkContext: Running Spark version 2.4.5
20/04/15 14:14:43 INFO spark.SparkContext: Submitted application: Spark shell
20/04/15 14:14:44 INFO spark.SecurityManager: Changing view acls to: root
20/04/15 14:14:44 INFO spark.SecurityManager: Changing modify acls to: root
20/04/15 14:14:44 INFO spark.SecurityManager: Changing view acls groups to:
20/04/15 14:14:44 INFO spark.SecurityManager: Changing modify acls groups to:
20/04/15 14:14:44 INFO spark.SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(root); groups with view permissions: Set(); users with modify permissions: Set(root); groups with modify permissions: Set()
20/04/15 14:14:44 INFO util.Utils: Successfully started service 'sparkDriver' on port 57273.
20/04/15 14:14:44 INFO spark.SparkEnv: Registering MapOutputTracker
20/04/15 14:14:44 INFO spark.SparkEnv: Registering BlockManagerMaster
20/04/15 14:14:45 INFO storage.BlockManagerMasterEndpoint: Using org.apache.spark.storage.DefaultTopologyMapper for getting topology information
20/04/15 14:14:45 INFO storage.BlockManagerMasterEndpoint: BlockManagerMasterEndpoint up
20/04/15 14:14:45 INFO storage.DiskBlockManager: Created local directory at /tmp/blockmgr-f687ebd9-556d-4ee6-94e6-36523b5e8657
20/04/15 14:14:45 INFO memory.MemoryStore: MemoryStore started with capacity 413.9 MB
20/04/15 14:14:45 INFO spark.SparkEnv: Registering OutputCommitCoordinator
20/04/15 14:14:45 INFO util.log: Logging initialized @17202ms
20/04/15 14:14:45 INFO server.Server: jetty-9.3.z-SNAPSHOT, build timestamp: unknown, git hash: unknown
20/04/15 14:14:45 INFO server.Server: Started @17438ms
20/04/15 14:14:45 INFO server.AbstractConnector: Started ServerConnector@44aa5585{HTTP/1.1,[http/1.1]}{192.168.58.132:4040}
20/04/15 14:14:45 INFO util.Utils: Successfully started service 'SparkUI' on port 4040.
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@cae4952{/jobs,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@53153d5e{/jobs/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@5b407336{/jobs/job,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@6a262980{/jobs/job/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@10230657{/stages,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@48a21ea6{/stages/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@394e504d{/stages/stage,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@32e9c3af{/stages/stage/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@796e2187{/stages/pool,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@632d1b1b{/stages/pool/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@3c2fa57a{/storage,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4027edeb{/storage/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@48cd8e71{/storage/rdd,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@43b3b1b0{/storage/rdd/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@2f6d8c9{/environment,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7d4135c9{/environment/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@59db8216{/executors,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7bb86ac{/executors/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@33bf2602{/executors/threadDump,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@4724b2c1{/executors/threadDump/json,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@426a4301{/static,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@1a06b95{/,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@7fb02606{/api,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@5c8d631{/jobs/job/kill,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@3d42b756{/stages/stage/kill,null,AVAILABLE,@Spark}
20/04/15 14:14:45 INFO ui.SparkUI: Bound SparkUI to 192.168.58.132, and started at http://master:4040
20/04/15 14:14:45 ERROR spark.SparkContext: Failed to add file:/usr/local/spark/code.jar to Spark environment
java.io.FileNotFoundException: Jar /usr/local/spark/code.jar not found
at org.apache.spark.SparkContext.addJarFile$1(SparkContext.scala:1838)
at org.apache.spark.SparkContext.addJar(SparkContext.scala:1868)
at org.apache.spark.SparkContext$$anonfun$12.apply(SparkContext.scala:458)
at org.apache.spark.SparkContext$$anonfun$12.apply(SparkContext.scala:458)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:458)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2520)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:935)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:926)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:926)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:106)
at $line3.$read$$iw$$iw.<init>(<console>:15)
at $line3.$read$$iw.<init>(<console>:43)
at $line3.$read.<init>(<console>:45)
at $line3.$read$.<init>(<console>:49)
at $line3.$read$.<clinit>(<console>)
at $line3.$eval$.$print$lzycompute(<console>:7)
at $line3.$eval$.$print(<console>:6)
at $line3.$eval.$print(<console>)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:793)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1054)
at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:645)
at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:644)
at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:644)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:576)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:572)
at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.scala:231)
at scala.tools.nsc.interpreter.IMain$$anonfun$quietRun$1.apply(IMain.scala:231)
at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
at scala.tools.nsc.interpreter.IMain.quietRun(IMain.scala:231)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$apply$mcV$sp$1.apply(SparkILoop.scala:109)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1$$anonfun$apply$mcV$sp$1.apply(SparkILoop.scala:109)
at scala.collection.immutable.List.foreach(List.scala:392)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV$sp(SparkILoop.scala:109)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:109)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:109)
at scala.tools.nsc.interpreter.ILoop.savingReplayStack(ILoop.scala:91)
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:108)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply$mcV$sp(SparkILoop.scala:211)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1$1.apply(SparkILoop.scala:199)
at scala.tools.nsc.interpreter.ILoop$$anonfun$mumly$1.apply(ILoop.scala:189)
at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:221)
at scala.tools.nsc.interpreter.ILoop.mumly(ILoop.scala:186)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1.org$apache$spark$repl$SparkILoop$$anonfun$$loopPostInit$1(SparkILoop.scala:199)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$1$1.apply(SparkILoop.scala:267)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1$$anonfun$startup$1$1.apply(SparkILoop.scala:247)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1.withSuppressedSettings$1(SparkILoop.scala:235)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1.startup$1(SparkILoop.scala:247)
at org.apache.spark.repl.SparkILoop$$anonfun$process$1.apply$mcZ$sp(SparkILoop.scala:282)
at org.apache.spark.repl.SparkILoop.runClosure(SparkILoop.scala:159)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:182)
at org.apache.spark.repl.Main$.doMain(Main.scala:78)
at org.apache.spark.repl.Main$.main(Main.scala:58)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:845)
at org.apache.spark.deploy.SparkSubmit.doRunMain$1(SparkSubmit.scala:161)
at org.apache.spark.deploy.SparkSubmit.submit(SparkSubmit.scala:184)
at org.apache.spark.deploy.SparkSubmit.doSubmit(SparkSubmit.scala:86)
at org.apache.spark.deploy.SparkSubmit$$anon$2.doSubmit(SparkSubmit.scala:920)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:929)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
20/04/15 14:14:46 INFO executor.Executor: Starting executor ID driver on host localhost
20/04/15 14:14:46 INFO executor.Executor: Using REPL class URI: spark://master:57273/classes
20/04/15 14:14:46 INFO util.Utils: Successfully started service 'org.apache.spark.network.netty.NettyBlockTransferService' on port 33829.
20/04/15 14:14:46 INFO netty.NettyBlockTransferService: Server created on master:33829
20/04/15 14:14:46 INFO storage.BlockManager: Using org.apache.spark.storage.RandomBlockReplicationPolicy for block replication policy
20/04/15 14:14:46 INFO storage.BlockManagerMaster: Registering BlockManager BlockManagerId(driver, master, 33829, None)
20/04/15 14:14:46 INFO storage.BlockManagerMasterEndpoint: Registering block manager master:33829 with 413.9 MB RAM, BlockManagerId(driver, master, 33829, None)
20/04/15 14:14:46 INFO storage.BlockManagerMaster: Registered BlockManager BlockManagerId(driver, master, 33829, None)
20/04/15 14:14:46 INFO storage.BlockManager: Initialized BlockManager: BlockManagerId(driver, master, 33829, None)
20/04/15 14:14:46 INFO handler.ContextHandler: Started o.s.j.s.ServletContextHandler@18ad085a{/metrics/json,null,AVAILABLE,@Spark}
20/04/15 14:14:46 INFO repl.Main: Created Spark session with Hive support
Spark context Web UI available at http://master:4040
Spark context available as 'sc' (master = local[4], app id = local-1586931286039).
Spark session available as 'spark'.
Welcome to
____ __
/ __/__ ___ _____/ /__
_\ \/ _ \/ _ `/ __/ '_/
/___/ .__/\_,_/_/ /_/\_\ version 2.4.5
/_/
Using Scala version 2.11.12 (Java HotSpot(TM) 64-Bit Server VM, Java 1.8.0_101)
Type in expressions to have them evaluated.
Type :help for more information.
scala> :quit
20/04/15 14:24:14 INFO server.AbstractConnector: Stopped Spark@44aa5585{HTTP/1.1,[http/1.1]}{192.168.58.132:4040}
20/04/15 14:24:14 INFO ui.SparkUI: Stopped Spark web UI at http://master:4040
20/04/15 14:24:14 INFO spark.MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped!
20/04/15 14:24:14 INFO memory.MemoryStore: MemoryStore cleared
20/04/15 14:24:14 INFO storage.BlockManager: BlockManager stopped
20/04/15 14:24:14 INFO storage.BlockManagerMaster: BlockManagerMaster stopped
20/04/15 14:24:14 INFO scheduler.OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped!
20/04/15 14:24:14 INFO spark.SparkContext: Successfully stopped SparkContext
20/04/15 14:24:14 INFO util.ShutdownHookManager: Shutdown hook called
20/04/15 14:24:14 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-5b89d251-0a33-4dbe-bef6-ff29d037fd36/repl-8d8e9121-5a88-44a9-b35c-6cdd6e9aff84
20/04/15 14:24:14 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-e7fb2f79-8f74-481d-931c-7881b52dad11
20/04/15 14:24:14 INFO util.ShutdownHookManager: Deleting directory /tmp/spark-5b89d251-0a33-4dbe-bef6-ff29d037fd36
[root@master spark]# sudo mkdir /usr/local/sbt
mkdir: cannot create directory ‘/usr/local/sbt’: File exists
[root@master spark]# cd /usr/local/sbt
[root@master sbt]# ls
123 sbt sbtlaunch
[root@master sbt]# cp ~/Downloads/sbt-launch.jar .
cp: cannot stat ‘/root/Downloads/sbt-launch.jar’: No such file or directory
[root@master sbt]# cd /Downloads
bash: cd: /Downloads: No such file or directory
[root@master sbt]# cd
[root@master ~]# cd /Downloads/
bash: cd: /Downloads/: No such file or directory
[root@master ~]# cd ~/Downloads
[root@master Downloads]# ls
[root@master Downloads]# ls -l
total 0
[root@master Downloads]# cd
[root@master ~]# /etc/resolv.conf
bash: /etc/resolv.conf: Permission denied
[root@master ~]# cat /etc/resolv.conf
# Generated by NetworkManager
search localdomain
nameserver 192.168.85.2
nameserver 8.8.8.8 #google域名服务器
nameserver 8.8.4.4 #google域名服务器
[root@master ~]# vi /etc/resolv.conf
[root@master ~]#