spark worker 启动失败 failed to launch org.apache.spark.deploy.worker.Worker
2016-06-18 00:08
651 查看
[root@cdh1 src_old]# ls -lh total 17M -rwxr-xr-x 1 root root 17M Jun 15 08:20 hadoop-2.6.0-src.tar.gz [root@cdh1 src_old]# pwd /user/local/src_old [root@cdh1 src_old]# export SCALA_HOME=/user/local/scala-2.9.3 export PATH=$PATH:$SCALA_HOME/bin [root@cdh1 src_old]# pwd /user/local/src_old [root@cdh1 src_old]# cd .. [root@cdh1 local]# ls -l total 176016 drwxr-xr-x 10 root root 4096 Jun 16 16:19 hadoop-2.6.0 -rw-r--r-- 1 root root 180047399 Jun 16 15:38 hadoop-2.6.0.tar.gz drwxr-xr-x 8 root root 4096 Jun 16 02:50 jdk drwxr-xr-x 2 root root 4096 Jun 17 08:09 src_old [root@cdh1 local]# cp /mnt/hgfs/hive/spark/scala-2.9.3.tgz . [root@cdh1 local]# ls -l total 200140 drwxr-xr-x 10 root root 4096 Jun 16 16:19 hadoop-2.6.0 -rw-r--r-- 1 root root 180047399 Jun 16 15:38 hadoop-2.6.0.tar.gz drwxr-xr-x 8 root root 4096 Jun 16 02:50 jdk -rwxr-xr-x 1 root root 24699008 Jun 17 08:13 scala-2.9.3.tgz drwxr-xr-x 2 root root 4096 Jun 17 08:09 src_old [root@cdh1 local]# tar -zxvf scala-2.9.3.tgz scala-2.9.3/ scala-2.9.3/bin/ scala-2.9.3/bin/scala.bat scala-2.9.3/bin/scalap scala-2.9.3/bin/fsc.bat scala-2.9.3/bin/scalac scala-2.9.3/bin/scala scala-2.9.3/bin/scaladoc scala-2.9.3/bin/scalap.bat scala-2.9.3/bin/fsc scala-2.9.3/bin/scaladoc.bat scala-2.9.3/bin/scalac.bat scala-2.9.3/lib/ scala-2.9.3/lib/scala-partest.jar scala-2.9.3/lib/scala-swing.jar scala-2.9.3/lib/scala-dbc.jar scala-2.9.3/lib/jline.jar scala-2.9.3/lib/scalacheck.jar scala-2.9.3/lib/scala-library.jar scala-2.9.3/lib/scala-compiler.jar scala-2.9.3/lib/scalap.jar scala-2.9.3/src/ scala-2.9.3/src/scala-partest-src.jar scala-2.9.3/src/scala-dbc-src.jar scala-2.9.3/src/scalap-src.jar scala-2.9.3/src/scala-compiler-src.jar scala-2.9.3/src/scala-swing-src.jar scala-2.9.3/src/scala-library-src.jar scala-2.9.3/misc/ scala-2.9.3/misc/scala-devel/ scala-2.9.3/misc/scala-devel/plugins/ scala-2.9.3/misc/scala-devel/plugins/continuations.jar scala-2.9.3/doc/ scala-2.9.3/doc/tools/ scala-2.9.3/doc/tools/scala.html scala-2.9.3/doc/tools/fsc.html scala-2.9.3/doc/tools/scalap.html scala-2.9.3/doc/tools/index.html scala-2.9.3/doc/tools/scalac.html scala-2.9.3/doc/tools/css/ scala-2.9.3/doc/tools/css/style.css scala-2.9.3/doc/tools/scaladoc.html scala-2.9.3/doc/tools/images/ scala-2.9.3/doc/tools/images/scala_logo.png scala-2.9.3/doc/tools/images/external.gif scala-2.9.3/doc/LICENSE scala-2.9.3/doc/README scala-2.9.3/man/ scala-2.9.3/man/man1/ scala-2.9.3/man/man1/scalac.1 scala-2.9.3/man/man1/scala.1 scala-2.9.3/man/man1/scalap.1 scala-2.9.3/man/man1/scaladoc.1 scala-2.9.3/man/man1/fsc.1 [root@cdh1 local]# ls -l total 200144 drwxr-xr-x 10 root root 4096 Jun 16 16:19 hadoop-2.6.0 -rw-r--r-- 1 root root 180047399 Jun 16 15:38 hadoop-2.6.0.tar.gz drwxr-xr-x 8 root root 4096 Jun 16 02:50 jdk drwxr-xr-x 8 119 129 4096 Feb 25 2013 scala-2.9.3 -rwxr-xr-x 1 root root 24699008 Jun 17 08:13 scala-2.9.3.tgz drwxr-xr-x 2 root root 4096 Jun 17 08:09 src_old [root@cdh1 local]# rm scala-2.9.3.tgz rm: remove regular file `scala-2.9.3.tgz'? y [root@cdh1 local]# ls -l total 176020 drwxr-xr-x 10 root root 4096 Jun 16 16:19 hadoop-2.6.0 -rw-r--r-- 1 root root 180047399 Jun 16 15:38 hadoop-2.6.0.tar.gz drwxr-xr-x 8 root root 4096 Jun 16 02:50 jdk drwxr-xr-x 8 119 129 4096 Feb 25 2013 scala-2.9.3 drwxr-xr-x 2 root root 4096 Jun 17 08:09 src_old [root@cdh1 local]# cd scala-2.9.3/ [root@cdh1 scala-2.9.3]# pwd /user/local/scala-2.9.3 [root@cdh1 scala-2.9.3]# vi /etc/profile [root@cdh1 scala-2.9.3]# source /etc/profile [root@cdh1 scala-2.9.3]# scala -version Scala code runner version 2.9.3 -- Copyright 2002-2011, LAMP/EPFL [root@cdh1 scala-2.9.3]# [root@cdh1 scala-2.9.3]# scala Welcome to Scala version 2.9.3 (Java HotSpot(TM) Client VM, Java 1.7.0_67). Type in expressions to have them evaluated. Type :help for more information. scala> print("I LOVE YOU,ZhengWeiHong"); I LOVE YOU,ZhengWeiHong scala> scala> :q [root@cdh1 scala-2.9.3]# --------------------------- export SPARK_HOME=/user/local/spark-1.4.0-bin-hadoop2.6 export PATH=$PATH:$SPARK_HOME/bin export SCALA_HOME=/user/local/scala-2.9.3 export JAVA_HOME=$JAVA_HOME export HADOOP_HOME=/user/local/hadoop-2.6.0 export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop export SPARK_MASTER_IP=cdh1 export SPARK_DRIVER_MEMORY=512M Exception in thread "main" java.lang.NoSuchMethodError: method java.util.regex.Pattern.quote with signature (Ljava.lang.String;)Ljava.lang.String /user/local/spark-1.4.0-bin-hadoop2.6/sbin [root@cdh1 sbin]# cd ../logs/ [root@cdh1 logs]# less spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 logs]# jps 4706 Jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode [root@cdh1 logs]# less spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 logs]# cd - /user/local/spark-1.4.0-bin-hadoop2.6/sbin [root@cdh1 sbin]# cd ../conf/ [root@cdh1 conf]# source spark-env.sh [root@cdh1 conf]# cd .. [root@cdh1 spark-1.4.0-bin-hadoop2.6]# sbin/start-all.sh org.apache.spark.deploy.master.Master running as process 4384. Stop it first. localhost: starting org.apache.spark.deploy.worker.Worker, logging to /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out localhost: failed to launch org.apache.spark.deploy.worker.Worker: localhost: at org.apache.spark.launcher.SparkClassCommandBuilder.buildCommand(SparkClassCommandBuilder.java:98) localhost: at org.apache.spark.launcher.Main.main(Main.java:74) localhost: full log in /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode 4939 Jps [root@cdh1 spark-1.4.0-bin-hadoop2.6]# spark-1.4.0-bin-hadoop2.6/bin/spark-class.cmd spark-1.4.0-bin-hadoop2.6/bin/pyspark.cmd spark-1.4.0-bin-hadoop2.6/bin/sparkR spark-1.4.0-bin-hadoop2.6/bin/beeline.cmd spark-1.4.0-bin-hadoop2.6/bin/sparkR2.cmd spark-1.4.0-bin-hadoop2.6/bin/run-example2.cmd spark-1.4.0-bin-hadoop2.6/bin/load-spark-env.sh spark-1.4.0-bin-hadoop2.6/bin/load-spark-env.cmd spark-1.4.0-bin-hadoop2.6/lib/ spark-1.4.0-bin-hadoop2.6/lib/datanucleus-core-3.2.10.jar spark-1.4.0-bin-hadoop2.6/lib/datanucleus-api-jdo-3.2.6.jar spark-1.4.0-bin-hadoop2.6/lib/spark-examples-1.4.0-hadoop2.6.0.jar spark-1.4.0-bin-hadoop2.6/lib/datanucleus-rdbms-3.2.9.jar spark-1.4.0-bin-hadoop2.6/lib/spark-assembly-1.4.0-hadoop2.6.0.jar spark-1.4.0-bin-hadoop2.6/lib/spark-1.4.0-yarn-shuffle.jar spark-1.4.0-bin-hadoop2.6/README.md [root@cdh1 local]# ls -l total 420600 drwxr-xr-x 10 root root 4096 Jun 16 16:19 hadoop-2.6.0 -rw-r--r-- 1 root root 180047399 Jun 16 15:38 hadoop-2.6.0.tar.gz drwxr-xr-x 8 root root 4096 Jun 16 02:50 jdk drwxr-xr-x 8 119 129 4096 Jun 17 08:35 scala-2.9.3 drwxr-xr-x 11 hadoop hadoop 4096 Jun 2 2015 spark-1.4.0-bin-hadoop2.6 -rwxr-xr-x 1 root root 250194134 Jun 17 08:35 spark-1.4.0-bin-hadoop2.6.tgz.tgz drwxr-xr-x 2 root root 4096 Jun 17 08:09 src_old [root@cdh1 local]# cd spark-1.4.0-bin-hadoop2.6 [root@cdh1 spark-1.4.0-bin-hadoop2.6]# pwd /user/local/spark-1.4.0-bin-hadoop2.6 [root@cdh1 spark-1.4.0-bin-hadoop2.6]# cat /etc/profile # /etc/profile #Hadoop Env export HADOOP_HOME_WARN_SUPPRESS=1 export JAVA_HOME=/user/local/jdk #export HADOOP_HOME=/user/local/hadoop #export PATH=$JAVA_HOME/bin:$HADOOP_HOME:/bin:$PATH export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH #Hadoop Env export HADOOP_HOME_WARN_SUPPRESS=1 export JAVA_HOME=/user/local/jdk export HADOOP_HOME=/user/local/hadoop-2.6.0 export HIVE_HOME=/user/local/hive export PATH=$JAVA_HOME/bin:$HADOOP_HOME:/bin:$PATH export JRE_HOME=$JAVA_HOME/jre export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib #export TOMCAT_HOME=/root/solr/apache-tomcat-6.0.37 #export JRE_HOME=$JAVA_HOME/jre #export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin:$PATH export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin:$PATH #FLUME #export FLUME_HOME=/usr/local/hadoop/flume/apache-flume-1.5.0-bin #export FLUME_CONF_DIR=$FLUME_HOME/conf #export PATH=$PATH:$FLUME_HOME/bin #mvn export MAVEN_HOME=/usr/local/apache-maven-3.3.9 export PATH=$PATH:$MAVEN_HOME/bin #scala export SCALA_HOME=/user/local/scala-2.9.3 export PATH=$PATH:$SCALA_HOME/bin [root@cdh1 spark-1.4.0-bin-hadoop2.6]# cat /etc/profile # /etc/profile #Hadoop Env export HADOOP_HOME_WARN_SUPPRESS=1 export JAVA_HOME=/user/local/jdk #export HADOOP_HOME=/user/local/hadoop #export PATH=$JAVA_HOME/bin:$HADOOP_HOME:/bin:$PATH export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$PATH #Hadoop Env export HADOOP_HOME_WARN_SUPPRESS=1 export JAVA_HOME=/user/local/jdk export HADOOP_HOME=/user/local/hadoop-2.6.0 export HIVE_HOME=/user/local/hive export PATH=$JAVA_HOME/bin:$HADOOP_HOME:/bin:$PATH export JRE_HOME=$JAVA_HOME/jre export CLASSPATH=.:$JAVA_HOME/lib:$JRE_HOME/lib #export TOMCAT_HOME=/root/solr/apache-tomcat-6.0.37 #export JRE_HOME=$JAVA_HOME/jre #export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin:$PATH export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HADOOP_HOME/sbin:$HIVE_HOME/bin:$PATH #FLUME #export FLUME_HOME=/usr/local/hadoop/flume/apache-flume-1.5.0-bin #export FLUME_CONF_DIR=$FLUME_HOME/conf #export PATH=$PATH:$FLUME_HOME/bin #mvn export MAVEN_HOME=/usr/local/apache-maven-3.3.9 export PATH=$PATH:$MAVEN_HOME/bin #scala export SCALA_HOME=/user/local/scala-2.9.3 export PATH=$PATH:$SCALA_HOME/bin [root@cdh1 spark-1.4.0-bin-hadoop2.6]# vi /etc/profile [root@cdh1 spark-1.4.0-bin-hadoop2.6]# source /etc/profile [root@cdh1 spark-1.4.0-bin-hadoop2.6]# spark -version bash: spark: command not found [root@cdh1 spark-1.4.0-bin-hadoop2.6]# spark --version bash: spark: command not found [root@cdh1 spark-1.4.0-bin-hadoop2.6]# ls -l total 676 drwxr-xr-x 2 hadoop hadoop 4096 Jun 2 2015 bin -rw-r--r-- 1 hadoop hadoop 561149 Jun 2 2015 CHANGES.txt drwxr-xr-x 2 hadoop hadoop 4096 Jun 2 2015 conf drwxr-xr-x 3 hadoop hadoop 4096 Jun 2 2015 data drwxr-xr-x 3 hadoop hadoop 4096 Jun 2 2015 ec2 drwxr-xr-x 3 hadoop hadoop 4096 Jun 2 2015 examples drwxr-xr-x 2 hadoop hadoop 4096 Jun 2 2015 lib -rw-r--r-- 1 hadoop hadoop 50902 Jun 2 2015 LICENSE -rw-r--r-- 1 hadoop hadoop 22559 Jun 2 2015 NOTICE drwxr-xr-x 6 hadoop hadoop 4096 Jun 2 2015 python drwxr-xr-x 3 hadoop hadoop 4096 Jun 2 2015 R -rw-r--r-- 1 hadoop hadoop 3624 Jun 2 2015 README.md -rw-r--r-- 1 hadoop hadoop 134 Jun 2 2015 RELEASE drwxr-xr-x 2 hadoop hadoop 4096 Jun 2 2015 sbin [root@cdh1 spark-1.4.0-bin-hadoop2.6]# cd conf/ [root@cdh1 conf]# ls -l total 32 -rw-r--r-- 1 hadoop hadoop 202 Jun 2 2015 docker.properties.template -rw-r--r-- 1 hadoop hadoop 303 Jun 2 2015 fairscheduler.xml.template -rw-r--r-- 1 hadoop hadoop 632 Jun 2 2015 log4j.properties.template -rw-r--r-- 1 hadoop hadoop 5565 Jun 2 2015 metrics.properties.template -rw-r--r-- 1 hadoop hadoop 80 Jun 2 2015 slaves.template -rw-r--r-- 1 hadoop hadoop 507 Jun 2 2015 spark-defaults.conf.template -rwxr-xr-x 1 hadoop hadoop 3318 Jun 2 2015 spark-env.sh.template [root@cdh1 conf]# cp spark-env.sh..template spark-env.sh cp: cannot stat `spark-env.sh..template': No such file or directory [root@cdh1 conf]# cp spark-env.sh.template spark-env.sh [root@cdh1 conf]# cp slaves.template slaves [root@cdh1 conf]# ls -l total 40 -rw-r--r-- 1 hadoop hadoop 202 Jun 2 2015 docker.properties.template -rw-r--r-- 1 hadoop hadoop 303 Jun 2 2015 fairscheduler.xml.template -rw-r--r-- 1 hadoop hadoop 632 Jun 2 2015 log4j.properties.template -rw-r--r-- 1 hadoop hadoop 5565 Jun 2 2015 metrics.properties.template -rw-r--r-- 1 root root 80 Jun 17 08:40 slaves -rw-r--r-- 1 hadoop hadoop 80 Jun 2 2015 slaves.template -rw-r--r-- 1 hadoop hadoop 507 Jun 2 2015 spark-defaults.conf.template -rwxr-xr-x 1 root root 3318 Jun 17 08:40 spark-env.sh -rwxr-xr-x 1 hadoop hadoop 3318 Jun 2 2015 spark-env.sh.template [root@cdh1 conf]# vi spark-env.sh [root@cdh1 conf]# cat spark-env.sh #!/usr/bin/env bash # This file is sourced when running various Spark programs. # Copy it as spark-env.sh and edit that to configure Spark for your site. # Options read when launching programs locally with # ./bin/run-example or ./bin/spark-submit # - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files # - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node # - SPARK_PUBLIC_DNS, to set the public dns name of the driver program # - SPARK_CLASSPATH, default classpath entries to append # Options read by executors and drivers running inside the cluster # - SPARK_LOCAL_IP, to set the IP address Spark binds to on this node # - SPARK_PUBLIC_DNS, to set the public DNS name of the driver program # - SPARK_CLASSPATH, default classpath entries to append # - SPARK_LOCAL_DIRS, storage directories to use on this node for shuffle and RDD data # - MESOS_NATIVE_JAVA_LIBRARY, to point to your libmesos.so if you use Mesos # Options read in YARN client mode # - HADOOP_CONF_DIR, to point Spark towards Hadoop configuration files # - SPARK_EXECUTOR_INSTANCES, Number of workers to start (Default: 2) # - SPARK_EXECUTOR_CORES, Number of cores for the workers (Default: 1). # - SPARK_EXECUTOR_MEMORY, Memory per Worker (e.g. 1000M, 2G) (Default: 1G) # - SPARK_DRIVER_MEMORY, Memory for Master (e.g. 1000M, 2G) (Default: 512 Mb) # - SPARK_YARN_APP_NAME, The name of your application (Default: Spark) # - SPARK_YARN_QUEUE, The hadoop queue to use for allocation requests (Default: ‘default’) # - SPARK_YARN_DIST_FILES, Comma separated list of files to be distributed with the job. # - SPARK_YARN_DIST_ARCHIVES, Comma separated list of archives to be distributed with the job. # Options for the daemons used in the standalone deploy mode # - SPARK_MASTER_IP, to bind the master to a different IP address or hostname # - SPARK_MASTER_PORT / SPARK_MASTER_WEBUI_PORT, to use non-default ports for the master # - SPARK_MASTER_OPTS, to set config properties only for the master (e.g. "-Dx=y") # - SPARK_WORKER_CORES, to set the number of cores to use on this machine # - SPARK_WORKER_MEMORY, to set how much total memory workers have to give executors (e.g. 1000m, 2g) # - SPARK_WORKER_PORT / SPARK_WORKER_WEBUI_PORT, to use non-default ports for the worker # - SPARK_WORKER_INSTANCES, to set the number of worker processes per node # - SPARK_WORKER_DIR, to set the working directory of worker processes # - SPARK_WORKER_OPTS, to set config properties only for the worker (e.g. "-Dx=y") # - SPARK_HISTORY_OPTS, to set config properties only for the history server (e.g. "-Dx=y") # - SPARK_SHUFFLE_OPTS, to set config properties only for the external shuffle service (e.g. "-Dx=y") # - SPARK_DAEMON_JAVA_OPTS, to set config properties for all daemons (e.g. "-Dx=y") # - SPARK_PUBLIC_DNS, to set the public dns name of the master or workers # Generic options for the daemons used in the standalone deploy mode # - SPARK_CONF_DIR Alternate conf dir. (Default: ${SPARK_HOME}/conf) # - SPARK_LOG_DIR Where log files are stored. (Default: ${SPARK_HOME}/logs) # - SPARK_PID_DIR Where the pid file is stored. (Default: /tmp) # - SPARK_IDENT_STRING A string representing this instance of spark. (Default: $USER) # - SPARK_NICENESS The scheduling priority for daemons. (Default: 0) export SCALA_HOME=/user/local/scala-2.9.3 export JAVA_HOME=$JAVA_HOME export HADOOP_HOME=/user/local/hadoop-2.6.0 export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop export SPARK_MASTER_IP=cdh1 export SPARK_DRIVER_MEMORY=512M [root@cdh1 conf]# vi slaves [root@cdh1 conf]# jps 3382 Jps [root@cdh1 conf]# cd .. [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 3391 Jps [root@cdh1 spark-1.4.0-bin-hadoop2.6]# start-all.sh This script is Deprecated. Instead use start-dfs.sh and start-yarn.sh Starting namenodes on [cdh1] cdh1: starting namenode, logging to /user/local/hadoop-2.6.0/logs/hadoop-root-namenode-cdh1.out localhost: starting datanode, logging to /user/local/hadoop-2.6.0/logs/hadoop-root-datanode-cdh1.out Starting secondary namenodes [cdh1] cdh1: starting secondarynamenode, logging to /user/local/hadoop-2.6.0/logs/hadoop-root-secondarynamenode-cdh1.out starting yarn daemons starting resourcemanager, logging to /user/local/hadoop-2.6.0/logs/yarn-root-resourcemanager-cdh1.out localhost: starting nodemanager, logging to /user/local/hadoop-2.6.0/logs/yarn-root-nodemanager-cdh1.out [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4265 Jps 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode [root@cdh1 spark-1.4.0-bin-hadoop2.6]# cd sbin/start-all.sh bash: cd: sbin/start-all.sh: Not a directory [root@cdh1 spark-1.4.0-bin-hadoop2.6]# sbin/start-all.sh starting org.apache.spark.deploy.master.Master, logging to /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.master.Master-1-cdh1.out localhost: starting org.apache.spark.deploy.worker.Worker, logging to /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out localhost: failed to launch org.apache.spark.deploy.worker.Worker: localhost: full log in /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode 4583 Jps [root@cdh1 spark-1.4.0-bin-hadoop2.6]# cd sbin/ [root@cdh1 sbin]# ls slaves.sh start-all.sh start-shuffle-service.sh stop-all.sh stop-shuffle-service.sh spark-config.sh start-history-server.sh start-slave.sh stop-history-server.sh stop-slave.sh spark-daemon.sh start-master.sh start-slaves.sh stop-master.sh stop-slaves.sh spark-daemons.sh start-mesos-dispatcher.sh start-thriftserver.sh stop-mesos-dispatcher.sh stop-thriftserver.sh [root@cdh1 sbin]# pwd /user/local/spark-1.4.0-bin-hadoop2.6/sbin [root@cdh1 sbin]# cd ../logs/ [root@cdh1 logs]# less spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 logs]# jps 4706 Jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode [root@cdh1 logs]# less spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 logs]# cd - /user/local/spark-1.4.0-bin-hadoop2.6/sbin [root@cdh1 sbin]# cd ../conf/ [root@cdh1 conf]# source spark-env.sh [root@cdh1 conf]# cd .. [root@cdh1 spark-1.4.0-bin-hadoop2.6]# sbin/start-all.sh org.apache.spark.deploy.master.Master running as process 4384. Stop it first. localhost: starting org.apache.spark.deploy.worker.Worker, logging to /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out localhost: failed to launch org.apache.spark.deploy.worker.Worker: localhost: at org.apache.spark.launcher.SparkClassCommandBuilder.buildCommand(SparkClassCommandBuilder.java:98) localhost: at org.apache.spark.launcher.Main.main(Main.java:74) localhost: full log in /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode 4939 Jps [root@cdh1 spark-1.4.0-bin-hadoop2.6]# rpm -qa | grep java java-1.5.0-gcj-1.5.0.0-29.1.el6.i686 java_cup-0.10k-5.el6.i686 gcc-java-4.4.7-17.el6.i686 java-1.5.0-gcj-devel-1.5.0.0-29.1.el6.i686 [root@cdh1 spark-1.4.0-bin-hadoop2.6]# rpm -e --nodes java_cup-0.10k-5.el6.i686 --nodes: unknown option [root@cdh1 spark-1.4.0-bin-hadoop2.6]# rpm -e --nodeps java_cup-0.10k-5.el6.x86_64 error: package java_cup-0.10k-5.el6.x86_64 is not installed [root@cdh1 spark-1.4.0-bin-hadoop2.6]# rpm -e --nodeps java_cup-0.10k-5.el6.i686 [root@cdh1 spark-1.4.0-bin-hadoop2.6]# rpm -e --nodeps java-1.5.0-gcj-devel-1.5.0.0-29.1.el6.i686 [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 5101 Jps 3710 SecondaryNameNode 3595 DataNode [root@cdh1 spark-1.4.0-bin-hadoop2.6]# sbin/start-all.sh org.apache.spark.deploy.master.Master running as process 4384. Stop it first. localhost: starting org.apache.spark.deploy.worker.Worker, logging to /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out localhost: failed to launch org.apache.spark.deploy.worker.Worker: localhost: at org.apache.spark.launcher.SparkClassCommandBuilder.buildCommand(SparkClassCommandBuilder.java:98) localhost: at org.apache.spark.launcher.Main.main(Main.java:74) localhost: full log in /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 spark-1.4.0-bin-hadoop2.6]# rpm -qa | grep java java-1.5.0-gcj-1.5.0.0-29.1.el6.i686 gcc-java-4.4.7-17.el6.i686 [root@cdh1 spark-1.4.0-bin-hadoop2.6]# rpm -e --nodeps java-1.5.0-gcj-1.5.0.0-29.1.el6.i686 [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4384 Master 5335 Jps 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode [root@cdh1 spark-1.4.0-bin-hadoop2.6]# sbin/start-all.sh org.apache.spark.deploy.master.Master running as process 4384. Stop it first. localhost: starting org.apache.spark.deploy.worker.Worker, logging to /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out localhost: failed to launch org.apache.spark.deploy.worker.Worker: localhost: JAVA_HOME is not set localhost: full log in /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 spark-1.4.0-bin-hadoop2.6]# less conf/spark-env.sh [root@cdh1 spark-1.4.0-bin-hadoop2.6]# echo $JAVA_HOME /user/local/jdk [root@cdh1 spark-1.4.0-bin-hadoop2.6]# vi conf/spark-env.sh [root@cdh1 spark-1.4.0-bin-hadoop2.6]# source conf/spark-env.sh [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode 5594 Jps [root@cdh1 spark-1.4.0-bin-hadoop2.6]# sbin/start-all.sh org.apache.spark.deploy.master.Master running as process 4384. Stop it first. localhost: starting org.apache.spark.deploy.worker.Worker, logging to /user/local/spark-1.4.0-bin-hadoop2.6/sbin/../logs/spark-root-org.apache.spark.deploy.worker.Worker-1-cdh1.out [root@cdh1 spark-1.4.0-bin-hadoop2.6]# jps 4384 Master 3902 ResourceManager 3507 NameNode 4000 NodeManager 3710 SecondaryNameNode 3595 DataNode 5832 Jps 5768 Worker [root@cdh1 spark-1.4.0-bin-hadoop2.6]#
相关文章推荐
- Spark RDD API详解(一) Map和Reduce
- 使用spark和spark mllib进行股票预测
- Spark随谈——开发指南(译)
- Spark,一种快速数据分析替代方案
- eclipse 开发 spark Streaming wordCount
- Understanding Spark Caching
- ClassNotFoundException:scala.PreDef$
- Windows 下Spark 快速搭建Spark源码阅读环境
- Spark中将对象序列化存储到hdfs
- 使用java代码提交Spark的hive sql任务,run as java application
- Spark机器学习(一) -- Machine Learning Library (MLlib)
- Spark机器学习(二) 局部向量 Local-- Data Types - MLlib
- Spark机器学习(三) Labeled point-- Data Types
- Spark初探
- Spark Streaming初探
- Spark本地开发环境搭建
- 搭建hadoop/spark集群环境
- Spark HA部署方案
- Spark HA原理架构图
- spark内存概述