超酷小 发表于 2019-1-30 11:39:06

安装SPARK和SCALA

  1、下载 spark
  

  http://mirrors.cnnic.cn/apache/spark/spark-1.3.0/spark-1.3.0-bin-hadoop2.3.tgz
  

  

  2、下载scala
  

  http://www.scala-lang.org/download/2.10.5.html
  

  

  3、安装scala
  mkdir /usr/lib/scala
  tar –zxvf scala-2.10.5.tgz
  mv scala-2.10.5 /usr/lib/scala
  

  

  4、设置scala路径
  vim /etc/bashrc
  export SCALA_HOME=/usr/lib/scala/scala-2.10.5
  export PATH=$SCALA_HOME/bin:$PATH
  

  source /etc/bashrc
  

  scala –version
  

  

  5、分发
  scp -r /usr/lib/scala/ hd2:/usr/lib/scala
  scp -r /usr/lib/scala/ hd3:/usr/lib/scala
  scp -r /usr/lib/scala/ hd4:/usr/lib/scala
  scp -r /usr/lib/scala/ hd5:/usr/lib/scala
  

  scp /etc/bashrc hd2:/etc/bashrc
  scp /etc/bashrc hd3:/etc/bashrc
  scp /etc/bashrc hd4:/etc/bashrc
  scp /etc/bashrc hd5:/etc/bashrc
  

  

  6、安装spark
  tar -zxvf spark-1.3.0-bin-hadoop2.3.tgz
  mkdir /usr/local/spark
  mv spark-1.3.0-bin-hadoop2.3 /usr/local/spark
  

  

  vim /etc/bashrc
  export SPARK_HOME=/usr/local/spark/spark-1.3.0-bin-hadoop2.3
  export PATH=$SCALA_HOME/bin:$SPARK_HOME/bin:$PATH
  

  

  source /etc/bashrc
  

  cd /usr/local/spark/spark-1.3.0-bin-hadoop2.3/conf/
  cp spark-env.sh.template spark-env.sh
  

  

  vim spark-env.sh
  

  export JAVA_HOME=/java
  export SCALA_HOME=/usr/lib/scala/scala-2.10.5
  export SPARK_HOME=/usr/local/spark/spark-1.3.0-bin-hadoop2.3
  export SPARK_MASTER_IP=192.168.137.101
  export SPARK_WORKER_MEMORY=10g
  export SPARK_DRIVER_MEMORY=9g
  export HADOOP_CONF_DIR=/home/hadoop/hadoop/etc/hadoop
  export SPARK_LIBRARY_PATH=$SPARK_HOME/lib
  export SCALA_LIBRARY_PATH=$SPARK_LIBRARY_PATH
  

  cp slaves.template slaves
  

  

  vim slaves
  

  hd1
  hd2
  hd3
  hd4
  hd5
  

  

  7、分发
  scp /etc/bashrc hd2:/etc
  scp /etc/bashrc hd3:/etc
  scp /etc/bashrc hd4:/etc
  scp /etc/bashrc hd5:/etc
  

  scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd2:/usr/local/spark/
  scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd3:/usr/local/spark/
  scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd4:/usr/local/spark/
  scp -r /usr/local/spark/spark-1.3.0-bin-hadoop2.3 hd5:/usr/local/spark/
  

  

  7、启动
  在hd1,启动
  cd $SPARK_HOME/sbin
  ./start-all.sh
  




页: [1]
查看完整版本: 安装SPARK和SCALA