build hadoop, spark, hbase cluster
1,something:1,arc land
506git branch
507git status
508git reset multicloud/qcloud/cluster_management/spark_cluster/dependent_file/spark-1.6.2-bin-hadoop2.6.tgz
509git status
510git add multicloud/qcloud/tools/ssh.py
511git status
512git log
513git commit -a
514git status
515git log
516git pull --rebase origin master
517git log
518arc diff
519arc diff
520history
git checkout -b test1432git status433git branch434ls435cd core/ 436ls
437cd ..
438cd multicloud/
439ls
440cd qcloud/
441ls
442cd ..
443cd ..
444ls
445ls multicloud/qcloud/spark_cluster/README.md
446vim multicloud/qcloud/spark_cluster/README.md
447git branch
448git status
449git commit -a 'test1'
450git commit -a
451git log
452clear
453git pull --rebase origin master
454arc diff
455pwd
456arc help
457source ~/.bashrc
458arc
459arc help
460ls ~/.bashrc
461arc diff
462arc diff
463arc land
View Code 2,ssh aliyun
ssh -i aliyun.pem root@101.201.233.28
chmod 600 aliyun.pem
3,ssh免密码登录
(1)
ssh-keygen -t rsa
scp -i ~/aliyun.pem> (2)in 172.17.148.92
cat> chmod 600 authorized_keys
http://blog.csdn.net/m15851813962/article/details/53150967
4,docker
docker exec -it ssss /bin/bash
5, scp:
scp -i ~/work/keys/alicloud.pem * hadoopmaster:/root
6, firewall
systemctl stop firewalld
7,auto build command
spark:
python auto_build_spark.py -f /Users/chunyueli/work/github/infra/multicloud/qcloud/cluster_management/spark_cluster/dev_conf/alicloud_dev_spark_cluster_conf.json -k /Users/chunyueli/work/keys/alicloud.pem -F /Users/chunyueli/work/github/infra/multicloud/qcloud/cluster_management/spark_cluster/restart_qcloud_cluster.py -i 114.240.80.157 -a restart
hadoop
python auto_build_hadoop.py -f /Users/chunyueli/work/github/infra/multicloud/qcloud/cluster_management/spark_cluster/dev_conf/alicloud_dev_hadoop_cluster_conf.json -k /Users/chunyueli/work/keys/alicloud.pem -F /Users/chunyueli/work/github/infra/multicloud/qcloud/cluster_management/spark_cluster/restart_qcloud_cluster.py -i 114.240.85.26
8, docker command
页:
[1]