配置虚拟机集群环境
修改主机名,获取IP地址
1 2 3 4 5 6 7 8 9 10
| [root@localhost ~]# hostnamectl set-hostname hadoop01 [root@localhost ~]# ifconfig ens33: flags=4163<UP,BROADCAST,RUNNING,MULTICAST> mtu 1500 inet 192.168.213.128 netmask 255.255.255.0 broadcast 192.168.213.255 inet6 fe80::3422:fe22:194f:42dd prefixlen 64 scopeid 0x20<link> ether 00:0c:29:39:5b:d4 txqueuelen 1000 (Ethernet) RX packets 20228 bytes 28822793 (27.4 MiB) RX errors 0 dropped 0 overruns 0 frame 0 TX packets 5069 bytes 310726 (303.4 KiB) TX errors 0 dropped 0 overruns 0 carrier 0 collisions 0
|
关闭防火墙
1 2 3 4 5 6 7 8 9 10
| [root@localhost ~]# systemctl stop firewalld.service [root@localhost ~]# systemctl disable firewalld.service Removed symlink /etc/systemd/system/multi-user.target.wants/firewalld.service. Removed symlink /etc/systemd/system/dbus-org.fedoraproject.FirewallD1.service. [root@localhost ~]# systemctl status firewalld.service ● firewalld.service - firewalld - dynamic firewall daemon Loaded: loaded (/usr/lib/systemd/system/firewalld.service; disabled; vendor preset: enabled) Active: inactive (dead) Docs: man:firewalld(1)
|
新建工作目录
1 2 3
| [root@localhost ~]# mkdir /opt/packages [root@localhost ~]# mkdir /opt/programs
|
安装和配置JDK
- 卸载掉原有的JDK
1 2 3 4 5 6 7 8 9 10 11
| [root@localhost ~]# rpm -qa|grep openjdk java-1.8.0-openjdk-headless-1.8.0.262.b10-1.el7.x86_64 java-1.7.0-openjdk-headless-1.7.0.261-2.6.22.2.el7_8.x86_64 java-1.8.0-openjdk-1.8.0.262.b10-1.el7.x86_64 java-1.7.0-openjdk-1.7.0.261-2.6.22.2.el7_8.x86_64 [root@localhost ~]# rpm -e --nodeps rpm: no packages given for erase [root@localhost ~]# rpm -e --nodeps java-1.8.0-openjdk-headless-1.8.0.262.b10-1.el7.x86_64 [root@localhost ~]# rpm -e --nodeps java-1.7.0-openjdk-headless-1.7.0.261-2.6.22.2.el7_8.x86_64 [root@localhost ~]# rpm -e --nodeps java-1.8.0-openjdk-1.8.0.262.b10-1.el7.x86_64 [root@localhost ~]# rpm -e --nodeps java-1.7.0-openjdk-1.7.0.261-2.6.22.2.el7_8.x86_64
|
-重新进行安装,在/etc/profile中添加环境变量后刷新,进行验证是否安装成功
1 2 3 4 5 6 7 8
| [root@hadoop01 ~]# rpm -qa|grep openjdk [root@hadoop01 ~]# tar -zxvf /opt/packages/jdk-8u211-linux-x64.tar.gz -C /opt/programs/ [root@hadoop01 ~]# vim /etc/profile [root@hadoop01 ~]# source /etc/profile [root@hadoop01 ~]# java -version java version "1.8.0_211" Java(TM) SE Runtime Environment (build 1.8.0_211-b12) Java HotSpot(TM) 64-Bit Server VM (build 25.211-b12, mixed mode)
|
1 2 3
| export JAVA_HOME=/opt/programs/jdk1.8.0_211 export PATH=$PATH:$JAVA_HOME/bin
|
克隆虚拟机
在/etc/hosts文件修改如下:
1 2 3 4 5 6
| 127.0.0.1 localhost localhost.localdomain localhost4 localhost4.localdomain4 ::1 localhost localhost.localdomain localhost6 localhost6.localdomain6 192.168.213.128 hadoop01 192.168.213.129 hadoop02 192.168.213.130 hadoop03
|
配置集群各节点SSH免密码登录
1 2 3 4 5
| ssh-keygen ssh-copy-id hadoop01 ssh-copy-id hadoop02 ssh-copy-id hadoop03
|
安装与部署ZooKeeper
1 2 3 4 5 6 7 8 9 10 11
| tar -zxvf /opt/packages/zookeeper-3.4.12.tar.gz -C /opt/programs/ [root@hadoop01 ~]# cd /opt/programs/zookeeper-3.4.12/ [root@hadoop01 zookeeper-3.4.12]# mkdir data logs [root@hadoop01 zookeeper-3.4.12]# cd data [root@hadoop01 data]# echo '1'>myid [root@hadoop01 data]# cd ../conf [root@hadoop01 conf]# cp zoo_sample.cfg zoo.cfg [root@hadoop01 conf]# vim zoo.cfg [root@hadoop01 conf]# scp -r /opt/programs/zookeeper-3.4.12/ root@hadoop02:/opt/programs/ [root@hadoop01 conf]# scp -r /opt/programs/zookeeper-3.4.12/ root@hadoop03:/opt/programs/
|
1 2 3 4 5 6 7
| dataDir=/opt/programs/zookeeper-3.4.12/data
dataLogDir=/opt/programs/zookeeper-3.4.12/logs server.1=hadoop01:2888:3888 server.2=hadoop02:2888:3888 server.3=hadoop03:2888:3888
|
1 2 3 4 5 6 7 8 9 10 11
| [root@hadoop01 conf]# ssh hadoop02 Last login: Fri Mar 31 03:43:51 2023 from hadoop01 [root@hadoop02 ~]# echo '2'>/opt/programs/zookeeper-3.4.12/data/myid [root@hadoop02 ~]# ssh hadoop03 Last login: Fri Mar 31 03:43:57 2023 from hadoop02 [root@hadoop03 ~]# echo '3'>/opt/programs/zookeeper-3.4.12/data/myid [root@hadoop01 conf]# zkServer.sh start
|
1 2
| export ZOOKEEPER_HOME=/opt/programs/zookeeper-3.4.12 export PATH=$PATH:$JAVA_HOME/bin:$ZOOKEEPER_HOME/bin
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
| [root@hadoop01 ~]# tar -zxvf /opt/packages/hadoop-2.7.6.tar.gz -C /opt/programs/ [root@hadoop01 ~]# cd /opt/programs/hadoop-2.7.6/etc/hadoop/ [root@hadoop01 hadoop]# vim core-site.xml [root@hadoop01 hadoop]# vim hdfs-site.xml [root@hadoop01 hadoop]# mv mapred-site.xml.template mapred-site.xml [root@hadoop01 hadoop]# vim mapred-site.xml [root@hadoop01 hadoop]# vim yarn-site.xml [root@hadoop01 hadoop]# vim slaves [root@hadoop01 hadoop]# vim hadoop-env.sh [root@hadoop01 hadoop]# vim mapred-env.sh [root@hadoop01 hadoop]# vim yarn-env.sh [root@hadoop01 hadoop]# scp -r /opt/programs/hadoop-2.7.6/ root@hadoop02:/opt/programs/ [root@hadoop01 hadoop]# scp -r /opt/programs/hadoop-2.7.6/ root@hadoop03:/opt/programs/
[root@hadoop01 hadoop]# vim /etc/profile [root@hadoop01 hadoop]# source /etc/profile
|
1 2 3 4 5
| export JAVA_HOME=/opt/programs/jdk1.8.0_211
export HADOOP_HOME=/opt/programs/hadoop-2.7.6 export PATH=$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54
| [root@hadoop01 hadoop]# hadoop-daemon.sh start journalnode starting journalnode, logging to /opt/programs/hadoop-2.7.6/logs/hadoop-root-journalnode-hadoop01.out [root@hadoop01 hadoop]# ssh hadoop02 Last login: Fri Mar 31 04:15:59 2023 from hadoop01 [root@hadoop02 ~]# hadoop-daemon.sh start journalnode starting journalnode, logging to /opt/programs/hadoop-2.7.6/logs/hadoop-root-journalnode-hadoop02.out [root@hadoop02 ~]# ssh hadoop03 Last login: Fri Mar 31 04:16:33 2023 from hadoop02 [root@hadoop03 ~]# hadoop-daemon.sh start journalnode starting journalnode, logging to /opt/programs/hadoop-2.7.6/logs/hadoop-root-journalnode-hadoop03.out [root@hadoop01 hadoop]# hdfs namenode -format ... 23/03/31 04:19:26 INFO common.Storage: Storage directory /opt/programs/hadoop-2.7.6/tmp/dfs/name has been successfully formatted. ... [root@hadoop01 hadoop]# cd /opt/programs/hadoop-2.7.6/ [root@hadoop01 hadoop-2.7.6]# scp -r tmp/ root@hadoop02:/opt/programs/hadoop-2.7.6/ [root@hadoop01 hadoop-2.7.6]# hdfs zkfc -formatZK [root@hadoop01 hadoop-2.7.6]# start-dfs.sh [root@hadoop01 hadoop-2.7.6]# start-yarn.sh
[root@hadoop01 zookeeper-3.4.12]# jps 2692 QuorumPeerMain 3045 DataNode 3224 JournalNode 3387 DFSZKFailoverController 2941 NameNode 3549 ResourceManager 3662 NodeManager 3775 Jps [root@hadoop01 zookeeper-3.4.12]# ssh hadoop02 Last login: Fri Mar 31 05:11:46 2023 [root@hadoop02 ~]# jps 2770 NameNode 2917 JournalNode 2840 DataNode 3144 NodeManager 3321 Jps 2635 QuorumPeerMain 3005 DFSZKFailoverController [root@hadoop02 ~]# ssh hadoop03 Last login: Fri Mar 31 05:11:42 2023 [root@hadoop03 ~]# jps 2789 JournalNode 3077 Jps 2712 DataNode 2586 QuorumPeerMain 2910 NodeManager [root@hadoop03 ~]#
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17
| [root@hadoop01 ~]# tar -zxvf /opt/packages/scala-2.11.8.tgz -C /opt/programs/ [root@hadoop01 ~]# source /etc/profile [root@hadoop01 ~]# scala -version Scala code runner version 2.11.8 -- Copyright 2002-2016, LAMP/EPFL [root@hadoop01 ~]# tar -zxvf /opt/packages/spark-2.3.3-bin-hado.tgz -C /opt/programs/
[root@hadoop01 ~]# cd /opt/programs/spark-2.3.3-bin-hadoop2.7/conf [root@hadoop01 conf]# cp spark-env.sh.template spark-env.sh [root@hadoop01 conf]# vim spark-env.sh [root@hadoop01 conf]# cp slaves.template slaves [root@hadoop01 conf]# vim slaves [root@hadoop01 conf]# scp -r /opt/programs/spark-2.3.3-bin-hadoop2.7 root@hadoop02:/opt/programs/
[root@hadoop01 conf]# scp -r /opt/programs/spark-2.3.3-bin-hadoop2.7 root@hadoop03:/opt/programs/
|
1 2 3 4 5 6 7 8 9 10
| export SCALA_HOME=/opt/programs/scala-2.11.8 export PATH=$PATH:$SCALA_HOME/bin
export JAVA_HOME=/opt/programs/jdk1.8.0_211 export SCALA_HOME=/opt/programs/scala-2.11.8 export HADOOP_HOME=/opt/programs/hadoop-2.7.6 export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop export SPARK_MASTER_HOST=hadoop01
|
1 2 3 4 5 6 7 8 9
| [root@hadoop01 ~]# tar -zxvf /opt/packages/hbase-2.0.2-bin.tar.gz -C /opt/programs/ [root@hadoop01 ~]# cd /opt/programs/hbase-2.0.2/conf/ [root@hadoop01 conf]# vim hbase-env.sh [root@hadoop01 conf]# vim hbase-site.xml [root@hadoop01 conf]# vim regionservers [root@hadoop01 conf]# scp -r /opt/programs/hbase-2.0.2/ root@hadoop02:/opt/programs/ [root@hadoop01 conf]# scp -r /opt/programs/hbase-2.0.2/ root@hadoop03:/opt/programs/
|
1 2 3 4 5 6 7
| export JAVA_HOME=/opt/programs/jdk1.8.0_211 export HBASE_MANAGES_ZK=false
export HBASE_HOME=/opt/programs/hbase-2.0.2 export PATH=$PATH:$HBASE_HOME/bin
|
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32
| [root@hadoop01 conf]# jps 3392 ResourceManager 3089 JournalNode 3250 DFSZKFailoverController 3506 NodeManager 2803 NameNode 7748 HRegionServer 2540 QuorumPeerMain 2909 DataNode 7613 HMaster 7807 Jps [root@hadoop01 conf]# ssh hadoop02 Last login: Fri Mar 31 19:07:15 2023 from hadoop01 [root@hadoop02 ~]# jps 4292 HRegionServer 1977 DataNode 4442 Jps 1899 NameNode 1788 QuorumPeerMain 2317 NodeManager 2062 JournalNode 2175 DFSZKFailoverController [root@hadoop02 ~]# ssh hadoop03 Last login: Fri Mar 31 19:03:58 2023 from hadoop02 [root@hadoop03 ~]# jps 1970 JournalNode 1783 QuorumPeerMain 3753 HRegionServer 3913 Jps 2093 NodeManager 1887 DataNode
|
1 2
| [root@hadoop01 spark-2.3.3-bin-hadoop2.7]# tar -zxvf /opt/packages/kafka_2.11-2.0.0.tgz -C /opt/programs/
|
评论区
欢迎你留下宝贵的意见,昵称输入QQ号会显示QQ头像哦~