Setting Hadoop and Mysql 8.0
Setting Hadoop and Mysql 8.0
goog/install-hadoop-on-ubuntu-operating-
system-6e0ca4ef9689?source=author_recirc-----8ef3d665e331----0---------------------
6d0ea0b1_45d1_4004_b330_964496b9d0d0-------
&_x_tr_sl=en&_x_tr_tl=vi&_x_tr_hl=vi&_x_tr_pto=sc
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
export PATH=$PATH:/usr/lib/jvm/java-8-openjdk-amd64/bin
export HADOOP_HOME=~/hadoop-3.3.5/
export PATH=$PATH:$HADOOP_HOME/bin
export PATH=$PATH:$HADOOP_HOME/sbin
export HADOOP_MAPRED_HOME=$HADOOP_HOME
export YARN_HOME=$HADOOP_HOME
export HADOOP_CONF_DIR=$HADOOP_HOME/etc/hadoop
export HADOOP_COMMON_LIB_NATIVE_DIR=$HADOOP_HOME/lib/native
export HADOOP_OPTS="-Djava.library.path=$HADOOP_HOME/lib/native"
export HADOOP_STREAMING=$HADOOP_HOME/share/hadoop/tools/lib/hadoop-streaming-
3.3.5.jar
export HADOOP_LOG_DIR=$HADOOP_HOME/logs
export PDSH_RCMD_TYPE=ssh
3. install ssh
✍️ cd hadoop-3.3.5/etc/hadoop
5. Open core-site.xml
✍️ sudo nano core-site.xml //// gedit core-site.xml
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value> </property>
<property>
<name>hadoop.proxyuser.dataflair.groups</name> <value>*</value>
</property>
<property>
<name>hadoop.proxyuser.dataflair.hosts</name> <value>*</value>
</property>
<property>
<name>hadoop.proxyuser.server.hosts</name> <value>*</value>
</property>
<property>
<name>hadoop.proxyuser.server.groups</name> <value>*</value>
</property>
</configuration>
6. Open hdfs-site.xml
✍️ sudo nano hdfs-site.xml //// gedit hdfs-site.xml
<configuration>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
</configuration>
<property>
<name>dfs.client.block.write.locateFollowingBlock.retries</name>
<value>10</value>
</property>
7. Open mapred-site.xml
✍️ sudo nano mapred-site.xml //// gedit mapred-site.xml
<configuration>
<property>
<name>mapreduce.framework.name</name> <value>yarn</value>
</property>
<property>
<name>mapreduce.application.classpath</name>
<value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:$HADOOP_MAPRED_HOME/share/
hadoop/mapreduce/lib/*</value>
</property>
</configuration>
8. Open yarn-site.xml
✍️ sudo nano yarn-site.xml //// gedit yarn-site.xml
<configuration>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.env-whitelist</name>
<value>JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,CLASSPATH_PREP
END_DISTCACHE,HADOOP_YARN_HOME,HADOOP_MAPRED_HOME</value>
</property>
</configuration>
9. Start ssh
✍️ ssh localhost
✍️ ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa
cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
✍️ chmod 0600 ~/.ssh/authorized_keys
✍️ hadoop-3.3.5/bin/hdfs namenode -format
✍️ export PDSH_RCMD_TYPE=ssh
11. To start
✍️ start-all.sh
(Start NameNode daemon and DataNode daemon)