Instalación Hadoop
Instalación Hadoop
2. su
sudo visudo
usuario ALL=(ALL:ALL) ALL
nano ~/.bashrc
export JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
export PATH=$PATH:$JAVA_HOME/bin
#actualizar variables de entorno
source ~/.bashrc
7. # verificar path
echo $JAVA_HOME
8. # instalar ssh
cd /etc/pdsh/
nano rcmd_default
sudo nano rcmd_default
#ingresar el codigo
ssh
12. Instalación hadoop
#descomprimir hadoop
cd Documentos
tar -xzvf hadoop-3.3.6.tar.gz
mv hadoop-3.3.6 hadoop
14. Verificar donde se instala hadoop
cd hadoop/
pwd
./bin/hadoop
nano ~/.bashrc
export HADOOP_HOME=/home/usuario/Documentos/hadoop
export PATH=$PATH:$HADOOP_HOME/bin
source ~/.bashrc
nano etc/hadoop/hadoop-env.sh
#escribir la siguiente línea debajo donde dice export JAVA_HOME
export JAVA_HOME=/usr/lib/jvm/java-11-openjdk-amd64
nano etc/hadoop/core-site.xml
<property>
<name>fs.defaultFS</name>
<value>hdfs://localhost:9000</value>
</property>
<property>
<name>dfs.replication</name>
<value>1</value>
</property>
sbin/start-dfs.sh
23. # ver aplicativo funcionando en browser
localhost:9870/
nano etc/hadoop/mapred-site.xml
<property>
<name>mapreduce.framework.name</name>
<value>yarn</value>
</property>
<property>
<name>mapreduce.application.classpath</name>
<value>$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/*:
$HADOOP_MAPRED_HOME/share/hadoop/mapreduce/lib/*</value>
</property>
nano etc/hadoop/yarn-site.xml
<property>
<name>yarn.resourcemanager.hostname</name>
<value>CLUSTER</value>
</property>
<property>
<name>yarn.resourcemanager.address</name>
<value>CLUSTER:8032</value>
</property>
<property>
<name>yarn.resourcemanager.webapp.address</name>
<value>0.0.0.0:8088</value>
</property>
<property>
<name>yarn.nodemanager.aux-services</name>
<value>mapreduce_shuffle</value>
</property>
<property>
<name>yarn.nodemanager.env-whitelist</name>
<value>JAVA_HOME, HADOOP_COMMON_HOME, HADOOP_HDFS_HOME,
HADOOP_CONF_DIR, CLASSPATH_PREPEND_DISTCACHE, HADOOP_YARN_HOME,
HADOOP_HOME, PATH, LANG, TZ, HADOOP_MAPRED_HOME</value>
</property>