BDA Lab Manual
BDA Lab Manual
Result
50 40 30 20 10
c) QUEUES:
package queues;
import java.util.*;
class GenQueue<E>
{
private LinkedList<E> list = new LinkedList<E>();
public void enqueue(E item)
{
list.addLast(item);
}
public E dequeue()
{
return list.poll();
}
public boolean hasItems()
{
return !list.isEmpty();
}
public int size()
{
return list.size();
}
public void addItems(GenQueue<? extends E> q)
{
while (q.hasItems())
list.addLast(q.dequeue());
}
}
public class GenQueueTest
{
public static void main(String[] args)
{
GenQueue<Employee> empList; empList = new
GenQueue<Employee>();
GenQueue<HourlyEmployee> hList;
hList = new GenQueue<HourlyEmployee>();
hList.enqueue(new HourlyEmployee( "Trump", "Donald"));
hList.enqueue(new HourlyEmployee( "Gates", "Bill"));
hList.enqueue(new HourlyEmployee( "Forbes", "Steve"));
empList.addItems(hList);
while (empList.hasItems())
{
Employee emp = empList.dequeue();
System.out.println(emp.firstName + " " + emp.lastName);
}
}
}
class Employee
{
public String lastName;
public String firstName;
public Employee()
{
}
public Employee(String last, String first)
{
this.lastName = last;
this.firstName = first;
}
public String toString()
{
return firstName + " " + lastName;
}
}
class HourlyEmployee extends Employee
{
public double hourlyRate;
public HourlyEmployee(String last, String first)
{
super(last, first);
}
}
d) SETS:
package sets;
import java.util.*;
public class SetDemo {
public static void main(String args[]) {
int count[] = {34, 22,10,60,30,22};
Set<Integer> set = new HashSet<Integer>();
try{
for(int i = 0; i<5; i++){
set.add(count[i]);
}
System.out.println(set);
e) MAPS
package maps;
import java.awt.Color;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
/**
This program demonstrates a map that maps names to colors.
*/
public class MapDemo
{
public static void main(String[] args)
{
Map<String, Color> favoriteColors = new HashMap<String, Color>();
favoriteColors.put("Juliet", Color.BLUE);
favoriteColors.put("Romeo", Color.GREEN);
favoriteColors.put("Adam", Color.RED);
favoriteColors.put("Eve", Color.BLUE);
// Print all keys and values in the map
Set<String> keySet = favoriteColors.keySet();
for (String key : keySet)
{
Color value = favoriteColors.get(key);
System.out.println(key + " : " + value);
}
}
}
public static void main(String[] args) {
MyStack theStack = new MyStack(10);
theStack.push(1
export JAVA_HOME=/home/user/Downloads/jdk1.8.0_77
export PATH=$PATH:$JAVA_HOME/bin
Step 11: Now apply all the changes into the current running system.
Close terminal. And open again or
user@user-Thinkceter-E73:- $ source ~/.bashrc
Step 12 : For verification of java path type echo $java_home in it.
HADOOP INSTALLATION
Step 13: Open hadoop/etc/hadoop/hadoop-env.sh file and add this line at the end
of the file.
export JAVA_HOME=/home/user/Downloads/jdk1.8.0_77
save and exit.
Step 14: open bash rc file using the following command.
user@user-Thinkceter-E73:- $ sudo gedit .bashrc
user@user-Thinkceter-E73:~/input$ vi f2.txt
f1.txt
user@user-Thinkceter-E73:~/input$ vi f1.txt
inception
requirements
analysis
design
development
implementation
testing
deployment
f2.txt
user@user-Thinkceter-E73:~/input$ vi f2.txt
inception
eloboration
elicitaion
implemenation
inception
development
tesing
step 18: Now execute the wordcount program using the following command
hadoop jar /home/user/Downloads/hadoop/share/hadoop/mapreduce/hadoop- mapreduce-examples-2.7.2.jar
wordcount input op
it display output as
user@user-Thinkceter-E73:- $ cat op/*
inception 3
requirements 1
analysis 1
design 1
development 2
implemenatation 2
deployment 1
testing 2
4. Create another file in same directory list of some other names , repeat some
names from first file
$ vi f2.txt
inception
eloboration
elicitaion
implemenation
inception
development
tesing
7. Transfer local directory with two files into hdfs file system
$ hdfs fs –put /home/user/input/f1 /kits/input/f1
$ hdfs fs –put /home/user/input/f2 /kits/input/f2
inception 3
requirements 1
analysis 1
design 1
development 2
implemenatation 2
deployment 1
testing 2
Week 5:
3. Implement the following file management tasks in Hadoop
Adding files and directories
Retrieving files
Deleting files
syntax:
$ hadoop fs -mkdir path
example:
qisit@qisit-Vostro-1500:~$ hadoop fs -mkdir /kits/cse
to view directories created:
syantax:
$ hadoop fs -ls path
example:
$hadoop fs -ls kits
Found 1 items
drwxr-xr-x - qisit supergroup 0 2016-06-17 14:45 /kits/cse
i)copyFromLocal: This command copies files from Local File System to HDFS
Syntax:
$hadoop fs -copyFromLocal <localsystempath> <HDFSPath>
Example:
hadoop fs -copyFromLocal /home/qisit/input.txt /kits/cse
ii) put: This command is also used to copy file from Local File System to HDFS.
Syntax:
$hadoop fs -put <localsystempath> <HDFSPath>
Example:
hadoop fs -put /home/qisit/input1.txt /kits/cse
To view files in HDFS:
Example: qisit@qisit-Vostro-1500:~$ hadoop fs -ls -R /kits
drwxr-xr-x - qisit supergroup 0 2016-06-17 15:27 /kits/cse
-rw-r--r-- 1 qisit supergroup 0 2016-06-17 15:21 /kits/cse/input.txt
-rw-r--r-- 1 qisit supergroup 78 2016-06-17 15:27 /kits/cse/input1.txt
qisit@qisit-Vostro-1500:~$ ls
qisit@qisit-Vostro-1500:~$ ls
Desktop dsjava input1 jhansi Music Templates
Documents examples.desktop input1.txt jout Pictures Videos
Downloads input input.txt kits Public
iii) DELETING FILES:
To delete file in HDFS:
Syntax:
$hadoop fs -rm filepath
Example:
qisit@qisit-Vostro-1500:~$ hadoop fs -rm /kits/cse/input.txt
Deletion interval = 0 minutes, Emptier interval = 0 minutes.
Deleted /kits/cse/input.txt
To delete directories in HDFS:
Syntax:
$hadoop fs -rmr directorypath
Example:
qisit@qisit-Vostro-1500:~$ hadoop fs -rmr /kits