Soft Computing File
Soft Computing File
1
Output-
2
2. Generate ANDNOT function using McCulloch-Pitts neural
net.
public class ANDNOTFunction {
public static int andNot(int inputA, int inputB) {
int weightA = 1;
int weightB = -1;
double threshold = 0.5;
double weightedSum = (inputA * weightA) + (inputB * weightB);
return (weightedSum >= threshold) ? 1 : 0;
}
public static void main(String[] args) {
int[][] truthTable = {
{0, 0},
{0, 1},
{1, 0},
{1, 1}
};
System.out.println("A B Output (A AND NOT B)");
for (int[] inputs : truthTable) {
int output = andNot(inputs[0], inputs[1]);
System.out.printf("%d %d %d%n", inputs[0], inputs[1], output);
}
}
}
Output-
3
3. Generate XOR function using McCulloch-Pitts neural net.
public class XORFunction {
Output-
5
4. Write a program to implement Hebb's Net to classify two
dimensional input patterns in bipolar with given targets.
public class HebbsNet {
private double[] weights;
private double bias;
6
{1, 1},
{1, -1},
{-1, 1},
{-1, -1}
};
Output-
7
5. Perceptron net for an AND function with bipolar inputs
and targets.
public class PerceptronAND {
public PerceptronAND() {
for (int i = 0; i < NUM_INPUTS; i++) {
weights[i] = Math.random() * 2 - 1;
}
bias = Math.random() * 2 - 1;
}
8
public void train(int[][] inputs, int[] targets) {
int epoch = 0;
boolean isConverged = false;
if (error != 0) {
isConverged = false;
for (int j = 0; j < NUM_INPUTS; j++) {
weights[j] += LEARNING_RATE * error * input[j];
}
bias += LEARNING_RATE * error;
}
}
epoch++;
}
if (isConverged) {
System.out.println("Training completed in " + epoch + " epochs.");
}
else {
System.out.println("Training did not converge in the given epochs.");
}
}
public void test(int[][] inputs) {
9
System.out.println("Testing the perceptron:");
for (int[] input : inputs) {
int output = calculateOutput(input);
System.out.println("Input: [" + input[0] + ", " + input[1] + "] => Output: "
+ output);
}
}
Output-
10
6. Write a program of Perceptron Training Algorithm.
public class Perceptron {
private double[] weights;
private double bias;
private double learningRate;
if (error != 0) {
for (int j = 0; j < weights.length; j++) {
weights[j] += learningRate * error * inputs[i][j];
}
bias += learningRate * error;
hasError = true;
}
}
epoch++;
}
while (hasError && epoch < maxEpochs);
System.out.println("Training completed in " + epoch + " epochs.");
11
}
public int predict(double[] input) {
double netInput = bias;
for (int i = 0; i < input.length; i++) {
netInput += weights[i] * input[i];
}
return (netInput >= 0) ? 1 : -1;
}
public static void main(String[] args) {
double[][] inputs = {
{0, 0},
{0, 1},
{1, 0},
{1, 1}
};
int[] targets = {-1, 1, 1, 1};
Perceptron perceptron = new Perceptron(2, 0.1);
perceptron.train(inputs, targets, 100);
System.out.println("Testing Perceptron:");
for (int i = 0; i < inputs.length; i++) {
int output = perceptron.predict(inputs[i]);
System.out.printf("Input: (%.1f, %.1f) -> Predicted Output: %d, Target: %d%n",
inputs[i][0], inputs[i][1], output, targets[i]);
}
}
}
Output-
12
7. Write a program for Back Propagation Algorithm.
import java.util.Random;
public class BackPropagation {
private static final int INPUT_NEURONS = 2;
private static final int HIDDEN_NEURONS = 2;
private static final int OUTPUT_NEURONS = 1;
private static final double LEARNING_RATE = 0.5;
public BackPropagation() {
initializeWeights();
}
13
for (int i = 0; i < HIDDEN_NEURONS; i++) {
hiddenBias[i] = random.nextDouble() - 0.5;
for (int j = 0; j < OUTPUT_NEURONS; j++) {
hiddenToOutputWeights[i][j] = random.nextDouble() - 0.5;
}
}
for (int i = 0; i < OUTPUT_NEURONS; i++) {
outputBias[i] = random.nextDouble() - 0.5;
}
}
14
double[] hiddenErrors = new double[HIDDEN_NEURONS];
double[] hiddenDeltas = new double[HIDDEN_NEURONS];
for (int i = 0; i < HIDDEN_NEURONS; i++) {
hiddenErrors[i] = 0.0;
for (int j = 0; j < OUTPUT_NEURONS; j++) {
hiddenErrors[i] += outputDeltas[j] * hiddenToOutputWeights[i][j];
}
hiddenDeltas[i] = hiddenErrors[i] * sigmoidDerivative(hiddenLayer[i]);
for (int i = 0; i < HIDDEN_NEURONS; i++) {
hiddenBias[i] += LEARNING_RATE * hiddenDeltas[i];
}
}
System.out.printf("Epoch %d: Total Error = %.6f%n", epoch + 1, totalError / 2);
}
}
outputLayer[i] += outputBias[i];
outputLayer[i] = sigmoid(outputLayer[i]);
}
}
Output-
16
8. Write a program to implement logic gates.
import java.util.Scanner;
public class LogicGates {
17
System.out.println("Welcome to the Logic Gates Simulator!");
System.out.print("Enter the first binary value (0 or 1): ");
int a = scanner.nextInt();
System.out.print("Enter the second binary value (0 or 1): ");
int b = scanner.nextInt();
Output-
19
9. To perform Union, Intersection and Complement
operations.
import java.util.HashSet;
import java.util.Scanner;
import java.util.Set;
public class SetOperations {
public static Set<Integer> union(Set<Integer> setA, Set<Integer> setB) {
Set<Integer> resultSet = new HashSet<>(setA);
resultSet.addAll(setB);
return resultSet;
}
public static Set<Integer> intersection(Set<Integer> setA, Set<Integer> setB) {
Set<Integer> resultSet = new HashSet<>(setA);
resultSet.retainAll(setB);
return resultSet;
}
Input-
Output-
21
10. To plot various membership functions.
import java.io.FileWriter;
import java.io.IOException;
writer.write(String.format("%.2f,%.4f,%.4f,%.4f,%.4f\n", x, triangular,
trapezoidal, gaussian, sigmoidal));
}
System.out.println("Membership function data exported to
'membership_functions.csv'");
}
catch (IOException e) {
e.printStackTrace();
}
}
}
Output-
23
11. To implement Genetic Algorithm.
import java.util.Random;
public class GeneticAlgorithm {
24
for (int j = 0; j < CHROMOSOME_LENGTH; j++) {
population[i][j] = random.nextInt(2);
}
}
return population;
}
private static int[] evaluateFitness(int[][] population) {
int[] fitness = new int[POPULATION_SIZE];
for (int i = 0; i < POPULATION_SIZE; i++) {
int value = binaryToDecimal(population[i]);
fitness[i] = value * value;
}
return fitness;
}
Output-
27