0% found this document useful (0 votes)
15 views25 pages

Aim Program

Uploaded by

22wh1a05d5
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
15 views25 pages

Aim Program

Uploaded by

22wh1a05d5
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 25

1.

Aim: Write a Python program to calculate compound


interest.
Program:
# Compound Interest calculation
def compound_interest(principal, rate, time):
amount = principal * (1 + rate / 100) ** time
ci = amount - principal
return round(ci, 2)

# User input
principal = float(input("Enter the principal amount: "))
rate = float(input("Enter the annual interest rate (in %): "))
time = float(input("Enter the time (in years): "))

# Output
ci = compound_interest(principal, rate, time)
print(f"Compound Interest: {ci}")
Output:
Enter the principal amount: 1000
Enter the annual interest rate (in %): 5
Enter the time (in years): 2
Compound Interest: 102.5
2. Aim: Write a Python program to generate Fibonacci series
up to n terms.
Program:
# Fibonacci series
def fibonacci(n):
fib_series = []
a, b = 0, 1
for _ in range(n):
fib_series.append(a)
a, b = b, a + b
return fib_series

# User input
n = int(input("Enter the number of terms: "))

# Output
fib_series = fibonacci(n)
print(f"Fibonacci Series: {fib_series}")
Output:
Enter the number of terms: 5
Fibonacci Series: [0, 1, 1, 2, 3]
3.Aim: Write a Python program to calculate the factorial of a
number.
Program:
# Factorial calculation
def factorial(n):
if n == 0 or n == 1:
return 1
return n * factorial(n - 1)

# User input
n = int(input("Enter a number: "))

# Output
fact = factorial(n)
print(f"Factorial of {n}: {fact}")
Output:
Enter a number: 5
Factorial of 5: 120
4. Aim: Write a Python program to find all prime numbers in
a given range.
Program:
# Check for prime
def is_prime(num):
if num <= 1:
return False
for i in range(2, int(num**0.5) + 1):
if num % i == 0:
return False
return True
def prime_in_range(start, end):
return [num for num in range(start, end + 1) if
is_prime(num)]
start = int(input("Enter the start of the range: "))
end = int(input("Enter the end of the range: "))
# Output
primes = prime_in_range(start, end)
print(f"Prime numbers between {start} and {end}:
{primes}")
Output:
Enter the start of the range: 10
Enter the end of the range: 20
Prime numbers between 10 and 20: [11, 13, 17, 19]
5. Largest of Three Numbers
Aim: Write a Python program to find the largest of three
numbers.
Program:
# Largest of three numbers
def largest_of_three(a, b, c):
return max(a, b, c)

# User input
a = float(input("Enter the first number: "))
b = float(input("Enter the second number: "))
c = float(input("Enter the third number: "))

# Output
largest = largest_of_three(a, b, c)
print(f"The largest of {a}, {b}, and {c} is: {largest}")
Output:
Enter the first number: 10
Enter the second number: 20
Enter the third number: 15
The largest of 10.0, 20.0, and 15.0 is: 20.0
6. Student Grades Based on Logical Rules
Aim: Write a Python program to calculate student grades
based on logical rules.
Program:
# Calculate grade
def calculate_grade(marks):
if marks >= 90:
return 'A'
elif marks >= 80:
return 'B'
elif marks >= 70:
return 'C'
elif marks >= 60:
return 'D'
else:
return 'F'
marks = float(input("Enter the student's marks (out of 100):
"))
# Output
grade = calculate_grade(marks)
print(f"The grade for marks {marks} is: {grade}")
Output:
Enter the student's marks (out of 100): 85
The grade for marks 85.0 is: B
7. GCD of Numbers
Aim: Write a Python program to calculate the GCD of two
numbers.
Program:
import math

# GCD calculation
def gcd_of_numbers(a, b):
return math.gcd(a, b)

# User input
a = int(input("Enter the first number: "))
b = int(input("Enter the second number: "))

# Output
gcd = gcd_of_numbers(a, b)
print(f"The GCD of {a} and {b} is: {gcd}")
Output:
Enter the first number: 56
Enter the second number: 98
The GCD of 56 and 98 is: 14
8. Linear Regression
Aim: Write a Python program to calculate RMSE, MSE, and
other errors for a linear regression model.
Program:
import numpy as np
def calculate_errors(y_actual, y_predicted):
mse = np.mean((y_actual - y_predicted) ** 2)
rmse = np.sqrt(mse)
mae = np.mean(np.abs(y_actual - y_predicted))
r2 = 1 - (np.sum((y_actual - y_predicted) ** 2) /
np.sum((y_actual - np.mean(y_actual)) ** 2))
return mse, rmse, mae, r2
y_actual = np.array(list(map(float, input("Enter actual values
separated by spaces: ").split())))
y_predicted = np.array(list(map(float, input("Enter predicted
values separated by spaces: ").split())))
mse, rmse, mae, r2 = calculate_errors(y_actual, y_predicted)
print(f"MSE: {mse}, RMSE: {rmse}, MAE: {mae}, R2
Score: {r2}")
Output:
Enter actual values separated by spaces: 3 4 5
Enter predicted values separated by spaces: 2.5 4.5 5.2
MSE: 0.10333333333333333, RMSE:
0.32145502536643194, MAE: 0.23333333333333336, R2
Score: 0.9925
9. Logistic Regression
Aim: Write a Python program to calculate accuracy, precision,
recall, and F1-score for a logistic regression model.
Program:
from sklearn.metrics import accuracy_score, precision_score,
recall_score, f1_score
def calculate_metrics(y_true, y_pred):
accuracy = accuracy_score(y_true, y_pred)
precision = precision_score(y_true, y_pred)
recall = recall_score(y_true, y_pred)
f1 = f1_score(y_true, y_pred)
return accuracy, precision, recall, f1
y_true = list(map(int, input("Enter actual values (0 or 1) separated
by spaces: ").split()))
y_pred = list(map(int, input("Enter predicted values (0 or 1)
separated by spaces: ").split()))
accuracy, precision, recall, f1 = calculate_metrics(y_true, y_pred)
print(f"Accuracy: {accuracy}, Precision: {precision}, Recall:
{recall}, F1-Score: {f1}")
Output:
Enter actual values (0 or 1) separated by spaces: 1 0 1 1 0
Enter predicted values (0 or 1) separated by spaces: 1 0 0 1 0
Accuracy: 0.8, Precision: 1.0, Recall: 0.6666666666666666, F1-
Score: 0.8
10. Linear Regression – Best Fit Straight Line
Aim: Write a Python program to calculate and plot the best
fit line for linear regression.
Program:
import numpy as np
import matplotlib.pyplot as plt

# Best fit line


def best_fit_line(x, y):
m = np.cov(x, y, bias=True)[0][1] / np.var(x)
c = np.mean(y) - m * np.mean(x)
return m, c

# User input
x = np.array(list(map(float, input("Enter X values separated
by spaces: ").split())))
y = np.array(list(map(float, input("Enter Y values separated
by spaces: ").split())))

# Calculation and plot


m, c = best_fit_line(x, y)
y_pred = m * x + c
plt.scatter(x, y, color="blue", label="Actual Data")
plt.plot(x, y_pred, color="red", label="Best Fit Line")
plt.legend()
plt.xlabel("X")
plt.ylabel("Y")
plt.title("Linear Regression - Best Fit Line")
plt.show()

# Output
print(f"Slope: {m}, Intercept: {c}")
Output:
Enter X values separated by spaces: 1 2 3 4
Enter Y values separated by spaces: 2 4 6 8
Slope: 2.0, Intercept: 0.0
11. Logistic Regression - Best Fit Straight Line
Aim: Write a Python program to visualize the logistic
regression sigmoid curve.
Program:
import numpy as np
import matplotlib.pyplot as plt

# Sigmoid function
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# User input
x = np.array(list(map(float, input("Enter X values separated
by spaces: ").split())))

# Calculate and plot


y = sigmoid(x)
plt.plot(x, y, color="green", label="Sigmoid Curve")
plt.xlabel("X")
plt.ylabel("Sigmoid(X)")
plt.title("Logistic Regression - Sigmoid Curve")
plt.grid()
plt.legend()
plt.show()
Output:
Enter X values separated by spaces: -10 -5 0 5 10
Sigmoid curve plotted on the graph:
12. Linear Regression Using Gradient Descent
Aim: Write a Python program to implement linear
regression using gradient descent.
Program:
# Implementation of gradient descent in linear regression
import numpy as np
import matplotlib.pyplot as plt

class Linear_Regression:
def __init__(self, X, Y):
self.X = X
self.Y = Y
self.b = [0, 0]

def update_coeffs(self, learning_rate):


Y_pred = self.predict()
Y = self.Y
m = len(Y)
self.b[0] = self.b[0] - (learning_rate * ((1/m) *
np.sum(Y_pred - Y)))

self.b[1] = self.b[1] - (learning_rate * ((1/m) *


np.sum((Y_pred - Y) *
self.X)))
def predict(self, X=[]):
Y_pred = np.array([])
if not X:
X = self.X
b = self.b
for x in X:
Y_pred = np.append(Y_pred, b[0] + (b[1] * x))

return Y_pred

def get_current_accuracy(self, Y_pred):


p, e = Y_pred, self.Y
n = len(Y_pred)
return 1-sum(
[
abs(p[i]-e[i])/e[i]
for i in range(n)
if e[i] != 0]
)/n
# def predict(self, b, yi):

def compute_cost(self, Y_pred):


m = len(self.Y)
J = (1 / 2*m) * (np.sum(Y_pred - self.Y)**2)
return J

def plot_best_fit(self, Y_pred, fig):


f = plt.figure(fig)
plt.scatter(self.X, self.Y, color='b')
plt.plot(self.X, Y_pred, color='g')
f.show()

def main():
X = np.array([i for i in range(11)])
Y = np.array([2*i for i in range(11)])

regressor = Linear_Regression(X, Y)

iterations = 0
steps = 100
learning_rate = 0.01
costs = []

# original best-fit line


Y_pred = regressor.predict()
regressor.plot_best_fit(Y_pred, 'Initial Best Fit Line')
while 1:
Y_pred = regressor.predict()
cost = regressor.compute_cost(Y_pred)
costs.append(cost)
regressor.update_coeffs(learning_rate)

iterations += 1
if iterations % steps == 0:
print(iterations, "epochs elapsed")
print("Current accuracy is :",
regressor.get_current_accuracy(Y_pred))

stop = input("Do you want to stop (y/*)??")


if stop == "y":
break

# final best-fit line


regressor.plot_best_fit(Y_pred, 'Final Best Fit Line')

# plot to verify cost function decreases


h = plt.figure('Verification')
plt.plot(range(iterations), costs, color='b')
h.show()

# if user wants to predict using the regressor:


regressor.predict([i for i in range(10)])

if __name__ == '__main__':
main()
Output:
100 epochs elapsed
Current accuracy is : 0.9836456109008862
13. Logistic Regression Using Gradient Descent
Aim: Write a Python program to implement logistic
regression using gradient descent.
Program:
import numpy as np
import matplotlib.pyplot as plt

class LogisticRegression:
def __init__(self, X, Y):
self.X = np.hstack((np.ones((len(X), 1)), X.reshape(-1,
1))) # Adding intercept term
self.Y = Y
self.b = np.zeros(self.X.shape[1]) # Initialize
coefficients as zeros

def sigmoid(self, z):


return 1 / (1 + np.exp(-z))

def update_coeffs(self, learning_rate):


Y_pred = self.predict_proba()
m = len(self.Y)
gradient = (1/m) * np.dot(self.X.T, (Y_pred - self.Y))
self.b -= learning_rate * gradient
def predict_proba(self, X=None):
if X is None:
X = self.X
z = np.dot(X, self.b)
return self.sigmoid(z)

def predict(self, X=None):


probas = self.predict_proba(X)
return np.array([1 if p >= 0.5 else 0 for p in probas])

def compute_cost(self):
m = len(self.Y)
Y_pred = self.predict_proba()
cost = (-1/m) * (np.sum(self.Y * np.log(Y_pred) + (1 -
self.Y) * np.log(1 - Y_pred)))
return cost

def plot_decision_boundary(self):
plt.scatter(self.X[:, 1], self.Y, color="blue",
label="Data")
x_boundary = np.linspace(min(self.X[:, 1]),
max(self.X[:, 1]), 100)
y_boundary = self.sigmoid(self.b[0] + self.b[1] *
x_boundary)
plt.plot(x_boundary, y_boundary, color="red",
label="Decision Boundary")
plt.xlabel("Feature")
plt.ylabel("Probability")
plt.legend()
plt.show()

def main():
# Example data
n = 11
X = np.array([i for i in range(n)]) # Generates feature
values
Y = np.array([0 if i < n//2 else 1 for i in range(n)]) #
Binary labels (0 for first half, 1 for second half)

regressor = LogisticRegression(X, Y)

iterations = 0
steps = 100
learning_rate = 0.1
costs = []

while 1:
cost = regressor.compute_cost()
costs.append(cost)
regressor.update_coeffs(learning_rate)
iterations += 1

if iterations % steps == 0:
print(f"{iterations} iterations elapsed")
print(f"Current cost: {cost}")

stop = input("Do you want to stop (y/*)? ")


if stop == "y":
break

# Final decision boundary


regressor.plot_decision_boundary()

# Plot cost vs iterations


plt.figure("Cost Reduction")
plt.plot(range(iterations), costs, color="blue")
plt.title("Cost Reduction Over Iterations")
plt.xlabel("Iterations")
plt.ylabel("Cost")
plt.show()
# Predictions
test_X = np.array([i for i in range(15)]) # Test inputs
test_X_with_intercept = np.hstack((np.ones((len(test_X),
1)), test_X.reshape(-1, 1)))
predictions = regressor.predict(test_X_with_intercept)
print(f"Predictions for test data {test_X}: {predictions}")

if __name__ == "__main__":
main()
Output:
100 iterations elapsed
Current cost: 0.32733039684291076
Predictions for test data [ 0 1 2 3 4 5 6 7 8 9 10 11 12
13 14]: [0 0 0 0 1 1 1 1 1 1 1 1 1 1 1]

You might also like