0% found this document useful (0 votes)
2 views

Python File

The document contains a series of programming experiments that demonstrate various tasks in Python, including file manipulation, regular expressions, exception handling, GUI creation, web scraping, data visualization, machine learning image classification, and server communication. Each experiment includes the aim, code, and expected output. The experiments cover a range of topics suitable for learning Python programming and its libraries.

Uploaded by

Piyush Mittal
Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
2 views

Python File

The document contains a series of programming experiments that demonstrate various tasks in Python, including file manipulation, regular expressions, exception handling, GUI creation, web scraping, data visualization, machine learning image classification, and server communication. Each experiment includes the aim, code, and expected output. The experiments cover a range of topics suitable for learning Python programming and its libraries.

Uploaded by

Piyush Mittal
Copyright
© © All Rights Reserved
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 11

Experiment - 7

Aim : Create a program that reads data from one file and writes it to another file in a
different format.

Code :

with open("input.txt", "w") as file:


file.write("Hello, this is a sample file content.")

with open("input.txt", "r") as file:


data = file.read()

with open("output.txt", "w") as file:


file.write(data.upper())

Output :
Experiment - 8

Aim : Create a program that uses regular expressions to find all instances of a specific
pattern in a text.

Code :
import re
text = "My email is [email protected] and my friend's email is
[email protected]."
pattern = r'[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z0-9]{2,}'
emails = re.findall(pattern, text)
print("Extracted Emails:", emails)

Output :
Experiment - 9

Aim : Create a program that prompts the user for two numbers and divides them, handling
any exceptions.

Code :

try:
num1 = int(input("Enter the first number: "))
num2 = int(input("Enter the second number: "))
result = num1 / num2
print("Result:", result)
except ZeroDivisionError:
print("Error: Cannot divide by zero!")
except ValueError:
print("Error: Invalid input! Please enter numbers only.")

Output :
Experiment - 10

Aim : Create a program that uses a graphical user interface (GUI) to allow the user to
perform simple calculations.

Code :
import tkinter as tk
def calculate():
try:
num1 = float(entry1.get())
num2 = float(entry2.get())
result.set(f"Result: {num1 + num2}")
except ValueError:
result.set("Invalid input")
root = tk.Tk()
root.title("Simple Calculator")
root.geometry("300x200")
root.resizable(False, False)
FONT = ("Helvetica", 12)
PAD = {'padx': 10, 'pady': 10}
tk.Label(root, text="Enter first number:", font=FONT).grid(row=0, column=0,
sticky="w", **PAD)
entry1 = tk.Entry(root, font=FONT)
entry1.grid(row=0, column=1, **PAD)
tk.Label(root, text="Enter second number:", font=FONT).grid(row=1,
column=0, sticky="w", **PAD)
entry2 = tk.Entry(root, font=FONT)
entry2.grid(row=1, column=1, **PAD)
result = tk.StringVar()
tk.Label(root, textvariable=result, font=FONT, fg="blue").grid(row=2,
column=0, columnspan=2, **PAD)
tk.Button(root, text="Add", command=calculate, font=FONT, bg="#4CAF50",
fg="white").grid(row=3, column=0, columnspan=2, **PAD)
root.mainloop()
Output :
Experiment - 11

Aim : Create a Python program that uses a web scraping library to extract data
(hyperlinks) from a website and then stores it in a SQLite database.

Code :
import requests
from bs4 import BeautifulSoup
import sqlite3

def scrape_data(url):
response = requests.get(url)
if response.status_code != 200:
print(f"Failed to retrieve the page: {response.status_code}")
return []
soup = BeautifulSoup(response.text, 'html.parser')
data = []
for item in soup.find_all('a'):
text = item.get_text(strip=True)
if text:
data.append(text)
return data

def create_database():
conn = sqlite3.connect('scraped_data.db')
cursor = conn.cursor()
cursor.execute('''
CREATE TABLE IF NOT EXISTS scraped_data (
id INTEGER PRIMARY KEY AUTOINCREMENT,
content TEXT
)''')
conn.commit()
return conn, cursor

def insert_data(cursor, data):


for item in data:
cursor.execute('INSERT INTO scraped_data (content) VALUES (?)',
(item,))
cursor.connection.commit()

def main():
url = 'https://www.google.com'
data = scrape_data(url)
if data:
print(f"Scraped {len(data)} items")
print("Scraped Data: ", data)
conn, cursor = create_database()
insert_data(cursor, data)
print("Data stored in the database successfully.")
conn.close()
else:
print("No data to store.")

if __name__ == "__main__":
main()
Output :
Experiment - 12

Aim : Create a program that reads data from a file and then creates a visualization of that
data using a data visualization library.

Code :

import pandas as pd
import matplotlib.pyplot as plt

def read_data(file_path):
df = pd.read_csv(file_path)
return df

def create_visualization(df):
plt.figure(figsize=(8, 6))
plt.bar(df['Category'], df['Value'], color='skyblue')
plt.title('Category vs Value')
plt.xlabel('Category')
plt.ylabel('Value')
plt.show()

def main():
file_path = 'data.csv'
df = read_data(file_path)

if df.empty:
print("Error: No data found in the file.")
return

create_visualization(df)

if __name__ == "__main__":
main()
Output :
Experiment - 13

Aim : Create a program that uses a machine learning library to classify images based on
their content using a pre-trained model.

Code :
import torch
import torch.nn as nn
from torchvision import models, transforms
from PIL import Image
import requests
from io import BytesIO
import json

# Load pre-trained ResNet-18 model


model = models.resnet18(weights=models.ResNet18_Weights.IMAGENET1K_V1)
model.eval()

# Image preprocessing pipeline


transform = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])

# Function to load and preprocess an image


def load_and_preprocess_image(image_path):
if image_path.startswith('http'):
response = requests.get(image_path)
img = Image.open(BytesIO(response.content)).convert('RGB')
else:
img = Image.open(image_path).convert('RGB')

img_tensor = transform(img)
img_tensor = img_tensor.unsqueeze(0) # Add batch dimension
return img_tensor

# Function to classify the image


def classify_image(image_path):
img_tensor = load_and_preprocess_image(image_path)

with torch.no_grad():
outputs = model(img_tensor)
_, predicted_idx = torch.max(outputs, 1)

# Get label names


LABELS_URL =
"https://storage.googleapis.com/download.tensorflow.org/data/imagenet_class
_index.json"
labels = requests.get(LABELS_URL).json()
predicted_label = labels[str(predicted_idx.item())][1]

print(f"Predicted label: {predicted_label}")

# Main entry point


if __name__ == "__main__":
image_path = input("Enter the image path (or URL): ")
classify_image(image_path)

Output :
Experiment - 14

Aim : Create a program that uses a networking library to communicate with a server and
retrieve data from it.

Code :
import requests

def get_data_from_server(url):
try:
response = requests.get(url)
if response.status_code == 200:
print("✅ Data received successfully:")
print(response.text) # Raw HTML or JSON/text response
else:
print(f"❌ Failed to retrieve data. HTTP Status code:
{response.status_code}")
except requests.exceptions.RequestException as e:
# Handle network or connection errors
print(f"⚠ An error occurred: {e}")

def main():
url = input("Enter the server URL to retrieve data from (e.g.,
https://jsonplaceholder.typicode.com/posts): ")
get_data_from_server(url)

if __name__ == "__main__":
main()
Output :

You might also like