DCshortCodes.ipynb - Colab
DCshortCodes.ipynb - Colab
while True:
num = int(input('\nEnter the number of probabilities: '))
Probabilities = [float(input(f'Enter Probability {i+1}: ')) for i in range(num)]
total = sum(Probabilities)
if total == 1:
H = sum(p * log(1 / p, 2) for p in Probabilities)
print('\nEntropy = %.3f' % H)
break
else:
print('\nThe sum of the Probabilities is %.3f, which is not equal to 1. Retry!' % total)
Entropy = 1.522
#6 shanon fano
import math
n = int(input("Enter number of probabilities: "))
# Input probabilities
probabilities = [float(input(f"Enter probability {i+1}: ")) for i in range(n)]
sorted_prob = sorted(probabilities, reverse=True)
# Calculate entropy
entropy = 0 # Initialize entropy
for p in sorted_prob:
entropy -= p * math.log2(p)
# Calculate average code length
avg_code_length = sum(sorted_prob[i] * code_lengths[i] for i in range(n))
# Calculate efficiency
efficiency = (entropy / avg_code_length) * 100
# Display results
print(f"\nEntropy: {entropy:.2f}")
print(f"Average Code Length: {avg_code_length:.2f}")
print(f"Efficiency: {efficiency:.2f}%")
Entropy: 1.87
Average Code Length: 1.88
Efficiency: 99.81%
#7 LBC
import numpy as np
generator= np.array([int(bit) for bit in input("Enter a generator bit:")])
msg=np.array([int(bit) for bit in input("Enter a message bit:")])
print("Generator:",generator)
print("Message:",msg)
result= np.convolve(generator, msg, mode='full')%2
print("Codeword:",result)
#9 Convolution
import numpy as np
# Generator vectors
g1 = np.array([1, 0, 1])
g2 = np.array([1, 1, 1])
# Message
m = np.array([1, 0, 1, 0, 1])
# C l ti d d l 2
# Convolution and modulo 2
v1 = np.convolve(g1, m) % 2
v2 = np.convolve(g2, m) % 2
# Print outputs
print("OUTPUT of PATH1: {}".format(v1))
print("OUTPUT of PATH2: {}".format(v2))
OUTPUT of PATH1: [1 0 0 0 0 0 1]
OUTPUT of PATH2: [1 1 0 1 0 1 1]
CONVOLUTION ENCODER OUTPUT: [1 1 0 1 0 0 0 1 0 0 0 1 1 1]