Algorithms For Optimization
Algorithms For Optimization
1978
G.R. PATIL COLLEGE DOMBIVLI
OF ARTS, SCIENCE, COMMERCE & B.M.S
Affiliated to Mumbai University
Certificate
This is to certify that Dasuri Vicky of Masters in Science (Computer
Science) Part-1(Sem-1) has completed the practical work in the
subject of Algorithm for Optimization during the academic year
2022-2023. as per the requirement of University of Mumbai in part
fulfillment for the completion of PG Degree of Master's of Science
(Computer Science).
Roll No:- 01
Code :-
using PlotlyJS
plot(contour(
z=[
10 10.625 12.5 15.625 20
5.625 6.25 8.125 11.25 15.625
2.5 3.125 5. 8.125 12.5
0.62 1.25 3.125 6.25 10.625
0 0.625 2.5 5.625 10
]'
))
OutPut:-
Practical 2
Result=fibonacci_section_search(f,-1,1,10)
OutPut:-
(-0.011235955056179792 , 0.011235955056179796)
Code:-
function golden_section_search(f, a, b, n)
p=1.618-1
d=p*b+(1-p)*a
yd=f(d)
for i = 1 : n - 1
c=p*a+(1-p)*b
yc=f(c)
if yc<yd
b, d, yd=d, c, yc
else
a, b=b, c
end
end
return a < b ? (a, b) : (b, a)
end
# golden_section_search (generic function with 1 method)
function f(x)
return x*x
end
# f (generic function with 1 method)
result=golden_section_search(f,2,8,5)
Output :-
(2, 2.8755439999999997)
Practical 3
function quadratic_fit_search(f,a,b,c,n)
ya,yb,yc=f(a),f(b),f(c)
for i in 1:n-3
print(a,"/n",b,"/n",c,"/n")
x=0.5*(ya*(b^2-c^2)+yb*(c^2-a^2)+yc*(a^2 -
b^2))/(ya*(b-c)+yb*(c-a)+yc*(a-b))
yx=f(x)
if x > b
if yx > yb
c, yc = x, yx
else
a, ya, b, yb = b, yb, x, yx
end
elseif x < b
if yx > yb
a, ya = x, yx
else
c, yc, b, yb = b, yb, x, yx
end
end
end
return (a, b, c)
end
# quadratic_fit_search (generic function with 1 method)
function f(n)
return n*n+2*n-1
end
# f (generic function with 1 method)
result=quadratic_fit_search(f,1,6,10,5)
Output :-
1/n6/n10/n1/n-1.0/n6/n(1, -1.0, 6)
Practical 4
Aim :- Implement Gradient descent
Code:-
function gradient_descent(p, q, x1; a=0.1,
maxiter=1000, g=1e-5)
x-copy(x1)
f=x -> 0 * x + q
x2= -f(x)
iter = 0
while norm(x2) > g || iter <= maxiter
iter +=1
x += a * x2
x2 = -f(x)
end
return x
end
# gradient_descent (generic function with 1 method)
p=[10.0 -1.0;
-1.0 1.0 ];
q=[0; -10.0];
x2=zeros(2);
Output :-
1.111111111111103
11.11111111111104
Practical 5
Code:-
Output :-
Iteration : 0, Current Guess: 0.0
Practical 6
ada_grad(3,20000,0.01)
Output :-
Iteration : 10043, current Guess : 1.0101584845215472
Iteration : 10044, current Guess : 1.0100587087650088
Iteration : 10045, current Guess : 1.0099589379745384
#RMSprop
Code :-
function rms_prop(x_guess, max_iter, alpha, beta)
fd = 2 * x_guess - 2
converged = fasle
iter = 0
prev_sgs = 0
while converged == false
delta = alpha * fd
sgs = (prev_sgs * beta ) + ((fd)^2) * (1-beta)
x_optimum = x_guess - delta/sqrt(sgs)
x_guess = x_optimum
prev_sgs = sgs
println("Iteration : $iter, Current_Guess : $x_guess")
if x_guess - 1 < 0.01
converged = true
end
if iter>max_iter
converged = true
end
iter=iter+1
end
end
# rms_prop (generic function with 1 method)
# Adadelta
Code:-
prev_sgs = sgs
x_optimum = x_guess - x
x_guess = x_optimum
println("Iteration : $iter, Current_Guess : $x_guess")
Output :-
Code:-
using Surrogates
using Plots
f=x -> log(x) * x^2 + x^3
ib=1.0
ub=10.0
x = sample(50, ib, ub, SobolSample())
y = f.(x)
my_radial_basis = RadialBasis(x,y,ib,ub)
approx = my_radial_basis(5.4)
using Plots
plot(x, y, seriestype=:scatter, label="Sample points",
xlims=(ib, ub), legend=:top)
plot!(f, label="True function", xlims=(ib,ub), legend=:top)
plot!(my_radial_basis, label="Sample points", xlims=(ib, ub),
legend=:top)
Output:-
Practical 8
Aim:- Apply Random Forest in surrogate Model.
Code:-
using Surrogates
using Plots
Output:-
num_round = 2
randomforest_surrogate = RandomForestSurrogate(x ,y ,lower_bound,
upper_bound, num_round = 2)
plot(x, y, seriestype=:scatter, label="Sampled points",
xlims=(lower_bound, upper_bound), legend=:top)
plot!(f, label="True function", xlims=(lower_bound, upper_bound),
legend=:top)
plot!(randomforest_surrogate, label="Surrogate function",
xlims=(lower_bound, upper_bound), legend=:top)
Output:
Practical 9
Code:-
using GaussianProcesses
using Random
Random.seed!(20140430)
n=10
x=2*3.14*rand(n)
y=sin.(x)+0.05*rand(n);
mZero=MeanZero()
kern=SE(0.0, 0.0)
logObsNoise=-1.0
gp=GP(x, y, mZero, kern, logObsNoise)
x=0:0.1:2*3.14
Method1
using Plots
plot(gp; obsv=false)
optimize!(gp)
plot(gp; obsv=false, label="Gaussian Process", fmt=:png)
samples=rand(gp, x, 5)
plot!(x,samples)
Output:-
Method2
Code:-
using Plots
plot(gp; xlabel="x", ylabel="y", title="Gaussian Process",
legend=false, fmt=:png)
Practical 10
Code:-
using AntColony
distance_matrix = rand(10,10)
aco(distance_matrix, is_tour=true)
aco(distance_matrix, start_node=1, end_node=5)
Output:-