0% found this document useful (0 votes)
30 views4 pages

Intro To Algo HW 2

ngu
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
30 views4 pages

Intro To Algo HW 2

ngu
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as DOCX, PDF, TXT or read online on Scribd
You are on page 1/ 4

2.

1
Logarithmic:
N log log N ( slowest growing logarithmic function)
N log N
Poly-logarithmic:
N log² N
Constant:
37 (any constant value)
2/N (as N grows, the constant value 2/N becomes less significant)
Linear:
N
√N
Polynomial:
N^1.5
N^2
N log(N^2) (notice that N^2 is inside the log, so it grows slower than N^2)
Exponential:
2^(N/2)
2^N (fastest growing function)
-Functions that grow at the same rate:
 37 and 2/N (both are constants)
 N and √N (they grow linearly at the same rate)
 2^N and 2^(N/2) (both are exponential but with different bases. As N grows, 2^N will
grow much faster than 2^(N/2))
Code:
#include <stdio.h>
#include <math.h>
Void print_growth_rate(int n, char *function_name, double (*function)(int)) {
Double result = function(n);
Printf(“Function: %s, Input: %d, Result: %.2lf\n”, function_name, n, result);
}
Double f1(int n) {
Return n;
}
Double f2(int n) {
Return sqrt(n);
}
Double f3(int n) {
Return pow(n, 1.5);
}
Double f4(int n) {
Return pow(n, 2);
}
Double f5(int n) {
Return n * log(n);
}
Int main() {
Int n = 10; // Change this value to see growth for different inputs
Print_growth_rate(n, “N”, f1);
Print_growth_rate(n, “sqrt(N)”, f2);
Print_growth_rate(n, “N^1.5”, f3);
Print_growth_rate(n, “N^2”, f4);
Print_growth_rate(n, “N log N”, f5);
Return 0;
}
2.2
a. T₁(N) + T₂(N) = O(f(N)) True
b. T₁(N) – T₂(N) = o(f(N)) Not necessarily true
c. T₁(N) = O(1) Not necessarily true
d. T₁(N) = O(T₂(N)) True
Code:
#include <math.h>
double f(int n) {
double sum = 0;
for (int i = 0; i < n; i++) {
sum += i;
}
return sum;
}
int main() {
int n = 10;
double result1 = f(n);
double result2 = f(n);
double sum = result1 + result2;
double difference = result1 - result2;
printf("Input size (n): %d\n", n);
printf("T1(n) result: %.2lf\n", result1);
printf("T2(n) result: %.2lf\n", result2);
printf("T1(n) + T2(n) result: %.2lf\n", sum);
printf("T1(n) - T2(n) result: %.2lf\n", difference);
printf("\nAnalysis (not definitive proof):\n");
if (sum > (2 * result1) - (result1 / n)) {
printf("T1(n) + T2(n) likely grows proportionally to n (O(n))\n");
} else {
printf("T1(n) + T2(n) growth rate might be affected by constant terms\n");
}
if (fabs(difference) < (result1 / n)) {
printf("T1(n) - T2(n) growth rate might be lower than O(n)\n");
}
else {
printf("T1(n) - T2(n) growth rate might still be O(n)\n");
}
return 0;
2.3
N log N grows slower than N^(1 + ε/√log N). N log N has a linear term (N) that dominates its
logarithmic term (log N) as the input size increases. On the other hand, N^(1 + ε/√log N) has
an exponential base (N) that outgrows the logarithmic term in the exponent as the input size
increases
Code:
#include <stdio.h>
#include <math.h>
#define EPSILON 0.01
Double f1(int n) {
Return n * log(n);
}
Double f2(int n) {
Return pow(n, 1 + EPSILON / sqrt(log(n)));
}
Int main() {
Int max_n = 1000;
Printf(“Input size (n)\tN log N\t\tN^(1 + ε/√log N)\n”);
For (int n = 1; n <= max_n; n *= 10) {
Double result1 = f1(n);
Double result2 = f2(n);
Printf(“%d\t\t%.2lf\t\t%.2lf\n”, n, result1, result2);
}
Printf(“\nAnalysis:\n”);
Printf(“As n increases, N log N grows slower than N^(1 + ε/√log N)\n”);
Printf(“This is because the exponential term in N^(1 + ε/√log N) dominates the logarithmic
term as n gets larger.\n”);
Return 0;
}

You might also like