"""
Orders of Growth Clarifications
@author: Alvin Wan
@site: alvinwan.com
"""
# Fundamental runtimes: O(1) O(n) O(n^2) O(logn) O(2^n)
def f(x):
return 1 # obviously constant runtime
def g(x): # still constant!
x = 1 + 1
x = 1 + 2
x = 3
return x
def surprising(x): # tricky! doesn't depend on x in any way
# more importantly, loop runs a constant number of times, even if that constant is massive
for x in range(10000000)
print(x)
def loop(x): # linear O(n), depends on the number of elements in range(x)
for y in range(x):
print(y)
def looploop(x): # O(n^2)
for y in range(x):
for z in range(x):
print(z)
def loopval(n): # O(n), but depends on the VALUE of n!
i = 0
while i < n:
print(i)
i += 1
def loopval(n): # O(n^2)
i = 0
while i < n**2:
print(i)
i += 1
students = [1 ,2, 3, 4, 5, 6, 7]
# O(n) where n is the number of elements in the list of students
def loop_students(lst):
for student in lst:
print(lst)
def fib(n): # O(2^n)
if n < 2:
return n
return fib(n-1) + fib(n-2)
def looooop(n): # O(n), get rid of coefficients and extraneous, lower-order constants
for x in range(n/2):
print(x)
def recurse(n): # O(logn)
if n <= 1:
print(x)
else:
recurse(n/2)
# what happens in each call, for recurse?
# say we call recurse(64) => recurse(32) => recurse(16) ...
# each time, we halve the number we're working with
# i.e., we halve the number of iterations we need
# Important distinctions: (that you might not need to know for this course)
# Big O an upper bound
# Big theta is an upper bound AND a lower bound
# Big Omega is a lower bound (which we never really care about)