-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathisd.py
70 lines (50 loc) · 1.55 KB
/
isd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
import numpy as np
from utilities import *
from test_functions import *
def isd(f, tol=1e-8, max_iter=1000, loc=None, verbose=False, plotting=False):
# constants for gradient descent
alpha = 1.1
beta = 0.5
ds = 0.5
xy_range = get_range(f)
if loc:
x0 = loc[0]
y0 = loc[1]
else:
x0 = np.random.uniform(xy_range[0], xy_range[1])
y0 = np.random.uniform(xy_range[2], xy_range[3])
current = f(x0,y0)
last = current
for i in range(0,max_iter):
constraint = True # reset our constraint each loop
gradx = -1 * x_partial(x0,y0, f, tol)
grady = -1 * y_partial(x0,y0, f, tol)
grad = np.sqrt(gradx * gradx + grady * grady)
if (np.abs(grad - 0) < tol):
x = x0
y = y0
break
coeff = ds / grad
x = x0 + coeff * gradx
y = y0 + coeff * grady
current = f(x, y)
if ( x < xy_range[0] or x > xy_range[1] or y < xy_range[2] or y > xy_range[3]):
constraint = True
current = last # reset value of best so far to our last best since we are out of bounds
if np.abs(current - last) <= tol:
break
dx = x - x0
dy = y - y0
if np.abs(dx) <= tol or np.abs(dy) <= tol:
break
if (current > last) or (not constraint):
ds = ds * beta
else:
ds = ds * alpha
last = current
x0 = x
y0 = y
if verbose:
return current, (x,y)
else:
return current