-
Notifications
You must be signed in to change notification settings - Fork 0
/
coordinatedescent.py
110 lines (93 loc) · 4.17 KB
/
coordinatedescent.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
"""Example implementation of the Coordinate Descent algorithm in Python
- The minimum value of F(x) is computed iteratively by minimizing individual coordinates in a cyclic manner
- On each iteration the active coordinate is updated as follows
X_k+1 = X_k - alpha * [del_F(xk)] * e
Where
* alpha : step length,
* e : the active co-ordinate vector(required to make vector addition to Xk possible),
* del_F(xk) : component of the gradient corresponding to the active coordinate
"""
import logging
import numpy as np
import math
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger(__name__)
__author__ = "@NdamuleloNemakh"
__version__ = 0.01
class CoordinateDescent:
def __init__(self, max_iterations=500, no_of_dimensions=2, step_length=1, fixed_step_length=True):
self.Dimensions = no_of_dimensions
self.StepLength = step_length
self.FixedStepLength = fixed_step_length
self.MaxIterations = max_iterations
self.CurrentIteration = 0
self.Minimiser: np.ndarray = np.array([0, 0])
self.ConvergenceThreshold = 10e-4
def __preview_config(self):
configs = f'''
Configuration Preview
========================
n = {self.Dimensions}
Step Length = {self.StepLength}
Fixed Step Length = {self.FixedStepLength}
Max Iterations = {self.MaxIterations}
'''
log.debug(configs)
return configs
@property
def _current_coordinate_idx(self):
# Choose next cordinate to optimize, in a CYCLIC manner
next_idx = lambda i: i % 2 + 1
return next_idx(self.CurrentIteration)
@property
def _ith_coordinate_vector(self):
v = np.zeros(self.Dimensions)
active_idx = self._current_coordinate_idx - 1
v[active_idx] = 1
return v
@staticmethod
def __current_gradient(xk) -> np.ndarray:
# Example: F(X1, X2) = X1 - X2 + 2X1**2 + 2X1X2 + X2**2
x1 = xk[0]
x2 = xk[1]
del_fx1 = 1 + 4 * x1 + 2 * x2
del_fx2 = 2 * x1 + 2 * x2 - 1
del_f = np.array([del_fx1, del_fx2])
return del_f
@staticmethod
def f(xk, decimal_places=3):
x1 = xk[0]
x2 = xk[1]
fn_value = x1 - x2 + 2 * math.pow(x1, 2) + 2 * x1 * x2 * math.pow(x2, 2)
return round(fn_value, decimal_places)
@property
def __current_gradient_value(self):
del_f = self.__current_gradient(self.Minimiser)
# sqaured_sum = functools.reduce(lambda a, b: a ** 2 + b ** 2, del_f)
return np.sqrt(del_f.dot(del_f))
@property
def __current_coordinate_gradient(self):
del_f = self.__current_gradient(self.Minimiser)
return del_f[self._current_coordinate_idx - 1]
def __find_next_candidate(self):
"""Compute the next potential minimiser point, i.e. Xk+1"""
xk = self.Minimiser
del_f = self.__current_coordinate_gradient
xk_plus_1 = xk - self.StepLength * del_f * self._ith_coordinate_vector
log.debug(f"X**{self.CurrentIteration} = {xk} - {self.StepLength} * {del_f} * {self._ith_coordinate_vector}")
return xk_plus_1
def execute(self):
log.info('Coordinate descent iteration started')
self.__preview_config()
for k in range(self.MaxIterations):
log.debug(f"-----------Iteration {k}--------------")
self.CurrentIteration = k
self.Minimiser = self.__find_next_candidate()
log.debug(f"------Minimiser = {self.Minimiser}. Gradient = {self.__current_gradient(self.Minimiser)} or "
f"{self.__current_gradient_value}--------\n")
if self.__current_gradient_value <= self.ConvergenceThreshold:
log.warning('Convergence condition satisfied, STOP!')
break
log.info(f"------Minimiser = {self.Minimiser}. Gradient = {self.__current_gradient_value}--------")
log.info(f'Coordinate descent completed successfully. Total Iterations={self.CurrentIteration}')
return self.Minimiser, self.f(self.Minimiser), round(self.__current_gradient_value, 3)