diff options
-rw-r--r-- | Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py | 25 |
1 files changed, 11 insertions, 14 deletions
diff --git a/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py b/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py index 086e322..2ac3eba 100644 --- a/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py +++ b/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py @@ -66,24 +66,22 @@ class PDHG(Algorithm): #self.y = self.f.proximal_conjugate(self.y_old, self.sigma) self.f.proximal_conjugate(self.y_old, self.sigma, out=self.y) - + # Gradient ascent, Primal problem solution self.operator.adjoint(self.y, out=self.x_tmp) self.x_tmp *= self.tau self.x_old -= self.x_tmp - + self.g.proximal(self.x_old, self.tau, out=self.x) - + #Update self.x.subtract(self.x_old, out=self.xbar) - #self.xbar -= self.x_old self.xbar *= self.theta self.xbar += self.x - + self.x_old.fill(self.x) self.y_old.fill(self.y) - #self.y_old = self.y.copy() - #self.x_old = self.x.copy() + else: # Gradient descent, Dual problem solution self.y_old += self.sigma * self.operator.direct(self.xbar) @@ -92,19 +90,18 @@ class PDHG(Algorithm): # Gradient ascent, Primal problem solution self.x_old -= self.tau * self.operator.adjoint(self.y) self.x = self.g.proximal(self.x_old, self.tau) - + #Update #xbar = x + theta * (x - x_old) self.xbar.fill(self.x) self.xbar -= self.x_old self.xbar *= self.theta self.xbar += self.x - - self.x_old.fill(self.x) - self.y_old.fill(self.y) - #self.y_old = self.y.copy() - #self.x_old = self.x.copy() - #self.y = self.y_old + + #self.x_old.fill(self.x) + #self.y_old.fill(self.y) + self.x_old = self.x + self.y_old = self.y def update_objective(self): p1 = self.f(self.operator.direct(self.x)) + self.g(self.x) |