diff options
| author | Edoardo Pasca <edo.paskino@gmail.com> | 2019-04-11 15:49:24 +0100 | 
|---|---|---|
| committer | Edoardo Pasca <edo.paskino@gmail.com> | 2019-04-11 15:49:24 +0100 | 
| commit | 6ce64e15b13cf7c6ae55cf9bc891980679268ac4 (patch) | |
| tree | d8fb13fbabf49d1e53593b59129407da953f4e90 /Wrappers | |
| parent | 350a889c38805dcda98a299315af1ab64510fa5b (diff) | |
| download | framework-6ce64e15b13cf7c6ae55cf9bc891980679268ac4.tar.gz framework-6ce64e15b13cf7c6ae55cf9bc891980679268ac4.tar.bz2 framework-6ce64e15b13cf7c6ae55cf9bc891980679268ac4.tar.xz framework-6ce64e15b13cf7c6ae55cf9bc891980679268ac4.zip | |
minor code beautification
Diffstat (limited to 'Wrappers')
| -rw-r--r-- | Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py | 25 | 
1 files changed, 11 insertions, 14 deletions
| diff --git a/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py b/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py index 086e322..2ac3eba 100644 --- a/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py +++ b/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py @@ -66,24 +66,22 @@ class PDHG(Algorithm):              #self.y = self.f.proximal_conjugate(self.y_old, self.sigma)              self.f.proximal_conjugate(self.y_old, self.sigma, out=self.y) -             +              # Gradient ascent, Primal problem solution              self.operator.adjoint(self.y, out=self.x_tmp)              self.x_tmp *= self.tau              self.x_old -= self.x_tmp -             +              self.g.proximal(self.x_old, self.tau, out=self.x) -             +              #Update              self.x.subtract(self.x_old, out=self.xbar) -            #self.xbar -= self.x_old               self.xbar *= self.theta              self.xbar += self.x -                             +              self.x_old.fill(self.x)              self.y_old.fill(self.y) -            #self.y_old = self.y.copy() -            #self.x_old = self.x.copy() +          else:              # Gradient descent, Dual problem solution              self.y_old += self.sigma * self.operator.direct(self.xbar) @@ -92,19 +90,18 @@ class PDHG(Algorithm):              # Gradient ascent, Primal problem solution              self.x_old -= self.tau * self.operator.adjoint(self.y)              self.x = self.g.proximal(self.x_old, self.tau) -             +              #Update              #xbar = x + theta * (x - x_old)              self.xbar.fill(self.x)              self.xbar -= self.x_old               self.xbar *= self.theta              self.xbar += self.x -                             -            self.x_old.fill(self.x) -            self.y_old.fill(self.y) -            #self.y_old = self.y.copy() -            #self.x_old = self.x.copy() -            #self.y = self.y_old + +            #self.x_old.fill(self.x) +            #self.y_old.fill(self.y) +            self.x_old = self.x +            self.y_old = self.y      def update_objective(self):          p1 = self.f(self.operator.direct(self.x)) + self.g(self.x) | 
