diff options
author | epapoutsellis <epapoutsellis@gmail.com> | 2019-04-23 09:43:00 +0100 |
---|---|---|
committer | epapoutsellis <epapoutsellis@gmail.com> | 2019-04-23 09:43:00 +0100 |
commit | ad31397888520977a21a8cd05578aba2f10c832d (patch) | |
tree | 89aedfc7eaf985a1bf4cc76c5545f372181ec23a | |
parent | 27774e3f370db3366dfaa0a4653df8b41349e62c (diff) | |
download | framework-ad31397888520977a21a8cd05578aba2f10c832d.tar.gz framework-ad31397888520977a21a8cd05578aba2f10c832d.tar.bz2 framework-ad31397888520977a21a8cd05578aba2f10c832d.tar.xz framework-ad31397888520977a21a8cd05578aba2f10c832d.zip |
PDGAP for TV
-rw-r--r-- | Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py | 25 |
1 files changed, 13 insertions, 12 deletions
diff --git a/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py b/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py index 6360ac1..7631e29 100644 --- a/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py +++ b/Wrappers/Python/ccpi/optimisation/algorithms/PDHG.py @@ -13,6 +13,7 @@ import time from ccpi.optimisation.operators import BlockOperator from ccpi.framework import BlockDataContainer from ccpi.optimisation.functions import FunctionOperatorComposition +import matplotlib.pyplot as plt class PDHG(Algorithm): '''Primal Dual Hybrid Gradient''' @@ -104,7 +105,7 @@ class PDHG(Algorithm): def update_objective(self): p1 = self.f(self.operator.direct(self.x)) + self.g(self.x) - d1 = -(self.f.convex_conjugate(self.y) + self.g(-1*self.operator.adjoint(self.y))) + d1 = -(self.f.convex_conjugate(self.y) + self.g.convex_conjugate(-1*self.operator.adjoint(self.y))) self.loss.append([p1,d1,p1-d1]) @@ -152,7 +153,7 @@ def PDHG_old(f, g, operator, tau = None, sigma = None, opt = None, **kwargs): if not memopt: - y_tmp = y_old + sigma * operator.direct(xbar) + y_tmp = y_old + sigma * operator.direct(xbar) y = f.proximal_conjugate(y_tmp, sigma) x_tmp = x_old - tau*operator.adjoint(y) @@ -164,15 +165,15 @@ def PDHG_old(f, g, operator, tau = None, sigma = None, opt = None, **kwargs): x_old.fill(x) y_old.fill(y) - - + if i%10==0: -# + p1 = f(operator.direct(x)) + g(x) - d1 = - ( f.convex_conjugate(y) + g(-1*operator.adjoint(y)) ) + d1 = - ( f.convex_conjugate(y) + g.convex_conjugate(-1*operator.adjoint(y)) ) primal.append(p1) dual.append(d1) - pdgap.append(p1-d1) + pdgap.append(p1-d1) + print(p1, d1, p1-d1) else: @@ -190,18 +191,18 @@ def PDHG_old(f, g, operator, tau = None, sigma = None, opt = None, **kwargs): x.subtract(x_old, out=xbar) xbar *= theta xbar += x - + x_old.fill(x) y_old.fill(y) if i%10==0: -# + p1 = f(operator.direct(x)) + g(x) - d1 = - ( f.convex_conjugate(y) + g(-1*operator.adjoint(y)) ) + d1 = - ( f.convex_conjugate(y) + g.convex_conjugate(-1*operator.adjoint(y)) ) primal.append(p1) dual.append(d1) - pdgap.append(p1-d1) -# print(p1, d1, p1-d1) + pdgap.append(p1-d1) + print(p1, d1, p1-d1) t_end = time.time() |