summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Wrappers/Python/ccpi/optimisation/functions/L2NormSquared.py12
-rwxr-xr-xWrappers/Python/ccpi/optimisation/functions/MixedL21Norm.py3
-rwxr-xr-xWrappers/Python/wip/pdhg_TV_denoising.py6
-rw-r--r--Wrappers/Python/wip/pdhg_tv_denoising_poisson.py71
4 files changed, 58 insertions, 34 deletions
diff --git a/Wrappers/Python/ccpi/optimisation/functions/L2NormSquared.py b/Wrappers/Python/ccpi/optimisation/functions/L2NormSquared.py
index 1946d67..6d3bf86 100644
--- a/Wrappers/Python/ccpi/optimisation/functions/L2NormSquared.py
+++ b/Wrappers/Python/ccpi/optimisation/functions/L2NormSquared.py
@@ -93,12 +93,12 @@ class L2NormSquared(Function):
if self.b is None:
return x/(1+2*tau)
else:
- tmp = x
- tmp -= self.b
- tmp /= (1+2*tau)
- tmp += self.b
- return tmp
-# return (x-self.b)/(1+2*tau) + self.b
+# tmp = x
+# tmp -= self.b
+# tmp /= (1+2*tau)
+# tmp += self.b
+# return tmp
+ return (x-self.b)/(1+2*tau) + self.b
# if self.b is not None:
# out=x
diff --git a/Wrappers/Python/ccpi/optimisation/functions/MixedL21Norm.py b/Wrappers/Python/ccpi/optimisation/functions/MixedL21Norm.py
index 7e6b6e7..2004e5f 100755
--- a/Wrappers/Python/ccpi/optimisation/functions/MixedL21Norm.py
+++ b/Wrappers/Python/ccpi/optimisation/functions/MixedL21Norm.py
@@ -96,7 +96,8 @@ class MixedL21Norm(Function):
tmp = [ el*el for el in x.containers]
res = sum(tmp).sqrt().maximum(1.0)
frac = [el/res for el in x.containers]
- return BlockDataContainer(*frac)
+ return BlockDataContainer(*frac)
+
#TODO this is slow, why???
# return x.divide(x.pnorm().maximum(1.0))
diff --git a/Wrappers/Python/wip/pdhg_TV_denoising.py b/Wrappers/Python/wip/pdhg_TV_denoising.py
index a5cd1bf..e6d38e9 100755
--- a/Wrappers/Python/wip/pdhg_TV_denoising.py
+++ b/Wrappers/Python/wip/pdhg_TV_denoising.py
@@ -20,10 +20,10 @@ from ccpi.optimisation.functions import ZeroFunction, L2NormSquared, \
from skimage.util import random_noise
from timeit import default_timer as timer
-def dt(steps):
- return steps[-1] - steps[-2]
+#def dt(steps):
+# return steps[-1] - steps[-2]
-# Create phantom for TV denoising
+# Create phantom for TV Gaussian denoising
N = 100
diff --git a/Wrappers/Python/wip/pdhg_tv_denoising_poisson.py b/Wrappers/Python/wip/pdhg_tv_denoising_poisson.py
index 9fad6f8..fbe0d9b 100644
--- a/Wrappers/Python/wip/pdhg_tv_denoising_poisson.py
+++ b/Wrappers/Python/wip/pdhg_tv_denoising_poisson.py
@@ -6,24 +6,25 @@ Created on Fri Feb 22 14:53:03 2019
@author: evangelos
"""
-from ccpi.framework import ImageData, ImageGeometry, BlockDataContainer
-
import numpy as np
import matplotlib.pyplot as plt
+from ccpi.framework import ImageData, ImageGeometry
+
from ccpi.optimisation.algorithms import PDHG, PDHG_old
from ccpi.optimisation.operators import BlockOperator, Identity, Gradient
-from ccpi.optimisation.functions import ZeroFun, KullbackLeibler, \
- MixedL21Norm, FunctionOperatorComposition, BlockFunction
+from ccpi.optimisation.functions import ZeroFunction, KullbackLeibler, \
+ MixedL21Norm, BlockFunction
from skimage.util import random_noise
+from timeit import default_timer as timer
# ############################################################################
-# Create phantom for TV denoising
+# Create phantom for TV Poisson denoising
N = 100
data = np.zeros((N,N))
@@ -41,13 +42,11 @@ plt.imshow(noisy_data.as_array())
plt.colorbar()
plt.show()
-#%%
-
# Regularisation Parameter
-alpha = 10
+alpha = 2
#method = input("Enter structure of PDHG (0=Composite or 1=NotComposite): ")
-method = '1'
+method = '0'
if method == '0':
# Create operators
@@ -57,15 +56,11 @@ if method == '0':
# Form Composite Operator
operator = BlockOperator(op1, op2, shape=(2,1) )
- #### Create functions
-# f = FunctionComposition_new(operator, mixed_L12Norm(alpha), \
-# L2NormSq(0.5, b = noisy_data) )
-
f1 = alpha * MixedL21Norm()
- f2 = KullbackLeibler(b = noisy_data)
+ f2 = KullbackLeibler(noisy_data)
f = BlockFunction(f1, f2 )
- g = ZeroFun()
+ g = ZeroFunction()
else:
@@ -73,29 +68,57 @@ else:
# No Composite #
###########################################################################
operator = Gradient(ig)
- f = alpha * FunctionOperatorComposition(operator, MixedL21Norm())
+ f = alpha * MixedL21Norm()
g = KullbackLeibler(noisy_data)
###########################################################################
#%%
# Compute operator Norm
normK = operator.norm()
-print ("normK", normK)
-# Primal & dual stepsizes
-#sigma = 1
-#tau = 1/(sigma*normK**2)
-sigma = 1/normK
-tau = 1/normK
+# Primal & dual stepsizes
+sigma = 1
+tau = 1/(sigma*normK**2)
opt = {'niter':2000}
+opt1 = {'niter':2000, 'memopt': True}
+t1 = timer()
res, time, primal, dual, pdgap = PDHG_old(f, g, operator, tau = tau, sigma = sigma, opt = opt)
-
-plt.figure(figsize=(5,5))
+t2 = timer()
+
+print(" Run memopt")
+
+t3 = timer()
+res1, time1, primal1, dual1, pdgap1 = PDHG_old(f, g, operator, tau = tau, sigma = sigma, opt = opt1)
+t4 = timer()
+
+#%%
+plt.figure(figsize=(15,15))
+plt.subplot(3,1,1)
plt.imshow(res.as_array())
+plt.title('no memopt')
+plt.colorbar()
+plt.subplot(3,1,2)
+plt.imshow(res1.as_array())
+plt.title('memopt')
+plt.colorbar()
+plt.subplot(3,1,3)
+plt.imshow((res1 - res).abs().as_array())
+plt.title('diff')
plt.colorbar()
plt.show()
+#
+plt.plot(np.linspace(0,N,N), res1.as_array()[int(N/2),:], label = 'memopt')
+plt.plot(np.linspace(0,N,N), res.as_array()[int(N/2),:], label = 'no memopt')
+plt.legend()
+plt.show()
+
+print ("Time: No memopt in {}s, \n Time: Memopt in {}s ".format(t2-t1, t4 -t3))
+diff = (res1 - res).abs().as_array().max()
+
+print(" Max of abs difference is {}".format(diff))
+
#pdhg = PDHG(f=f,g=g,operator=operator, tau=tau, sigma=sigma)
#pdhg.max_iteration = 2000