Skip to content

Commit

Permalink
fix SGD TomographicImaging#1345, update SAG, SAGA
Browse files Browse the repository at this point in the history
  • Loading branch information
epapoutsellis committed Aug 23, 2022
1 parent 135b01b commit 11331f7
Show file tree
Hide file tree
Showing 4 changed files with 15 additions and 44 deletions.
19 changes: 5 additions & 14 deletions Wrappers/Python/cil/optimisation/functions/SAGAFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self, functions, sampling = "random", precond=None, replacement = F
self.allocate_memory = False
super(SAGFunction, self).__init__(functions, sampling = sampling, replacement=replacement)

def gradient(self, x, out=None):
def gradient(self, x, out):
"""
Returns a variance-reduced approximate gradient, defined below.
For f = 1/num_subsets \sum_{i=1}^num_subsets F_{i}, the output is computed as follows:
Expand Down Expand Up @@ -98,26 +98,17 @@ def gradient(self, x, out=None):
self.tmp1.sapyb(1., self.subset_gradients[self.subset_num], -1., out=self.tmp2)

# Compute the output : tmp2 + full_gradient
if out is None:
ret = 0.0 * self.tmp2
self.tmp2.sapyb(1., self.full_gradient, 1., out=ret)
else:
self.tmp2.sapyb(1., self.full_gradient, 1., out=out)
self.tmp2.sapyb(1., self.full_gradient, 1., out=out)

# Apply preconditioning
if self.precond is not None:
if out is None:
ret.multiply(self.precond,out=ret)
else:
out.multiply(self.precond,out=out)
if self.precond is not None:
out.multiply(self.precond(self.subset_num, x), out=out)

# Update subset gradients in memory: store the computed gradient F_{subset_num} (x) in self.subset_gradients[self.subset_num]
self.subset_gradients[self.subset_num].fill(self.tmp1)

# Update the full gradient estimator: add 1/num_subsets * (gradient F_{subset_num} (x) - subset_gradient_in_memory_{subset_num}) to the current full_gradient
self.full_gradient.sapyb(1., self.tmp2, 1./self.num_subsets, out=self.full_gradient)

if out is None:
return ret



18 changes: 4 additions & 14 deletions Wrappers/Python/cil/optimisation/functions/SAGFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def __init__(self, functions, sampling = "random", precond=None, replacement = F
self.allocate_memory = False
super(SAGFunction, self).__init__(functions, sampling = sampling, replacement=replacement)

def gradient(self, x, out=None):
def gradient(self, x, out):

"""
Returns a variance-reduced approximate gradient.
Expand All @@ -78,28 +78,18 @@ def gradient(self, x, out=None):
self.tmp1.sapyb(1., self.subset_gradients[self.subset_num], -1., out=self.tmp2)

# Compute the output : 1/num_subsets * tmp2 + full_gradient
if out is None:
ret = 0.0 * self.tmp2
self.tmp2.sapyb(1./self.num_subsets, self.full_gradient, 1., out=ret)
else:
self.tmp2.sapyb(1./self.num_subsets, self.full_gradient, 1., out=out)
self.tmp2.sapyb(1./self.num_subsets, self.full_gradient, 1., out=out)

# Apply preconditioning
if self.precond is not None:
if out is None:
ret.multiply(self.precond,out=ret)
else:
out.multiply(self.precond,out=out)
if self.precond is not None:
out.multiply(self.precond(self.subset_num, x), out=out)

# Update subset gradients in memory: store the computed gradient F_{subset_num} (x) in self.subset_gradients[self.subset_num]
self.subset_gradients[self.subset_num].fill(self.tmp1)

# Update the full gradient estimator: add 1/num_subsets * (gradient F_{subset_num} (x) - subset_gradient_in_memory_{subset_num}) to the current full_gradient
self.full_gradient.sapyb(1., self.tmp2, 1./self.num_subsets, out=self.full_gradient)

if out is None:
return ret

def initialise_memory(self, x):

r"""Initialize subset gradients :math:`v_{i}` and full gradient that are stored in memory.
Expand Down
18 changes: 4 additions & 14 deletions Wrappers/Python/cil/optimisation/functions/SGDFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def __init__(self, functions, sampling = "random", replacement = False, precond=
super(SGDFunction, self).__init__(functions, sampling = sampling, replacement = replacement)
self.precond = precond

def gradient(self, x, out=None):
def gradient(self, x, out):

""" Returns the gradient of the selected function at :code:`x`. The function is selected using the :meth:`~SubsetSumFunction.next_subset`
"""
Expand All @@ -60,19 +60,9 @@ def gradient(self, x, out=None):
self.next_subset()

# Compute new gradient for current subset
if out is None:
ret = 0.0 * x
self.functions[self.subset_num].gradient(x, out=ret)
else:
self.functions[self.subset_num].gradient(x, out=out)
self.functions[self.subset_num].gradient(x, out=out)

# Apply preconditioning
if self.precond is not None:
if out is None:
ret.multiply(self.precond,out=ret)
else:
out.multiply(self.precond,out=out)

if out is None:
return ret
if self.precond is not None:
out.multiply(self.precond(self.subset_num, x), out=out)

4 changes: 2 additions & 2 deletions Wrappers/Python/test/test_SAG_function.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def setUp(self):

self.F = LeastSquares(self.Aop, b=self.bop, c = 0.5/self.n_subsets)
self.ig = self.Aop.domain
self.precond = self.ig.allocate(1.0)
self.precond = lambda i, x: 3./self.ig.allocate(2.5)
self.F_SAG = SAGFunction(self.fi_cil, replacement = True, precond = self.precond)

self.initial = self.ig.allocate()
Expand All @@ -69,7 +69,7 @@ def test_gradient(self):
tmp_sag.functions[tmp_sag.subset_num].gradient(x, out=tmp_sag.tmp1)
tmp_sag.tmp1.sapyb(1., tmp_sag.subset_gradients[tmp_sag.subset_num], -1., out=tmp_sag.tmp2)
tmp_sag.tmp2.sapyb(1./tmp_sag.num_subsets, tmp_sag.full_gradient, 1., out=out2)
out2.multiply(self.precond,out=out2)
out2 *= self.precond(tmp_sag.tmp2.subset_num, 3./self.ig.allocate(2.5))

# update subset_gradient in the subset_num
# update full gradient
Expand Down

0 comments on commit 11331f7

Please sign in to comment.