Skip to content

Commit

Permalink
sync tes_SGD from TomographicImaging#1345, rename SAG test
Browse files Browse the repository at this point in the history
  • Loading branch information
epapoutsellis committed Aug 25, 2022
1 parent 11331f7 commit 4695760
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 13 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -60,27 +60,27 @@ def test_gradient(self):
# use the gradient method for one iteration
self.F_SAG.gradient(x, out=out1)

# run all steps of the SAG gradient method, one iteration
tmp_sag = SAGFunction(self.fi_cil, replacement = True)
# run all steps of the SAG gradient method for one iteration
tmp_sag = SAGFunction(self.fi_cil, replacement = True, precond = self.precond)

# x is passed but the gradient initial point = None, hence initial is 0
tmp_sag.initialise_memory(self.ig.allocate())
tmp_sag.next_subset()
tmp_sag.functions[tmp_sag.subset_num].gradient(x, out=tmp_sag.tmp1)
tmp_sag.tmp1.sapyb(1., tmp_sag.subset_gradients[tmp_sag.subset_num], -1., out=tmp_sag.tmp2)
tmp_sag.tmp2.sapyb(1./tmp_sag.num_subsets, tmp_sag.full_gradient, 1., out=out2)
out2 *= self.precond(tmp_sag.tmp2.subset_num, 3./self.ig.allocate(2.5))
out2 *= self.precond(tmp_sag.subset_num, 3./self.ig.allocate(2.5))

# update subset_gradient in the subset_num
# update full gradient
tmp_sag.subset_gradients[tmp_sag.subset_num].fill(tmp_sag.tmp1)
tmp_sag.full_gradient.sapyb(1., tmp_sag.tmp2, 1./tmp_sag.num_subsets, out=tmp_sag.full_gradient)

np.testing.assert_allclose(tmp_sag.subset_gradients[tmp_sag.subset_num].array,
self.n_subsets * out1.array, atol=1e-3)
tmp_sag.tmp1.array, atol=1e-3)

np.testing.assert_allclose(tmp_sag.full_gradient.array,
out1.array, atol=1e-3)
self.F_SAG.full_gradient.array, atol=1e-3)

np.testing.assert_allclose(out1.array, out2.array, atol=1e-3)

Expand Down
16 changes: 8 additions & 8 deletions Wrappers/Python/test/test_SGDFunction.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,8 @@ def setUp(self):

self.Aop = MatrixOperator(Anp)
self.bop = VectorData(bnp)
ig = self.Aop.domain
self.x_cil = ig.allocate('random')
self.ig = self.Aop.domain
self.x_cil = self.ig.allocate('random')

self.fi_cil = []
for i in range(self.n_subsets):
Expand All @@ -38,8 +38,8 @@ def setUp(self):
self.f = (1/self.n_subsets) * LeastSquares(self.Aop, b=self.bop, c=1.0)
self.f_SGD = SGDFunction(self.fi_cil, sampling="sequential")

precond = ig.allocate(1.0)
self.f_SGD_precond = SGDFunction(self.fi_cil, sampling="sequential", precond=precond)
self.precond = lambda i, x: 3./self.ig.allocate(2.5)
self.f_SGD_precond = SGDFunction(self.fi_cil, sampling="sequential", precond=self.precond)

def test_gradient(self):

Expand All @@ -56,11 +56,11 @@ def test_gradient(self):
out4 = self.x_cil.geometry.allocate()

# With preconditioning
self.f_SGD_precond.gradient(self.x_cil, out=out1)
self.f_SGD_precond.gradient(self.x_cil, out=out3)

self.f_SGD_precond[self.f_SGD.subset_num].gradient(self.x_cil, out=out2)
out2.multiply(self.f_SGD_precond.precond, out=out2)
np.testing.assert_allclose(out1.array, out2.array, atol=1e-3)
self.f_SGD_precond[self.f_SGD.subset_num].gradient(self.x_cil, out=out4)
out4*=self.precond(self.f_SGD_precond.subset_num, 3./self.ig.allocate(2.5))
np.testing.assert_allclose(out3.array, out4.array, atol=1e-3)



Expand Down

0 comments on commit 4695760

Please sign in to comment.