diff --git a/README.md b/README.md index fcc2da2..4cf773e 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ SVRG with a projected gradient descent warm start and armijo line searches. SVRG minimises a sum of differentiable functions, $x^* = \underset{x}{\text{arg min}}\sum_n f_n(x)$. It approximates the gradient at each iteration by $$ \begin{equation} - g^{(k)} = n\left(\nabla_n f(x^{(k)}) - \nabla_n f(\tilde{x})\right) + \nabla f(\tilde{x}) + g^{(k)} = N\left(\nabla_n f(x^{(k)}) - \nabla_n f(\tilde{x})\right) + \nabla f(\tilde{x}) \end{equation} $$ where N is the number of sub-functions and $\tilde{x}$ is an anchor point where a full gradient has been calculated. diff --git a/main.py b/main.py index e3afdb3..37c1e09 100644 --- a/main.py +++ b/main.py @@ -8,14 +8,12 @@ >>> algorithm.run(np.inf, callbacks=metrics + submission_callbacks) """ #%% -from sirf.STIR import AcquisitionData -AcquisitionData.set_storage_scheme('memory') from cil.optimisation.algorithms import ISTA, Algorithm from cil.optimisation.functions import IndicatorBox, SVRGFunction from cil.optimisation.utilities import (Preconditioner, Sampler, StepSizeRule) -from petric import Dataset from sirf.contrib.partitioner import partitioner +from petric import Dataset import numpy as np import numbers @@ -240,11 +238,15 @@ def __init__(self, data: Dataset, update_objective_interval=10): decay_perc = 0.1 decay = (1/(1-decay_perc) - 1)/update_interval beta = 0.5 + + print(f"Using {self.num_subsets} subsets") _, _, obj_funs = partitioner.data_partition(data.acquired_data, data.additive_term, data.mult_factors, self.num_subsets, mode='staggered', initial_image=data.OSEM_image) + print("made it past partitioner") + data.prior.set_penalisation_factor(data.prior.get_penalisation_factor() / len(obj_funs)) data.prior.set_up(data.OSEM_image)