19 #ifndef PISM_IPTAOTIKHONOVPROBLEM_HH
20 #define PISM_IPTAOTIKHONOVPROBLEM_HH
24 #include "pism/inverse/TaoUtil.hh"
25 #include "pism/inverse/functional/IPFunctional.hh"
26 #include "pism/util/ConfigInterface.hh"
27 #include "pism/util/Context.hh"
28 #include "pism/util/Grid.hh"
29 #include "pism/util/Logger.hh"
30 #include "pism/util/array/Array.hh"
31 #include "pism/util/petscwrappers/DM.hh"
32 #include "pism/util/petscwrappers/Vec.hh"
40 template <
class ForwardProblem>
41 class IPTaoTikhonovProblem;
54 template <
class ForwardProblem>
57 typedef std::shared_ptr<IPTaoTikhonovProblemListener>
Ptr;
59 typedef std::shared_ptr<typename ForwardProblem::DesignVec>
DesignVecPtr;
60 typedef std::shared_ptr<typename ForwardProblem::StateVec>
StateVecPtr;
61 typedef std::shared_ptr<typename ForwardProblem::StateVec1>
StateVec1Ptr;
70 double objectiveValue,
double designValue,
const DesignVecPtr d,
168 template <
class ForwardProblem>
178 typedef std::shared_ptr<typename ForwardProblem::DesignVec>
DesignVecPtr;
179 typedef std::shared_ptr<typename ForwardProblem::StateVec>
StateVecPtr;
180 typedef std::shared_ptr<typename ForwardProblem::StateVec1>
StateVec1Ptr;
289 std::vector<typename IPTaoTikhonovProblemListener<ForwardProblem>::Ptr>
m_listeners;
303 template <
class ForwardProblem>
309 m_dGlobal(d0.grid(),
"design variable (global)"),
312 m_adjointRHS(d0.grid(),
"work vector"),
314 m_designFunctional(designFunctional),
315 m_stateFunctional(stateFunctional) {
320 m_d = std::make_shared<DesignVecGhosted>(
m_grid,
"design variable");
326 m_d_diff = std::make_shared<DesignVecGhosted>(
m_grid,
"design residual");
332 m_grad = std::make_shared<DesignVec>(
m_grid,
"gradient");
335 template<
class ForwardProblem>
340 template<
class ForwardProblem>
345 ObjGradCallback::connect(tao,*
this);
352 gatol = PETSC_DEFAULT,
353 grtol = PETSC_DEFAULT,
354 gttol = PETSC_DEFAULT;
356 #if PETSC_VERSION_LT(3,7,0)
357 double fatol = 1e-10, frtol = 1e-20;
358 PetscErrorCode ierr = TaoSetTolerances(tao, fatol, frtol, gatol, grtol, gttol);
361 PetscErrorCode ierr = TaoSetTolerances(tao, gatol, grtol, gttol);
366 template<
class ForwardProblem>
369 TaoGetSolutionStatus(tao, &its, NULL, NULL, NULL, NULL, NULL);
371 int nListeners = m_listeners.size();
372 for (
int k=0;
k<nListeners;
k++) {
373 m_listeners[
k]->iteration(*
this, m_eta,
374 its, m_val_design, m_val_state,
375 m_d, m_d_diff, m_grad_design,
376 m_forward.solution(), m_u_diff, m_grad_state,
382 double designNorm, stateNorm, sumNorm;
383 double dWeight, sWeight;
387 designNorm = m_grad_design->norm(NORM_2)[0];
388 stateNorm = m_grad_state->norm(NORM_2)[0];
389 sumNorm = m_grad->norm(NORM_2)[0];
391 designNorm *= dWeight;
392 stateNorm *= sWeight;
394 if (sumNorm < m_tikhonov_atol) {
395 TaoSetConvergedReason(tao, TAO_CONVERGED_GATOL);
396 }
else if (sumNorm < m_tikhonov_rtol*
std::max(designNorm,stateNorm)) {
397 TaoSetConvergedReason(tao, TAO_CONVERGED_USER);
399 TaoDefaultConvergenceTest(tao, NULL);
403 template<
class ForwardProblem>
405 double *value, Vec gradient) {
409 ierr = DMGlobalToLocalBegin(*m_d->dm(), x, INSERT_VALUES, m_d->vec());
410 PISM_CHK(ierr,
"DMGlobalToLocalBegin");
412 ierr = DMGlobalToLocalEnd(*m_d->dm(), x, INSERT_VALUES, m_d->vec());
413 PISM_CHK(ierr,
"DMGlobalToLocalEnd");
416 auto reason = m_forward.linearize_at(*m_d);
417 if (reason->failed()) {
420 "IPTaoTikhonovProblem::evaluateObjectiveAndGradient"
421 " failure in forward solve\n%s\n", reason->description().c_str());
422 ierr = TaoSetConvergedReason(tao, TAO_DIVERGED_USER);
423 PISM_CHK(ierr,
"TaoSetConvergedReason");
427 m_d_diff->copy_from(*m_d);
428 m_d_diff->add(-1, m_d0);
429 m_designFunctional.gradientAt(*m_d_diff, *m_grad_design);
431 m_u_diff->copy_from(*m_forward.solution());
432 m_u_diff->add(-1, m_u_obs);
435 m_stateFunctional.gradientAt(*m_u_diff, m_adjointRHS);
436 m_forward.apply_linearization_transpose(m_adjointRHS, *m_grad_state);
438 m_grad->copy_from(*m_grad_design);
439 m_grad->scale(1.0 / m_eta);
440 m_grad->add(1, *m_grad_state);
442 ierr = VecCopy(m_grad->vec(), gradient);
445 double valDesign, valState;
446 m_designFunctional.valueAt(*m_d_diff, &valDesign);
447 m_stateFunctional.valueAt(*m_u_diff, &valState);
449 m_val_design = valDesign;
450 m_val_state = valState;
452 *value = valDesign / m_eta + valState;
static std::shared_ptr< TerminationReason > success()
std::shared_ptr< const Logger > ConstPtr
Abstract base class for functions from ice model vectors to .
std::shared_ptr< IPTaoTikhonovProblemListener > Ptr
virtual void iteration(IPTaoTikhonovProblem< ForwardProblem > &problem, double eta, int iter, double objectiveValue, double designValue, const DesignVecPtr d, const DesignVecPtr diff_d, const DesignVecPtr grad_d, const StateVecPtr u, const StateVecPtr diff_u, const DesignVecPtr grad_u, const DesignVecPtr gradient)=0
The method called after each minimization iteration.
IPTaoTikhonovProblemListener()
virtual ~IPTaoTikhonovProblemListener()
std::shared_ptr< typename ForwardProblem::StateVec1 > StateVec1Ptr
std::shared_ptr< typename ForwardProblem::StateVec > StateVecPtr
std::shared_ptr< typename ForwardProblem::DesignVec > DesignVecPtr
Iteration callback class for IPTaoTikhonovProblem.
double m_eta
Penalty parameter/Lagrange multiplier.
std::shared_ptr< typename ForwardProblem::DesignVec > DesignVecPtr
std::vector< typename IPTaoTikhonovProblemListener< ForwardProblem >::Ptr > m_listeners
List of iteration callbacks.
virtual void convergenceTest(Tao tao)
Callback from TAO to detect convergence. Allows us to implement a custom convergence check.
std::shared_ptr< typename ForwardProblem::StateVec1 > StateVec1Ptr
double m_val_state
Value of at the current iterate.
ForwardProblem & m_forward
virtual void addListener(typename IPTaoTikhonovProblemListener< ForwardProblem >::Ptr listener)
Add an object to the list of objects to be called after each iteration.
double m_tikhonov_atol
Convergence parameter: convergence stops when m_tikhonov_rtol.
DesignVecPtr m_grad_state
Gradient of at the current iterate.
virtual void setInitialGuess(DesignVec &d)
Sets the initial guess for minimization iterations. If this isn't set explicitly,.
DesignVecPtr m_d_diff
Storage for (m_d-m_d0)
virtual void connect(Tao tao)
Callback from TaoBasicSolver, used to wire the connections between a Tao and.
std::shared_ptr< typename ForwardProblem::StateVec > StateVecPtr
IPFunctional< array::Scalar > & m_designFunctional
Implementation of .
virtual void evaluateObjectiveAndGradient(Tao tao, Vec x, double *value, Vec gradient)
Callback provided to TAO for objective evaluation.
StateVec1Ptr m_u_diff
Storage for F(d)-u_obs.
ForwardProblem::DesignVec DesignVec
DesignVecGhostedPtr m_d
Current iterate of design parameter.
DesignVec & m_d0
A-priori estimate of design parameter.
std::shared_ptr< const Grid > m_grid
double m_val_design
Value of at the current iterate.
IPFunctional< array::Vector > & m_stateFunctional
Implementation of .
ForwardProblem::StateVec StateVec
virtual std::shared_ptr< TerminationReason > formInitialGuess(Vec *v)
Callback from TaoBasicSolver to form the starting iterate for the minimization. See also.
virtual DesignVecPtr designSolution()
Value of , the solution of the minimization problem.
std::shared_ptr< typename ForwardProblem::DesignVecGhosted > DesignVecGhostedPtr
StateVec & m_u_obs
State parameter to match via F(d)=u_obs.
DesignVecPtr m_grad_design
Gradient of at the current iterate.
virtual ~IPTaoTikhonovProblem()
DesignVec m_dGlobal
Initial iterate of design parameter, stored without ghosts for the benefit of TAO.
StateVec m_adjointRHS
Temporary storage used in gradient computation.
ForwardProblem::DesignVecGhosted DesignVecGhosted
ForwardProblem::StateVec1 StateVec1
virtual void monitorTao(Tao tao)
Callback from TAO after each iteration. The call is forwarded to each element of our list of listener...
IPTaoTikhonovProblem(ForwardProblem &forward, DesignVec &d0, StateVec &u_obs, double eta, IPFunctional< DesignVec > &designFunctional, IPFunctional< StateVec > &stateFunctional)
virtual StateVecPtr stateSolution()
Final value of , where is the solution of the minimization.
Defines a Tikhonov minimization problem to be solved with a TaoBasicSolver.
Adaptor to connect a TAO objective function callback to a C++ object method.
Adaptor to connect a TAO monitoring callback to a C++ object method.
Adaptor to connect a TAO objective and gradient function callback to a C++ object method.
#define PISM_CHK(errcode, name)
double max(const array::Scalar &input)
Finds maximum over all the values in an array::Scalar object. Ignores ghosts.