11 #ifndef OOPS_ASSIMILATION_PRIMALMINIMIZER_H_
12 #define OOPS_ASSIMILATION_PRIMALMINIMIZER_H_
18 #include "eckit/config/Configuration.h"
24 #include "oops/util/FloatCompare.h"
25 #include "oops/util/Logger.h"
56 const int,
const double) = 0;
63 template<
typename MODEL,
typename OBS>
66 int ninner = config.getInt(
"ninner");
67 double gnreduc = config.getDouble(
"gradient norm reduction");
69 bool runOnlineAdjTest = config.getBool(
"online diagnostics.online adj test",
false);
71 Log::info() << classname() <<
": max iter = " << ninner
72 <<
", requested norm reduction = " << gnreduc << std::endl;
75 Hessian_ hessian(J_, runOnlineAdjTest);
83 if (config.has(
"fsoi")) {
84 const eckit::LocalConfiguration FcSensitivityConfig(config,
"fsoi.input forecast sensitivity");
85 rhs.
read(FcSensitivityConfig);
86 Log::info() << classname() <<
" rhs has forecast sensitivity" << std::endl;
88 J_.computeGradientFG(rhs);
89 J_.jb().addGradientFG(rhs);
92 Log::info() << classname() <<
" rhs" << rhs << std::endl;
95 const double gnorm = dot_product(rhs, rhs);
96 const double epsilon = config.getDouble(
"epsilon", std::numeric_limits<double>::epsilon());
97 Log::info() <<
"Initial RHS squared norm = " << gnorm << std::endl;
98 if (gnorm < epsilon) {
99 Log::info() <<
"RHS smaller than " << epsilon <<
", returning." << std::endl;
104 double reduc = this->solve(*dx, rhs, hessian, B, ninner, gnreduc);
106 Log::test() << classname() <<
": reduction in residual norm = " << reduc << std::endl;
107 Log::info() << classname() <<
" output" << *dx << std::endl;
109 if (config.has(
"fsoi")) {
110 Log::info() << classname() <<
" Entering Observation Sensitivity Calculation" << std::endl;
118 const std::string osensname =
"ObsSensitivity";
121 bool runFSOIincTest = config.getBool(
"fsoi.increment test",
false);
122 if (runFSOIincTest) {
125 for (
unsigned jj = 0; jj < J_.nterms(); ++jj) {
126 std::unique_ptr<GeneralizedDepartures> ww(J_.jterm(jj).newGradientFG());
127 dp.
append(J_.jterm(jj).multiplyCovar(*ww));
131 double adj_tst_fwd = dot_product(rhs, rhs);
133 double adj_tst_bwd = dot_product(ys, dp);
135 Log::info() <<
"Online FSOI increment test: " << std::endl
136 << util::PrintAdjTest(adj_tst_fwd, adj_tst_bwd,
"K") << std::endl;
138 double fsoi_inctest_tolerance = config.getDouble(
"fsoi.increment test tolerance", 1.0e-5);
139 bool passed = oops::is_close_absolute(adj_tst_fwd, adj_tst_bwd, fsoi_inctest_tolerance);
141 Log::test() <<
"FSOI increment test within tolerance." << std::endl;
143 Log::test() <<
"FSOI increment test fails tolerance bound." << std::endl;
void zero()
Linear algebra operators.
void read(const eckit::Configuration &)
I/O and diagnostics.
Container of dual space vectors for all terms of the cost function.
void append(std::unique_ptr< GeneralizedDepartures > &&)
void saveDep(const std::string &) const
A Minimizer knows how to minimize a cost function.
virtual double solve(CtrlInc_ &, const CtrlInc_ &, const Hessian_ &, const Bmat_ &, const int, const double)=0
BMatrix< MODEL, OBS > Bmat_
ControlIncrement< MODEL, OBS > CtrlInc_
const std::string classname() const override=0
CtrlInc_ * doMinimize(const eckit::Configuration &) override
Minimizer< MODEL, OBS > Minimizer_
HessianMatrix< MODEL, OBS > Hessian_
CostFunction< MODEL, OBS > CostFct_
RinvHMatrix< MODEL, OBS > RinvH_
PrimalMinimizer(const CostFct_ &J)
void multiply(const CtrlInc_ &dx, Dual_ &zz) const
The namespace for the main oops code.