10 using namespace Ipopt;
15 fIpotApp = IpoptApplicationFactory();
17 fIpotApp->Options()->SetStringValue(
"hessian_approximation",
"limited-memory");
23 fIpotApp->Options()->SetStringValue(
"hessian_approximation",
"limited-memory");
24 fIpotApp->Options()->SetStringValue(
"linear_solver", type);
34 IpoptMinimizer::IpoptMinimizer::InternalTNLP::InternalTNLP::InternalTNLP(
IpoptMinimizer *minimizer)
38 nlp_lower_bound_inf = -1e19;
39 nlp_upper_bound_inf = 1e19;
40 fMinimizer = minimizer;
44 IpoptMinimizer::IpoptMinimizer::InternalTNLP::~InternalTNLP()
49 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::get_nlp_info(Index &n, Index &
m, Index &nnz_jac_g, Index &nnz_h_lag,
50 IndexStyleEnum &index_style)
52 n = fMinimizer->
NDim();
54 nnz_jac_g = fNNZerosJacobian;
55 nnz_h_lag = fNNZerosHessian;
57 index_style = TNLP::C_STYLE;
63 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::get_bounds_info(Index n, Number *x_l, Number *x_u, Index ,
69 R__ASSERT(n == (Index)fMinimizer->NDim());
71 for (Index i = 0; i < n; i++) {
73 if (fMinimizer->GetVariableSettings(i, varsettings)) {
77 x_l[i] = nlp_lower_bound_inf;
82 x_u[i] = nlp_upper_bound_inf;
85 MATH_ERROR_MSG(
"IpoptMinimizer::InternalTNLP::get_bounds_info", Form(
"Variable index = %d not found", i));
92 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::get_starting_point(Index n,
bool , Number *
x,
97 R__ASSERT(n == (Index)fMinimizer->NDim());
98 for (Index i = 0; i < n; i++) {
100 if (fMinimizer->GetVariableSettings(i, varsettings)) {
101 x[i] = varsettings.
Value();
103 MATH_ERROR_MSG(
"IpoptMinimizer::InternalTNLP::get_starting_point", Form(
"Variable index = %d not found", i));
110 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::eval_f(Index n,
const Number *
x,
bool , Number &obj_value)
113 auto fun = fMinimizer->ObjFunction();
114 R__ASSERT(n == (Index)fun->NDim());
115 obj_value = (*fun)(
x);
120 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::eval_grad_f(Index n,
const Number *
x,
bool , Number *grad_f)
122 auto gfun = fMinimizer->GradObjFunction();
126 R__ASSERT(n == (Index)gfun->NDim());
127 gfun->Gradient(
x, grad_f);
133 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::eval_g(Index ,
const Number * ,
bool ,
140 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::eval_jac_g(Index ,
const Number * ,
bool ,
141 Index , Index , Index * ,
148 bool IpoptMinimizer::IpoptMinimizer::InternalTNLP::eval_h(Index ,
const Number * ,
bool ,
149 Number , Index ,
const Number * ,
150 bool , Index , Index * ,
157 void IpoptMinimizer::IpoptMinimizer::InternalTNLP::finalize_solution(SolverReturn , Index n,
const Number *
x,
158 const Number *z_L,
const Number *z_U, Index
m,
159 const Number *g,
const Number * ,
160 Number obj_value,
const IpoptData * ,
161 IpoptCalculatedQuantities * )
167 std::cout << std::endl << std::endl <<
"Solution of the primal variables, x" << std::endl;
168 for (Index i = 0; i < n; i++) {
169 std::cout <<
"x[" << i <<
"] = " <<
x[i] << std::endl;
172 std::cout << std::endl << std::endl <<
"Solution of the bound multipliers, z_L and z_U" << std::endl;
173 for (Index i = 0; i < n; i++) {
174 std::cout <<
"z_L[" << i <<
"] = " << z_L[i] << std::endl;
176 for (Index i = 0; i < n; i++) {
177 std::cout <<
"z_U[" << i <<
"] = " << z_U[i] << std::endl;
180 std::cout << std::endl << std::endl <<
"Objective value" << std::endl;
181 std::cout <<
"f(x*) = " << obj_value << std::endl;
183 std::cout << std::endl <<
"Final value of the constraints:" << std::endl;
184 for (Index i = 0; i <
m; i++) {
185 std::cout <<
"g(" << i <<
") = " << g[i] << std::endl;
187 fMinimizer->SetFinalValues(
x);
189 fMinimizer->SetMinValue(obj_value);
221 fIpotApp->Options()->SetStringValue(var, value);
227 ApplicationReturnStatus status;
229 if (status != Solve_Succeeded) {
230 std::cout << std::endl << std::endl <<
"*** Error during initialization!" << std::endl;
237 if (status == Solve_Succeeded) {
239 Index iter_count =
fIpotApp->Statistics()->IterationCount();
240 std::cout << std::endl << std::endl <<
"*** The problem solved in " << iter_count <<
" iterations!" << std::endl;
242 Number final_obj =
fIpotApp->Statistics()->FinalObjective();
243 std::cout << std::endl
245 <<
"*** The final value of the objective function is " << final_obj <<
'.' << std::endl;
Namespace for new ROOT classes and functions.
Class, describing value, limits and step size of the parameters Provides functionality also to set/re...
virtual void SetOptionStringValue(const char *var, const char *value)
double Value() const
copy constructor and assignment operators (leave them to the compiler)
virtual ~IpoptMinimizer()
Destructor.
Base Minimizer class, which defines the basic funcionality of various minimizer implementations (apar...
IpoptMinimizer()
Default constructor.
virtual void SetNNZerosHessian(UInt_t nzeros)
Namespace for the fitting classes.
virtual unsigned int NDim() const
number of dimensions
#define MATH_ERROR_MSG(loc, str)
you should not use this method at all Int_t Int_t Double_t Double_t Double_t Int_t Double_t Double_t Double_t Double_t Int_t m
Documentation for the abstract class IBaseFunctionMultiDim.
bool HasUpperLimit() const
check if parameter has upper limit
virtual bool Minimize()
method to perform the minimization
virtual void SetFunction(const ROOT::Math::IMultiGenFunction &func)
set the function to minimize
* x
Deprecated and error prone model selection interface.
MultiNumGradFunction class to wrap a normal function in a gradient function using numerical gradient ...
double UpperLimit() const
return upper limit value
Ipopt::SmartPtr< Ipopt::IpoptApplication > fIpotApp
virtual void SetFunction(const ROOT::Math::IMultiGenFunction &func)
set the function to minimize
Ipopt::SmartPtr< InternalTNLP > fInternalTNLP
double LowerLimit() const
return lower limit value
virtual void SetNNZerosJacobian(UInt_t nzeros)
Internal class to create a TNLP object, required for Ipopt minimization in c++, every method is overl...
bool HasLowerLimit() const
check if parameter has lower limit