source: branches/dev/Algorithm/IpRestoIterationOutput.cpp @ 546

Last change on this file since 546 was 546, checked in by andreasw, 15 years ago
  • added missing svn:keyword for a few files
  • increased factor for theta reduction in restoration convergence check from 1e1 to 1e2
  • trigger restoration phase in expect_infeasible_problem mode if multipliers become larger than 1e8
  • revert to regular restoration phase if more than max_soft_resto_iters (new option) iterations are taken in soft restoration phase
  • made quality_function default for adaptive barrier strategy
  • added new print level to avoid one line of output per iteration
  • fixed TNLPAdapter, so that only some parts of the problem (objective, constraints, variables) are scaled if desired
  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 11.7 KB
Line 
1// Copyright (C) 2004, 2005 International Business Machines and others.
2// All Rights Reserved.
3// This code is published under the Common Public License.
4//
5// $Id: IpRestoIterationOutput.cpp 546 2005-10-20 22:40:15Z andreasw $
6//
7// Authors:  Carl Laird, Andreas Waechter              IBM    2004-09-23
8
9#include "IpRestoIterationOutput.hpp"
10#include "IpRestoIpoptNLP.hpp"
11
12#ifdef HAVE_CMATH
13# include <cmath>
14#else
15# ifdef HAVE_MATH_H
16#  include <math.h>
17# else
18#  error "don't have header file for math"
19# endif
20#endif
21
22namespace Ipopt
23{
24#ifdef IP_DEBUG
25  static const Index dbg_verbosity = 0;
26#endif
27
28  RestoIterationOutput::RestoIterationOutput(const SmartPtr<OrigIterationOutput>& resto_orig_iteration_output)
29      :
30      resto_orig_iteration_output_(resto_orig_iteration_output)
31  {}
32
33  RestoIterationOutput::~RestoIterationOutput()
34  {}
35
36  bool RestoIterationOutput::InitializeImpl(const OptionsList& options,
37      const std::string& prefix)
38  {
39    options.GetBoolValue("print_info_string", print_info_string_, prefix);
40
41    bool retval = true;
42    if (IsValid(resto_orig_iteration_output_)) {
43      retval = resto_orig_iteration_output_->Initialize(Jnlst(), IpNLP(),
44               IpData(), IpCq(),
45               options, prefix);
46    }
47    return retval;
48  }
49
50  void RestoIterationOutput::WriteOutput()
51  {
52    // Get pointers to the Original NLP objects
53    const RestoIpoptNLP* resto_ipopt_nlp =
54      dynamic_cast<const RestoIpoptNLP*>(&IpNLP());
55    DBG_ASSERT(resto_ipopt_nlp);
56
57    SmartPtr<IpoptData> orig_ip_data = &resto_ipopt_nlp->OrigIpData();
58    SmartPtr<IpoptNLP> orig_ip_nlp = &resto_ipopt_nlp->OrigIpNLP();
59    SmartPtr<IpoptCalculatedQuantities> orig_ip_cq =
60      &resto_ipopt_nlp->OrigIpCq();
61
62    // Set the iteration counter for the original NLP to the current value
63    Index iter = IpData().iter_count();
64    orig_ip_data->Set_iter_count(iter);
65
66    // If a resto_orig_iteration_output object was given, first do the
67    // WriteOutput method with that one
68    if (IsValid(resto_orig_iteration_output_)) {
69      resto_orig_iteration_output_->WriteOutput();
70    }
71
72    //////////////////////////////////////////////////////////////////////
73    //         First print the summary line for the iteration           //
74    //////////////////////////////////////////////////////////////////////
75
76    std::string header =
77      "iter    objective    inf_pr   inf_du lg(mu)  ||d||  lg(rg) alpha_du alpha_pr  ls\n";
78    Jnlst().Printf(J_DETAILED, J_MAIN,
79                   "\n\n**************************************************\n");
80    Jnlst().Printf(J_DETAILED, J_MAIN,
81                   "*** Summary of Iteration %d for original NLP:", IpData().iter_count());
82    Jnlst().Printf(J_DETAILED, J_MAIN,
83                   "\n**************************************************\n\n");
84    if (iter%10 == 0 && !IsValid(resto_orig_iteration_output_)) {
85      // output the header
86      Jnlst().Printf(J_ITERSUMMARY, J_MAIN, header.c_str());
87    }
88    else {
89      Jnlst().Printf(J_DETAILED, J_MAIN, header.c_str());
90    }
91
92    // For now, just print the total NLP error for the restoration
93    // phase problem in the dual infeasibility column
94    Number inf_du =
95      IpCq().curr_dual_infeasibility(NORM_MAX);
96
97    Number mu = IpData().curr_mu();
98    Number dnrm = 0.;
99    if (IsValid(IpData().delta()) && IsValid(IpData().delta()->x()) && IsValid(IpData().delta()->s())) {
100      dnrm = Max(IpData().delta()->x()->Amax(), IpData().delta()->s()->Amax());
101    }
102
103    // Set  the trial  values  for  the original  Data  object to  the
104    // current restoration phase values
105    SmartPtr<const Vector> x = IpData().curr()->x();
106    const CompoundVector* cx =
107      dynamic_cast<const CompoundVector*>(GetRawPtr(x));
108    DBG_ASSERT(cx);
109
110    SmartPtr<IteratesVector> trial = orig_ip_data->trial()->MakeNewContainer();
111    trial->Set_x(*cx->GetComp(0));
112    trial->Set_s(*IpData().curr()->s());
113    orig_ip_data->set_trial(trial);
114
115    // Compute primal infeasibility
116    Number inf_pr = orig_ip_cq->trial_primal_infeasibility(NORM_MAX);
117    Number f = orig_ip_cq->unscaled_trial_f();
118
119    // Retrieve some information set in the different parts of the algorithm
120    char info_iter='r';
121
122    Number alpha_primal = IpData().info_alpha_primal();
123    char alpha_primal_char = IpData().info_alpha_primal_char();
124    Number alpha_dual = IpData().info_alpha_dual();
125    Number regu_x = IpData().info_regu_x();
126    char regu_x_buf[8];
127    char dashes[]="   - ";
128    char *regu_x_ptr;
129    if (regu_x==.0) {
130      regu_x_ptr = dashes;
131    }
132    else {
133      sprintf(regu_x_buf, "%5.1f", log10(regu_x));
134      regu_x_ptr = regu_x_buf;
135    }
136    Index ls_count = IpData().info_ls_count();
137    const std::string info_string = IpData().info_string();
138
139    Jnlst().Printf(J_ITERSUMMARY, J_MAIN,
140                   "%4d%c%14.7e %7.2e %7.2e %5.1f %7.2e %5s %7.2e %7.2e%c%3d",
141                   iter, info_iter, f, inf_pr, inf_du, log10(mu), dnrm, regu_x_ptr,
142                   alpha_dual, alpha_primal, alpha_primal_char,
143                   ls_count);
144    if (print_info_string_) {
145      Jnlst().Printf(J_ITERSUMMARY, J_MAIN, " %s", info_string.c_str());
146    }
147    else {
148      Jnlst().Printf(J_DETAILED, J_MAIN, " %s", info_string.c_str());
149    }
150    Jnlst().Printf(J_ITERSUMMARY, J_MAIN, "\n");
151
152    //////////////////////////////////////////////////////////////////////
153    //           Now if desired more detail on the iterates             //
154    //////////////////////////////////////////////////////////////////////
155
156    if (Jnlst().ProduceOutput(J_DETAILED, J_MAIN)) {
157      Jnlst().Printf(J_DETAILED, J_MAIN,
158                     "\n**************************************************\n");
159      Jnlst().Printf(J_DETAILED, J_MAIN,
160                     "*** Beginning Iteration %d from the following point:",
161                     IpData().iter_count());
162      Jnlst().Printf(J_DETAILED, J_MAIN,
163                     "\n**************************************************\n\n");
164
165      Jnlst().Printf(J_DETAILED, J_MAIN,
166                     "Primal infeasibility for restoration phase problem = %.16e\n",
167                     IpCq().curr_primal_infeasibility(NORM_MAX));
168      Jnlst().Printf(J_DETAILED, J_MAIN,
169                     "Dual infeasibility for restoration phase problem   = %.16e\n",
170                     IpCq().curr_dual_infeasibility(NORM_MAX));
171
172      Jnlst().Printf(J_DETAILED, J_MAIN,
173                     "||curr_x||_inf   = %.16e\n", IpData().curr()->x()->Amax());
174      Jnlst().Printf(J_DETAILED, J_MAIN,
175                     "||curr_s||_inf   = %.16e\n", IpData().curr()->s()->Amax());
176      Jnlst().Printf(J_DETAILED, J_MAIN,
177                     "||curr_y_c||_inf = %.16e\n", IpData().curr()->y_c()->Amax());
178      Jnlst().Printf(J_DETAILED, J_MAIN,
179                     "||curr_y_d||_inf = %.16e\n", IpData().curr()->y_d()->Amax());
180      Jnlst().Printf(J_DETAILED, J_MAIN,
181                     "||curr_z_L||_inf = %.16e\n", IpData().curr()->z_L()->Amax());
182      Jnlst().Printf(J_DETAILED, J_MAIN,
183                     "||curr_z_U||_inf = %.16e\n", IpData().curr()->z_U()->Amax());
184      Jnlst().Printf(J_DETAILED, J_MAIN,
185                     "||curr_v_L||_inf = %.16e\n", IpData().curr()->v_L()->Amax());
186      Jnlst().Printf(J_DETAILED, J_MAIN,
187                     "||curr_v_U||_inf = %.16e\n", IpData().curr()->v_U()->Amax());
188    }
189    if (Jnlst().ProduceOutput(J_MOREDETAILED, J_MAIN)) {
190      if (IsValid(IpData().delta())) {
191        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
192                       "\n||delta_x||_inf   = %.16e\n", IpData().delta()->x()->Amax());
193        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
194                       "||delta_s||_inf   = %.16e\n", IpData().delta()->s()->Amax());
195        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
196                       "||delta_y_c||_inf = %.16e\n", IpData().delta()->y_c()->Amax());
197        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
198                       "||delta_y_d||_inf = %.16e\n", IpData().delta()->y_d()->Amax());
199        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
200                       "||delta_z_L||_inf = %.16e\n", IpData().delta()->z_L()->Amax());
201        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
202                       "||delta_z_U||_inf = %.16e\n", IpData().delta()->z_U()->Amax());
203        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
204                       "||delta_v_L||_inf = %.16e\n", IpData().delta()->v_L()->Amax());
205        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
206                       "||delta_v_U||_inf = %.16e\n", IpData().delta()->v_U()->Amax());
207      }
208      else {
209        Jnlst().Printf(J_MOREDETAILED, J_MAIN,
210                       "\nNo search direction has been computed yet.\n");
211      }
212    }
213    if (Jnlst().ProduceOutput(J_VECTOR, J_MAIN)) {
214      IpData().curr()->x()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_x");
215      IpData().curr()->s()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_s");
216
217      IpData().curr()->y_c()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_y_c");
218      IpData().curr()->y_d()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_y_d");
219
220      IpCq().curr_slack_x_L()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_slack_x_L");
221      IpCq().curr_slack_x_U()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_slack_x_U");
222      IpData().curr()->z_L()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_z_L");
223      IpData().curr()->z_U()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_z_U");
224
225      IpCq().curr_slack_s_L()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_slack_s_L");
226      IpCq().curr_slack_s_U()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_slack_s_U");
227      IpData().curr()->v_L()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_v_L");
228      IpData().curr()->v_U()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_v_U");
229    }
230    if (Jnlst().ProduceOutput(J_MOREVECTOR, J_MAIN)) {
231      IpCq().curr_grad_lag_x()->Print(Jnlst(), J_MOREVECTOR, J_MAIN, "curr_grad_lag_x");
232      IpCq().curr_grad_lag_s()->Print(Jnlst(), J_MOREVECTOR, J_MAIN, "curr_grad_lag_s");
233      if (IsValid(IpData().delta())) {
234        IpData().delta()->Print(Jnlst(), J_MOREVECTOR, J_MAIN, "delta");
235      }
236    }
237
238    if (Jnlst().ProduceOutput(J_DETAILED, J_MAIN)) {
239      Jnlst().Printf(J_DETAILED, J_MAIN,
240                     "\n\n***Current NLP Values for Iteration (Restoration phase problem) %d:\n",
241                     IpData().iter_count());
242      Jnlst().Printf(J_DETAILED, J_MAIN, "\n                                   (scaled)                 (unscaled)\n");
243      Jnlst().Printf(J_DETAILED, J_MAIN, "Objective...............: %24.16e  %24.16e\n", IpCq().curr_f(), IpCq().unscaled_curr_f());
244      Jnlst().Printf(J_DETAILED, J_MAIN, "Dual infeasibility......: %24.16e  %24.16e\n", IpCq().curr_dual_infeasibility(NORM_MAX), IpCq().unscaled_curr_dual_infeasibility(NORM_MAX));
245      Jnlst().Printf(J_DETAILED, J_MAIN, "Constraint violation....: %24.16e  %24.16e\n", IpCq().curr_nlp_constraint_violation(NORM_MAX), IpCq().unscaled_curr_nlp_constraint_violation(NORM_MAX));
246      Jnlst().Printf(J_DETAILED, J_MAIN, "Complementarity.........: %24.16e  %24.16e\n", IpCq().curr_complementarity(0., NORM_MAX), IpCq().unscaled_curr_complementarity(0., NORM_MAX));
247      Jnlst().Printf(J_DETAILED, J_MAIN, "Overall NLP error.......: %24.16e  %24.16e\n\n", IpCq().curr_nlp_error(), IpCq().unscaled_curr_nlp_error());
248    }
249    if (Jnlst().ProduceOutput(J_VECTOR, J_MAIN)) {
250      IpCq().curr_grad_f()->Print(Jnlst(), J_VECTOR, J_MAIN, "grad_f");
251      IpCq().curr_c()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_c");
252      IpCq().curr_d()->Print(Jnlst(), J_VECTOR, J_MAIN, "curr_d");
253      IpCq().curr_d_minus_s()->Print(Jnlst(), J_VECTOR, J_MAIN,
254                                     "curr_d - curr_s");
255    }
256
257    if (Jnlst().ProduceOutput(J_MATRIX, J_MAIN)) {
258      IpCq().curr_jac_c()->Print(Jnlst(), J_MATRIX, J_MAIN, "jac_c");
259      IpCq().curr_jac_d()->Print(Jnlst(), J_MATRIX, J_MAIN, "jac_d");
260      IpCq().curr_exact_hessian()->Print(Jnlst(), J_MATRIX, J_MAIN, "h");
261    }
262
263    Jnlst().Printf(J_DETAILED, J_MAIN, "\n\n");
264  }
265
266} // namespace Ipopt
Note: See TracBrowser for help on using the repository browser.