source: trunk/Bonmin/src/IpoptInterface/TNLP2FPNLP.hpp @ 1

Last change on this file since 1 was 1, checked in by andreasw, 13 years ago

imported initial code

  • Property svn:eol-style set to native
  • Property svn:keywords set to "Author Date Id Revision"
File size: 5.2 KB
Line 
1// Copyright (C) 2004, International Business Machines and others.
2// All Rights Reserved.
3// This code is published under the Common Public License.
4//
5//
6// Authors:  Pierre Bonami 06/10/2005
7
8#ifndef _TNLP2FPNLP_HPP_
9#define _TNLP2FPNLP_HPP_
10
11#include "IpTNLP.hpp"
12#include "TMINLP.hpp"
13#include "IpSmartPtr.hpp"
14
15namespace Ipopt
16{
17  /** This is an adapter class to convert an NLP to a Feasibility Pump NLP
18   *  by changing the objective function to the (2-norm) distance to a point.
19   * The extra function is set_dist2point_obj(int n, const double *, const int *)
20   */
21  class TNLP2FPNLP : public TNLP
22  {
23  public:
24    /**@name Constructors/Destructors */
25    //@{
26    TNLP2FPNLP(const SmartPtr<TNLP> tnlp, double objectiveScalingFactor = 100);
27
28    /** Default destructor */
29    virtual ~TNLP2FPNLP();
30    //@}
31
32    /**@name Methods to change the objective function*/
33    //@{
34    /** \brief Set the point to which distance is minimized.
35    * The distance is minimize in a subspace define by a subset of coordinates
36     * \param n number of coordinates on which distance is minimized
37     * \param inds indices of the coordinates on which distance is minimized
38     * \param vals values of the point for coordinates in ind
39     */
40    void set_dist2point_obj(int n, const Number * vals, const Index * inds);
41    //@}
42
43    /**@name methods to gather information about the NLP */
44    //@{
45    /** get info from nlp_ and add hessian information */
46    virtual bool get_nlp_info(Index& n, Index& m, Index& nnz_jac_g,
47        Index& nnz_h_lag, TNLP::IndexStyleEnum& index_style);
48
49    /** This call is just passed onto tnlp_
50     */
51    virtual bool get_bounds_info(Index n, Number* x_l, Number* x_u,
52        Index m, Number* g_l, Number* g_u)
53    {
54      return tnlp_->get_bounds_info(n, x_l , x_u, m, g_l, g_u);
55    }
56
57    /** Passed onto tnlp_
58     */
59    virtual bool get_starting_point(Index n, bool init_x, Number* x,
60        bool init_z, Number* z_L, Number* z_U,
61        Index m, bool init_lambda,
62        Number* lambda)
63    {
64      return tnlp_->get_starting_point(n, init_x, x,
65          init_z, z_L, z_U, m, init_lambda, lambda);
66    }
67
68    /** overloaded to return the value of the objective function */
69    virtual bool eval_f(Index n, const Number* x, bool new_x,
70        Number& obj_value);
71
72    /** overload this method to return the vector of the gradient of
73     *  the objective w.r.t. x */
74    virtual bool eval_grad_f(Index n, const Number* x, bool new_x,
75        Number* grad_f);
76
77    /** passed onto tnlp_ */
78    virtual bool eval_g(Index n, const Number* x, bool new_x,
79        Index m, Number* g)
80    {
81      return tnlp_->eval_g(n, x, new_x, m, g);
82    }
83
84    /** Passed onto tnlp_ */
85    virtual bool eval_jac_g(Index n, const Number* x, bool new_x,
86        Index m, Index nele_jac, Index* iRow,
87        Index *jCol, Number* values)
88    {
89      return tnlp_->eval_jac_g(n, x, new_x, m, nele_jac, iRow, jCol, values);
90    }
91
92    /** Evaluate the modified Hessian of the Lagrangian*/
93    virtual bool eval_h(Index n, const Number* x, bool new_x,
94        Number obj_factor, Index m, const Number* lambda,
95        bool new_lambda, Index nele_hess,
96        Index* iRow, Index* jCol, Number* values);
97    //@}
98
99    /** @name Solution Methods */
100    //@{
101    /** This method is called when the algorithm is complete so the TNLP can store/write the solution */
102    virtual void finalize_solution(SolverReturn status,
103        Index n, const Number* x, const Number* z_L, const Number* z_U,
104        Index m, const Number* g, const Number* lambda,
105        Number obj_value);
106    //@}
107
108    /** @name Scaling of the objective function */
109    //@{
110    void setObjectiveScaling(double value)
111    {
112      objectiveScalingFactor_ = value;
113    }
114    double getObjectiveScaling() const
115    {
116      return objectiveScalingFactor_;
117    }
118
119  private:
120    /** @name Internal methods to help compute the distance, its gradient and hessian */
121    //@{
122    /** Compute the norm-2 distance to the current point to which distance is minimized. */
123    double dist2point(const Number *x);
124    //@}
125    /**@name Default Compiler Generated Methods
126     * (Hidden to avoid implicit creation/calling).
127     * These methods are not implemented and
128     * we do not want the compiler to implement
129     * them for us, so we declare them private
130     * and do not define them. This ensures that
131     * they will not be implicitly created/called. */
132    //@{
133    /** Default Constructor */
134    TNLP2FPNLP();
135
136    /** Copy Constructor */
137    TNLP2FPNLP(const TNLP2FPNLP&);
138
139    /** Overloaded Equals Operator */
140    void operator=(const TNLP2FPNLP&);
141    //@}
142
143    /** pointer to the tminlp that is being adapted */
144    SmartPtr<TNLP> tnlp_;
145
146    /** @name Data for storing the point the distance to which is minimized */
147    //@{
148    /// Number of variables in inds_ and vals_
149    Index n_;
150    /// Size of inds_ and vals_ arrays
151    Index nMax_;
152    /// Indices of the variables for which distance is minimized (i.e. indices of integer variables in a feasibility pump setting)
153    Index * inds_;
154    /// Values of the point to which we separate (if x is the point vals_[i] should be x[inds_[i]] )
155    Number * vals_;
156    //@}
157
158    /// Scaling factor for the objective
159    double objectiveScalingFactor_;
160
161  };
162
163} // namespace Ipopt
164
165#endif /*_TNLP2FPNLP_HPP_*/
Note: See TracBrowser for help on using the repository browser.