Changeset 467


Ignore:
Timestamp:
Aug 19, 2005 11:49:13 AM (14 years ago)
Author:
andreasw
Message:

avoid illegal partial overwrite of methods

Location:
branches/dev/Algorithm
Files:
5 edited

Legend:

Unmodified
Added
Removed
  • branches/dev/Algorithm/IpIpoptNLP.hpp

    r430 r467  
    173173      return false;
    174174    }
     175
    175176    /** Replacement for the default objective function method which
    176177     *  knows about the barrier parameter */
    177     virtual Number f(const Vector& x, Number mu)
    178     {
    179       DBG_ASSERT(false && "ERROR: This method is only a placeholder for f(mu) and should not be called");
    180       return 0.;
    181     }
     178    virtual Number f(const Vector& x, Number mu) = 0;
     179
    182180    /** Replacement for the default objective gradient method which
    183181     *  knows about the barrier parameter  */
    184     virtual SmartPtr<const Vector> grad_f(const Vector& x, Number mu)
    185     {
    186       DBG_ASSERT(false && "ERROR: This method is only a placeholder for grad_f(mu) and should not be called");
    187       return NULL;
    188     }
     182    virtual SmartPtr<const Vector> grad_f(const Vector& x, Number mu) = 0;
     183
    189184    /** Replacement for the default Lagrangian Hessian method which
    190185     *  knows about the barrier parameter */
     
    193188                                        const Vector& yc,
    194189                                        const Vector& yd,
    195                                         Number mu)
    196     {
    197       DBG_ASSERT(false && "ERROR: This method is only a for h(mu) and should not be called");
    198       return NULL;
    199     }
     190                                        Number mu) = 0;
    200191    //@}
    201192
  • branches/dev/Algorithm/IpOrigIpoptNLP.cpp

    r465 r467  
    1616# else
    1717#  error "don't have header file for math"
     18# endif
     19#endif
     20
     21#ifdef HAVE_CASSERT
     22# include <cassert>
     23#else
     24# ifdef HAVE_ASSERT_H
     25#  include <assert.h>
     26# else
     27#  error "don't have header file for assert"
    1828# endif
    1929#endif
     
    268278  }
    269279
     280  Number OrigIpoptNLP::f(const Vector& x, Number mu)
     281  {
     282    assert(false && "ERROR: This method is only a placeholder for f(mu) and should not be called");
     283    return 0.;
     284  }
     285
    270286  SmartPtr<const Vector> OrigIpoptNLP::grad_f(const Vector& x)
    271287  {
     
    285301
    286302    return retValue;
     303  }
     304
     305  SmartPtr<const Vector> OrigIpoptNLP::grad_f(const Vector& x, Number mu)
     306  {
     307    assert(false && "ERROR: This method is only a placeholder for grad_f(mu) and should not be called");
     308    return NULL;
    287309  }
    288310
     
    444466    return retValue;
    445467  }
     468
     469  SmartPtr<const SymMatrix> OrigIpoptNLP::h(const Vector& x,
     470      Number obj_factor,
     471      const Vector& yc,
     472      const Vector& yd,
     473      Number mu)
     474  {
     475    assert(false &&
     476           "ERROR: This method is only a for h(mu) and should not be called");
     477    return NULL;
     478  }
     479
    446480
    447481  void OrigIpoptNLP::GetSpaces(SmartPtr<const VectorSpace>& x_space,
  • branches/dev/Algorithm/IpOrigIpoptNLP.hpp

    r460 r467  
    6161    virtual Number f(const Vector& x);
    6262
     63    /** Objective value (depending in mu) - incorrect version for
     64     *  OrigIpoptNLP */
     65    virtual Number f(const Vector& x, Number mu);
     66
    6367    /** Gradient of the objective */
    6468    virtual SmartPtr<const Vector> grad_f(const Vector& x);
     69
     70    /** Gradient of the objective (depending in mu) - incorrect
     71     *  version for OrigIpoptNLP */
     72    virtual SmartPtr<const Vector> grad_f(const Vector& x, Number mu);
    6573
    6674    /** Equality constraint residual */
     
    8391                                        const Vector& yd
    8492                                       );
     93
     94    /** Hessian of the Lagrangian (depending in mu) - incorrect
     95     *  version for OrigIpoptNLP */
     96    virtual SmartPtr<const SymMatrix> h(const Vector& x,
     97                                        Number obj_factor,
     98                                        const Vector& yc,
     99                                        const Vector& yd,
     100                                        Number mu);
    85101
    86102    /** Lower bounds on x */
  • branches/dev/Algorithm/IpRestoIpoptNLP.cpp

    r465 r467  
    2020# else
    2121#  error "don't have header file for math"
     22# endif
     23#endif
     24
     25#ifdef HAVE_CASSERT
     26# include <cassert>
     27#else
     28# ifdef HAVE_ASSERT_H
     29#  include <assert.h>
     30# else
     31#  error "don't have header file for assert"
    2232# endif
    2333#endif
     
    431441  }
    432442
     443  Number RestoIpoptNLP::f(const Vector& x)
     444  {
     445    assert(false && "ERROR: In RestoIpoptNLP f() is called without mu!");
     446    return 0.;
     447  }
     448
    433449  Number RestoIpoptNLP::f(const Vector& x, Number mu)
    434450  {
     
    503519  }
    504520
     521  SmartPtr<const Vector> RestoIpoptNLP::grad_f(const Vector& x)
     522  {
     523    assert(false && "ERROR: In RestoIpoptNLP grad_f() is called without mu!");
     524    return NULL;
     525  }
    505526
    506527  SmartPtr<const Vector> RestoIpoptNLP::d(const Vector& x)
     
    606627
    607628    return GetRawPtr(retPtr);
     629  }
     630
     631  SmartPtr<const SymMatrix> RestoIpoptNLP::h(const Vector& x,
     632      Number obj_factor,
     633      const Vector& yc,
     634      const Vector& yd
     635                                            )
     636  {
     637    assert(false && "ERROR: In RestoIpoptNLP h() is called without mu!");
     638    return NULL;
    608639  }
    609640
  • branches/dev/Algorithm/IpRestoIpoptNLP.hpp

    r460 r467  
    8484
    8585    /** Objective value (incorrect version for restoration phase) */
    86     virtual Number f(const Vector& x)
    87     {
    88       DBG_ASSERT(false && "ERROR: In RestoIpoptNLP f() is called without mu!");
    89       return 0.;
    90     }
     86    virtual Number f(const Vector& x);
    9187
    9288    /** Objective value */
     
    9490
    9591    /** Gradient of the objective (incorrect version for restoration phase) */
    96     virtual SmartPtr<const Vector> grad_f(const Vector& x)
    97     {
    98       DBG_ASSERT(false && "ERROR: In RestoIpoptNLP grad_f() is called without mu!");
    99       return NULL;
    100     }
     92    virtual SmartPtr<const Vector> grad_f(const Vector& x);
    10193
    10294    /** Gradient of the objective */
     
    122114                                        const Vector& yc,
    123115                                        const Vector& yd
    124                                        )
    125     {
    126       DBG_ASSERT(false && "ERROR: In RestoIpoptNLP h() is called without mu!");
    127       return NULL;
    128     }
     116                                       );
    129117
    130118    /** Hessian of the Lagrangian */
Note: See TracChangeset for help on using the changeset viewer.