Ignore:
Timestamp:
Oct 15, 2003 5:34:57 PM (16 years ago)
Author:
forrest
Message:

This should break everything

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/ClpDualRowSteepest.cpp

    r137 r225  
    2424    infeasible_(NULL),
    2525    alternateWeights_(NULL),
    26     savedWeights_(NULL)
     26    savedWeights_(NULL),
     27    dubiousWeights_(NULL)
    2728{
    2829  type_=2+64*mode;
     
    6162    savedWeights_=NULL;
    6263  }
     64  if (rhs.dubiousWeights_) {
     65    assert(model_);
     66    int number = model_->numberRows();
     67    dubiousWeights_= new int[number];
     68    ClpDisjointCopyN(rhs.dubiousWeights_,number,dubiousWeights_);
     69  } else {
     70    dubiousWeights_=NULL;
     71  }
    6372}
    6473
     
    6978{
    7079  delete [] weights_;
     80  delete [] dubiousWeights_;
    7181  delete infeasible_;
    7282  delete alternateWeights_;
     
    8696    model_ = rhs.model_;
    8797    delete [] weights_;
     98    delete [] dubiousWeights_;
    8899    delete infeasible_;
    89100    delete alternateWeights_;
     
    111122    } else {
    112123      savedWeights_=NULL;
     124    }
     125    if (rhs.dubiousWeights_) {
     126      assert(model_);
     127      int number = model_->numberRows();
     128      dubiousWeights_= new int[number];
     129      ClpDisjointCopyN(rhs.dubiousWeights_,number,dubiousWeights_);
     130    } else {
     131      dubiousWeights_=NULL;
    113132    }
    114133  }
     
    135154  // allow tolerance at least slightly bigger than standard
    136155  tolerance = tolerance +  error;
     156  // But cap
     157  tolerance = min(1000.0,tolerance);
    137158  tolerance *= tolerance; // as we are using squares
    138159  double * solution = model_->solutionRegion();
     
    156177      if (iPivot<numberColumns)
    157178        value *= COLUMN_BIAS; // bias towards columns
     179k
    158180#endif
    159181      // store square in list
     
    181203    number = infeasible_->getNumElements();
    182204  }
    183   for (i=0;i<number;i++) {
    184     iRow = index[i];
    185     double value = infeas[iRow];
    186     if (value>largest*weights_[iRow]&&value>tolerance) {
    187       // make last pivot row last resort choice
    188       if (iRow==lastPivotRow) {
    189         if (value*1.0e-10<largest*weights_[iRow])
    190           continue;
    191         else
    192           value *= 1.0e-10;
    193       }
    194       int iSequence = pivotVariable[iRow];
    195       if (!model_->flagged(iSequence)) {
    196         //#define CLP_DEBUG 1
     205  if(model_->numberIterations()<model_->lastBadIteration()+200) {
     206    // we can't really trust infeasibilities if there is dual error
     207    double checkTolerance = 1.0e-8;
     208    if (!model_->factorization()->pivots())
     209      checkTolerance = 1.0e-6;
     210    if (model_->largestPrimalError()>checkTolerance)
     211      tolerance *= model_->largestPrimalError()/checkTolerance;
     212  }
     213  int numberWanted;
     214  if (mode_<2 ) {
     215    numberWanted = number+1;
     216  } else if (mode_==2) {
     217    numberWanted = max(2000,number/8);
     218  } else {
     219    int numberElements = model_->factorization()->numberElements();
     220    double ratio = (double) numberElements/(double) model_->numberRows();
     221    numberWanted = max(2000,number/8);
     222    if (ratio<1.0) {
     223      numberWanted = max(2000,number/20);
     224    } else if (ratio>10.0) {
     225      ratio = number * (ratio/80.0);
     226      if (ratio>number)
     227        numberWanted=number+1;
     228      else
     229        numberWanted = max(2000,(int) ratio);
     230    }
     231  }
     232  int iPass;
     233  // Setup two passes
     234  int start[4];
     235  start[1]=number;
     236  start[2]=0;
     237  double dstart = ((double) number) * CoinDrand48();
     238  start[0]=(int) dstart;
     239  start[3]=start[0];
     240  //double largestWeight=0.0;
     241  //double smallestWeight=1.0e100;
     242  for (iPass=0;iPass<2;iPass++) {
     243    int end = start[2*iPass+1];
     244    for (i=start[2*iPass];i<end;i++) {
     245      iRow = index[i];
     246      double value = infeas[iRow];
     247      if (value>tolerance) {
     248        //#define OUT_EQ
     249#ifdef OUT_EQ
     250        {
     251          int iSequence = pivotVariable[iRow];
     252          if (upper[iSequence]==lower[iSequence])
     253            value *= 2.0;
     254        }
     255#endif
     256        double weight = weights_[iRow];
     257        //largestWeight = max(largestWeight,weight);
     258        //smallestWeight = min(smallestWeight,weight);
     259        //double dubious = dubiousWeights_[iRow];
     260        //weight *= dubious;
     261        //if (value>2.0*largest*weight||(value>0.5*largest*weight&&value*largestWeight>dubious*largest*weight)) {
     262        if (value>largest*weight) {
     263          // make last pivot row last resort choice
     264          if (iRow==lastPivotRow) {
     265            if (value*1.0e-10<largest*weight)
     266              continue;
     267            else
     268              value *= 1.0e-10;
     269          }
     270          int iSequence = pivotVariable[iRow];
     271          if (!model_->flagged(iSequence)) {
     272            //#define CLP_DEBUG 1
    197273#ifdef CLP_DEBUG
    198         double value2=0.0;
    199         if (solution[iSequence]>upper[iSequence]+tolerance)
    200           value2=solution[iSequence]-upper[iSequence];
    201         else if (solution[iSequence]<lower[iSequence]-tolerance)
    202           value2=solution[iSequence]-lower[iSequence];
    203         assert(fabs(value2*value2-infeas[iRow])<1.0e-8*min(value2*value2,infeas[iRow]));
    204 #endif
    205         if (solution[iSequence]>upper[iSequence]+tolerance||
    206             solution[iSequence]<lower[iSequence]-tolerance) {
    207           chosenRow=iRow;
    208           largest=value/weights_[iRow];
     274            double value2=0.0;
     275            if (solution[iSequence]>upper[iSequence]+tolerance)
     276              value2=solution[iSequence]-upper[iSequence];
     277            else if (solution[iSequence]<lower[iSequence]-tolerance)
     278              value2=solution[iSequence]-lower[iSequence];
     279            assert(fabs(value2*value2-infeas[iRow])<1.0e-8*min(value2*value2,infeas[iRow]));
     280#endif
     281            if (solution[iSequence]>upper[iSequence]+tolerance||
     282                solution[iSequence]<lower[iSequence]-tolerance) {
     283              chosenRow=iRow;
     284              largest=value/weight;
     285              //largestWeight = dubious;
     286            }
     287          } else {
     288            // just to make sure we don't exit before got something
     289            numberWanted++;
     290          }
    209291        }
    210       }
    211     }
    212   }
     292        numberWanted--;
     293        if (!numberWanted)
     294          break;
     295      }
     296    }
     297    if (!numberWanted)
     298      break;
     299  }
     300  //printf("smallest %g largest %g\n",smallestWeight,largestWeight);
    213301  return chosenRow;
    214302}
    215303#define TRY_NORM 1.0e-4
    216 // Updates weights
    217 void
     304// Updates weights and returns pivot alpha
     305double
    218306ClpDualRowSteepest::updateWeights(CoinIndexedVector * input,
    219307                                  CoinIndexedVector * spare,
    220308                                  CoinIndexedVector * updatedColumn)
    221309{
    222   // clear other region
    223   alternateWeights_->clear();
    224   double norm = 0.0;
    225   double * work = input->denseVector();
    226   int number = input->getNumElements();
    227   int * which = input->getIndices();
    228   double * work2 = spare->denseVector();
    229   int * which2 = spare->getIndices();
    230   double * work3 = alternateWeights_->denseVector();
    231   int * which3 = alternateWeights_->getIndices();
    232   int i;
    233310#if CLP_DEBUG>2
    234311  // Very expensive debug
     
    240317    double * array = alternateWeights_->denseVector();
    241318    int * which = alternateWeights_->getIndices();
     319    int i;
    242320    for (i=0;i<numberRows;i++) {
    243321      double value=0.0;
     
    257335      if (fabs(weights_[i]-value)>1.0e-4)
    258336        printf("%d old %g, true %g\n",i,weights_[i],value);
     337      //else
     338      //printf("%d matches %g\n",i,value);
    259339    }
    260340    delete temp;
    261341  }
    262342#endif
    263   for (i=0;i<number;i++) {
    264     int iRow = which[i];
    265     double value = work[iRow];
    266     norm += value*value;
    267     work2[iRow]=value;
    268     which2[i]=iRow;
    269   }
    270   spare->setNumElements(number);
    271   // ftran
    272   model_->factorization()->updateColumn(alternateWeights_,spare);
    273   // alternateWeights_ should still be empty
    274   int pivotRow = model_->pivotRow();
     343  assert (input->packedMode());
     344  assert (updatedColumn->packedMode());
     345  double alpha=0.0;
     346  if (!model_->factorization()->networkBasis()) {
     347    // clear other region
     348    alternateWeights_->clear();
     349    double norm = 0.0;
     350    int i;
     351    double * work = input->denseVector();
     352    int numberNonZero = input->getNumElements();
     353    int * which = input->getIndices();
     354    double * work2 = spare->denseVector();
     355    int * which2 = spare->getIndices();
     356    // ftran
     357    //permute and move indices into index array
     358    //also compute norm
     359    //int *regionIndex = alternateWeights_->getIndices (  );
     360    const int *permute = model_->factorization()->permute();
     361    //double * region = alternateWeights_->denseVector();
     362    for ( i = 0; i < numberNonZero; i ++ ) {
     363      int iRow = which[i];
     364      double value = work[i];
     365      norm += value*value;
     366      iRow = permute[iRow];
     367      work2[iRow] = value;
     368      which2[i] = iRow;
     369    }
     370    spare->setNumElements ( numberNonZero );
     371    // Only one array active as already permuted
     372    model_->factorization()->updateColumn(spare,spare,true);
     373    // permute back
     374    numberNonZero = spare->getNumElements();
     375    // alternateWeights_ should still be empty
     376    int pivotRow = model_->pivotRow();
    275377#ifdef CLP_DEBUG
    276   if ( model_->logLevel (  ) >4  &&
    277        fabs(norm-weights_[pivotRow])>1.0e-3*(1.0+norm))
    278     printf("on row %d, true weight %g, old %g\n",
    279            pivotRow,sqrt(norm),sqrt(weights_[pivotRow]));
    280 #endif
    281   // could re-initialize here (could be expensive)
    282   norm /= model_->alpha() * model_->alpha();
    283 
    284   assert(norm);
    285   if (norm < TRY_NORM)
    286     norm = TRY_NORM;
    287   if (norm != 0.) {
     378    if ( model_->logLevel (  ) >4  &&
     379         fabs(norm-weights_[pivotRow])>1.0e-3*(1.0+norm))
     380      printf("on row %d, true weight %g, old %g\n",
     381             pivotRow,sqrt(norm),sqrt(weights_[pivotRow]));
     382#endif
     383    // could re-initialize here (could be expensive)
     384    norm /= model_->alpha() * model_->alpha();
     385   
     386    assert(norm);
     387    // pivot element
     388    alpha=0.0;
     389    double multiplier = 2.0 / model_->alpha();
     390    // look at updated column
     391    work = updatedColumn->denseVector();
     392    numberNonZero = updatedColumn->getNumElements();
     393    which = updatedColumn->getIndices();
     394   
     395    int nSave=0;
     396    double * work3 = alternateWeights_->denseVector();
     397    int * which3 = alternateWeights_->getIndices();
     398    const int * pivotColumn = model_->factorization()->pivotColumn();
     399    for (i =0; i < numberNonZero; i++) {
     400      int iRow = which[i];
     401      double theta = work[i];
     402      if (iRow==pivotRow)
     403        alpha = theta;
     404      double devex = weights_[iRow];
     405      work3[nSave]=devex; // save old
     406      which3[nSave++]=iRow;
     407      // transform to match spare
     408      int jRow = pivotColumn[iRow];
     409      double value = work2[jRow];
     410      devex +=  theta * (theta*norm+value * multiplier);
     411      if (devex < TRY_NORM)
     412        devex = TRY_NORM;
     413      weights_[iRow]=devex;
     414    }
     415    assert (alpha);
     416    alternateWeights_->setPackedMode(true);
     417    alternateWeights_->setNumElements(nSave);
     418    if (norm < TRY_NORM)
     419      norm = TRY_NORM;
     420    weights_[pivotRow] = norm;
     421    spare->clear();
     422#ifdef CLP_DEBUG
     423    spare->checkClear();
     424#endif
     425  } else {
     426    // clear other region
     427    alternateWeights_->clear();
     428    double norm = 0.0;
     429    int i;
     430    double * work = input->denseVector();
     431    int number = input->getNumElements();
     432    int * which = input->getIndices();
     433    double * work2 = spare->denseVector();
     434    int * which2 = spare->getIndices();
     435    for (i=0;i<number;i++) {
     436      int iRow = which[i];
     437      double value = work[i];
     438      norm += value*value;
     439      work2[iRow]=value;
     440      which2[i]=iRow;
     441    }
     442    spare->setNumElements(number);
     443    // ftran
     444    model_->factorization()->updateColumn(alternateWeights_,spare);
     445    // alternateWeights_ should still be empty
     446    int pivotRow = model_->pivotRow();
     447#ifdef CLP_DEBUG
     448    if ( model_->logLevel (  ) >4  &&
     449         fabs(norm-weights_[pivotRow])>1.0e-3*(1.0+norm))
     450      printf("on row %d, true weight %g, old %g\n",
     451             pivotRow,sqrt(norm),sqrt(weights_[pivotRow]));
     452#endif
     453    // could re-initialize here (could be expensive)
     454    norm /= model_->alpha() * model_->alpha();
     455   
     456    assert(norm);
     457    //if (norm < TRY_NORM)
     458    //norm = TRY_NORM;
     459    // pivot element
     460    alpha=0.0;
    288461    double multiplier = 2.0 / model_->alpha();
    289462    // look at updated column
     
    291464    number = updatedColumn->getNumElements();
    292465    which = updatedColumn->getIndices();
    293 
     466   
    294467    int nSave=0;
    295 
     468    double * work3 = alternateWeights_->denseVector();
     469    int * which3 = alternateWeights_->getIndices();
    296470    for (i =0; i < number; i++) {
    297471      int iRow = which[i];
    298       double theta = work[iRow];
    299       if (theta) {
    300         double devex = weights_[iRow];
    301         work3[iRow]=devex; // save old
    302         which3[nSave++]=iRow;
    303         double value = work2[iRow];
    304         devex +=  theta * (theta*norm+value * multiplier);
    305         if (devex < TRY_NORM)
    306           devex = TRY_NORM;
    307         weights_[iRow]=devex;
    308       }
    309     }
    310 #ifdef CLP_DEBUG
    311     assert(work3[pivotRow]&&work[pivotRow]);
    312 #endif
     472      double theta = work[i];
     473      if (iRow==pivotRow)
     474        alpha = theta;
     475      double devex = weights_[iRow];
     476      work3[nSave]=devex; // save old
     477      which3[nSave++]=iRow;
     478      double value = work2[iRow];
     479      devex +=  theta * (theta*norm+value * multiplier);
     480      if (devex < TRY_NORM)
     481        devex = TRY_NORM;
     482      weights_[iRow]=devex;
     483    }
     484    assert (alpha);
     485    alternateWeights_->setPackedMode(true);
    313486    alternateWeights_->setNumElements(nSave);
    314487    if (norm < TRY_NORM)
    315488      norm = TRY_NORM;
    316489    weights_[pivotRow] = norm;
    317   }
    318   spare->clear();
     490    spare->clear();
     491  }
    319492#ifdef CLP_DEBUG
    320493  spare->checkClear();
    321494#endif
     495  return alpha;
    322496}
    323497 
     
    345519  int numberColumns = model_->numberColumns();
    346520#endif
    347   for (i=0;i<number;i++) {
    348     int iRow=which[i];
    349     int iPivot=pivotVariable[iRow];
    350     double value = solution[iPivot];
    351     double cost = model_->cost(iPivot);
    352     double change = primalRatio*work[iRow];
    353     value -= change;
    354     changeObj -= change*cost;
    355     solution[iPivot] = value;
    356     double lower = model_->lower(iPivot);
    357     double upper = model_->upper(iPivot);
    358     // But if pivot row then use value of incoming
    359     // Although it is safer to recompute before next selection
    360     // in case something odd happens
    361     if (iRow==pivotRow) {
    362       iPivot = model_->sequenceIn();
    363       lower = model_->lower(iPivot);
    364       upper = model_->upper(iPivot);
    365       value = model_->valueIncomingDual();
    366     }
    367     if (value<lower-tolerance) {
    368       value -= lower;
    369       value *= value;
    370 #ifdef COLUMN_BIAS
    371       if (iPivot<numberColumns)
    372         value *= COLUMN_BIAS; // bias towards columns
     521  if (primalUpdate->packedMode()) {
     522    for (i=0;i<number;i++) {
     523      int iRow=which[i];
     524      int iPivot=pivotVariable[iRow];
     525      double value = solution[iPivot];
     526      double cost = model_->cost(iPivot);
     527      double change = primalRatio*work[i];
     528      work[i]=0.0;
     529      value -= change;
     530      changeObj -= change*cost;
     531      solution[iPivot] = value;
     532      double lower = model_->lower(iPivot);
     533      double upper = model_->upper(iPivot);
     534      // But if pivot row then use value of incoming
     535      // Although it is safer to recompute before next selection
     536      // in case something odd happens
     537      if (iRow==pivotRow) {
     538        iPivot = model_->sequenceIn();
     539        lower = model_->lower(iPivot);
     540        upper = model_->upper(iPivot);
     541        value = model_->valueIncomingDual();
     542      }
     543      if (value<lower-tolerance) {
     544        value -= lower;
     545        value *= value;
     546#ifdef COLUMN_BIAS
     547        if (iPivot<numberColumns)
     548          value *= COLUMN_BIAS; // bias towards columns
    373549#endif
    374550#ifdef FIXED_BIAS
    375       if (lower==upper)
    376         value *= FIXED_BIAS; // bias towards taking out fixed variables
    377 #endif
    378       // store square in list
    379       if (infeas[iRow])
    380         infeas[iRow] = value; // already there
    381       else
    382         infeasible_->quickAdd(iRow,value);
    383     } else if (value>upper+tolerance) {
    384       value -= upper;
    385       value *= value;
    386 #ifdef COLUMN_BIAS
    387       if (iPivot<numberColumns)
    388         value *= COLUMN_BIAS; // bias towards columns
     551        if (lower==upper)
     552          value *= FIXED_BIAS; // bias towards taking out fixed variables
     553#endif
     554        // store square in list
     555        if (infeas[iRow])
     556          infeas[iRow] = value; // already there
     557        else
     558          infeasible_->quickAdd(iRow,value);
     559      } else if (value>upper+tolerance) {
     560        value -= upper;
     561        value *= value;
     562#ifdef COLUMN_BIAS
     563        if (iPivot<numberColumns)
     564          value *= COLUMN_BIAS; // bias towards columns
    389565#endif
    390566#ifdef FIXED_BIAS
    391       if (lower==upper)
    392         value *= FIXED_BIAS; // bias towards taking out fixed variables
    393 #endif
    394       // store square in list
    395       if (infeas[iRow])
    396         infeas[iRow] = value; // already there
    397       else
    398         infeasible_->quickAdd(iRow,value);
    399     } else {
    400       // feasible - was it infeasible - if so set tiny
    401       if (infeas[iRow])
    402         infeas[iRow] = 1.0e-100;
    403     }
    404     work[iRow]=0.0;
     567        if (lower==upper)
     568          value *= FIXED_BIAS; // bias towards taking out fixed variables
     569#endif
     570        // store square in list
     571        if (infeas[iRow])
     572          infeas[iRow] = value; // already there
     573        else
     574          infeasible_->quickAdd(iRow,value);
     575      } else {
     576        // feasible - was it infeasible - if so set tiny
     577        if (infeas[iRow])
     578          infeas[iRow] = 1.0e-100;
     579      }
     580    }
     581  } else {
     582    for (i=0;i<number;i++) {
     583      int iRow=which[i];
     584      int iPivot=pivotVariable[iRow];
     585      double value = solution[iPivot];
     586      double cost = model_->cost(iPivot);
     587      double change = primalRatio*work[iRow];
     588      value -= change;
     589      changeObj -= change*cost;
     590      solution[iPivot] = value;
     591      double lower = model_->lower(iPivot);
     592      double upper = model_->upper(iPivot);
     593      // But if pivot row then use value of incoming
     594      // Although it is safer to recompute before next selection
     595      // in case something odd happens
     596      if (iRow==pivotRow) {
     597        iPivot = model_->sequenceIn();
     598        lower = model_->lower(iPivot);
     599        upper = model_->upper(iPivot);
     600        value = model_->valueIncomingDual();
     601      }
     602      if (value<lower-tolerance) {
     603        value -= lower;
     604        value *= value;
     605#ifdef COLUMN_BIAS
     606        if (iPivot<numberColumns)
     607          value *= COLUMN_BIAS; // bias towards columns
     608#endif
     609#ifdef FIXED_BIAS
     610        if (lower==upper)
     611          value *= FIXED_BIAS; // bias towards taking out fixed variables
     612#endif
     613        // store square in list
     614        if (infeas[iRow])
     615          infeas[iRow] = value; // already there
     616        else
     617          infeasible_->quickAdd(iRow,value);
     618      } else if (value>upper+tolerance) {
     619        value -= upper;
     620        value *= value;
     621#ifdef COLUMN_BIAS
     622        if (iPivot<numberColumns)
     623          value *= COLUMN_BIAS; // bias towards columns
     624#endif
     625#ifdef FIXED_BIAS
     626        if (lower==upper)
     627          value *= FIXED_BIAS; // bias towards taking out fixed variables
     628#endif
     629        // store square in list
     630        if (infeas[iRow])
     631          infeas[iRow] = value; // already there
     632        else
     633          infeasible_->quickAdd(iRow,value);
     634      } else {
     635        // feasible - was it infeasible - if so set tiny
     636        if (infeas[iRow])
     637          infeas[iRow] = 1.0e-100;
     638      }
     639      work[iRow]=0.0;
     640    }
    405641  }
    406642  primalUpdate->setNumElements(0);
     
    442678      alternateWeights_->reserve(numberRows+
    443679                                 model_->factorization()->maximumPivots());
    444       if (!mode_||mode==5) {
     680      if (!mode_||mode_==2||mode==5) {
    445681        // initialize to 1.0 (can we do better?)
    446682        for (i=0;i<numberRows;i++) {
     
    455691        for (i=0;i<numberRows;i++) {
    456692          double value=0.0;
    457           array[i] = 1.0;
     693          array[0] = 1.0;
    458694          which[0] = i;
    459695          alternateWeights_->setNumElements(1);
     696          alternateWeights_->setPackedMode(true);
    460697          model_->factorization()->updateColumnTranspose(temp,
    461698                                                         alternateWeights_);
     
    463700          int j;
    464701          for (j=0;j<number;j++) {
    465             int iRow=which[j];
    466             value+=array[iRow]*array[iRow];
    467             array[iRow]=0.0;
     702            value+=array[j]*array[j];
     703            array[j]=0.0;
    468704          }
    469705          alternateWeights_->setNumElements(0);
     
    519755  }
    520756  if (mode>=2) {
     757    // Get dubious weights
     758    //if (!dubiousWeights_)
     759    //dubiousWeights_=new int[numberRows];
     760    //model_->factorization()->getWeights(dubiousWeights_);
    521761    infeasible_->clear();
    522762    int iRow;
     
    566806  int * which = alternateWeights_->getIndices();
    567807  int i;
    568   for (i=0;i<number;i++) {
    569     int iRow = which[i];
    570     weights_[iRow]=saved[iRow];
    571     saved[iRow]=0.0;
     808  if (alternateWeights_->packedMode()) {
     809    for (i=0;i<number;i++) {
     810      int iRow = which[i];
     811      weights_[iRow]=saved[i];
     812      saved[i]=0.0;
     813    }
     814  } else {
     815    for (i=0;i<number;i++) {
     816      int iRow = which[i];
     817      weights_[iRow]=saved[iRow];
     818      saved[iRow]=0.0;
     819    }
    572820  }
    573821  alternateWeights_->setNumElements(0);
     
    590838  delete [] weights_;
    591839  weights_=NULL;
     840  delete [] dubiousWeights_;
     841  dubiousWeights_=NULL;
    592842  delete infeasible_;
    593843  infeasible_ = NULL;
     
    598848  state_ =-1;
    599849}
     850// Returns true if would not find any row
     851bool
     852ClpDualRowSteepest::looksOptimal() const
     853{
     854  int iRow;
     855  const int * pivotVariable = model_->pivotVariable();
     856  double tolerance=model_->currentPrimalTolerance();
     857  // we can't really trust infeasibilities if there is primal error
     858  // this coding has to mimic coding in checkPrimalSolution
     859  double error = min(1.0e-3,model_->largestPrimalError());
     860  // allow tolerance at least slightly bigger than standard
     861  tolerance = tolerance +  error;
     862  // But cap
     863  tolerance = min(1000.0,tolerance);
     864  int numberRows = model_->numberRows();
     865  int numberInfeasible=0;
     866  for (iRow=0;iRow<numberRows;iRow++) {
     867    int iPivot=pivotVariable[iRow];
     868    double value = model_->solution(iPivot);
     869    double lower = model_->lower(iPivot);
     870    double upper = model_->upper(iPivot);
     871    if (value<lower-tolerance) {
     872      numberInfeasible++;
     873    } else if (value>upper+tolerance) {
     874      numberInfeasible++;
     875    }
     876  }
     877  return (numberInfeasible==0);
     878}
    600879
Note: See TracChangeset for help on using the changeset viewer.