Changeset 3073
- Timestamp:
- Oct 4, 2010 9:35:24 PM (10 years ago)
- Location:
- coopr.pysp/trunk/coopr/pysp
- Files:
-
- 9 edited
Legend:
- Unmodified
- Added
- Removed
-
coopr.pysp/trunk/coopr/pysp/asynchph.py
r2573 r3073 369 369 new_w_index = reference_variable._index # TBD - need to be careful with the shallow copy here 370 370 new_w_parameter_name = "PHWEIGHT_"+reference_variable.name 371 new_w_parameter = Param(new_w_index, name=new_w_parameter_name)371 new_w_parameter = Param(new_w_index, name=new_w_parameter_name, mutable=True) 372 372 setattr(instance,new_w_parameter_name,new_w_parameter) 373 373 … … 381 381 new_avg_index = reference_variable._index # TBD - need to be careful with the shallow copy here 382 382 new_avg_parameter_name = "PHAVG_"+reference_variable.name 383 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name)383 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name, mutable=True) 384 384 setattr(instance,new_avg_parameter_name,new_avg_parameter) 385 385 … … 389 389 new_rho_index = reference_variable._index # TBD - need to be careful with the shallow copy here 390 390 new_rho_parameter_name = "PHRHO_"+reference_variable.name 391 new_rho_parameter = Param(new_rho_index, name=new_rho_parameter_name)391 new_rho_parameter = Param(new_rho_index, name=new_rho_parameter_name, mutable=True) 392 392 setattr(instance,new_rho_parameter_name,new_rho_parameter) 393 393 … … 426 426 new_min_index = reference_variable._index # TBD - need to be careful with the shallow copy here (and below) 427 427 new_min_parameter_name = "NODEMIN_"+reference_variable.name 428 new_min_parameter = Param(new_min_index, name=new_min_parameter_name)428 new_min_parameter = Param(new_min_index, name=new_min_parameter_name, mutable=True) 429 429 for index in new_min_index: 430 430 new_min_parameter[index] = 0.0 … … 433 433 new_avg_index = reference_variable._index 434 434 new_avg_parameter_name = "NODEAVG_"+reference_variable.name 435 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name)435 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name, mutable=True) 436 436 for index in new_avg_index: 437 437 new_avg_parameter[index] = 0.0 … … 440 440 new_max_index = reference_variable._index 441 441 new_max_parameter_name = "NODEMAX_"+reference_variable.name 442 new_max_parameter = Param(new_max_index, name=new_max_parameter_name)442 new_max_parameter = Param(new_max_index, name=new_max_parameter_name, mutable=True) 443 443 for index in new_max_index: 444 444 new_max_parameter[index] = 0.0 … … 619 619 for index in variable_indices: 620 620 621 tree_node_average = tree_node._averages[variable.name][index]()621 tree_node_average = value(tree_node._averages[variable.name][index]) 622 622 623 623 for scenario in tree_node._scenarios: … … 627 627 if getattr(instance,variable.name)[index].status != VarStatus.unused: 628 628 629 current_variable_weight = getattr(instance, weight_parameter_name)[index].value629 current_variable_weight = value(getattr(instance, weight_parameter_name)[index]) 630 630 # if I'm maximizing, invert value prior to adding (hack to implement negatives) 631 631 if self._is_minimizing is False: … … 636 636 if self._is_minimizing is False: 637 637 new_variable_weight = (-new_variable_weight) 638 getattr(instance, weight_parameter_name)[index] .value= new_variable_weight638 getattr(instance, weight_parameter_name)[index] = new_variable_weight 639 639 640 640 # we shouldn't have to re-simplify the expression, as we aren't adding any constant-variable terms - just modifying parameters. … … 663 663 for index in variable_indices: 664 664 665 tree_node_average = tree_node._averages[variable.name][index]()665 tree_node_average = value(tree_node._averages[variable.name][index]) 666 666 667 667 if getattr(instance,variable.name)[index].status != VarStatus.unused: 668 668 669 current_variable_weight = getattr(instance, weight_parameter_name)[index].value669 current_variable_weight = value(getattr(instance, weight_parameter_name)[index]) 670 670 # if I'm maximizing, invert value prior to adding (hack to implement negatives) 671 671 if self._is_minimizing is False: … … 676 676 if self._is_minimizing is False: 677 677 new_variable_weight = (-new_variable_weight) 678 getattr(instance, weight_parameter_name)[index] .value= new_variable_weight678 getattr(instance, weight_parameter_name)[index] = new_variable_weight 679 679 680 680 # we shouldn't have to re-simplify the expression, as we aren't adding any constant-variable terms - just modifying parameters. … … 986 986 if is_used is True: 987 987 988 minimum_value = tree_node._minimums[variable_name][index]()989 maximum_value = tree_node._maximums[variable_name][index]()988 minimum_value = value(tree_node._minimums[variable_name][index]) 989 maximum_value = value(tree_node._maximums[variable_name][index]) 990 990 991 991 num_outputs_this_stage = num_outputs_this_stage + 1 … … 1006 1006 1007 1007 if output_values is True: 1008 average_value = tree_node._averages[variable_name][index]()1008 average_value = value(tree_node._averages[variable_name][index]) 1009 1009 print "\t\t\t\tValues: ", 1010 1010 for scenario in tree_node._scenarios: … … 1020 1020 for scenario in tree_node._scenarios: 1021 1021 instance = self._instances[scenario._name] 1022 print "%12.4f" % getattr(instance,weight_parameter_name)[index].value,1022 print "%12.4f" % value(getattr(instance,weight_parameter_name)[index]) 1023 1023 if scenario == tree_node._scenarios[-1]: 1024 1024 print "" 1025 1025 if output_averages: 1026 print "\t\t\t\tAverage: %12.4f" % ( tree_node._averages[variable_name][index].value)1026 print "\t\t\t\tAverage: %12.4f" % (value(tree_node._averages[variable_name][index])) 1027 1027 1028 1028 if num_outputs_this_stage == 0: -
coopr.pysp/trunk/coopr/pysp/convergence.py
r3072 r3073 141 141 142 142 instance = instances[scenario._name] 143 this_value = value(getattr(instance, reference_variable_name)[index])143 this_value = getattr(instance, reference_variable_name)[index].value 144 144 term_diff += scenario._probability * fabs(this_value - value(node_variable_average[index])) 145 145 … … 185 185 186 186 # should think about nixing the magic constant below (not sure how to best pararamterize it). 187 if fabs( node_variable_average[index]()) > 0.0001:187 if fabs(value(node_variable_average[index])) > 0.0001: 188 188 189 189 is_used = True # until proven otherwise … … 198 198 if is_used is True: 199 199 200 average_value = node_variable_average[index]()200 average_value = value(node_variable_average[index]) 201 201 202 202 for scenario in tree_node._scenarios: -
coopr.pysp/trunk/coopr/pysp/csvsolutionwriter.py
r3048 r3073 49 49 for var_name, var in tree_node._solutions.items(): 50 50 for idx in var: 51 print >>output_file, stage_name, ",", tree_node_name, ",", var_name, ",", index_to_string(idx), ",", var[idx] ()51 print >>output_file, stage_name, ",", tree_node_name, ",", var_name, ",", index_to_string(idx), ",", var[idx].value 52 52 53 53 output_file.close() -
coopr.pysp/trunk/coopr/pysp/ph.py
r3072 r3073 706 706 new_min_parameter = None 707 707 if (len(new_min_index) is 1) and (None in new_min_index): 708 new_min_parameter = Param(name=new_min_parameter_name )708 new_min_parameter = Param(name=new_min_parameter_name, mutable=True) 709 709 else: 710 new_min_parameter = Param(new_min_index, name=new_min_parameter_name)710 new_min_parameter = Param(new_min_index, name=new_min_parameter_name, mutable=True) 711 711 for index in new_min_index: 712 712 new_min_parameter[index] = 0.0 … … 717 717 new_avg_parameter = None 718 718 if (len(new_avg_index) is 1) and (None in new_avg_index): 719 new_avg_parameter = Param(name=new_avg_parameter_name )719 new_avg_parameter = Param(name=new_avg_parameter_name, mutable=True) 720 720 else: 721 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name)721 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name, mutable=True) 722 722 for index in new_avg_index: 723 723 new_avg_parameter[index] = 0.0 … … 728 728 new_max_parameter = None 729 729 if (len(new_max_index) is 1) and (None in new_max_index): 730 new_max_parameter = Param(name=new_max_parameter_name )730 new_max_parameter = Param(name=new_max_parameter_name, mutable=True) 731 731 else: 732 new_max_parameter = Param(new_max_index, name=new_max_parameter_name)732 new_max_parameter = Param(new_max_index, name=new_max_parameter_name, mutable=True) 733 733 for index in new_max_index: 734 734 new_max_parameter[index] = 0.0 … … 1417 1417 maximum_value = tree_node._maximums[variable_name] 1418 1418 else: 1419 minimum_value = tree_node._minimums[variable_name][index]()1420 maximum_value = tree_node._maximums[variable_name][index]()1419 minimum_value = value(tree_node._minimums[variable_name][index]) 1420 maximum_value = value(tree_node._maximums[variable_name][index]) 1421 1421 1422 1422 # there really isn't a need to output variables whose … … 1448 1448 1449 1449 if output_values is True: 1450 average_value = tree_node._averages[variable_name][index]()1450 average_value = value(tree_node._averages[variable_name][index]) 1451 1451 if output_only_statistics is False: 1452 1452 print "\t\t\t\tValues: ", … … 1472 1472 for scenario in tree_node._scenarios: 1473 1473 instance = self._instances[scenario._name] 1474 print "%12.4f" % getattr(instance,weight_parameter_name)[index].value,1474 print "%12.4f" % value(getattr(instance,weight_parameter_name)[index]), 1475 1475 if scenario == tree_node._scenarios[-1]: 1476 1476 print "" -
coopr.pysp/trunk/coopr/pysp/phobjective.py
r2818 r3073 196 196 def create_piecewise_constraint_expression(lb, ub, instance_variable, variable_average, quad_variable, tolerance): 197 197 198 penalty_at_lb = (lb - va riable_average()) * (lb - variable_average())199 penalty_at_ub = (ub - va riable_average()) * (ub - variable_average())198 penalty_at_lb = (lb - value(variable_average)) * (lb - value(variable_average)) 199 penalty_at_ub = (ub - value(variable_average)) * (ub - value(variable_average)) 200 200 slope = None 201 201 if fabs(ub-lb) > tolerance: … … 339 339 ub = x.ub() 340 340 341 node_min = node_min_parameter[index]()342 node_max = node_max_parameter[index]()341 node_min = value(node_min_parameter[index]) 342 node_max = value(node_max_parameter[index]) 343 343 344 344 # compute the breakpoint sequence according to the specified strategy. … … 350 350 compute_exponential_from_mean_breakpoints, 351 351 )[ breakpoint_strategy ] 352 args = ( lb, node_min, xavg(), node_max, ub, \352 args = ( lb, node_min, value(xavg), node_max, ub, \ 353 353 linearize_nonbinary_penalty_terms, tolerance ) 354 354 breakpoints = strategy( *args ) -
coopr.pysp/trunk/coopr/pysp/phserver.py
r2772 r3073 142 142 143 143 for index in weight_index: 144 target_weight_parameter[index] = weight_update[index]()144 target_weight_parameter[index] = value(weight_update[index]) 145 145 146 146 for average_update in new_averages: … … 151 151 152 152 for index in average_index: 153 target_average_parameter[index] = average_update[index]()153 target_average_parameter[index] = value(average_update[index]) 154 154 155 155 def update_rhos(self, scenario_name, new_rhos): … … 169 169 170 170 for index in rho_index: 171 target_rho_parameter[index] = rho_update[index]() # the value operator is crucial!171 target_rho_parameter[index] = value(rho_update[index]) 172 172 173 173 def update_tree_node_statistics(self, scenario_name, new_node_minimums, new_node_maximums): -
coopr.pysp/trunk/coopr/pysp/phutils.py
r3014 r3073 270 270 new_w_parameter = None 271 271 if (len(new_w_index) is 1) and (None in new_w_index): 272 new_w_parameter = Param(name=new_w_parameter_name )273 else: 274 new_w_parameter = Param(new_w_index, name=new_w_parameter_name)272 new_w_parameter = Param(name=new_w_parameter_name, mutable=True) 273 else: 274 new_w_parameter = Param(new_w_index, name=new_w_parameter_name, mutable=True) 275 275 setattr(instance,new_w_parameter_name,new_w_parameter) 276 276 … … 288 288 new_avg_parameter = None 289 289 if (len(new_avg_index) is 1) and (None in new_avg_index): 290 new_avg_parameter = Param(name=new_avg_parameter_name )291 else: 292 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name)290 new_avg_parameter = Param(name=new_avg_parameter_name, mutable=True) 291 else: 292 new_avg_parameter = Param(new_avg_index, name=new_avg_parameter_name, mutable=True) 293 293 setattr(instance,new_avg_parameter_name,new_avg_parameter) 294 294 … … 302 302 new_rho_parameter = None 303 303 if (len(new_avg_index) is 1) and (None in new_avg_index): 304 new_rho_parameter = Param(name=new_rho_parameter_name )305 else: 306 new_rho_parameter = Param(new_rho_index, name=new_rho_parameter_name)304 new_rho_parameter = Param(name=new_rho_parameter_name, mutable=True) 305 else: 306 new_rho_parameter = Param(new_rho_index, name=new_rho_parameter_name, mutable=True) 307 307 setattr(instance,new_rho_parameter_name,new_rho_parameter) 308 308 … … 334 334 new_blend_parameter = None 335 335 if (len(new_avg_index) is 1) and (None in new_avg_index): 336 new_blend_parameter = Param(name=new_blend_parameter_name, within=Binary )337 else: 338 new_blend_parameter = Param(new_blend_index, name=new_blend_parameter_name, within=Binary )336 new_blend_parameter = Param(name=new_blend_parameter_name, within=Binary, mutable=True) 337 else: 338 new_blend_parameter = Param(new_blend_index, name=new_blend_parameter_name, within=Binary, mutable=True) 339 339 setattr(instance,new_blend_parameter_name,new_blend_parameter) 340 340 -
coopr.pysp/trunk/coopr/pysp/scenariotree.py
r3072 r3073 135 135 for index in variable._index: 136 136 if variable[index].active is True: 137 variable[index] = average_parameter[index]()137 variable[index] = value(average_parameter[index]) 138 138 139 139 # … … 172 172 # IMPT: This implicitly assumes convergence across the scenarios - if not, garbage results. 173 173 instance = scenario_instance_map[self._scenarios[0]._name] 174 my_cost = instance.active_components(Var)[self._stage._cost_variable[0].name][self._stage._cost_variable[1]] ()174 my_cost = instance.active_components(Var)[self._stage._cost_variable[0].name][self._stage._cost_variable[1]].value 175 175 child_cost = 0.0 176 176 for child in self._children: … … 529 529 aggregate_cost = 0.0 530 530 for stage in self._stages: 531 instance_cost_variable = instance.active_components(Var)[stage._cost_variable[0].name][stage._cost_variable[1]] ()531 instance_cost_variable = instance.active_components(Var)[stage._cost_variable[0].name][stage._cost_variable[1]].value 532 532 aggregate_cost += instance_cost_variable 533 533 return aggregate_cost … … 811 811 # if this is a singleton variable, then it should necessarily be active - 812 812 # otherwise, it wouldn't be referenced in the stage!!! 813 value = solution_variable[None] ()813 value = solution_variable[None].value 814 814 if fabs(value) > epsilon: 815 815 print "\t\t"+variable.name+"="+str(value) … … 817 817 for index in indices: 818 818 if (solution_variable[index].active is True) and (index in solution_variable): 819 value = solution_variable[index] ()819 value = solution_variable[index].value 820 820 if (value is not None) and (fabs(value) > epsilon): 821 821 print "\t\t"+variable.name+indexToString(index)+"="+str(value) … … 890 890 aggregate_cost = 0.0 891 891 for stage in self._stages: 892 instance_cost_variable = instance.active_components(Var)[stage._cost_variable[0].name][stage._cost_variable[1]] ()892 instance_cost_variable = instance.active_components(Var)[stage._cost_variable[0].name][stage._cost_variable[1]].value 893 893 print "\tStage=%20s Cost=%10.4f" % (stage._name, instance_cost_variable) 894 894 aggregate_cost += instance_cost_variable -
coopr.pysp/trunk/coopr/pysp/wwphextension.py
r2587 r3073 281 281 # is a singleton. this case be cleaned up when the source issue in Pyomo is fixed. 282 282 if (len(new_stat_index) is 1) and (None in new_stat_index): 283 new_stat_parameter = Param(name=new_stat_parameter_name )283 new_stat_parameter = Param(name=new_stat_parameter_name, mutable=True) 284 284 else: 285 new_stat_parameter = Param(new_stat_index, name=new_stat_parameter_name)285 new_stat_parameter = Param(new_stat_index, name=new_stat_parameter_name, mutable=True) 286 286 for newindex in new_stat_index: 287 287 new_stat_parameter[newindex] = 0 … … 293 293 new_conv_parameter = None 294 294 if (len(new_conv_index) is 1) and (None in new_conv_index): 295 new_conv_parameter = Param(name=new_conv_parameter_name )295 new_conv_parameter = Param(name=new_conv_parameter_name, mutable=True) 296 296 else: 297 new_conv_parameter = Param(new_conv_index, name=new_conv_parameter_name)297 new_conv_parameter = Param(new_conv_index, name=new_conv_parameter_name, mutable=True) 298 298 for newindex in new_conv_index: 299 299 new_conv_parameter[newindex] = 0.5 # not an int, so harmless … … 305 305 new_fix_parameter = None 306 306 if (len(new_fix_index) is 1) and (None in new_fix_index): 307 new_fix_parameter = Param(name=new_fix_parameter_name )307 new_fix_parameter = Param(name=new_fix_parameter_name, mutable=True) 308 308 else: 309 new_fix_parameter = Param(new_fix_index, name=new_fix_parameter_name)309 new_fix_parameter = Param(new_fix_index, name=new_fix_parameter_name, mutable=True) 310 310 for newindex in new_fix_index: 311 311 new_fix_parameter[newindex] = False … … 317 317 new_hash_parameter = None 318 318 if (len(new_hash_index) is 1) and (None in new_hash_index): 319 new_hash_parameter = Param(ph._iteration_index_set, name=new_hash_parameter_name )319 new_hash_parameter = Param(ph._iteration_index_set, name=new_hash_parameter_name, mutable=True) 320 320 else: 321 new_hash_parameter = Param(new_hash_index, ph._iteration_index_set, name=new_hash_parameter_name )321 new_hash_parameter = Param(new_hash_index, ph._iteration_index_set, name=new_hash_parameter_name, mutable=True) 322 322 for newindex in new_hash_index: 323 323 for i in range(0, ph._max_iterations+1): … … 400 400 401 401 if isinstance(variable_type, IntegerSet) or isinstance(variable_type, BooleanSet): 402 node_min = self.Int_If_Close_Enough(ph, tree_node._minimums[variable_name][index]())403 node_max = self.Int_If_Close_Enough(ph, tree_node._maximums[variable_name][index]())402 node_min = self.Int_If_Close_Enough(ph, value(tree_node._minimums[variable_name][index])) 403 node_max = self.Int_If_Close_Enough(ph, value(tree_node._maximums[variable_name][index])) 404 404 405 405 # update convergence prior to checking for fixing. … … 425 425 else: 426 426 427 node_min = tree_node._minimums[variable_name][index]()428 node_max = tree_node._maximums[variable_name][index]()427 node_min = value(tree_node._minimums[variable_name][index]) 428 node_max = value(tree_node._maximums[variable_name][index]) 429 429 430 430 self._continuous_convergence_tracking(ph, tree_node, variable_name, index, node_min, node_max) … … 492 492 493 493 if isinstance(variable_type, IntegerSet) or isinstance(variable_type, BooleanSet): 494 node_min = self.Int_If_Close_Enough(ph, tree_node._minimums[variable_name][index]())495 node_max = self.Int_If_Close_Enough(ph, tree_node._maximums[variable_name][index]())494 node_min = self.Int_If_Close_Enough(ph, value(tree_node._minimums[variable_name][index])) 495 node_max = self.Int_If_Close_Enough(ph, value(tree_node._maximums[variable_name][index])) 496 496 497 497 # update convergence prior to checking for fixing. … … 553 553 554 554 # obviously don't round in the continuous case. 555 node_min = tree_node._minimums[variable_name][index]()556 node_max = tree_node._maximums[variable_name][index]()555 node_min = value(tree_node._minimums[variable_name][index]) 556 node_max = value(tree_node._maximums[variable_name][index]) 557 557 558 558 # update convergence prior to checking for fixing. … … 614 614 # keep track of cumulative iters of convergence to the same int 615 615 if (node_min == node_max) and (type(node_min) is types.IntType): 616 if node_min == tree_node._last_converged_val[variable_name][index]():617 tree_node._num_iters_converged[variable_name][index] .value = tree_node._num_iters_converged[variable_name][index].value+ 1616 if node_min == value(tree_node._last_converged_val[variable_name][index]): 617 tree_node._num_iters_converged[variable_name][index] = value(tree_node._num_iters_converged[variable_name][index]) + 1 618 618 else: 619 tree_node._num_iters_converged[variable_name][index] .value= 1619 tree_node._num_iters_converged[variable_name][index] = 1 620 620 tree_node._last_converged_val[variable_name][index] = node_min 621 621 else: 622 tree_node._num_iters_converged[variable_name][index] .value= 0623 tree_node._last_converged_val[variable_name][index] .value= 0.5622 tree_node._num_iters_converged[variable_name][index] = 0 623 tree_node._last_converged_val[variable_name][index] = 0.5 624 624 625 625 #========================= … … 627 627 # keep track of cumulative iters of convergence to the same value within tolerance. 628 628 if abs(node_max - node_min) <= ph._integer_tolerance: 629 if abs(node_min - tree_node._last_converged_val[variable_name][index]()) <= ph._integer_tolerance:630 tree_node._num_iters_converged[variable_name][index] .value = tree_node._num_iters_converged[variable_name][index].value+ 1629 if abs(node_min - value(tree_node._last_converged_val[variable_name][index])) <= ph._integer_tolerance: 630 tree_node._num_iters_converged[variable_name][index] = value(tree_node._num_iters_converged[variable_name][index]) + 1 631 631 else: 632 tree_node._num_iters_converged[variable_name][index] .value= 1632 tree_node._num_iters_converged[variable_name][index] = 1 633 633 tree_node._last_converged_val[variable_name][index] = node_min 634 634 else: 635 tree_node._num_iters_converged[variable_name][index] .value= 0635 tree_node._num_iters_converged[variable_name][index] = 0 636 636 tree_node._last_converged_val[variable_name][index] = 0.2342343243223423 # TBD - avoid the magic constant! 637 637 … … 645 645 weight_parameter_name = "PHWEIGHT_"+variable_name 646 646 if index is None: 647 tree_node._w_hash[variable_name][ph._current_iteration] .value += getattr(instance,weight_parameter_name)[index].value* self.W_hash_rand_val647 tree_node._w_hash[variable_name][ph._current_iteration] += value(getattr(instance,weight_parameter_name)[index]) * self.W_hash_rand_val 648 648 else: 649 tree_node._w_hash[variable_name][index,ph._current_iteration] .value += getattr(instance,weight_parameter_name)[index].value* self.W_hash_rand_val649 tree_node._w_hash[variable_name][index,ph._current_iteration] += value(getattr(instance,weight_parameter_name)[index]) * self.W_hash_rand_val 650 650 self.W_hash_rand_val = (self.W_hash_b + self.W_hash_a * self.W_hash_rand_val) % self.W_hash_c 651 651 … … 664 664 for index in variable_index: 665 665 if index is None: 666 print "%4d %50ls %20.5f" % (ph._current_iteration, tree_node._w_hash[variable_name][ph._current_iteration], tree_node._w_hash[variable_name][ph._current_iteration]())666 print "%4d %50ls %20.5f" % (ph._current_iteration, tree_node._w_hash[variable_name][ph._current_iteration], value(tree_node._w_hash[variable_name][ph._current_iteration])) 667 667 else: 668 print "%4d %50ls %20.5f" % (ph._current_iteration, tree_node._w_hash[variable_name][index,ph._current_iteration], tree_node._w_hash[variable_name][index,ph._current_iteration]())668 print "%4d %50ls %20.5f" % (ph._current_iteration, tree_node._w_hash[variable_name][index,ph._current_iteration], value(tree_node._w_hash[variable_name][index,ph._current_iteration])) 669 669 670 670 #========================= … … 674 674 # if the values are converged, then don't report a cycle - often, the weights at convergence are 0s, and even 675 675 # if they aren't, they won't move if the values are uniform. 676 if ( tree_node._num_iters_converged[variable_name][index].value == 0) and (tree_node._fixed_var_flag[variable_name][index].valueis False):676 if (value(tree_node._num_iters_converged[variable_name][index]) == 0) and (value(tree_node._fixed_var_flag[variable_name][index]) is False): 677 677 current_hash_value = None 678 678 if index is None: 679 current_hash_value = tree_node._w_hash[variable_name][ph._current_iteration]()679 current_hash_value = value(tree_node._w_hash[variable_name][ph._current_iteration]) 680 680 else: 681 current_hash_value = tree_node._w_hash[variable_name][index,ph._current_iteration]()681 current_hash_value = value(tree_node._w_hash[variable_name][index,ph._current_iteration]) 682 682 # scan starting from the farthest point back in history to the closest - this is required to 683 683 # identify the longest possible cycles, which is what we want. … … 685 685 this_hash_value = None 686 686 if index is None: 687 this_hash_value = tree_node._w_hash[variable_name][i]()687 this_hash_value = value(tree_node._w_hash[variable_name][i]) 688 688 else: 689 this_hash_value = tree_node._w_hash[variable_name][index,i]()689 this_hash_value = value(tree_node._w_hash[variable_name][index,i]) 690 690 if abs(this_hash_value - current_hash_value) <= ph._integer_tolerance: 691 691 if report_possible_cycles is True: … … 719 719 getattr(instance,variable.name)[index].fixed = True 720 720 getattr(instance,variable.name)[index].value = fix_value 721 tree_node._fixed_var_flag[variable.name][index] .value= True721 tree_node._fixed_var_flag[variable.name][index] = True 722 722 723 723 variable_type = variable.domain … … 726 726 # pretty-print the index, string the trailing spaces from the strings. 727 727 if index is None: 728 print "Fixing variable="+variable.name+" at tree node="+tree_node._name+" to value="+str(fix_value)+"; converged for "+str( tree_node._num_iters_converged[variable.name][index]())+" iterations"728 print "Fixing variable="+variable.name+" at tree node="+tree_node._name+" to value="+str(fix_value)+"; converged for "+str(value(tree_node._num_iters_converged[variable.name][index]))+" iterations" 729 729 else: 730 print "Fixing variable="+variable.name+indexToString(index)+" at tree node="+tree_node._name+" to value="+str(fix_value)+"; converged for "+str( tree_node._num_iters_converged[variable.name][index]())+" iterations"730 print "Fixing variable="+variable.name+indexToString(index)+" at tree node="+tree_node._name+" to value="+str(fix_value)+"; converged for "+str(value(tree_node._num_iters_converged[variable.name][index]))+" iterations" 731 731 fixing_reported = True 732 732 if isinstance(variable_type, IntegerSet) or isinstance(variable_type, BooleanSet): … … 744 744 # jpw: i don't think this logic is correct - shouldn't "non-bound" be moved after the lb/ub checks - this doesn't check a bound! 745 745 # dlw reply: i meant it to mean "without regard to bound" so i have updated the document 746 if nb_iters > 0 and tree_node._num_iters_converged[variable_name][index]() >= nb_iters:746 if nb_iters > 0 and value(tree_node._num_iters_converged[variable_name][index]) >= nb_iters: 747 747 return True 748 748 else: … … 756 756 if variable[index].ub is not None: 757 757 ub = variable[index].ub() 758 conval = tree_node._last_converged_val[variable_name][index]()758 conval = value(tree_node._last_converged_val[variable_name][index]) 759 759 # note: if they are converged node_max == node_min 760 if (lb is not None) and (lb_iters > 0) and ( tree_node._num_iters_converged[variable_name][index]() >= lb_iters) and (conval == lb):760 if (lb is not None) and (lb_iters > 0) and (value(tree_node._num_iters_converged[variable_name][index]) >= lb_iters) and (conval == lb): 761 761 return True 762 elif (ub is not None) and (ub_iters > 0) and ( tree_node._num_iters_converged[variable_name][index]() >= ub_iters) and (conval == ub):762 elif (ub is not None) and (ub_iters > 0) and (value(tree_node._num_iters_converged[variable_name][index]) >= ub_iters) and (conval == ub): 763 763 return True 764 764 # if we are still here, nothing triggered fixing … … 769 769 770 770 if self.fix_continuous_variables is True: 771 if self.FixWhenItersConvergedContinuous > 0 and tree_node._num_iters_converged[variable.name][index]() >= self.FixWhenItersConvergedContinuous:771 if self.FixWhenItersConvergedContinuous > 0 and value(tree_node._num_iters_converged[variable.name][index]) >= self.FixWhenItersConvergedContinuous: 772 772 return True 773 773 … … 788 788 variable_name = variable.name 789 789 if isinstance(variable_type, IntegerSet) or isinstance(variable_type, BooleanSet): 790 node_min = self.Int_If_Close_Enough(ph, tree_node._minimums[variable_name][index]())791 node_max = self.Int_If_Close_Enough(ph, tree_node._maximums[variable_name][index]())792 anywhere = round( tree_node._averages[variable.name][index].value)790 node_min = self.Int_If_Close_Enough(ph, value(tree_node._minimums[variable_name][index])) 791 node_max = self.Int_If_Close_Enough(ph, value(tree_node._maximums[variable_name][index])) 792 anywhere = round(value(tree_node._averages[variable.name][index])) 793 793 else: 794 node_min = tree_node._minimums[variable_name][index]()795 node_max = tree_node._maximums[variable_name][index]()796 anywhere = tree_node._averages[variable.name][index].value794 node_min = value(tree_node._minimums[variable_name][index]) 795 node_max = value(tree_node._maximums[variable_name][index]) 796 anywhere = value(tree_node._averages[variable.name][index]) 797 797 798 798 slam_basis_string = "" … … 856 856 for tree_node in variable[full_index]._stage._tree_nodes: 857 857 # determine if the variable is already fixed (the trusting version...). 858 if tree_node._fixed_var_flag[variable_name][full_index].valueis False:858 if value(tree_node._fixed_var_flag[variable_name][full_index]) is False: 859 859 didone = self._slam(ph, tree_node, variable, full_index) 860 860 if didone: … … 908 908 909 909 if isinstance(variable_type, IntegerSet) or isinstance(variable_type, BooleanSet): 910 node_min = self.Int_If_Close_Enough(ph, tree_node._minimums[variable_name][index]())911 node_max = self.Int_If_Close_Enough(ph, tree_node._maximums[variable_name][index]())910 node_min = self.Int_If_Close_Enough(ph, value(tree_node._minimums[variable_name][index])) 911 node_max = self.Int_If_Close_Enough(ph, value(tree_node._maximums[variable_name][index])) 912 912 913 913 if node_min == node_max:
Note: See TracChangeset
for help on using the changeset viewer.