25 return context->GetLiteralRepresentative(ref_);
29 return context->GetVariableRepresentative(ref_);
46 IntegerVariableProto*
const var_proto =
working_model->add_variables();
47 var_proto->add_domain(cst);
48 var_proto->add_domain(cst);
51 return constant_to_ref_[cst].Get(
this);
57 ct->add_enforcement_literal(
a);
58 ct->mutable_bool_and()->add_literals(
b);
63 ConstraintProto*
const imply =
working_model->add_constraints();
67 imply->mutable_enforcement_literal()->Resize(1,
b);
68 LinearConstraintProto* mutable_linear = imply->mutable_linear();
69 mutable_linear->mutable_vars()->Resize(1, x);
70 mutable_linear->mutable_coeffs()->Resize(1, 1);
86 return domains[
var].Min() >= 0 && domains[
var].Max() <= 1;
92 return domains[lit].Min() == 1;
101 return domains[lit].Max() == 0;
120 int64 result = expr.offset();
121 for (
int i = 0; i < expr.vars_size(); ++i) {
122 const int64 coeff = expr.coeffs(i);
124 result += coeff *
MinOf(expr.vars(i));
126 result += coeff *
MaxOf(expr.vars(i));
133 int64 result = expr.offset();
134 for (
int i = 0; i < expr.vars_size(); ++i) {
135 const int64 coeff = expr.coeffs(i);
137 result += coeff *
MaxOf(expr.vars(i));
139 result += coeff *
MinOf(expr.vars(i));
147bool PresolveContext::VariableIsNotRepresentativeOfEquivalenceClass(
168 return var_to_constraints_[
var].size() == 1 &&
169 VariableIsNotRepresentativeOfEquivalenceClass(
var) &&
179 var_to_constraints_[
var].size() == 2 &&
180 VariableIsNotRepresentativeOfEquivalenceClass(
var);
187 return var_to_constraints_[
PositiveRef(ref)].empty();
199 if (
IsFixed(ref))
return false;
200 if (!removed_variables_.contains(
PositiveRef(ref)))
return false;
201 if (!var_to_constraints_[
PositiveRef(ref)].empty()) {
203 <<
" was removed, yet it appears in some constraints!";
204 LOG(
INFO) <<
"affine relation: "
206 for (
const int c : var_to_constraints_[
PositiveRef(ref)]) {
207 LOG(
INFO) <<
"constraint #" << c <<
" : "
208 << (c >= 0 ?
working_model->constraints(c).ShortDebugString()
218 return var_to_num_linear1_[
var] == var_to_constraints_[
var].size();
224 result = domains[ref];
235 return domains[ref].Contains(
value);
239 int ref,
const Domain& domain,
bool* domain_modified) {
244 if (domains[
var].IsIncludedIn(domain)) {
247 domains[
var] = domains[
var].IntersectionWith(domain);
250 if (domains[
var].IsIncludedIn(temp)) {
253 domains[
var] = domains[
var].IntersectionWith(temp);
256 if (domain_modified !=
nullptr) {
257 *domain_modified =
true;
260 if (domains[
var].IsEmpty()) {
293void PresolveContext::UpdateLinear1Usage(
const ConstraintProto&
ct,
int c) {
294 const int old_var = constraint_to_linear1_var_[c];
296 var_to_num_linear1_[old_var]--;
298 if (
ct.constraint_case() == ConstraintProto::ConstraintCase::kLinear &&
299 ct.linear().vars().size() == 1) {
301 constraint_to_linear1_var_[c] =
var;
302 var_to_num_linear1_[
var]++;
306void PresolveContext::AddVariableUsage(
int c) {
310 for (
const int v : constraint_to_vars_[c]) {
312 var_to_constraints_[v].insert(c);
314 for (
const int i : constraint_to_intervals_[c]) interval_usage_[i]++;
315 UpdateLinear1Usage(
ct, c);
319 if (is_unsat)
return;
324 for (
const int i : constraint_to_intervals_[c]) interval_usage_[i]--;
326 for (
const int i : constraint_to_intervals_[c]) interval_usage_[i]++;
331 const std::vector<int>& old_usage = constraint_to_vars_[c];
332 const int old_size = old_usage.size();
334 for (
const int var : tmp_new_usage_) {
336 while (i < old_size && old_usage[i] <
var) {
337 var_to_constraints_[old_usage[i]].erase(c);
340 if (i < old_size && old_usage[i] ==
var) {
343 var_to_constraints_[
var].insert(c);
346 for (; i < old_size; ++i) var_to_constraints_[old_usage[i]].erase(c);
347 constraint_to_vars_[c] = tmp_new_usage_;
349 UpdateLinear1Usage(
ct, c);
353 return constraint_to_vars_.size() ==
working_model->constraints_size();
357 if (is_unsat)
return;
358 const int old_size = constraint_to_vars_.size();
361 constraint_to_vars_.resize(new_size);
362 constraint_to_linear1_var_.resize(new_size, -1);
363 constraint_to_intervals_.resize(new_size);
364 interval_usage_.resize(new_size);
365 for (
int c = old_size; c < new_size; ++c) {
372 if (is_unsat)
return true;
373 if (constraint_to_vars_.size() !=
working_model->constraints_size()) {
374 LOG(
INFO) <<
"Wrong constraint_to_vars size!";
377 for (
int c = 0; c < constraint_to_vars_.size(); ++c) {
378 if (constraint_to_vars_[c] !=
380 LOG(
INFO) <<
"Wrong variables usage for constraint: \n"
382 <<
"old_size: " << constraint_to_vars_[c].size();
386 int num_in_objective = 0;
387 for (
int v = 0; v < var_to_constraints_.size(); ++v) {
390 if (!objective_map_.contains(v)) {
392 <<
" is marked as part of the objective but isn't.";
397 if (num_in_objective != objective_map_.size()) {
398 LOG(
INFO) <<
"Not all variables are marked as part of the objective";
415bool PresolveContext::AddRelation(
int x,
int y,
int64 c,
int64 o,
419 if (std::abs(c) != 1)
return repo->
TryAdd(x, y, c, o);
436 bool allow_rep_x = m_x < m_y;
437 bool allow_rep_y = m_y < m_x;
444 return repo->
TryAdd(x, y, c, o, allow_rep_x, allow_rep_y);
452 const int rep = constant_to_ref_[
min].Get(
this);
455 AddRelation(
var, rep, 1, 0, &affine_relations_);
456 AddRelation(
var, rep, 1, 0, &var_equiv_relations_);
494 for (
auto& ref_map : var_to_constraints_) {
520 if (affine_relations_.
ClassSize(rep) == 1 &&
521 var_equiv_relations_.
ClassSize(rep) == 1) {
533 if (is_unsat)
return false;
542 if (lhs % std::abs(coeff) != 0) {
571 if (
b != 0) is_unsat =
true;
579 const int64 unique_value = -
b /
a;
600 bool added = AddRelation(x, y, c, o, &affine_relations_);
601 if ((c == 1 || c == -1) && o == 0) {
602 added |= AddRelation(x, y, c, o, &var_equiv_relations_);
615 if (x != rep) encoding_remap_queue_.push_back(x);
616 if (y != rep) encoding_remap_queue_.push_back(y);
632 LOG(
INFO) <<
"Cannot add relation " <<
DomainOf(ref_x) <<
" = " << coeff
633 <<
" * " <<
DomainOf(ref_y) <<
" + " << offset
634 <<
" because of incompatibilities with existing relation: ";
635 for (
const int ref : {ref_x, ref_y}) {
638 <<
DomainOf(r.representative) <<
" + " << r.offset;
646 if (is_unsat)
return;
655 if (ref_a == ref_b)
return;
672 const auto insert_status = abs_relations_.insert(
674 if (!insert_status.second) {
676 const int candidate = insert_status.first->second.Get(
this);
677 if (removed_variables_.contains(candidate)) {
687 auto it = abs_relations_.find(target_ref);
688 if (it == abs_relations_.end())
return false;
695 const int candidate = it->second.Get(
this);
696 if (removed_variables_.contains(candidate)) {
697 abs_relations_.erase(it);
725 DCHECK_NE(positive_possible, negative_possible);
768 for (
int i = domains.size(); i < working_model->variables_size(); ++i) {
770 if (domains.back().IsEmpty()) {
777 var_to_constraints_.resize(domains.size());
778 var_to_num_linear1_.resize(domains.size());
783bool PresolveContext::RemapEncodingMaps() {
792 encoding_remap_queue_.clear();
798 for (
const int var : encoding_remap_queue_) {
802 int num_remapping = 0;
806 const absl::flat_hash_map<int64, SavedLiteral>& var_map = encoding_[
var];
807 for (
const auto& entry : var_map) {
808 const int lit = entry.second.Get(
this);
809 if (removed_variables_.contains(
PositiveRef(lit)))
continue;
810 if ((entry.first - r.
offset) % r.
coeff != 0)
continue;
815 if (is_unsat)
return false;
817 encoding_.erase(
var);
822 const absl::flat_hash_map<int64, absl::flat_hash_set<int>>& var_map =
823 eq_half_encoding_[
var];
824 for (
const auto& entry : var_map) {
825 if ((entry.first - r.
offset) % r.
coeff != 0)
continue;
827 for (
int literal : entry.second) {
832 if (is_unsat)
return false;
835 eq_half_encoding_.erase(
var);
840 const absl::flat_hash_map<int64, absl::flat_hash_set<int>>& var_map =
841 neq_half_encoding_[
var];
842 for (
const auto& entry : var_map) {
843 if ((entry.first - r.
offset) % r.
coeff != 0)
continue;
845 for (
int literal : entry.second) {
850 if (is_unsat)
return false;
853 neq_half_encoding_.erase(
var);
856 if (num_remapping > 0) {
857 VLOG(1) <<
"Remapped " << num_remapping <<
" encodings due to " <<
var
861 encoding_remap_queue_.clear();
871 if (is_unsat)
return;
873 absl::flat_hash_map<int64, SavedLiteral>& var_map = encoding_[
var];
876 auto min_it = var_map.find(var_min);
877 if (min_it != var_map.end()) {
878 const int old_var =
PositiveRef(min_it->second.Get(
this));
879 if (removed_variables_.contains(old_var)) {
880 var_map.erase(min_it);
881 min_it = var_map.end();
886 auto max_it = var_map.find(var_max);
887 if (max_it != var_map.end()) {
888 const int old_var =
PositiveRef(max_it->second.Get(
this));
889 if (removed_variables_.contains(old_var)) {
890 var_map.erase(max_it);
891 max_it = var_map.end();
898 if (min_it != var_map.end() && max_it != var_map.end()) {
899 min_literal = min_it->second.Get(
this);
900 max_literal = max_it->second.Get(
this);
904 if (is_unsat)
return;
909 }
else if (min_it != var_map.end() && max_it == var_map.end()) {
911 min_literal = min_it->second.Get(
this);
914 }
else if (min_it == var_map.end() && max_it != var_map.end()) {
916 max_literal = max_it->second.Get(
this);
943 var_max - var_min, var_min));
946 var_min - var_max, var_max));
951void PresolveContext::InsertVarValueEncodingInternal(
int literal,
int var,
953 bool add_constraints) {
956 absl::flat_hash_map<int64, SavedLiteral>& var_map = encoding_[
var];
962 const auto it = var_map.find(
value);
963 if (it != var_map.end()) {
964 const int old_var =
PositiveRef(it->second.Get(
this));
965 if (removed_variables_.contains(old_var)) {
971 var_map.insert(std::make_pair(
value, SavedLiteral(
literal)));
974 if (!insert.second) {
975 const int previous_literal = insert.first->second.Get(
this);
977 if (
literal != previous_literal) {
979 "variables: merge equivalent var value encoding literals");
992 if (add_constraints) {
1000bool PresolveContext::InsertHalfVarValueEncoding(
int literal,
int var,
1002 if (is_unsat)
return false;
1009 if (!direct_set.insert(
literal).second)
return false;
1012 << (imply_eq ?
") == " :
") != ") <<
value;
1019 for (
const int other : other_set) {
1024 InsertVarValueEncodingInternal(imply_eq_literal,
var,
value,
1032bool PresolveContext::CanonicalizeEncoding(
int* ref,
int64*
value) {
1034 if ((*
value - r.offset) % r.coeff != 0)
return false;
1035 *ref = r.representative;
1042 if (!RemapEncodingMaps())
return;
1043 if (!CanonicalizeEncoding(&ref, &
value))
return;
1045 InsertVarValueEncodingInternal(
literal, ref,
value,
true);
1050 if (!RemapEncodingMaps())
return false;
1051 if (!CanonicalizeEncoding(&
var, &
value))
return false;
1058 if (!RemapEncodingMaps())
return false;
1059 if (!CanonicalizeEncoding(&
var, &
value))
return false;
1065 if (!RemapEncodingMaps())
return false;
1066 if (!CanonicalizeEncoding(&ref, &
value))
return false;
1067 const absl::flat_hash_map<int64, SavedLiteral>& var_map = encoding_[ref];
1068 const auto it = var_map.find(
value);
1069 if (it != var_map.end()) {
1071 *
literal = it->second.Get(
this);
1083 const int var = ref;
1086 if (!domains[
var].Contains(
value)) {
1091 absl::flat_hash_map<int64, SavedLiteral>& var_map = encoding_[
var];
1092 auto it = var_map.find(
value);
1093 if (it != var_map.end()) {
1094 return it->second.Get(
this);
1098 if (domains[
var].Size() == 1) {
1101 return true_literal;
1107 if (domains[
var].Size() == 2) {
1109 const int64 other_value =
value == var_min ? var_max : var_min;
1110 auto other_it = var_map.find(other_value);
1111 if (other_it != var_map.end()) {
1119 if (var_min == 0 && var_max == 1) {
1140 objective_offset_ = obj.offset();
1141 objective_scaling_factor_ = obj.scaling_factor();
1142 if (objective_scaling_factor_ == 0.0) {
1143 objective_scaling_factor_ = 1.0;
1145 if (!obj.domain().empty()) {
1148 objective_domain_is_constraining_ =
true;
1151 objective_domain_is_constraining_ =
false;
1158 objective_overflow_detection_ = 0;
1160 objective_map_.clear();
1161 for (
int i = 0; i < obj.vars_size(); ++i) {
1162 const int ref = obj.vars(i);
1163 int64 coeff = obj.coeffs(i);
1167 objective_overflow_detection_ +=
1170 objective_map_[
var] += coeff;
1171 if (objective_map_[
var] == 0) {
1172 objective_map_.erase(
var);
1181 int64 offset_change = 0;
1187 tmp_entries_.clear();
1188 for (
const auto& entry : objective_map_) {
1189 tmp_entries_.push_back(entry);
1195 for (
const auto& entry : tmp_entries_) {
1196 const int var = entry.first;
1197 const auto it = objective_map_.find(
var);
1198 if (it == objective_map_.end())
continue;
1199 const int64 coeff = it->second;
1206 var_to_constraints_[
var].size() == 1 &&
1221 offset_change += coeff *
MinOf(
var);
1223 objective_map_.erase(
var);
1230 objective_map_.erase(
var);
1234 offset_change += coeff * r.
offset;
1238 if (new_coeff == 0) {
1251 Domain implied_domain(0);
1255 tmp_entries_.clear();
1256 for (
const auto& entry : objective_map_) {
1257 tmp_entries_.push_back(entry);
1259 std::sort(tmp_entries_.begin(), tmp_entries_.end());
1260 for (
const auto& entry : tmp_entries_) {
1261 const int var = entry.first;
1262 const int64 coeff = entry.second;
1277 objective_offset_ += offset_change;
1281 for (
auto& entry : objective_map_) {
1282 entry.second /= gcd;
1285 objective_offset_ /=
static_cast<double>(gcd);
1286 objective_scaling_factor_ *=
static_cast<double>(gcd);
1289 if (objective_domain_.
IsEmpty())
return false;
1294 objective_domain_is_constraining_ =
1302 int var_in_equality,
int64 coeff_in_equality,
1303 const ConstraintProto& equality, std::vector<int>* new_vars_in_objective) {
1304 CHECK(equality.enforcement_literal().empty());
1307 if (new_vars_in_objective !=
nullptr) new_vars_in_objective->clear();
1311 const int64 coeff_in_objective =
1314 CHECK_EQ(coeff_in_objective % coeff_in_equality, 0);
1315 const int64 multiplier = coeff_in_objective / coeff_in_equality;
1319 for (
int i = 0; i < equality.linear().vars().size(); ++i) {
1320 int var = equality.linear().vars(i);
1322 int64 coeff = equality.linear().coeffs(i);
1326 const int64 new_value =
1328 objective_overflow_detection_ -
1329 std::abs(coeff_in_equality) *
1331 std::abs(
MaxOf(var_in_equality))));
1332 if (new_value ==
kint64max)
return false;
1333 objective_overflow_detection_ = new_value;
1335 for (
int i = 0; i < equality.linear().vars().size(); ++i) {
1336 int var = equality.linear().vars(i);
1337 int64 coeff = equality.linear().coeffs(i);
1342 if (
var == var_in_equality)
continue;
1344 int64& map_ref = objective_map_[
var];
1345 if (map_ref == 0 && new_vars_in_objective !=
nullptr) {
1346 new_vars_in_objective->push_back(
var);
1348 map_ref -= coeff * multiplier;
1351 objective_map_.erase(
var);
1358 objective_map_.erase(var_in_equality);
1370 objective_offset_ +=
static_cast<double>(offset.
Min());
1376 objective_domain_is_constraining_ =
true;
1378 if (objective_domain_.
IsEmpty()) {
1386 std::vector<std::pair<int, int64>> entries;
1387 for (
const auto& entry : objective_map_) {
1388 entries.push_back(entry);
1390 std::sort(entries.begin(), entries.end());
1392 CpObjectiveProto* mutable_obj =
working_model->mutable_objective();
1393 mutable_obj->set_offset(objective_offset_);
1394 mutable_obj->set_scaling_factor(objective_scaling_factor_);
1396 mutable_obj->clear_vars();
1397 mutable_obj->clear_coeffs();
1398 for (
const auto& entry : entries) {
1399 mutable_obj->add_vars(entry.first);
1400 mutable_obj->add_coeffs(entry.second);
1408 if (active_j < active_i) std::swap(active_i, active_j);
1410 const std::tuple<int, int, int, int> key =
1411 std::make_tuple(time_i, time_j, active_i, active_j);
1412 const auto& it = reified_precedences_cache_.find(key);
1413 if (it != reified_precedences_cache_.end())
return it->second;
1416 reified_precedences_cache_[key] = result;
1419 ConstraintProto*
const lesseq =
working_model->add_constraints();
1420 lesseq->add_enforcement_literal(result);
1421 lesseq->mutable_linear()->add_vars(time_i);
1422 lesseq->mutable_linear()->add_vars(time_j);
1423 lesseq->mutable_linear()->add_coeffs(-1);
1424 lesseq->mutable_linear()->add_coeffs(1);
1425 lesseq->mutable_linear()->add_domain(0);
1426 lesseq->mutable_linear()->add_domain(
kint64max);
1435 ConstraintProto*
const greater =
working_model->add_constraints();
1436 greater->mutable_linear()->add_vars(time_i);
1437 greater->mutable_linear()->add_vars(time_j);
1438 greater->mutable_linear()->add_coeffs(-1);
1439 greater->mutable_linear()->add_coeffs(1);
1440 greater->mutable_linear()->add_domain(
kint64min);
1441 greater->mutable_linear()->add_domain(-1);
1444 greater->add_enforcement_literal(
NegatedRef(result));
1445 greater->add_enforcement_literal(active_i);
1446 greater->add_enforcement_literal(active_j);
1453 const auto& rev_it = reified_precedences_cache_.find(
1454 std::make_tuple(time_j, time_i, active_i, active_j));
1455 if (rev_it != reified_precedences_cache_.end()) {
1456 auto*
const bool_or =
working_model->add_constraints()->mutable_bool_or();
1457 bool_or->add_literals(result);
1458 bool_or->add_literals(rev_it->second);
1467 reified_precedences_cache_.clear();
#define DCHECK_NE(val1, val2)
#define CHECK_EQ(val1, val2)
#define CHECK_NE(val1, val2)
#define DCHECK_LT(val1, val2)
#define DCHECK(condition)
#define CHECK_LE(val1, val2)
#define DCHECK_EQ(val1, val2)
#define VLOG(verboselevel)
void IgnoreFromClassSize(int x)
bool TryAdd(int x, int y, int64 coeff, int64 offset)
int ClassSize(int x) const
Relation Get(int x) const
We call domain any subset of Int64 = [kint64min, kint64max].
static Domain AllValues()
Returns the full domain Int64.
Domain Negation() const
Returns {x ∈ Int64, ∃ e ∈ D, x = -e}.
bool IsIncludedIn(const Domain &domain) const
Returns true iff D is included in the given domain.
Domain MultiplicationBy(int64 coeff, bool *exact=nullptr) const
Returns {x ∈ Int64, ∃ e ∈ D, x = e * coeff}.
Domain InverseMultiplicationBy(const int64 coeff) const
Returns {x ∈ Int64, ∃ e ∈ D, x * coeff = e}.
Domain AdditionWith(const Domain &domain) const
Returns {x ∈ Int64, ∃ a ∈ D, ∃ b ∈ domain, x = a + b}.
int64 Min() const
Returns the min value of the domain.
int64 Max() const
Returns the max value of the domain.
Domain IntersectionWith(const Domain &domain) const
Returns the intersection of D and domain.
bool IsEmpty() const
Returns true if this is the empty set.
Domain RelaxIfTooComplex() const
If NumIntervals() is too large, this return a superset of the domain.
Domain SimplifyUsingImpliedDomain(const Domain &implied_domain) const
Advanced usage.
static int64 GCD64(int64 x, int64 y)
void Set(IntegerType index)
void Resize(IntegerType size)
bool StoreAbsRelation(int target_ref, int ref)
SparseBitset< int64 > modified_domains
bool ConstraintVariableUsageIsConsistent()
int64 num_presolve_operations
void AddImplication(int a, int b)
bool VariableIsOnlyUsedInEncoding(int ref) const
ABSL_MUST_USE_RESULT bool IntersectDomainWith(int ref, const Domain &domain, bool *domain_modified=nullptr)
void InsertVarValueEncoding(int literal, int ref, int64 value)
std::vector< absl::flat_hash_set< int > > var_to_lb_only_constraints
bool ConstraintVariableGraphIsUpToDate() const
int GetOrCreateVarValueEncoding(int ref, int64 value)
bool DomainContains(int ref, int64 value) const
bool StoreLiteralImpliesVarNEqValue(int literal, int var, int64 value)
bool VariableWithCostIsUniqueAndRemovable(int ref) const
void WriteObjectiveToProto() const
int GetLiteralRepresentative(int ref) const
ABSL_MUST_USE_RESULT bool SetLiteralToTrue(int lit)
bool StoreLiteralImpliesVarEqValue(int literal, int var, int64 value)
int64 MaxOf(int ref) const
std::vector< absl::flat_hash_set< int > > var_to_ub_only_constraints
void UpdateNewConstraintsVariableUsage()
bool VariableIsUniqueAndRemovable(int ref) const
void RemoveVariableFromAffineRelation(int var)
ABSL_MUST_USE_RESULT bool NotifyThatModelIsUnsat(const std::string &message="")
bool PropagateAffineRelation(int ref)
bool HasVarValueEncoding(int ref, int64 value, int *literal=nullptr)
Domain DomainOf(int ref) const
void InitializeNewDomains()
int GetVariableRepresentative(int ref) const
std::string AffineRelationDebugString(int ref) const
int NewIntVar(const Domain &domain)
void MarkVariableAsRemoved(int ref)
bool DomainIsEmpty(int ref) const
void CanonicalizeDomainOfSizeTwo(int var)
bool LiteralIsTrue(int lit) const
absl::flat_hash_map< std::string, int > stats_by_rule_name
void StoreBooleanEqualityRelation(int ref_a, int ref_b)
CpModelProto * working_model
bool SubstituteVariableInObjective(int var_in_equality, int64 coeff_in_equality, const ConstraintProto &equality, std::vector< int > *new_vars_in_objective=nullptr)
int GetOrCreateConstantVar(int64 cst)
bool LiteralIsFalse(int lit) const
void UpdateRuleStats(const std::string &name, int num_times=1)
ABSL_MUST_USE_RESULT bool CanonicalizeObjective()
void RemoveAllVariablesFromAffineRelationConstraint()
AffineRelation::Relation GetAffineRelation(int ref) const
bool VariableIsNotUsedAnymore(int ref) const
void UpdateConstraintVariableUsage(int c)
bool keep_all_feasible_solutions
bool IsFixed(int ref) const
ABSL_MUST_USE_RESULT bool SetLiteralToFalse(int lit)
std::string RefDebugString(int ref) const
int64 MinOf(int ref) const
int GetOrCreateReifiedPrecedenceLiteral(int time_i, int time_j, int active_i, int active_j)
void ClearPrecedenceCache()
void ReadObjectiveFromProto()
void AddImplyInDomain(int b, int x, const Domain &domain)
bool CanBeUsedAsLiteral(int ref) const
bool VariableWasRemoved(int ref) const
void ExploitFixedDomain(int var)
bool GetAbsRelation(int target_ref, int *ref)
bool StoreAffineRelation(int ref_x, int ref_y, int64 coeff, int64 offset)
int Get(PresolveContext *context) const
int Get(PresolveContext *context) const
GurobiMPCallbackContext * context
static const int64 kint64max
static const int64 kint64min
const Collection::value_type::second_type & FindOrDie(const Collection &collection, const typename Collection::value_type::first_type &key)
bool ContainsKey(const Collection &collection, const Key &key)
std::vector< int > UsedVariables(const ConstraintProto &ct)
std::vector< int > UsedIntervals(const ConstraintProto &ct)
Domain ReadDomainFromProto(const ProtoWithDomain &proto)
constexpr int kAffineRelationConstraint
constexpr int kObjectiveConstraint
bool RefIsPositive(int ref)
void FillDomainInProto(const Domain &domain, ProtoWithDomain *proto)
The vehicle routing library lets one model and solve generic vehicle routing problems ranging from th...
int64 CapAdd(int64 x, int64 y)
const absl::string_view ToString(MPSolver::OptimizationProblemType optimization_problem_type)
int64 CapProd(int64 x, int64 y)
std::string ProtobufDebugString(const P &message)
#define VLOG_IS_ON(verboselevel)