Skip to content

Commit

Permalink
polish example
Browse files Browse the repository at this point in the history
  • Loading branch information
lperron committed Dec 6, 2024
1 parent 1ff8120 commit 3cfe81d
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 48 deletions.
42 changes: 18 additions & 24 deletions ortools/sat/docs/scheduling.md
Original file line number Diff line number Diff line change
Expand Up @@ -2676,7 +2676,6 @@ def sequence_constraints_with_circuit(
all_tasks = range(num_tasks)

arcs: List[cp_model.ArcT] = []
penalty_terms = []
for i in all_tasks:
# if node i is first.
start_lit = model.new_bool_var(f"start_{i}")
Expand All @@ -2696,17 +2695,6 @@ def sequence_constraints_with_circuit(
type_length_min = sequence_length_constraints[task_types[i]][0]
model.add(lengths[i] >= type_length_min).only_enforce_if(end_lit)

# Penalize the cumul of the last task w.r.t. the soft max
soft_max, linear_penalty, hard_max = sequence_cumul_constraints[task_types[i]]
if soft_max < hard_max:
aux = model.new_int_var(0, hard_max - soft_max, f"aux_{i}")
model.add_max_equality(aux, [0, cumuls[i] - soft_max])

excess = model.new_int_var(0, hard_max - soft_max, f"excess_{i}")
model.add(excess == aux).only_enforce_if(end_lit)
model.add(excess == 0).only_enforce_if(~end_lit)
penalty_terms.append((excess, linear_penalty))

for j in all_tasks:
if i == j:
continue
Expand Down Expand Up @@ -2746,19 +2734,25 @@ def sequence_constraints_with_circuit(
# Reset the cumul to the duration of the task.
model.add(cumuls[j] == durations[j]).only_enforce_if(lit)

# Penalize the cumul of the previous task w.r.t. the soft max
if soft_max < hard_max:
aux = model.new_int_var(0, hard_max - soft_max, f"aux_{i}")
model.add_max_equality(aux, [0, cumuls[i] - soft_max])

excess = model.new_int_var(0, hard_max - soft_max, f"excess_{i}")
model.add(excess == aux).only_enforce_if(lit)
model.add(excess == 0).only_enforce_if(~lit)
penalty_terms.append((excess, linear_penalty))

# Add the circuit constraint.
model.add_circuit(arcs)

# Create the penalty terms. We can penalize each cumul locally.
penalty_terms = []
for i in all_tasks:
# Penalize the cumul of the last task w.r.t. the soft max
soft_max, linear_penalty, hard_max = sequence_cumul_constraints[task_types[i]]

# To make it separable per task, and avoid double counting, we use the
# following trick:
# reduced_excess = min(durations[i], max(0, cumul[i] - soft_max))
if soft_max < hard_max:
excess = model.new_int_var(0, hard_max - soft_max, f"excess+_{i}")
model.add_max_equality(excess, [0, cumuls[i] - soft_max])
reduced_excess = model.new_int_var(0, durations[i], f"reduced_excess_{i}")
model.add_min_equality(reduced_excess, [durations[i], excess])
penalty_terms.append((reduced_excess, linear_penalty))

return penalty_terms


Expand Down Expand Up @@ -2826,13 +2820,13 @@ def sequences_in_no_overlap_sample_sat():
lengths = []
for i in all_tasks:
max_hard_length = sequence_length_constraints[task_types[i]][1]
lengths.append(model.new_int_var(0, max_hard_length, f"length_{i}"))
lengths.append(model.new_int_var(1, max_hard_length, f"length_{i}"))

# Create cumul variables for each task.
cumuls = []
for i in all_tasks:
max_hard_cumul = sequence_cumul_constraints[task_types[i]][2]
cumuls.append(model.new_int_var(0, max_hard_cumul, f"cumul_{i}"))
cumuls.append(model.new_int_var(durations[i], max_hard_cumul, f"cumul_{i}"))

# Adds NoOverlap constraint.
model.add_no_overlap(intervals)
Expand Down
43 changes: 19 additions & 24 deletions ortools/sat/samples/sequences_in_no_overlap_sample_sat.py
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,6 @@ def sequence_constraints_with_circuit(
all_tasks = range(num_tasks)

arcs: List[cp_model.ArcT] = []
penalty_terms = []
for i in all_tasks:
# if node i is first.
start_lit = model.new_bool_var(f"start_{i}")
Expand All @@ -85,17 +84,6 @@ def sequence_constraints_with_circuit(
type_length_min = sequence_length_constraints[task_types[i]][0]
model.add(lengths[i] >= type_length_min).only_enforce_if(end_lit)

# Penalize the cumul of the last task w.r.t. the soft max
soft_max, linear_penalty, hard_max = sequence_cumul_constraints[task_types[i]]
if soft_max < hard_max:
aux = model.new_int_var(0, hard_max - soft_max, f"aux_{i}")
model.add_max_equality(aux, [0, cumuls[i] - soft_max])

excess = model.new_int_var(0, hard_max - soft_max, f"excess_{i}")
model.add(excess == aux).only_enforce_if(end_lit)
model.add(excess == 0).only_enforce_if(~end_lit)
penalty_terms.append((excess, linear_penalty))

for j in all_tasks:
if i == j:
continue
Expand Down Expand Up @@ -135,19 +123,25 @@ def sequence_constraints_with_circuit(
# Reset the cumul to the duration of the task.
model.add(cumuls[j] == durations[j]).only_enforce_if(lit)

# Penalize the cumul of the previous task w.r.t. the soft max
if soft_max < hard_max:
aux = model.new_int_var(0, hard_max - soft_max, f"aux_{i}")
model.add_max_equality(aux, [0, cumuls[i] - soft_max])

excess = model.new_int_var(0, hard_max - soft_max, f"excess_{i}")
model.add(excess == aux).only_enforce_if(lit)
model.add(excess == 0).only_enforce_if(~lit)
penalty_terms.append((excess, linear_penalty))

# Add the circuit constraint.
model.add_circuit(arcs)

# Create the penalty terms. We can penalize each cumul locally.
penalty_terms = []
for i in all_tasks:
# Penalize the cumul of the last task w.r.t. the soft max
soft_max, linear_penalty, hard_max = sequence_cumul_constraints[task_types[i]]

# To make it separable per task, and avoid double counting, we use the
# following trick:
# reduced_excess = min(durations[i], max(0, cumul[i] - soft_max))
if soft_max < hard_max:
excess = model.new_int_var(0, hard_max - soft_max, f"excess+_{i}")
model.add_max_equality(excess, [0, cumuls[i] - soft_max])
reduced_excess = model.new_int_var(0, durations[i], f"reduced_excess_{i}")
model.add_min_equality(reduced_excess, [durations[i], excess])
penalty_terms.append((reduced_excess, linear_penalty))

return penalty_terms


Expand Down Expand Up @@ -215,13 +209,13 @@ def sequences_in_no_overlap_sample_sat():
lengths = []
for i in all_tasks:
max_hard_length = sequence_length_constraints[task_types[i]][1]
lengths.append(model.new_int_var(0, max_hard_length, f"length_{i}"))
lengths.append(model.new_int_var(1, max_hard_length, f"length_{i}"))

# Create cumul variables for each task.
cumuls = []
for i in all_tasks:
max_hard_cumul = sequence_cumul_constraints[task_types[i]][2]
cumuls.append(model.new_int_var(0, max_hard_cumul, f"cumul_{i}"))
cumuls.append(model.new_int_var(durations[i], max_hard_cumul, f"cumul_{i}"))

# Adds NoOverlap constraint.
model.add_no_overlap(intervals)
Expand All @@ -244,6 +238,7 @@ def sequences_in_no_overlap_sample_sat():

# Solves the model model.
solver = cp_model.CpSolver()
solver.parameters.log_search_progress = True
status = solver.solve(model)

if status == cp_model.OPTIMAL or status == cp_model.FEASIBLE:
Expand Down

0 comments on commit 3cfe81d

Please sign in to comment.