Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@ struct pdlp_warm_start_data_t {
f_t last_restart_kkt_score_{-1};
f_t sum_solution_weight_{-1};
i_t iterations_since_last_restart_{-1};
bool solved_by_pdlp_{false};

// Constructor when building it in the solution object
pdlp_warm_start_data_t(rmm::device_uvector<f_t>& current_primal_solution,
Expand All @@ -67,7 +68,8 @@ struct pdlp_warm_start_data_t {
f_t last_candidate_kkt_score,
f_t last_restart_kkt_score,
f_t sum_solution_weight,
i_t iterations_since_last_restart);
i_t iterations_since_last_restart,
bool solved_by_pdlp);

// Empty constructor
pdlp_warm_start_data_t();
Expand Down Expand Up @@ -104,6 +106,7 @@ struct pdlp_warm_start_data_view_t {
f_t last_restart_kkt_score_{-1};
f_t sum_solution_weight_{-1};
i_t iterations_since_last_restart_{-1};
bool solved_by_pdlp_{false};
};

} // namespace cuopt::linear_programming
17 changes: 15 additions & 2 deletions cpp/include/cuopt/linear_programming/pdlp/solver_settings.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -160,10 +160,23 @@ class pdlp_solver_settings_t {
f_t last_candidate_kkt_score_,
f_t last_restart_kkt_score_,
f_t sum_solution_weight_,
i_t iterations_since_last_restart_);
i_t iterations_since_last_restart_,
bool solved_by_pdlp_);

/**
* @brief Check if the pdlp warm start data is set
*
* @return true if the pdlp warm start data is set, false otherwise
*/
bool has_pdlp_warm_start_data() const;

/**
* @brief Get the pdlp warm start data
*
* @note PDLP warm start data is an optional field, it is not set by default.
* You need to make sure that the warm start data is set before calling this function.
* You can check if the warm start data is set by calling has_pdlp_warm_start_data().
*
* @return pdlp warm start data
*/
const pdlp_warm_start_data_t<i_t, f_t>& get_pdlp_warm_start_data() const noexcept;
Expand Down Expand Up @@ -216,7 +229,7 @@ class pdlp_solver_settings_t {
/** Initial dual solution */
std::shared_ptr<rmm::device_uvector<f_t>> initial_dual_solution_;
// For the C++ interface
pdlp_warm_start_data_t<i_t, f_t> pdlp_warm_start_data_;
std::optional<pdlp_warm_start_data_t<i_t, f_t>> pdlp_warm_start_data_;
// For the Cython interface
pdlp_warm_start_data_view_t<i_t, f_t> pdlp_warm_start_data_view_;

Expand Down
33 changes: 32 additions & 1 deletion cpp/include/cuopt/linear_programming/solve.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,11 @@

namespace cuopt::linear_programming {

namespace detail {
template <typename i_t, typename f_t>
class problem_t;
}

/**
* @brief Linear programming solve function.
* @note Both primal and dual solutions are zero-initialized. For custom initialization, see
Expand All @@ -54,7 +59,33 @@ optimization_problem_solution_t<i_t, f_t> solve_lp(
bool use_pdlp_solver_mode = true);

/**
* @brief Linear programming solve function.
* @brief Linear programming solve function. Used in the context of a MIP when the input is a
* detail::problem_t.
* @note Both primal and dual solutions are zero-initialized. For custom initialization, see
* op_problem.initial_primal/dual_solution
*
* @tparam i_t Data type of indexes
* @tparam f_t Data type of the variables and their weights in the equations
*
* @param[in] op_problem An optimization_problem_t<i_t, f_t> object with a
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

syntax: Parameter documentation refers to 'op_problem' but the actual parameter is 'problem' - should be consistent

Prompt To Fix With AI
This is a comment left during a code review.
Path: cpp/include/cuopt/linear_programming/solve.hpp
Line: 70:70

Comment:
**syntax:** Parameter documentation refers to 'op_problem' but the actual parameter is 'problem' - should be consistent

How can I resolve this? If you propose a fix, please make it concise.

* representation of a linear program
* @param[in] settings A pdlp_solver_settings_t<i_t, f_t> object with the settings for the PDLP
* solver.
* @param[in] use_pdlp_solver_modes If true, the PDLP hyperparameters coming from the
* pdlp_solver_mode are used (instead of the ones comming from a potential hyper-params file).
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

syntax: Typo: 'comming' should be 'coming'

Prompt To Fix With AI
This is a comment left during a code review.
Path: cpp/include/cuopt/linear_programming/solve.hpp
Line: 75:75

Comment:
**syntax:** Typo: 'comming' should be 'coming'

How can I resolve this? If you propose a fix, please make it concise.

* @param[in] inside_mip If true, the problem is being solved in the context of a MIP.
* @return optimization_problem_solution_t<i_t, f_t> owning container for the solver solution
*/
template <typename i_t, typename f_t>
optimization_problem_solution_t<i_t, f_t> solve_lp(
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If this is used purely in MIP context. It is cleaner to rename this to solve_relaxed_lp or solve_lp_relaxation

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I agree that for now this is just used in the context of MIP but some people might want to solve an LP using the detail::problem representation instead of the optimization_problem one. What do you think?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@akifcorduk do you have an opinion there?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think renaming it kind of pins it to MIP use-cases, I agree with Nicolas that it is just another LP interface that accepts problem_t. In the MIP context, the naming doesn't matter too much because we have other functions like run_lp_with_vars_fixed

detail::problem_t<i_t, f_t>& problem,
pdlp_solver_settings_t<i_t, f_t> const& settings = pdlp_solver_settings_t<i_t, f_t>{},
bool use_pdlp_solver_mode = true,
bool inside_mip = false);

/**
* @brief Linear programming solve function. This is a wrapper around the solve_lp function taking a
* detail::problem_t as input.
* @note Both primal and dual solutions are zero-initialized. For custom initialization, see
* op_problem.initial_primal/dual_solution
*
Expand Down
3 changes: 2 additions & 1 deletion cpp/include/cuopt/linear_programming/solver_settings.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,8 @@ class solver_settings_t {
f_t last_candidate_kkt_score_,
f_t last_restart_kkt_score_,
f_t sum_solution_weight_,
i_t iterations_since_last_restart_);
i_t iterations_since_last_restart_,
bool solved_by_pdlp_);

const rmm::device_uvector<f_t>& get_initial_pdlp_primal_solution() const;
const rmm::device_uvector<f_t>& get_initial_pdlp_dual_solution() const;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,12 @@ struct linear_programming_ret_t {
double gap_;
int nb_iterations_;
double solve_time_;
// This parameter is stored twice in both the C++ and the Python layer: inside the solution object
// and in the warm start data It is required in the solution object to know if the problem was
// solved by PDLP or Dual Simplex, whether or not the warm start data was populated It is required
// in the warm start data as only this object and not the solution object is passed to the solver
// settings In this adapter between the C++ and the Python layer, we can carry the information
// through a single field
bool solved_by_pdlp_;
};

Expand Down
129 changes: 73 additions & 56 deletions cpp/src/linear_programming/pdlp.cu
Original file line number Diff line number Diff line change
Expand Up @@ -128,56 +128,65 @@ pdlp_solver_t<i_t, f_t>::pdlp_solver_t(problem_t<i_t, f_t>& op_problem,
set_initial_dual_solution(dual_sol);
}

if (settings.get_pdlp_warm_start_data().last_restart_duality_gap_dual_solution_.size() != 0) {
set_initial_primal_solution(settings.get_pdlp_warm_start_data().current_primal_solution_);
set_initial_dual_solution(settings.get_pdlp_warm_start_data().current_dual_solution_);
initial_step_size_ = settings.get_pdlp_warm_start_data().initial_step_size_;
initial_primal_weight_ = settings.get_pdlp_warm_start_data().initial_primal_weight_;
total_pdlp_iterations_ = settings.get_pdlp_warm_start_data().total_pdlp_iterations_;
pdhg_solver_.total_pdhg_iterations_ =
settings.get_pdlp_warm_start_data().total_pdhg_iterations_;
pdhg_solver_.get_d_total_pdhg_iterations().set_value_async(
settings.get_pdlp_warm_start_data().total_pdhg_iterations_, stream_view_);
restart_strategy_.last_candidate_kkt_score =
settings.get_pdlp_warm_start_data().last_candidate_kkt_score_;
restart_strategy_.last_restart_kkt_score =
settings.get_pdlp_warm_start_data().last_restart_kkt_score_;
raft::copy(restart_strategy_.weighted_average_solution_.sum_primal_solutions_.data(),
settings.get_pdlp_warm_start_data().sum_primal_solutions_.data(),
settings.get_pdlp_warm_start_data().sum_primal_solutions_.size(),
stream_view_);
raft::copy(restart_strategy_.weighted_average_solution_.sum_dual_solutions_.data(),
settings.get_pdlp_warm_start_data().sum_dual_solutions_.data(),
settings.get_pdlp_warm_start_data().sum_dual_solutions_.size(),
stream_view_);
raft::copy(unscaled_primal_avg_solution_.data(),
settings.get_pdlp_warm_start_data().initial_primal_average_.data(),
settings.get_pdlp_warm_start_data().initial_primal_average_.size(),
stream_view_);
raft::copy(unscaled_dual_avg_solution_.data(),
settings.get_pdlp_warm_start_data().initial_dual_average_.data(),
settings.get_pdlp_warm_start_data().initial_dual_average_.size(),
stream_view_);
raft::copy(pdhg_solver_.get_saddle_point_state().get_current_AtY().data(),
settings.get_pdlp_warm_start_data().current_ATY_.data(),
settings.get_pdlp_warm_start_data().current_ATY_.size(),
stream_view_);
raft::copy(restart_strategy_.last_restart_duality_gap_.primal_solution_.data(),
settings.get_pdlp_warm_start_data().last_restart_duality_gap_primal_solution_.data(),
settings.get_pdlp_warm_start_data().last_restart_duality_gap_primal_solution_.size(),
stream_view_);
raft::copy(restart_strategy_.last_restart_duality_gap_.dual_solution_.data(),
settings.get_pdlp_warm_start_data().last_restart_duality_gap_dual_solution_.data(),
settings.get_pdlp_warm_start_data().last_restart_duality_gap_dual_solution_.size(),
stream_view_);
if (settings.has_pdlp_warm_start_data()) {
const auto& warm_start_data = settings.get_pdlp_warm_start_data();
if (!warm_start_data.solved_by_pdlp_) {
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

style: Should verify the logic: warm start data from non-PDLP solvers might still be partially useful for initialization

Prompt To Fix With AI
This is a comment left during a code review.
Path: cpp/src/linear_programming/pdlp.cu
Line: 133:133

Comment:
**style:** Should verify the logic: warm start data from non-PDLP solvers might still be partially useful for initialization

How can I resolve this? If you propose a fix, please make it concise.

CUOPT_LOG_DEBUG(
"Warm start data coming from a solution which was not solved by PDLP, skipping warm start");
} else if (pdlp_hyper_params::restart_strategy ==
static_cast<int>(
pdlp_restart_strategy_t<i_t, f_t>::restart_strategy_t::TRUST_REGION_RESTART)) {
CUOPT_LOG_DEBUG(
"Trying to use warm start with trust region restart (neither Stable nor Fast1), skipping "
"warm start");
} else {
set_initial_primal_solution(warm_start_data.current_primal_solution_);
set_initial_dual_solution(warm_start_data.current_dual_solution_);
initial_step_size_ = warm_start_data.initial_step_size_;
initial_primal_weight_ = warm_start_data.initial_primal_weight_;
total_pdlp_iterations_ = warm_start_data.total_pdlp_iterations_;
pdhg_solver_.total_pdhg_iterations_ = warm_start_data.total_pdhg_iterations_;
pdhg_solver_.get_d_total_pdhg_iterations().set_value_async(
warm_start_data.total_pdhg_iterations_, stream_view_);
restart_strategy_.last_candidate_kkt_score = warm_start_data.last_candidate_kkt_score_;
restart_strategy_.last_restart_kkt_score = warm_start_data.last_restart_kkt_score_;
raft::copy(restart_strategy_.weighted_average_solution_.sum_primal_solutions_.data(),
warm_start_data.sum_primal_solutions_.data(),
warm_start_data.sum_primal_solutions_.size(),
stream_view_);
raft::copy(restart_strategy_.weighted_average_solution_.sum_dual_solutions_.data(),
warm_start_data.sum_dual_solutions_.data(),
warm_start_data.sum_dual_solutions_.size(),
stream_view_);
raft::copy(unscaled_primal_avg_solution_.data(),
warm_start_data.initial_primal_average_.data(),
warm_start_data.initial_primal_average_.size(),
stream_view_);
raft::copy(unscaled_dual_avg_solution_.data(),
warm_start_data.initial_dual_average_.data(),
warm_start_data.initial_dual_average_.size(),
stream_view_);
raft::copy(pdhg_solver_.get_saddle_point_state().get_current_AtY().data(),
warm_start_data.current_ATY_.data(),
warm_start_data.current_ATY_.size(),
stream_view_);
raft::copy(restart_strategy_.last_restart_duality_gap_.primal_solution_.data(),
warm_start_data.last_restart_duality_gap_primal_solution_.data(),
warm_start_data.last_restart_duality_gap_primal_solution_.size(),
stream_view_);
raft::copy(restart_strategy_.last_restart_duality_gap_.dual_solution_.data(),
warm_start_data.last_restart_duality_gap_dual_solution_.data(),
warm_start_data.last_restart_duality_gap_dual_solution_.size(),
stream_view_);

const auto value = settings.get_pdlp_warm_start_data().sum_solution_weight_;
restart_strategy_.weighted_average_solution_.sum_primal_solution_weights_.set_value_async(
value, stream_view_);
restart_strategy_.weighted_average_solution_.sum_dual_solution_weights_.set_value_async(
value, stream_view_);
restart_strategy_.weighted_average_solution_.iterations_since_last_restart_ =
settings.get_pdlp_warm_start_data().iterations_since_last_restart_;
const auto value = warm_start_data.sum_solution_weight_;
restart_strategy_.weighted_average_solution_.sum_primal_solution_weights_.set_value_async(
value, stream_view_);
restart_strategy_.weighted_average_solution_.sum_dual_solution_weights_.set_value_async(
value, stream_view_);
restart_strategy_.weighted_average_solution_.iterations_since_last_restart_ =
warm_start_data.iterations_since_last_restart_;
}
}
// Checks performed below are assert only
best_primal_quality_so_far_.primal_objective = (op_problem_scaled_.maximize)
Expand Down Expand Up @@ -323,7 +332,7 @@ std::optional<optimization_problem_solution_t<i_t, f_t>> pdlp_solver_t<i_t, f_t>
pdhg_solver_,
pdhg_solver_.get_primal_solution(),
pdhg_solver_.get_dual_solution(),
get_filled_warmed_start_data(),
get_filled_warmed_start_data(false),
Copy link

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

logic: Missing the true parameter that was added in other calls to get_filled_warmed_start_data

Prompt To Fix With AI
This is a comment left during a code review.
Path: cpp/src/linear_programming/pdlp.cu
Line: 335:335

Comment:
**logic:** Missing the `true` parameter that was added in other calls to `get_filled_warmed_start_data`

How can I resolve this? If you propose a fix, please make it concise.

pdlp_termination_status_t::ConcurrentLimit);
}

Expand Down Expand Up @@ -466,7 +475,8 @@ void pdlp_solver_t<i_t, f_t>::record_best_primal_so_far(
}

template <typename i_t, typename f_t>
pdlp_warm_start_data_t<i_t, f_t> pdlp_solver_t<i_t, f_t>::get_filled_warmed_start_data()
pdlp_warm_start_data_t<i_t, f_t> pdlp_solver_t<i_t, f_t>::get_filled_warmed_start_data(
bool solved_by_pdlp)
{
return pdlp_warm_start_data_t<i_t, f_t>(
pdhg_solver_.get_primal_solution(),
Expand All @@ -485,7 +495,8 @@ pdlp_warm_start_data_t<i_t, f_t> pdlp_solver_t<i_t, f_t>::get_filled_warmed_star
restart_strategy_.last_candidate_kkt_score,
restart_strategy_.last_restart_kkt_score,
restart_strategy_.weighted_average_solution_.sum_primal_solution_weights_.value(stream_view_),
restart_strategy_.weighted_average_solution_.iterations_since_last_restart_);
restart_strategy_.weighted_average_solution_.iterations_since_last_restart_,
solved_by_pdlp);
}

template <typename i_t, typename f_t>
Expand Down Expand Up @@ -545,9 +556,14 @@ std::optional<optimization_problem_solution_t<i_t, f_t>> pdlp_solver_t<i_t, f_t>
#ifdef PDLP_VERBOSE_MODE
RAFT_CUDA_TRY(cudaDeviceSynchronize());
printf("Termination criteria current\n");
current_termination_strategy_.print_termination_criteria();
const auto current_time = std::chrono::high_resolution_clock::now();
const f_t elapsed =
std::chrono::duration_cast<std::chrono::milliseconds>(current_time - start_time).count() /
1000.0;
current_termination_strategy_.print_termination_criteria(total_pdlp_iterations_, elapsed);
RAFT_CUDA_TRY(cudaDeviceSynchronize());
#endif

pdlp_termination_status_t termination_current =
current_termination_strategy_.evaluate_termination_criteria(
pdhg_solver_,
Expand All @@ -559,7 +575,7 @@ std::optional<optimization_problem_solution_t<i_t, f_t>> pdlp_solver_t<i_t, f_t>
#ifdef PDLP_VERBOSE_MODE
RAFT_CUDA_TRY(cudaDeviceSynchronize());
std::cout << "Termination criteria average:" << std::endl;
average_termination_strategy_.print_termination_criteria();
average_termination_strategy_.print_termination_criteria(total_pdlp_iterations_, elapsed);
RAFT_CUDA_TRY(cudaDeviceSynchronize());
#endif

Expand Down Expand Up @@ -984,6 +1000,8 @@ template <typename i_t, typename f_t>
optimization_problem_solution_t<i_t, f_t> pdlp_solver_t<i_t, f_t>::run_solver(
const std::chrono::high_resolution_clock::time_point& start_time)
{
raft::common::nvtx::range fun_scope("Run PDLP Solver");

bool verbose;
#ifdef PDLP_VERBOSE_MODE
verbose = true;
Expand Down Expand Up @@ -1071,8 +1089,7 @@ optimization_problem_solution_t<i_t, f_t> pdlp_solver_t<i_t, f_t>::run_solver(
raft::print_device_vector("Initial dual_step_size", dual_step_size_.data(), 1, std::cout);
}

bool warm_start_was_given =
settings_.get_pdlp_warm_start_data().last_restart_duality_gap_dual_solution_.size() != 0;
bool warm_start_was_given = settings_.has_pdlp_warm_start_data();

if (!inside_mip_) {
CUOPT_LOG_INFO(
Expand Down
2 changes: 1 addition & 1 deletion cpp/src/linear_programming/pdlp.cuh
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,7 @@ class pdlp_solver_t {
// Intentionnaly take a copy to avoid an unintentional modification in the calling context
const pdlp_solver_settings_t<i_t, f_t> settings_;

pdlp_warm_start_data_t<i_t, f_t> get_filled_warmed_start_data();
pdlp_warm_start_data_t<i_t, f_t> get_filled_warmed_start_data(bool solved_by_pdlp = true);

// Initial scaling strategy
detail::pdlp_initial_scaling_strategy_t<i_t, f_t> initial_scaling_strategy_;
Expand Down
12 changes: 8 additions & 4 deletions cpp/src/linear_programming/pdlp_warm_start_data.cu
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,8 @@ pdlp_warm_start_data_t<i_t, f_t>::pdlp_warm_start_data_t(
f_t last_candidate_kkt_score,
f_t last_restart_kkt_score,
f_t sum_solution_weight,
i_t iterations_since_last_restart)
i_t iterations_since_last_restart,
bool solved_by_pdlp)
: // When initially creating this object, we can't move neither the primal/dual solution nor
// the average since they might be used as a solution by the solution object, they have to be
// copied
Expand All @@ -66,7 +67,8 @@ pdlp_warm_start_data_t<i_t, f_t>::pdlp_warm_start_data_t(
last_candidate_kkt_score_(last_candidate_kkt_score),
last_restart_kkt_score_(last_restart_kkt_score),
sum_solution_weight_(sum_solution_weight),
iterations_since_last_restart_(iterations_since_last_restart)
iterations_since_last_restart_(iterations_since_last_restart),
solved_by_pdlp_(solved_by_pdlp)
{
check_sizes();
}
Expand Down Expand Up @@ -107,7 +109,8 @@ pdlp_warm_start_data_t<i_t, f_t>::pdlp_warm_start_data_t(
last_candidate_kkt_score_(other.last_candidate_kkt_score_),
last_restart_kkt_score_(other.last_restart_kkt_score_),
sum_solution_weight_(other.sum_solution_weight_),
iterations_since_last_restart_(other.iterations_since_last_restart_)
iterations_since_last_restart_(other.iterations_since_last_restart_),
solved_by_pdlp_(other.solved_by_pdlp_)
{
raft::copy(current_primal_solution_.data(),
other.current_primal_solution_.data(),
Expand Down Expand Up @@ -168,7 +171,8 @@ pdlp_warm_start_data_t<i_t, f_t>::pdlp_warm_start_data_t(const pdlp_warm_start_d
last_candidate_kkt_score_(other.last_candidate_kkt_score_),
last_restart_kkt_score_(other.last_restart_kkt_score_),
sum_solution_weight_(other.sum_solution_weight_),
iterations_since_last_restart_(other.iterations_since_last_restart_)
iterations_since_last_restart_(other.iterations_since_last_restart_),
solved_by_pdlp_(other.solved_by_pdlp_)
{
check_sizes();
}
Expand Down
Loading
Loading