22 #ifndef REGRESSORS_HPP_
23 #define REGRESSORS_HPP_
25 #include "cereal/cereal.hpp"
26 #include "superviseddescent/utils/mat_cerealisation.hpp"
28 #include "Eigen/Dense"
30 #include "opencv2/core/core.hpp"
34 namespace superviseddescent {
57 virtual bool learn(cv::Mat data, cv::Mat labels) = 0;
68 virtual double test(cv::Mat data, cv::Mat labels) = 0;
76 virtual cv::Mat
predict(cv::Mat values) = 0;
128 switch (regularisation_type)
135 lambda = lambda *
static_cast<float>(cv::norm(data)) / static_cast<float>(num_training_elements);
141 cv::Mat regulariser = cv::Mat::eye(data.rows, data.cols, CV_32FC1) * lambda;
143 if (!regularise_last_row) {
145 regulariser.at<
float>(regulariser.rows - 1, regulariser.cols - 1) = 0.0f;
153 bool regularise_last_row;
155 friend class cereal::access;
164 template<
class Archive>
165 void serialize(Archive& ar)
167 ar(regularisation_type, lambda, regularise_last_row);
202 using RowMajorMatrixXf = Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>;
205 Eigen::Map<RowMajorMatrixXf> A_Eigen(data.ptr<
float>(), data.rows, data.cols);
206 Eigen::Map<RowMajorMatrixXf> labels_Eigen(labels.ptr<
float>(), labels.rows, labels.cols);
208 RowMajorMatrixXf AtA_Eigen = A_Eigen.transpose() * A_Eigen;
211 Mat AtA_Map(static_cast<int>(AtA_Eigen.rows()), static_cast<int>(AtA_Eigen.cols()), CV_32FC1, AtA_Eigen.data());
212 Mat regularisation_matrix = regulariser.
get_matrix(AtA_Map, data.rows);
213 Eigen::Map<RowMajorMatrixXf> reg_Eigen(regularisation_matrix.ptr<
float>(), regularisation_matrix.rows, regularisation_matrix.cols);
215 Eigen::DiagonalMatrix<float, Eigen::Dynamic> reg_Eigen_diag(regularisation_matrix.rows);
216 Eigen::VectorXf diag_vec(regularisation_matrix.rows);
217 for (
int i = 0; i < diag_vec.size(); ++i) {
218 diag_vec(i) = regularisation_matrix.at<
float>(i, i);
220 reg_Eigen_diag.diagonal() = diag_vec;
221 AtA_Eigen = AtA_Eigen + reg_Eigen_diag.toDenseMatrix();
224 Eigen::PartialPivLU<RowMajorMatrixXf> lu_of_AtA(AtA_Eigen);
225 RowMajorMatrixXf x_Eigen = lu_of_AtA.solve(A_Eigen.transpose() * labels_Eigen);
229 Mat x(static_cast<int>(x_Eigen.rows()), static_cast<int>(x_Eigen.cols()), CV_32FC1, x_Eigen.data());
267 using RowMajorMatrixXf = Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic, Eigen::RowMajor>;
269 Eigen::Map<RowMajorMatrixXf> A_Eigen(data.ptr<
float>(), data.rows, data.cols);
270 Eigen::Map<RowMajorMatrixXf> labels_Eigen(labels.ptr<
float>(), labels.rows, labels.cols);
272 RowMajorMatrixXf AtA_Eigen = A_Eigen.transpose() * A_Eigen;
275 Mat AtA_Map(static_cast<int>(AtA_Eigen.rows()), static_cast<int>(AtA_Eigen.cols()), CV_32FC1, AtA_Eigen.data());
276 Mat regularisation_matrix = regulariser.
get_matrix(AtA_Map, data.rows);
277 Eigen::Map<RowMajorMatrixXf> reg_Eigen(regularisation_matrix.ptr<
float>(), regularisation_matrix.rows, regularisation_matrix.cols);
279 Eigen::DiagonalMatrix<float, Eigen::Dynamic> reg_Eigen_diag(regularisation_matrix.rows);
280 Eigen::VectorXf diag_vec(regularisation_matrix.rows);
281 for (
int i = 0; i < diag_vec.size(); ++i) {
282 diag_vec(i) = regularisation_matrix.at<
float>(i, i);
284 reg_Eigen_diag.diagonal() = diag_vec;
285 AtA_Eigen = AtA_Eigen + reg_Eigen_diag.toDenseMatrix();
288 Eigen::ColPivHouseholderQR<RowMajorMatrixXf> qr_of_AtA(AtA_Eigen);
289 auto rankOfAtA = qr_of_AtA.rank();
290 if (!qr_of_AtA.isInvertible()) {
292 std::cout <<
"The regularised AtA is not invertible. We continued learning, but Eigen may return garbage (their docu is not very specific). (The rank is " << std::to_string(rankOfAtA) <<
", full rank would be " << std::to_string(AtA_Eigen.rows()) <<
"). Increase lambda." << std::endl;
294 RowMajorMatrixXf AtAInv_Eigen = qr_of_AtA.inverse();
297 RowMajorMatrixXf x_Eigen = AtAInv_Eigen * A_Eigen.transpose() * labels_Eigen;
300 Mat x(static_cast<int>(x_Eigen.rows()), static_cast<int>(x_Eigen.cols()), CV_32FC1, x_Eigen.data());
318 template<
class Solver = PartialPivLUSolver>
345 bool learn(cv::Mat data, cv::Mat labels)
override
347 cv::Mat
x = solver.solve(data, labels, regulariser);
361 double test(cv::Mat data, cv::Mat labels)
override
364 for (
int i = 0; i < data.rows; ++i) {
365 cv::Mat prediction =
predict(data.row(i));
366 predictions.push_back(prediction);
368 return cv::norm(predictions, labels, cv::NORM_L2) / cv::norm(labels, cv::NORM_L2);
379 cv::Mat prediction = values *
x;
389 friend class cereal::access;
395 template<
class Archive>
396 void serialize(Archive& ar)
cv::Mat predict(cv::Mat values) override
Definition: regressors.hpp:377
double test(cv::Mat data, cv::Mat labels) override
Definition: regressors.hpp:361
Use A suitable default for param suggested by the SDM authors is 0.5.
virtual double test(cv::Mat data, cv::Mat labels)=0
Definition: regressors.hpp:180
cv::Mat x
The linear model we learn ( ). TODO: Make private member variable.
Definition: regressors.hpp:381
Definition: regressors.hpp:43
cv::Mat solve(cv::Mat data, cv::Mat labels, Regulariser regulariser)
Definition: regressors.hpp:264
Definition: regressors.hpp:319
cv::Mat get_matrix(cv::Mat data, int num_training_elements)
Definition: regressors.hpp:126
virtual bool learn(cv::Mat data, cv::Mat labels)=0
Definition: regressors.hpp:87
Definition: regressors.hpp:245
RegularisationType
Definition: regressors.hpp:93
Regulariser(RegularisationType regularisation_type=RegularisationType::Manual, float param=0.0f, bool regularise_last_row=true)
Definition: regressors.hpp:113
LinearRegressor(Regulariser regulariser=Regulariser())
Definition: regressors.hpp:328
cv::Mat solve(cv::Mat data, cv::Mat labels, Regulariser regulariser)
Definition: regressors.hpp:199
Use the given param value as lambda.
virtual cv::Mat predict(cv::Mat values)=0
bool learn(cv::Mat data, cv::Mat labels) override
Definition: regressors.hpp:345