My solution was retraining in C ++, because I could not find a good way to directly save the model. Here is my code. You will need to adapt it and clean it a bit. The biggest change you will have to make is not hard to code the svm_parameter values ββlike me. You will also have to replace FilePath with std::string . I copy, paste and make small changes here in SO, so formatting will not be ideal:
Used as follows:
auto targetsPath = FilePath("targets.txt"); auto observationsPath = FilePath("observations.txt"); auto targetsMat = MatlabMatrixFileReader::Read(targetsPath, ','); auto observationsMat = MatlabMatrixFileReader::Read(observationsPath, ','); auto v = MiscVector::ConvertVecOfVecToVec(targetsMat); auto model = SupportVectorRegressionModel{ observationsMat, v }; std::vector<double> observation{ {
miscvector.h
static vector<double> ConvertVecOfVecToVec(const vector<vector<double>> &mat) { vector<double> targetsVec; targetsVec.reserve(mat.size()); for (size_t i = 0; i < mat.size(); i++) { targetsVec.push_back(mat[i][0]); } return targetsVec; }
libsvmtargetobjectconvertor.h
#pragma once #include "machinelearning.h" struct svm_node; class LibSvmTargetObservationConvertor { public: svm_node ** LibSvmTargetObservationConvertor::ConvertObservations(const vector<MlObservation> &observations, size_t numFeatures) const { svm_node **svmObservations = (svm_node **)malloc(sizeof(svm_node *) * observations.size()); for (size_t rowI = 0; rowI < observations.size(); rowI++) { svm_node *row = (svm_node *)malloc(sizeof(svm_node) * numFeatures); for (size_t colI = 0; colI < numFeatures; colI++) { row[colI].index = colI; row[colI].value = observations[rowI][colI]; } row[numFeatures].index = -1; // apparently needed svmObservations[rowI] = row; } return svmObservations; } svm_node* LibSvmTargetObservationConvertor::ConvertMatToSvmNode(const MlObservation &observation) const { size_t numFeatures = observation.size(); svm_node *obsNode = (svm_node *)malloc(sizeof(svm_node) * numFeatures); for (size_t rowI = 0; rowI < numFeatures; rowI++) { obsNode[rowI].index = rowI; obsNode[rowI].value = observation[rowI]; } obsNode[numFeatures].index = -1; // apparently needed return obsNode; } };
machinelearning.h
#pragma once #include <vector> using std::vector; using MlObservation = vector<double>; using MlTarget = double; //machinelearningmodel.h #pragma once #include <vector> #include "machinelearning.h" class MachineLearningModel { public: virtual ~MachineLearningModel() {} virtual double Predict(const MlObservation &observation) const = 0; };
matlabmatrixfilereader.h
#pragma once
supportvectorregressionmodel.h
#pragma once #include <vector> using std::vector; #include "machinelearningmodel.h" #include "svm.h" // libsvm class FilePath; class SupportVectorRegressionModel : public MachineLearningModel { public: SupportVectorRegressionModel::~SupportVectorRegressionModel() { svm_free_model_content(model_); svm_destroy_param(¶m_); svm_free_and_destroy_model(&model_); } SupportVectorRegressionModel::SupportVectorRegressionModel(const vector<MlObservation>& observations, const vector<MlTarget>& targets) { // assumes all observations have same number of features size_t numFeatures = observations[0].size(); //setup targets //auto v = ConvertVecOfVecToVec(targetsMat); double *targetsPtr = const_cast<double *>(&targets[0]); // why aren't the targets const? LibSvmTargetObservationConvertor conv; svm_node **observationsPtr = conv.ConvertObservations(observations, numFeatures); // setup observations //svm_node **observations = BuildObservations(observationsMat, numFeatures); // setup problem svm_problem problem; problem.l = targets.size(); problem.y = targetsPtr; problem.x = observationsPtr; // specific to out training sets // TODO: This is hard coded. // Bust out these values for use in constructor param_.C = 0.4; // cost param_.svm_type = 4; // SVR param_.kernel_type = 2; // radial param_.nu = 0.6; // SVR nu // These values are the defaults used in the Matlab version // as found in svm_model_matlab.c param_.gamma = 1.0 / (double)numFeatures; param_.coef0 = 0; param_.cache_size = 100; // in MB param_.shrinking = 1; param_.probability = 0; param_.degree = 3; param_.eps = 1e-3; param_.p = 0.1; param_.shrinking = 1; param_.probability = 0; param_.nr_weight = 0; param_.weight_label = NULL; param_.weight = NULL; // suppress command line output svm_set_print_string_function([](auto c) {}); model_ = svm_train(&problem, ¶m_); } double SupportVectorRegressionModel::Predict(const vector<double>& observation) const { LibSvmTargetObservationConvertor conv; svm_node *obsNode = conv.ConvertMatToSvmNode(observation); double prediction = svm_predict(model_, obsNode); return prediction; } SupportVectorRegressionModel::SupportVectorRegressionModel(const FilePath & modelFile) { model_ = svm_load_model(modelFile.Path().c_str()); } private: svm_model *model_; svm_parameter param_; };
source share