From 4a16d719182c72da990b29e065b1a2385ac86a01 Mon Sep 17 00:00:00 2001 From: Gil Date: Thu, 22 Nov 2018 10:32:17 +0000 Subject: [PATCH] sg_add_refactor: * Replaced the SG_ADD macro calls with the new ParameterProperties enum class * this was done in the same way as in PR https://github.com/shogun-toolbox/shogun/pull/4417 --- .../FeatureBlockLogisticRegression.cpp | 14 ++++++------ shogun/classifier/svm/SVMLin.cpp | 8 +++---- shogun/classifier/svm/SVMSGD.cpp | 18 +++++++-------- .../kernel/string/DistantSegmentsKernel.cpp | 4 ++-- .../mathematics/SparseInverseCovariance.cpp | 11 +++++----- .../MulticlassLogisticRegression.cpp | 6 ++--- ...MulticlassTreeGuidedLogisticRegression.cpp | 6 ++--- shogun/optimization/NLOPTMinimizer.cpp | 8 +++---- shogun/structure/DualLibQPBMSOSVM.cpp | 22 ++++++++----------- .../MultitaskL12LogisticRegression.cpp | 4 ++-- .../MultitaskLeastSquaresRegression.cpp | 12 +++++----- .../multitask/MultitaskLinearMachine.cpp | 2 +- .../multitask/MultitaskLogisticRegression.cpp | 12 +++++----- .../MultitaskTraceLogisticRegression.cpp | 2 +- 14 files changed, 62 insertions(+), 67 deletions(-) diff --git a/shogun/classifier/FeatureBlockLogisticRegression.cpp b/shogun/classifier/FeatureBlockLogisticRegression.cpp index 9a19f7d..27880ff 100644 --- a/shogun/classifier/FeatureBlockLogisticRegression.cpp +++ b/shogun/classifier/FeatureBlockLogisticRegression.cpp @@ -57,13 +57,13 @@ CFeatureBlockLogisticRegression::~CFeatureBlockLogisticRegression() void CFeatureBlockLogisticRegression::register_parameters() { - SG_ADD((CSGObject**)&m_feature_relation, "feature_relation", "feature relation", MS_NOT_AVAILABLE); - SG_ADD(&m_z, "z", "regularization coefficient", MS_AVAILABLE); - SG_ADD(&m_q, "q", "q of L1/Lq", MS_AVAILABLE); - SG_ADD(&m_termination, "termination", "termination", MS_NOT_AVAILABLE); - SG_ADD(&m_regularization, "regularization", "regularization", MS_NOT_AVAILABLE); - SG_ADD(&m_tolerance, "tolerance", "tolerance", MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_feature_relation, "feature_relation", "feature relation"); + SG_ADD(&m_z, "z", "regularization coefficient", ParameterProperties::HYPER); + SG_ADD(&m_q, "q", "q of L1/Lq", ParameterProperties::HYPER); + SG_ADD(&m_termination, "termination", "termination"); + SG_ADD(&m_regularization, "regularization", "regularization"); + SG_ADD(&m_tolerance, "tolerance", "tolerance"); + SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations"); } CIndexBlockRelation* CFeatureBlockLogisticRegression::get_feature_relation() const diff --git a/shogun/classifier/svm/SVMLin.cpp b/shogun/classifier/svm/SVMLin.cpp index 10192d4..dc82d27 100644 --- a/shogun/classifier/svm/SVMLin.cpp +++ b/shogun/classifier/svm/SVMLin.cpp @@ -43,12 +43,12 @@ CSVMLin::~CSVMLin() void CSVMLin::init() { SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE); + &use_bias, "use_bias", "Indicates if bias is used."); SG_ADD( - &C1, "C1", "C constant for negatively labeled examples.", MS_AVAILABLE); + &C1, "C1", "C constant for negatively labeled examples.", ParameterProperties::HYPER); SG_ADD( - &C2, "C2", "C constant for positively labeled examples.", MS_AVAILABLE); - SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE); + &C2, "C2", "C constant for positively labeled examples.", ParameterProperties::HYPER); + SG_ADD(&epsilon, "epsilon", "Convergence precision."); } bool CSVMLin::train_machine(CFeatures* data) diff --git a/shogun/classifier/svm/SVMSGD.cpp b/shogun/classifier/svm/SVMSGD.cpp index 885d58b..fe538a0 100644 --- a/shogun/classifier/svm/SVMSGD.cpp +++ b/shogun/classifier/svm/SVMSGD.cpp @@ -213,16 +213,16 @@ void CSVMSGD::init() loss=new CHingeLoss(); SG_REF(loss); - SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE); - SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE); - SG_ADD(&wscale, "wscale", "W scale", MS_NOT_AVAILABLE); - SG_ADD(&bscale, "bscale", "b scale", MS_NOT_AVAILABLE); - SG_ADD(&epochs, "epochs", "epochs", MS_NOT_AVAILABLE); - SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE); - SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE); + SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER); + SG_ADD(&wscale, "wscale", "W scale"); + SG_ADD(&bscale, "bscale", "b scale"); + SG_ADD(&epochs, "epochs", "epochs"); + SG_ADD(&skip, "skip", "skip"); + SG_ADD(&count, "count", "count"); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE); + &use_bias, "use_bias", "Indicates if bias is used."); SG_ADD( &use_regularized_bias, "use_regularized_bias", - "Indicates if bias is regularized.", MS_NOT_AVAILABLE); + "Indicates if bias is regularized."); } diff --git a/shogun/kernel/string/DistantSegmentsKernel.cpp b/shogun/kernel/string/DistantSegmentsKernel.cpp index a9e9407..a38ee59 100644 --- a/shogun/kernel/string/DistantSegmentsKernel.cpp +++ b/shogun/kernel/string/DistantSegmentsKernel.cpp @@ -43,8 +43,8 @@ bool CDistantSegmentsKernel::init(CFeatures* l, CFeatures* r) void CDistantSegmentsKernel::init() { - SG_ADD(&m_delta, "delta", "Delta parameter of the DS-Kernel", MS_AVAILABLE); - SG_ADD(&m_theta, "theta", "Theta parameter of the DS-Kernel", MS_AVAILABLE); + SG_ADD(&m_delta, "delta", "Delta parameter of the DS-Kernel", ParameterProperties::HYPER); + SG_ADD(&m_theta, "theta", "Theta parameter of the DS-Kernel", ParameterProperties::HYPER); } float64_t CDistantSegmentsKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/shogun/mathematics/SparseInverseCovariance.cpp b/shogun/mathematics/SparseInverseCovariance.cpp index 9e8a606..86ea210 100644 --- a/shogun/mathematics/SparseInverseCovariance.cpp +++ b/shogun/mathematics/SparseInverseCovariance.cpp @@ -31,12 +31,11 @@ CSparseInverseCovariance::~CSparseInverseCovariance() void CSparseInverseCovariance::register_parameters() { SG_ADD(&m_lasso_max_iter,"lasso_max_iter", - "maximum iteration of LASSO step",MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter,"max_iter","maximum total iteration", - MS_NOT_AVAILABLE); - SG_ADD(&m_f_gap,"f_gap","f gap",MS_NOT_AVAILABLE); - SG_ADD(&m_x_gap,"x_gap","x gap",MS_NOT_AVAILABLE); - SG_ADD(&m_xtol,"xtol","xtol",MS_NOT_AVAILABLE); + "maximum iteration of LASSO step"); + SG_ADD(&m_max_iter,"max_iter","maximum total iteration"); + SG_ADD(&m_f_gap,"f_gap","f gap"); + SG_ADD(&m_x_gap,"x_gap","x gap"); + SG_ADD(&m_xtol,"xtol","xtol"); } SGMatrix CSparseInverseCovariance::estimate(SGMatrix S, float64_t lambda_c) diff --git a/shogun/multiclass/MulticlassLogisticRegression.cpp b/shogun/multiclass/MulticlassLogisticRegression.cpp index 53d269d..430e323 100644 --- a/shogun/multiclass/MulticlassLogisticRegression.cpp +++ b/shogun/multiclass/MulticlassLogisticRegression.cpp @@ -42,9 +42,9 @@ void CMulticlassLogisticRegression::init_defaults() void CMulticlassLogisticRegression::register_parameters() { - SG_ADD(&m_z, "m_z", "regularization constant",MS_AVAILABLE); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE); + SG_ADD(&m_z, "m_z", "regularization constant", ParameterProperties::HYPER); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon"); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations"); } CMulticlassLogisticRegression::~CMulticlassLogisticRegression() diff --git a/shogun/multiclass/MulticlassTreeGuidedLogisticRegression.cpp b/shogun/multiclass/MulticlassTreeGuidedLogisticRegression.cpp index d0371d9..76ab6f8 100644 --- a/shogun/multiclass/MulticlassTreeGuidedLogisticRegression.cpp +++ b/shogun/multiclass/MulticlassTreeGuidedLogisticRegression.cpp @@ -41,9 +41,9 @@ void CMulticlassTreeGuidedLogisticRegression::init_defaults() void CMulticlassTreeGuidedLogisticRegression::register_parameters() { - SG_ADD(&m_z, "m_z", "regularization constant",MS_AVAILABLE); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE); + SG_ADD(&m_z, "m_z", "regularization constant", ParameterProperties::HYPER); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon"); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations"); } CMulticlassTreeGuidedLogisticRegression::~CMulticlassTreeGuidedLogisticRegression() diff --git a/shogun/optimization/NLOPTMinimizer.cpp b/shogun/optimization/NLOPTMinimizer.cpp index 69ea676..92b1f7a 100644 --- a/shogun/optimization/NLOPTMinimizer.cpp +++ b/shogun/optimization/NLOPTMinimizer.cpp @@ -57,13 +57,13 @@ void CNLOPTMinimizer::init() m_target_variable=SGVector(); set_nlopt_parameters(); SG_ADD(&m_max_iterations, "CNLOPTMinimizer__m_max_iterations", - "max_iterations in CNLOPTMinimizer", MS_NOT_AVAILABLE); + "max_iterations in CNLOPTMinimizer"); SG_ADD(&m_variable_tolerance, "CNLOPTMinimizer__m_variable_tolerance", - "variable_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE); + "variable_tolerance in CNLOPTMinimizer"); SG_ADD(&m_function_tolerance, "CNLOPTMinimizer__m_function_tolerance", - "function_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE); + "function_tolerance in CNLOPTMinimizer"); SG_ADD(&m_nlopt_algorithm_id, "CNLOPTMinimizer__m_nlopt_algorithm_id", - "nlopt_algorithm_id in CNLOPTMinimizer", MS_NOT_AVAILABLE); + "nlopt_algorithm_id in CNLOPTMinimizer"); #endif } diff --git a/shogun/structure/DualLibQPBMSOSVM.cpp b/shogun/structure/DualLibQPBMSOSVM.cpp index cf372db..266fed4 100644 --- a/shogun/structure/DualLibQPBMSOSVM.cpp +++ b/shogun/structure/DualLibQPBMSOSVM.cpp @@ -55,21 +55,17 @@ CDualLibQPBMSOSVM::~CDualLibQPBMSOSVM() void CDualLibQPBMSOSVM::init() { - SG_ADD(&m_TolRel, "m_TolRel", "Relative tolerance", MS_AVAILABLE); - SG_ADD(&m_TolAbs, "m_TolAbs", "Absolute tolerance", MS_AVAILABLE); - SG_ADD(&m_BufSize, "m_BuffSize", "Size of CP Buffer", MS_AVAILABLE); - SG_ADD(&m_lambda, "m_lambda", "Regularization constant lambda", - MS_AVAILABLE); - SG_ADD(&m_cleanICP, "m_cleanICP", "Inactive cutting plane removal flag", - MS_AVAILABLE); + SG_ADD(&m_TolRel, "m_TolRel", "Relative tolerance", ParameterProperties::HYPER); + SG_ADD(&m_TolAbs, "m_TolAbs", "Absolute tolerance", ParameterProperties::HYPER); + SG_ADD(&m_BufSize, "m_BuffSize", "Size of CP Buffer", ParameterProperties::HYPER); + SG_ADD(&m_lambda, "m_lambda", "Regularization constant lambda"); + SG_ADD(&m_cleanICP, "m_cleanICP", "Inactive cutting plane removal flag"); SG_ADD(&m_cleanAfter, "m_cleanAfter", - "Number of inactive iterations after which ICP will be removed", - MS_AVAILABLE); - SG_ADD(&m_K, "m_K", "Parameter K", MS_NOT_AVAILABLE); - SG_ADD(&m_Tmax, "m_Tmax", "Parameter Tmax", MS_AVAILABLE); - SG_ADD(&m_cp_models, "m_cp_models", "Number of cutting plane models", - MS_AVAILABLE); + "Number of inactive iterations after which ICP will be removed"); + SG_ADD(&m_K, "m_K", "Parameter K"); + SG_ADD(&m_Tmax, "m_Tmax", "Parameter Tmax", ParameterProperties::HYPER); + SG_ADD(&m_cp_models, "m_cp_models", "Number of cutting plane models"); set_TolRel(0.001); set_TolAbs(0.0); diff --git a/shogun/transfer/multitask/MultitaskL12LogisticRegression.cpp b/shogun/transfer/multitask/MultitaskL12LogisticRegression.cpp index 4d9c652..8971ffd 100644 --- a/shogun/transfer/multitask/MultitaskL12LogisticRegression.cpp +++ b/shogun/transfer/multitask/MultitaskL12LogisticRegression.cpp @@ -49,8 +49,8 @@ CMultitaskL12LogisticRegression::CMultitaskL12LogisticRegression( void CMultitaskL12LogisticRegression::init() { - SG_ADD(&self->m_rho1,"rho1","rho L1/L2 regularization parameter",MS_AVAILABLE); - SG_ADD(&self->m_rho2,"rho2","rho L2 regularization parameter",MS_AVAILABLE); + SG_ADD(&self->m_rho1,"rho1","rho L1/L2 regularization parameter", ParameterProperties::HYPER); + SG_ADD(&self->m_rho2,"rho2","rho L2 regularization parameter", ParameterProperties::HYPER); } void CMultitaskL12LogisticRegression::set_rho1(float64_t rho1) diff --git a/shogun/transfer/multitask/MultitaskLeastSquaresRegression.cpp b/shogun/transfer/multitask/MultitaskLeastSquaresRegression.cpp index 9b89c7d..2ad83b7 100644 --- a/shogun/transfer/multitask/MultitaskLeastSquaresRegression.cpp +++ b/shogun/transfer/multitask/MultitaskLeastSquaresRegression.cpp @@ -41,12 +41,12 @@ CMultitaskLeastSquaresRegression::~CMultitaskLeastSquaresRegression() void CMultitaskLeastSquaresRegression::register_parameters() { - SG_ADD(&m_z, "z", "regularization coefficient", MS_AVAILABLE); - SG_ADD(&m_q, "q", "q of L1/Lq", MS_AVAILABLE); - SG_ADD(&m_termination, "termination", "termination", MS_NOT_AVAILABLE); - SG_ADD(&m_regularization, "regularization", "regularization", MS_NOT_AVAILABLE); - SG_ADD(&m_tolerance, "tolerance", "tolerance", MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE); + SG_ADD(&m_z, "z", "regularization coefficient", ParameterProperties::HYPER); + SG_ADD(&m_q, "q", "q of L1/Lq", ParameterProperties::HYPER); + SG_ADD(&m_termination, "termination", "termination"); + SG_ADD(&m_regularization, "regularization", "regularization"); + SG_ADD(&m_tolerance, "tolerance", "tolerance"); + SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations"); } void CMultitaskLeastSquaresRegression::initialize_parameters() diff --git a/shogun/transfer/multitask/MultitaskLinearMachine.cpp b/shogun/transfer/multitask/MultitaskLinearMachine.cpp index 731c571..3a7bf30 100644 --- a/shogun/transfer/multitask/MultitaskLinearMachine.cpp +++ b/shogun/transfer/multitask/MultitaskLinearMachine.cpp @@ -46,7 +46,7 @@ CMultitaskLinearMachine::~CMultitaskLinearMachine() void CMultitaskLinearMachine::register_parameters() { - SG_ADD((CSGObject**)&m_task_relation, "task_relation", "task relation", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_task_relation, "task_relation", "task relation"); } int32_t CMultitaskLinearMachine::get_current_task() const diff --git a/shogun/transfer/multitask/MultitaskLogisticRegression.cpp b/shogun/transfer/multitask/MultitaskLogisticRegression.cpp index 063ea8e..af3b472 100644 --- a/shogun/transfer/multitask/MultitaskLogisticRegression.cpp +++ b/shogun/transfer/multitask/MultitaskLogisticRegression.cpp @@ -40,12 +40,12 @@ CMultitaskLogisticRegression::~CMultitaskLogisticRegression() void CMultitaskLogisticRegression::register_parameters() { - SG_ADD(&m_z, "z", "regularization coefficient", MS_AVAILABLE); - SG_ADD(&m_q, "q", "q of L1/Lq", MS_AVAILABLE); - SG_ADD(&m_termination, "termination", "termination", MS_NOT_AVAILABLE); - SG_ADD(&m_regularization, "regularization", "regularization", MS_NOT_AVAILABLE); - SG_ADD(&m_tolerance, "tolerance", "tolerance", MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE); + SG_ADD(&m_z, "z", "regularization coefficient", ParameterProperties::HYPER); + SG_ADD(&m_q, "q", "q of L1/Lq", ParameterProperties::HYPER); + SG_ADD(&m_termination, "termination", "termination"); + SG_ADD(&m_regularization, "regularization", "regularization"); + SG_ADD(&m_tolerance, "tolerance", "tolerance"); + SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations"); } void CMultitaskLogisticRegression::initialize_parameters() diff --git a/shogun/transfer/multitask/MultitaskTraceLogisticRegression.cpp b/shogun/transfer/multitask/MultitaskTraceLogisticRegression.cpp index a84c909..90ed61c 100644 --- a/shogun/transfer/multitask/MultitaskTraceLogisticRegression.cpp +++ b/shogun/transfer/multitask/MultitaskTraceLogisticRegression.cpp @@ -36,7 +36,7 @@ CMultitaskTraceLogisticRegression::CMultitaskTraceLogisticRegression( void CMultitaskTraceLogisticRegression::init() { - SG_ADD(&m_rho,"rho","rho",MS_AVAILABLE); + SG_ADD(&m_rho,"rho","rho", ParameterProperties::HYPER); } void CMultitaskTraceLogisticRegression::set_rho(float64_t rho)