Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SG_ADD refactor #8

Merged
merged 1 commit into from
Nov 22, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions shogun/classifier/FeatureBlockLogisticRegression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,13 @@ CFeatureBlockLogisticRegression::~CFeatureBlockLogisticRegression()

void CFeatureBlockLogisticRegression::register_parameters()
{
SG_ADD((CSGObject**)&m_feature_relation, "feature_relation", "feature relation", MS_NOT_AVAILABLE);
SG_ADD(&m_z, "z", "regularization coefficient", MS_AVAILABLE);
SG_ADD(&m_q, "q", "q of L1/Lq", MS_AVAILABLE);
SG_ADD(&m_termination, "termination", "termination", MS_NOT_AVAILABLE);
SG_ADD(&m_regularization, "regularization", "regularization", MS_NOT_AVAILABLE);
SG_ADD(&m_tolerance, "tolerance", "tolerance", MS_NOT_AVAILABLE);
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE);
SG_ADD((CSGObject**)&m_feature_relation, "feature_relation", "feature relation");
SG_ADD(&m_z, "z", "regularization coefficient", ParameterProperties::HYPER);
SG_ADD(&m_q, "q", "q of L1/Lq", ParameterProperties::HYPER);
SG_ADD(&m_termination, "termination", "termination");
SG_ADD(&m_regularization, "regularization", "regularization");
SG_ADD(&m_tolerance, "tolerance", "tolerance");
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations");
}

CIndexBlockRelation* CFeatureBlockLogisticRegression::get_feature_relation() const
Expand Down
8 changes: 4 additions & 4 deletions shogun/classifier/svm/SVMLin.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,12 +43,12 @@ CSVMLin::~CSVMLin()
void CSVMLin::init()
{
SG_ADD(
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
&use_bias, "use_bias", "Indicates if bias is used.");
SG_ADD(
&C1, "C1", "C constant for negatively labeled examples.", MS_AVAILABLE);
&C1, "C1", "C constant for negatively labeled examples.", ParameterProperties::HYPER);
SG_ADD(
&C2, "C2", "C constant for positively labeled examples.", MS_AVAILABLE);
SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE);
&C2, "C2", "C constant for positively labeled examples.", ParameterProperties::HYPER);
SG_ADD(&epsilon, "epsilon", "Convergence precision.");
}

bool CSVMLin::train_machine(CFeatures* data)
Expand Down
18 changes: 9 additions & 9 deletions shogun/classifier/svm/SVMSGD.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -213,16 +213,16 @@ void CSVMSGD::init()
loss=new CHingeLoss();
SG_REF(loss);

SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
SG_ADD(&wscale, "wscale", "W scale", MS_NOT_AVAILABLE);
SG_ADD(&bscale, "bscale", "b scale", MS_NOT_AVAILABLE);
SG_ADD(&epochs, "epochs", "epochs", MS_NOT_AVAILABLE);
SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE);
SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE);
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
SG_ADD(&wscale, "wscale", "W scale");
SG_ADD(&bscale, "bscale", "b scale");
SG_ADD(&epochs, "epochs", "epochs");
SG_ADD(&skip, "skip", "skip");
SG_ADD(&count, "count", "count");
SG_ADD(
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
&use_bias, "use_bias", "Indicates if bias is used.");
SG_ADD(
&use_regularized_bias, "use_regularized_bias",
"Indicates if bias is regularized.", MS_NOT_AVAILABLE);
"Indicates if bias is regularized.");
}
4 changes: 2 additions & 2 deletions shogun/kernel/string/DistantSegmentsKernel.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -43,8 +43,8 @@ bool CDistantSegmentsKernel::init(CFeatures* l, CFeatures* r)

void CDistantSegmentsKernel::init()
{
SG_ADD(&m_delta, "delta", "Delta parameter of the DS-Kernel", MS_AVAILABLE);
SG_ADD(&m_theta, "theta", "Theta parameter of the DS-Kernel", MS_AVAILABLE);
SG_ADD(&m_delta, "delta", "Delta parameter of the DS-Kernel", ParameterProperties::HYPER);
SG_ADD(&m_theta, "theta", "Theta parameter of the DS-Kernel", ParameterProperties::HYPER);
}

float64_t CDistantSegmentsKernel::compute(int32_t idx_a, int32_t idx_b)
Expand Down
11 changes: 5 additions & 6 deletions shogun/mathematics/SparseInverseCovariance.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,11 @@ CSparseInverseCovariance::~CSparseInverseCovariance()
void CSparseInverseCovariance::register_parameters()
{
SG_ADD(&m_lasso_max_iter,"lasso_max_iter",
"maximum iteration of LASSO step",MS_NOT_AVAILABLE);
SG_ADD(&m_max_iter,"max_iter","maximum total iteration",
MS_NOT_AVAILABLE);
SG_ADD(&m_f_gap,"f_gap","f gap",MS_NOT_AVAILABLE);
SG_ADD(&m_x_gap,"x_gap","x gap",MS_NOT_AVAILABLE);
SG_ADD(&m_xtol,"xtol","xtol",MS_NOT_AVAILABLE);
"maximum iteration of LASSO step");
SG_ADD(&m_max_iter,"max_iter","maximum total iteration");
SG_ADD(&m_f_gap,"f_gap","f gap");
SG_ADD(&m_x_gap,"x_gap","x gap");
SG_ADD(&m_xtol,"xtol","xtol");
}

SGMatrix<float64_t> CSparseInverseCovariance::estimate(SGMatrix<float64_t> S, float64_t lambda_c)
Expand Down
6 changes: 3 additions & 3 deletions shogun/multiclass/MulticlassLogisticRegression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -42,9 +42,9 @@ void CMulticlassLogisticRegression::init_defaults()

void CMulticlassLogisticRegression::register_parameters()
{
SG_ADD(&m_z, "m_z", "regularization constant",MS_AVAILABLE);
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE);
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE);
SG_ADD(&m_z, "m_z", "regularization constant", ParameterProperties::HYPER);
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon");
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations");
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

we should introduce an attribute for parameters like this one .... these are hyper-parameters, but not in the classical sense (as in that we want to tune them). But still free parameters ...
@vigsterkr @lisitsyn ?

}

CMulticlassLogisticRegression::~CMulticlassLogisticRegression()
Expand Down
6 changes: 3 additions & 3 deletions shogun/multiclass/MulticlassTreeGuidedLogisticRegression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,9 +41,9 @@ void CMulticlassTreeGuidedLogisticRegression::init_defaults()

void CMulticlassTreeGuidedLogisticRegression::register_parameters()
{
SG_ADD(&m_z, "m_z", "regularization constant",MS_AVAILABLE);
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE);
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE);
SG_ADD(&m_z, "m_z", "regularization constant", ParameterProperties::HYPER);
SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon");
SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations");
}

CMulticlassTreeGuidedLogisticRegression::~CMulticlassTreeGuidedLogisticRegression()
Expand Down
8 changes: 4 additions & 4 deletions shogun/optimization/NLOPTMinimizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -57,13 +57,13 @@ void CNLOPTMinimizer::init()
m_target_variable=SGVector<float64_t>();
set_nlopt_parameters();
SG_ADD(&m_max_iterations, "CNLOPTMinimizer__m_max_iterations",
"max_iterations in CNLOPTMinimizer", MS_NOT_AVAILABLE);
"max_iterations in CNLOPTMinimizer");
SG_ADD(&m_variable_tolerance, "CNLOPTMinimizer__m_variable_tolerance",
"variable_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE);
"variable_tolerance in CNLOPTMinimizer");
SG_ADD(&m_function_tolerance, "CNLOPTMinimizer__m_function_tolerance",
"function_tolerance in CNLOPTMinimizer", MS_NOT_AVAILABLE);
"function_tolerance in CNLOPTMinimizer");
SG_ADD(&m_nlopt_algorithm_id, "CNLOPTMinimizer__m_nlopt_algorithm_id",
"nlopt_algorithm_id in CNLOPTMinimizer", MS_NOT_AVAILABLE);
"nlopt_algorithm_id in CNLOPTMinimizer");
#endif
}

Expand Down
22 changes: 9 additions & 13 deletions shogun/structure/DualLibQPBMSOSVM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,21 +55,17 @@ CDualLibQPBMSOSVM::~CDualLibQPBMSOSVM()

void CDualLibQPBMSOSVM::init()
{
SG_ADD(&m_TolRel, "m_TolRel", "Relative tolerance", MS_AVAILABLE);
SG_ADD(&m_TolAbs, "m_TolAbs", "Absolute tolerance", MS_AVAILABLE);
SG_ADD(&m_BufSize, "m_BuffSize", "Size of CP Buffer", MS_AVAILABLE);
SG_ADD(&m_lambda, "m_lambda", "Regularization constant lambda",
MS_AVAILABLE);
SG_ADD(&m_cleanICP, "m_cleanICP", "Inactive cutting plane removal flag",
MS_AVAILABLE);
SG_ADD(&m_TolRel, "m_TolRel", "Relative tolerance", ParameterProperties::HYPER);
SG_ADD(&m_TolAbs, "m_TolAbs", "Absolute tolerance", ParameterProperties::HYPER);
SG_ADD(&m_BufSize, "m_BuffSize", "Size of CP Buffer", ParameterProperties::HYPER);
SG_ADD(&m_lambda, "m_lambda", "Regularization constant lambda");
SG_ADD(&m_cleanICP, "m_cleanICP", "Inactive cutting plane removal flag");
SG_ADD(&m_cleanAfter,
"m_cleanAfter",
"Number of inactive iterations after which ICP will be removed",
MS_AVAILABLE);
SG_ADD(&m_K, "m_K", "Parameter K", MS_NOT_AVAILABLE);
SG_ADD(&m_Tmax, "m_Tmax", "Parameter Tmax", MS_AVAILABLE);
SG_ADD(&m_cp_models, "m_cp_models", "Number of cutting plane models",
MS_AVAILABLE);
"Number of inactive iterations after which ICP will be removed");
SG_ADD(&m_K, "m_K", "Parameter K");
SG_ADD(&m_Tmax, "m_Tmax", "Parameter Tmax", ParameterProperties::HYPER);
SG_ADD(&m_cp_models, "m_cp_models", "Number of cutting plane models");

set_TolRel(0.001);
set_TolAbs(0.0);
Expand Down
4 changes: 2 additions & 2 deletions shogun/transfer/multitask/MultitaskL12LogisticRegression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,8 +49,8 @@ CMultitaskL12LogisticRegression::CMultitaskL12LogisticRegression(

void CMultitaskL12LogisticRegression::init()
{
SG_ADD(&self->m_rho1,"rho1","rho L1/L2 regularization parameter",MS_AVAILABLE);
SG_ADD(&self->m_rho2,"rho2","rho L2 regularization parameter",MS_AVAILABLE);
SG_ADD(&self->m_rho1,"rho1","rho L1/L2 regularization parameter", ParameterProperties::HYPER);
SG_ADD(&self->m_rho2,"rho2","rho L2 regularization parameter", ParameterProperties::HYPER);
}

void CMultitaskL12LogisticRegression::set_rho1(float64_t rho1)
Expand Down
12 changes: 6 additions & 6 deletions shogun/transfer/multitask/MultitaskLeastSquaresRegression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,12 @@ CMultitaskLeastSquaresRegression::~CMultitaskLeastSquaresRegression()

void CMultitaskLeastSquaresRegression::register_parameters()
{
SG_ADD(&m_z, "z", "regularization coefficient", MS_AVAILABLE);
SG_ADD(&m_q, "q", "q of L1/Lq", MS_AVAILABLE);
SG_ADD(&m_termination, "termination", "termination", MS_NOT_AVAILABLE);
SG_ADD(&m_regularization, "regularization", "regularization", MS_NOT_AVAILABLE);
SG_ADD(&m_tolerance, "tolerance", "tolerance", MS_NOT_AVAILABLE);
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE);
SG_ADD(&m_z, "z", "regularization coefficient", ParameterProperties::HYPER);
SG_ADD(&m_q, "q", "q of L1/Lq", ParameterProperties::HYPER);
SG_ADD(&m_termination, "termination", "termination");
SG_ADD(&m_regularization, "regularization", "regularization");
SG_ADD(&m_tolerance, "tolerance", "tolerance");
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations");
}

void CMultitaskLeastSquaresRegression::initialize_parameters()
Expand Down
2 changes: 1 addition & 1 deletion shogun/transfer/multitask/MultitaskLinearMachine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ CMultitaskLinearMachine::~CMultitaskLinearMachine()

void CMultitaskLinearMachine::register_parameters()
{
SG_ADD((CSGObject**)&m_task_relation, "task_relation", "task relation", MS_NOT_AVAILABLE);
SG_ADD((CSGObject**)&m_task_relation, "task_relation", "task relation");
}

int32_t CMultitaskLinearMachine::get_current_task() const
Expand Down
12 changes: 6 additions & 6 deletions shogun/transfer/multitask/MultitaskLogisticRegression.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,12 +40,12 @@ CMultitaskLogisticRegression::~CMultitaskLogisticRegression()

void CMultitaskLogisticRegression::register_parameters()
{
SG_ADD(&m_z, "z", "regularization coefficient", MS_AVAILABLE);
SG_ADD(&m_q, "q", "q of L1/Lq", MS_AVAILABLE);
SG_ADD(&m_termination, "termination", "termination", MS_NOT_AVAILABLE);
SG_ADD(&m_regularization, "regularization", "regularization", MS_NOT_AVAILABLE);
SG_ADD(&m_tolerance, "tolerance", "tolerance", MS_NOT_AVAILABLE);
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE);
SG_ADD(&m_z, "z", "regularization coefficient", ParameterProperties::HYPER);
SG_ADD(&m_q, "q", "q of L1/Lq", ParameterProperties::HYPER);
SG_ADD(&m_termination, "termination", "termination");
SG_ADD(&m_regularization, "regularization", "regularization");
SG_ADD(&m_tolerance, "tolerance", "tolerance");
SG_ADD(&m_max_iter, "max_iter", "maximum number of iterations");
}

void CMultitaskLogisticRegression::initialize_parameters()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ CMultitaskTraceLogisticRegression::CMultitaskTraceLogisticRegression(

void CMultitaskTraceLogisticRegression::init()
{
SG_ADD(&m_rho,"rho","rho",MS_AVAILABLE);
SG_ADD(&m_rho,"rho","rho", ParameterProperties::HYPER);
}

void CMultitaskTraceLogisticRegression::set_rho(float64_t rho)
Expand Down