Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

SG_ADD refactor #4417

Merged
merged 7 commits into from
Nov 21, 2018
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
47 changes: 8 additions & 39 deletions src/shogun/base/SGObject.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
* Authors: Heiko Strathmann, Soeren Sonnenburg, Sergey Lisitsyn,
* Giovanni De Toni, Jacob Walker, Thoralf Klein, Chiyuan Zhang,
* Fernando Iglesias, Sanuj Sharma, Roman Votyakov, Yuyu Zhang,
* Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng
* Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng,
* Gil Hoben
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Weclome to SGObject ;)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Haha yes, I thought I would discretely add myself to the list :D

*/

#ifndef __SGOBJECT_H__
Expand Down Expand Up @@ -59,50 +60,18 @@ template <class T> class SGStringList;
* Macros for registering parameters/model selection parameters
******************************************************************************/

#ifdef _MSC_VER

#define VA_NARGS(...) INTERNAL_EXPAND_ARGS_PRIVATE(INTERNAL_ARGS_AUGMENTER(__VA_ARGS__))
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

While it would be good to get rid of this thing, I think we still need to keep it (see below on default properties)

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

OK, I will bring this back and add a SG_ADD3!

#define INTERNAL_ARGS_AUGMENTER(...) unused, __VA_ARGS__
#define INTERNAL_EXPAND(x) x
#define INTERNAL_EXPAND_ARGS_PRIVATE(...) INTERNAL_EXPAND(INTERNAL_GET_ARG_COUNT_PRIVATE(__VA_ARGS__, 5, 4, 3, 2, 1, 0))
#define INTERNAL_GET_ARG_COUNT_PRIVATE(_0_, _1_, _2_, _3_, _4_, _5_, count, ...) count

#else

#define VA_NARGS_IMPL(_1, _2, _3, _4, _5, N, ...) N
#define VA_NARGS(...) VA_NARGS_IMPL(__VA_ARGS__, 5, 4, 3, 2, 1)

#endif

#define VARARG_IMPL2(base, count, ...) base##count(__VA_ARGS__)
#define VARARG_IMPL(base, count, ...) VARARG_IMPL2(base, count, __VA_ARGS__)
#define VARARG(base, ...) VARARG_IMPL(base, VA_NARGS(__VA_ARGS__), __VA_ARGS__)

#define SG_ADD4(param, name, description, ms_available) \
#define SG_ADD(param, name, description, param_properties) \
{ \
AnyParameterProperties pprop = \
AnyParameterProperties(description, param_properties); \
this->m_parameters->add(param, name, description); \
this->watch_param( \
name, param, \
AnyParameterProperties( \
description, ms_available, GRADIENT_NOT_AVAILABLE)); \
if (ms_available) \
this->watch_param(name, param, pprop); \
if (pprop.get_model_selection()) \
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Note for later: we can already start to get rid of the model_selection_parameters field and instead make the code work with any. Definitely a sub-task in all this refactoring ... but more on it later

this->m_model_selection_parameters->add(param, name, description); \
}

#define SG_ADD5(param, name, description, ms_available, gradient_available) \
{ \
this->m_parameters->add(param, name, description); \
this->watch_param( \
name, param, AnyParameterProperties( \
description, ms_available, gradient_available)); \
if (ms_available) \
this->m_model_selection_parameters->add(param, name, description); \
if (gradient_available) \
if (pprop.get_gradient()) \
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

same for those gradient parameters

this->m_gradient_parameters->add(param, name, description); \
}

#define SG_ADD(...) VARARG(SG_ADD, __VA_ARGS__)

/*******************************************************************************
* End of macros for registering parameters/model selection parameters
******************************************************************************/
Expand Down
4 changes: 2 additions & 2 deletions src/shogun/classifier/AveragedPerceptron.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ void CAveragedPerceptron::init()
{
max_iter = 1000;
learn_rate = 0.1;
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations.", MS_AVAILABLE);
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", MS_AVAILABLE);
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations.", ParameterProperties::HYPER);
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", ParameterProperties::HYPER);
}

bool CAveragedPerceptron::train_machine(CFeatures* data)
Expand Down
6 changes: 3 additions & 3 deletions src/shogun/classifier/LDA.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,9 @@ void CLDA::init()

SG_ADD(
(machine_int_t*)&m_method, "m_method",
"Method used for LDA calculation", MS_NOT_AVAILABLE);
SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", MS_AVAILABLE);
SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", MS_NOT_AVAILABLE);
"Method used for LDA calculation", ParameterProperties());
SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", ParameterProperties::HYPER);
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is quite nice now

SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", ParameterProperties());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Mmmh. This is not nice, it would be better if this default was done automatically.
I didn't realise this before, but I think it means we have to have SG_ADD3 (default properties) and SG_ADD4(user specified properties) as macros cannot do default parameters.

}

CLDA::~CLDA()
Expand Down
4 changes: 2 additions & 2 deletions src/shogun/classifier/Perceptron.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,8 @@ CPerceptron::CPerceptron() : CIterativeMachine<CLinearMachine>()
m_initialize_hyperplane = true;
SG_ADD(
&m_initialize_hyperplane, "initialize_hyperplane",
"Whether to initialize hyperplane.", MS_AVAILABLE);
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", MS_AVAILABLE);
"Whether to initialize hyperplane.", ParameterProperties::HYPER);
SG_ADD(&learn_rate, "learn_rate", "Learning rate.", ParameterProperties::HYPER);
}

CPerceptron::~CPerceptron()
Expand Down
10 changes: 5 additions & 5 deletions src/shogun/classifier/PluginEstimate.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -22,17 +22,17 @@ CPluginEstimate::CPluginEstimate(float64_t pos_pseudo, float64_t neg_pseudo)
{
SG_ADD(
&m_pos_pseudo, "pos_pseudo", "pseudo count for positive class",
MS_NOT_AVAILABLE);
ParameterProperties());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

again, would be good if default was taken if just nothing was provided

SG_ADD(
&m_neg_pseudo, "neg_pseudo", "pseudo count for negative class",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(
&pos_model, "pos_model", "LinearHMM modelling positive class.",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(
&neg_model, "neg_model", "LinearHMM modelling negative class.",
MS_NOT_AVAILABLE);
SG_ADD(&features, "features", "String Features.", MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(&features, "features", "String Features.", ParameterProperties());
}

CPluginEstimate::~CPluginEstimate()
Expand Down
22 changes: 11 additions & 11 deletions src/shogun/classifier/mkl/MKL.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -271,21 +271,21 @@ void CMKL::register_params()
rho = 0;
lp_initialized = false;

SG_ADD((CMachine**)&svm, "svm", "wrapper svm", MS_NOT_AVAILABLE);
SG_ADD(&C_mkl, "C_mkl", "C mkl", MS_NOT_AVAILABLE);
SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", MS_NOT_AVAILABLE);
SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", MS_NOT_AVAILABLE);
SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", MS_NOT_AVAILABLE);
SG_ADD((CMachine**)&svm, "svm", "wrapper svm", ParameterProperties());
SG_ADD(&C_mkl, "C_mkl", "C mkl", ParameterProperties());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is HYPER

SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", ParameterProperties());
SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", ParameterProperties());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hyper

SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", ParameterProperties());
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

hyper


m_parameters->add_vector(&beta_local, &beta_local_size, "beta_local", "subkernel weights on L1 term of elastic net mkl");
watch_param("beta_local", &beta_local, &beta_local_size);

SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", MS_NOT_AVAILABLE);
SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", MS_NOT_AVAILABLE);
SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", MS_NOT_AVAILABLE);
SG_ADD(&w_gap, "w_gap", "gap between interactions", MS_NOT_AVAILABLE);
SG_ADD(&rho, "rho", "objective after mkl iterations", MS_NOT_AVAILABLE);
SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", MS_NOT_AVAILABLE);
SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", ParameterProperties());
SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", ParameterProperties());
SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", ParameterProperties());
SG_ADD(&w_gap, "w_gap", "gap between interactions", ParameterProperties());
SG_ADD(&rho, "rho", "objective after mkl iterations", ParameterProperties());
SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", ParameterProperties());
// Missing: self (3rd party specific, handled in clone())
}

Expand Down
14 changes: 7 additions & 7 deletions src/shogun/classifier/svm/LibLinear.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,18 +50,18 @@ void CLibLinear::init()
set_max_iterations();
set_epsilon(1e-5);

SG_ADD(&C1, "C1", "C Cost constant 1.", MS_AVAILABLE);
SG_ADD(&C2, "C2", "C Cost constant 2.", MS_AVAILABLE);
SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER);
SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER);
SG_ADD(
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE);
&use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties());
SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties());
SG_ADD(
&max_iterations, "max_iterations", "Max number of iterations.",
MS_NOT_AVAILABLE);
SG_ADD(&m_linear_term, "linear_term", "Linear Term", MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(&m_linear_term, "linear_term", "Linear Term", ParameterProperties());
SG_ADD(
(machine_int_t*)&liblinear_solver_type, "liblinear_solver_type",
"Type of LibLinear solver.", MS_NOT_AVAILABLE);
"Type of LibLinear solver.", ParameterProperties());
}

CLibLinear::~CLibLinear()
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/classifier/svm/LibSVM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ CLibSVM::~CLibSVM()

void CLibSVM::register_params()
{
SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type", MS_NOT_AVAILABLE);
SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type", ParameterProperties());
}

bool CLibSVM::train_machine(CFeatures* data)
Expand Down
6 changes: 3 additions & 3 deletions src/shogun/classifier/svm/OnlineLibLinear.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,10 +63,10 @@ void COnlineLibLinear::init()
Cn=1;
use_bias=false;

SG_ADD(&C1, "C1", "C Cost constant 1.", MS_AVAILABLE);
SG_ADD(&C2, "C2", "C Cost constant 2.", MS_AVAILABLE);
SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER);
SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER);
SG_ADD(
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
&use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties());

PG = 0;
PGmax_old = CMath::INFTY;
Expand Down
20 changes: 10 additions & 10 deletions src/shogun/classifier/svm/OnlineSVMSGD.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -206,17 +206,17 @@ void COnlineSVMSGD::init()
loss=new CHingeLoss();
SG_REF(loss);

SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
SG_ADD(&lambda, "lambda", "Regularization parameter.", MS_AVAILABLE);
SG_ADD(&wscale, "wscale", "W scale", MS_NOT_AVAILABLE);
SG_ADD(&bscale, "bscale", "b scale", MS_NOT_AVAILABLE);
SG_ADD(&epochs, "epochs", "epochs", MS_NOT_AVAILABLE);
SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE);
SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE);
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
SG_ADD(&lambda, "lambda", "Regularization parameter.", ParameterProperties::HYPER);
SG_ADD(&wscale, "wscale", "W scale", ParameterProperties());
SG_ADD(&bscale, "bscale", "b scale", ParameterProperties());
SG_ADD(&epochs, "epochs", "epochs", ParameterProperties());
SG_ADD(&skip, "skip", "skip", ParameterProperties());
SG_ADD(&count, "count", "count", ParameterProperties());
SG_ADD(
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
&use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties());
SG_ADD(
&use_regularized_bias, "use_regularized_bias",
"Indicates if bias is regularized.", MS_NOT_AVAILABLE);
"Indicates if bias is regularized.", ParameterProperties());
}
10 changes: 5 additions & 5 deletions src/shogun/classifier/svm/SGDQN.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -226,9 +226,9 @@ void CSGDQN::init()
loss=new CHingeLoss();
SG_REF(loss);

SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
SG_ADD(&epochs, "epochs", "epochs", MS_AVAILABLE);
SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE);
SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE);
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
SG_ADD(&epochs, "epochs", "epochs", ParameterProperties::HYPER);
SG_ADD(&skip, "skip", "skip", ParameterProperties());
SG_ADD(&count, "count", "count", ParameterProperties());
}
22 changes: 11 additions & 11 deletions src/shogun/classifier/svm/SVM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -40,21 +40,21 @@ CSVM::~CSVM()

void CSVM::set_defaults(int32_t num_sv)
{
SG_ADD(&C1, "C1", "", MS_AVAILABLE);
SG_ADD(&C2, "C2", "", MS_AVAILABLE);
SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded.", MS_NOT_AVAILABLE);
SG_ADD(&epsilon, "epsilon", "", MS_AVAILABLE);
SG_ADD(&C1, "C1", "", ParameterProperties::HYPER);
SG_ADD(&C2, "C2", "", ParameterProperties::HYPER);
SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded.", ParameterProperties());
SG_ADD(&epsilon, "epsilon", "", ParameterProperties::HYPER);
SG_ADD(&tube_epsilon, "tube_epsilon",
"Tube epsilon for support vector regression.", MS_AVAILABLE);
SG_ADD(&nu, "nu", "", MS_AVAILABLE);
SG_ADD(&objective, "objective", "", MS_NOT_AVAILABLE);
SG_ADD(&qpsize, "qpsize", "", MS_NOT_AVAILABLE);
"Tube epsilon for support vector regression.", ParameterProperties::HYPER);
SG_ADD(&nu, "nu", "", ParameterProperties::HYPER);
SG_ADD(&objective, "objective", "", ParameterProperties());
SG_ADD(&qpsize, "qpsize", "", ParameterProperties());
SG_ADD(&use_shrinking, "use_shrinking", "Shrinking shall be used.",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD((CSGObject**) &mkl, "mkl", "MKL object that svm optimizers need.",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(&m_linear_term, "linear_term", "Linear term in qp.",
MS_NOT_AVAILABLE);
ParameterProperties());

callback=NULL;
mkl=NULL;
Expand Down
12 changes: 6 additions & 6 deletions src/shogun/classifier/svm/SVMOcas.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -345,17 +345,17 @@ void CSVMOcas::init()

primal_objective = 0.0;

SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE);
SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE);
SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER);
SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER);
SG_ADD(
&use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE);
SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE);
&use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties());
SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties());
SG_ADD(
&bufsize, "bufsize", "Maximum number of cutting planes.",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(
(machine_int_t*)&method, "method", "SVMOcas solver type.",
MS_NOT_AVAILABLE);
ParameterProperties());
}

float64_t CSVMOcas::compute_primal_objective() const
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/clustering/GMM.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -827,5 +827,5 @@ void CGMM::register_params()
//m_parameters->add((SGVector<CSGObject*>*) &m_components, "m_components", "Mixture components");
SG_ADD(
&m_coefficients, "m_coefficients", "Mixture coefficients.",
MS_NOT_AVAILABLE);
ParameterProperties());
}
8 changes: 4 additions & 4 deletions src/shogun/clustering/KMeansBase.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -359,10 +359,10 @@ void CKMeansBase::init()
dimensions=0;
fixed_centers=false;
use_kmeanspp=false;
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations", MS_AVAILABLE);
SG_ADD(&k, "k", "k, the number of clusters", MS_AVAILABLE);
SG_ADD(&dimensions, "dimensions", "Dimensions of data", MS_NOT_AVAILABLE);
SG_ADD(&R, "radiuses", "Cluster radiuses", MS_NOT_AVAILABLE);
SG_ADD(&max_iter, "max_iter", "Maximum number of iterations", ParameterProperties::HYPER);
SG_ADD(&k, "k", "k, the number of clusters", ParameterProperties::HYPER);
SG_ADD(&dimensions, "dimensions", "Dimensions of data", ParameterProperties());
SG_ADD(&R, "radiuses", "Cluster radiuses", ParameterProperties());

watch_method("cluster_centers", &CKMeansBase::get_cluster_centers);
}
Expand Down
2 changes: 1 addition & 1 deletion src/shogun/clustering/KMeansMiniBatch.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@ void CKMeansMiniBatch::init_mb_params()

SG_ADD(
&batch_size, "batch_size", "batch size for mini-batch KMeans",
MS_NOT_AVAILABLE);
ParameterProperties());
}

bool CKMeansMiniBatch::train_machine(CFeatures* data)
Expand Down
4 changes: 2 additions & 2 deletions src/shogun/converter/DiffusionMaps.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,8 +25,8 @@ CDiffusionMaps::CDiffusionMaps() :

void CDiffusionMaps::init()
{
SG_ADD(&m_t, "t", "number of steps", MS_AVAILABLE);
SG_ADD(&m_width, "width", "gaussian kernel width", MS_AVAILABLE);
SG_ADD(&m_t, "t", "number of steps", ParameterProperties::HYPER);
SG_ADD(&m_width, "width", "gaussian kernel width", ParameterProperties::HYPER);
}

CDiffusionMaps::~CDiffusionMaps()
Expand Down
6 changes: 3 additions & 3 deletions src/shogun/converter/EmbeddingConverter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -69,11 +69,11 @@ CKernel* CEmbeddingConverter::get_kernel() const
void CEmbeddingConverter::init()
{
SG_ADD(&m_target_dim, "target_dim",
"target dimensionality of preprocessor", MS_AVAILABLE);
"target dimensionality of preprocessor", ParameterProperties::HYPER);
SG_ADD(
&m_distance, "distance", "distance to be used for embedding",
MS_AVAILABLE);
ParameterProperties::HYPER);
SG_ADD(
&m_kernel, "kernel", "kernel to be used for embedding", MS_AVAILABLE);
&m_kernel, "kernel", "kernel to be used for embedding", ParameterProperties::HYPER);
}
}
4 changes: 2 additions & 2 deletions src/shogun/converter/FactorAnalysis.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ CFactorAnalysis::CFactorAnalysis() :

void CFactorAnalysis::init()
{
SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", MS_NOT_AVAILABLE);
SG_ADD(&m_epsilon, "epsilon", "convergence parameter", MS_NOT_AVAILABLE);
SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", ParameterProperties());
SG_ADD(&m_epsilon, "epsilon", "convergence parameter", ParameterProperties());
}

CFactorAnalysis::~CFactorAnalysis()
Expand Down
10 changes: 5 additions & 5 deletions src/shogun/converter/HashedDocConverter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -58,14 +58,14 @@ void CHashedDocConverter::init(CTokenizer* tzer, int32_t hash_bits, bool normali

SG_REF(tokenizer);
SG_ADD(&num_bits, "num_bits", "Number of bits of the hash",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(&ngrams, "ngrams", "Number of consecutive tokens",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(&tokens_to_skip, "tokens_to_skip", "Number of tokens to skip",
MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(&should_normalize, "should_normalize", "Whether to normalize vectors or not",
MS_NOT_AVAILABLE);
SG_ADD(&tokenizer, "tokenizer", "Tokenizer", MS_NOT_AVAILABLE);
ParameterProperties());
SG_ADD(&tokenizer, "tokenizer", "Tokenizer", ParameterProperties());
}

const char* CHashedDocConverter::get_name() const
Expand Down
Loading