-
-
Notifications
You must be signed in to change notification settings - Fork 1k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
SG_ADD refactor #4417
SG_ADD refactor #4417
Changes from 2 commits
6de1d81
c610f3c
d304e15
a601c0a
bd9e09e
2458eee
9d6e506
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -4,7 +4,8 @@ | |
* Authors: Heiko Strathmann, Soeren Sonnenburg, Sergey Lisitsyn, | ||
* Giovanni De Toni, Jacob Walker, Thoralf Klein, Chiyuan Zhang, | ||
* Fernando Iglesias, Sanuj Sharma, Roman Votyakov, Yuyu Zhang, | ||
* Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng | ||
* Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng, | ||
* Gil Hoben | ||
*/ | ||
|
||
#ifndef __SGOBJECT_H__ | ||
|
@@ -59,50 +60,18 @@ template <class T> class SGStringList; | |
* Macros for registering parameters/model selection parameters | ||
******************************************************************************/ | ||
|
||
#ifdef _MSC_VER | ||
|
||
#define VA_NARGS(...) INTERNAL_EXPAND_ARGS_PRIVATE(INTERNAL_ARGS_AUGMENTER(__VA_ARGS__)) | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. While it would be good to get rid of this thing, I think we still need to keep it (see below on default properties) There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. OK, I will bring this back and add a SG_ADD3! |
||
#define INTERNAL_ARGS_AUGMENTER(...) unused, __VA_ARGS__ | ||
#define INTERNAL_EXPAND(x) x | ||
#define INTERNAL_EXPAND_ARGS_PRIVATE(...) INTERNAL_EXPAND(INTERNAL_GET_ARG_COUNT_PRIVATE(__VA_ARGS__, 5, 4, 3, 2, 1, 0)) | ||
#define INTERNAL_GET_ARG_COUNT_PRIVATE(_0_, _1_, _2_, _3_, _4_, _5_, count, ...) count | ||
|
||
#else | ||
|
||
#define VA_NARGS_IMPL(_1, _2, _3, _4, _5, N, ...) N | ||
#define VA_NARGS(...) VA_NARGS_IMPL(__VA_ARGS__, 5, 4, 3, 2, 1) | ||
|
||
#endif | ||
|
||
#define VARARG_IMPL2(base, count, ...) base##count(__VA_ARGS__) | ||
#define VARARG_IMPL(base, count, ...) VARARG_IMPL2(base, count, __VA_ARGS__) | ||
#define VARARG(base, ...) VARARG_IMPL(base, VA_NARGS(__VA_ARGS__), __VA_ARGS__) | ||
|
||
#define SG_ADD4(param, name, description, ms_available) \ | ||
#define SG_ADD(param, name, description, param_properties) \ | ||
{ \ | ||
AnyParameterProperties pprop = \ | ||
AnyParameterProperties(description, param_properties); \ | ||
this->m_parameters->add(param, name, description); \ | ||
this->watch_param( \ | ||
name, param, \ | ||
AnyParameterProperties( \ | ||
description, ms_available, GRADIENT_NOT_AVAILABLE)); \ | ||
if (ms_available) \ | ||
this->watch_param(name, param, pprop); \ | ||
if (pprop.get_model_selection()) \ | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Note for later: we can already start to get rid of the |
||
this->m_model_selection_parameters->add(param, name, description); \ | ||
} | ||
|
||
#define SG_ADD5(param, name, description, ms_available, gradient_available) \ | ||
{ \ | ||
this->m_parameters->add(param, name, description); \ | ||
this->watch_param( \ | ||
name, param, AnyParameterProperties( \ | ||
description, ms_available, gradient_available)); \ | ||
if (ms_available) \ | ||
this->m_model_selection_parameters->add(param, name, description); \ | ||
if (gradient_available) \ | ||
if (pprop.get_gradient()) \ | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. same for those gradient parameters |
||
this->m_gradient_parameters->add(param, name, description); \ | ||
} | ||
|
||
#define SG_ADD(...) VARARG(SG_ADD, __VA_ARGS__) | ||
|
||
/******************************************************************************* | ||
* End of macros for registering parameters/model selection parameters | ||
******************************************************************************/ | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -49,9 +49,9 @@ void CLDA::init() | |
|
||
SG_ADD( | ||
(machine_int_t*)&m_method, "m_method", | ||
"Method used for LDA calculation", MS_NOT_AVAILABLE); | ||
SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", MS_AVAILABLE); | ||
SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", MS_NOT_AVAILABLE); | ||
"Method used for LDA calculation", ParameterProperties()); | ||
SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", ParameterProperties::HYPER); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is quite nice now |
||
SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", ParameterProperties()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Mmmh. This is not nice, it would be better if this default was done automatically. |
||
} | ||
|
||
CLDA::~CLDA() | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -22,17 +22,17 @@ CPluginEstimate::CPluginEstimate(float64_t pos_pseudo, float64_t neg_pseudo) | |
{ | ||
SG_ADD( | ||
&m_pos_pseudo, "pos_pseudo", "pseudo count for positive class", | ||
MS_NOT_AVAILABLE); | ||
ParameterProperties()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. again, would be good if default was taken if just nothing was provided |
||
SG_ADD( | ||
&m_neg_pseudo, "neg_pseudo", "pseudo count for negative class", | ||
MS_NOT_AVAILABLE); | ||
ParameterProperties()); | ||
SG_ADD( | ||
&pos_model, "pos_model", "LinearHMM modelling positive class.", | ||
MS_NOT_AVAILABLE); | ||
ParameterProperties()); | ||
SG_ADD( | ||
&neg_model, "neg_model", "LinearHMM modelling negative class.", | ||
MS_NOT_AVAILABLE); | ||
SG_ADD(&features, "features", "String Features.", MS_NOT_AVAILABLE); | ||
ParameterProperties()); | ||
SG_ADD(&features, "features", "String Features.", ParameterProperties()); | ||
} | ||
|
||
CPluginEstimate::~CPluginEstimate() | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -271,21 +271,21 @@ void CMKL::register_params() | |
rho = 0; | ||
lp_initialized = false; | ||
|
||
SG_ADD((CMachine**)&svm, "svm", "wrapper svm", MS_NOT_AVAILABLE); | ||
SG_ADD(&C_mkl, "C_mkl", "C mkl", MS_NOT_AVAILABLE); | ||
SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", MS_NOT_AVAILABLE); | ||
SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", MS_NOT_AVAILABLE); | ||
SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", MS_NOT_AVAILABLE); | ||
SG_ADD((CMachine**)&svm, "svm", "wrapper svm", ParameterProperties()); | ||
SG_ADD(&C_mkl, "C_mkl", "C mkl", ParameterProperties()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. this is HYPER |
||
SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", ParameterProperties()); | ||
SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", ParameterProperties()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. hyper |
||
SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", ParameterProperties()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. hyper |
||
|
||
m_parameters->add_vector(&beta_local, &beta_local_size, "beta_local", "subkernel weights on L1 term of elastic net mkl"); | ||
watch_param("beta_local", &beta_local, &beta_local_size); | ||
|
||
SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", MS_NOT_AVAILABLE); | ||
SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", MS_NOT_AVAILABLE); | ||
SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", MS_NOT_AVAILABLE); | ||
SG_ADD(&w_gap, "w_gap", "gap between interactions", MS_NOT_AVAILABLE); | ||
SG_ADD(&rho, "rho", "objective after mkl iterations", MS_NOT_AVAILABLE); | ||
SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", MS_NOT_AVAILABLE); | ||
SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", ParameterProperties()); | ||
SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", ParameterProperties()); | ||
SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", ParameterProperties()); | ||
SG_ADD(&w_gap, "w_gap", "gap between interactions", ParameterProperties()); | ||
SG_ADD(&rho, "rho", "objective after mkl iterations", ParameterProperties()); | ||
SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", ParameterProperties()); | ||
// Missing: self (3rd party specific, handled in clone()) | ||
} | ||
|
||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Weclome to SGObject ;)
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Haha yes, I thought I would discretely add myself to the list :D