From 6de1d81bdc28417697254eec9deae3d1fd67c4c9 Mon Sep 17 00:00:00 2001 From: Gil Date: Fri, 16 Nov 2018 14:56:44 +0000 Subject: [PATCH 1/6] replaced variable macro with a single macro [ci skip] * with the new AnyParameterProperties we can clean up the macros a bit and have a single macro with four parameters * inside the macro we still have exactly the same process --- src/shogun/base/SGObject.h | 47 +++++++------------------------------- 1 file changed, 8 insertions(+), 39 deletions(-) diff --git a/src/shogun/base/SGObject.h b/src/shogun/base/SGObject.h index 7aefd5b82f8..305034411f8 100644 --- a/src/shogun/base/SGObject.h +++ b/src/shogun/base/SGObject.h @@ -4,7 +4,8 @@ * Authors: Heiko Strathmann, Soeren Sonnenburg, Sergey Lisitsyn, * Giovanni De Toni, Jacob Walker, Thoralf Klein, Chiyuan Zhang, * Fernando Iglesias, Sanuj Sharma, Roman Votyakov, Yuyu Zhang, - * Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng + * Viktor Gal, Bjoern Esser, Evangelos Anagnostopoulos, Pan Deng, + * Gil Hoben */ #ifndef __SGOBJECT_H__ @@ -59,50 +60,18 @@ template class SGStringList; * Macros for registering parameters/model selection parameters ******************************************************************************/ -#ifdef _MSC_VER - -#define VA_NARGS(...) INTERNAL_EXPAND_ARGS_PRIVATE(INTERNAL_ARGS_AUGMENTER(__VA_ARGS__)) -#define INTERNAL_ARGS_AUGMENTER(...) unused, __VA_ARGS__ -#define INTERNAL_EXPAND(x) x -#define INTERNAL_EXPAND_ARGS_PRIVATE(...) INTERNAL_EXPAND(INTERNAL_GET_ARG_COUNT_PRIVATE(__VA_ARGS__, 5, 4, 3, 2, 1, 0)) -#define INTERNAL_GET_ARG_COUNT_PRIVATE(_0_, _1_, _2_, _3_, _4_, _5_, count, ...) count - -#else - -#define VA_NARGS_IMPL(_1, _2, _3, _4, _5, N, ...) N -#define VA_NARGS(...) VA_NARGS_IMPL(__VA_ARGS__, 5, 4, 3, 2, 1) - -#endif - -#define VARARG_IMPL2(base, count, ...) base##count(__VA_ARGS__) -#define VARARG_IMPL(base, count, ...) VARARG_IMPL2(base, count, __VA_ARGS__) -#define VARARG(base, ...) VARARG_IMPL(base, VA_NARGS(__VA_ARGS__), __VA_ARGS__) - -#define SG_ADD4(param, name, description, ms_available) \ +#define SG_ADD(param, name, description, param_properties) \ { \ + AnyParameterProperties pprop = \ + AnyParameterProperties(description, param_properties); \ this->m_parameters->add(param, name, description); \ - this->watch_param( \ - name, param, \ - AnyParameterProperties( \ - description, ms_available, GRADIENT_NOT_AVAILABLE)); \ - if (ms_available) \ + this->watch_param(name, param, pprop); \ + if (pprop.get_model_selection()) \ this->m_model_selection_parameters->add(param, name, description); \ - } - -#define SG_ADD5(param, name, description, ms_available, gradient_available) \ - { \ - this->m_parameters->add(param, name, description); \ - this->watch_param( \ - name, param, AnyParameterProperties( \ - description, ms_available, gradient_available)); \ - if (ms_available) \ - this->m_model_selection_parameters->add(param, name, description); \ - if (gradient_available) \ + if (pprop.get_gradient()) \ this->m_gradient_parameters->add(param, name, description); \ } -#define SG_ADD(...) VARARG(SG_ADD, __VA_ARGS__) - /******************************************************************************* * End of macros for registering parameters/model selection parameters ******************************************************************************/ From c610f3c79971f3f3f63dba264f443af5125e2c16 Mon Sep 17 00:00:00 2001 From: Gil Date: Fri, 16 Nov 2018 15:02:01 +0000 Subject: [PATCH 2/6] refactored all SG_ADD macros to use ParameterProperties [ci skip] --- src/shogun/classifier/AveragedPerceptron.cpp | 4 +- src/shogun/classifier/LDA.cpp | 6 +-- src/shogun/classifier/Perceptron.cpp | 4 +- src/shogun/classifier/PluginEstimate.cpp | 10 ++-- src/shogun/classifier/mkl/MKL.cpp | 22 ++++---- src/shogun/classifier/svm/LibLinear.cpp | 14 ++--- src/shogun/classifier/svm/LibSVM.cpp | 2 +- src/shogun/classifier/svm/OnlineLibLinear.cpp | 6 +-- src/shogun/classifier/svm/OnlineSVMSGD.cpp | 20 +++---- src/shogun/classifier/svm/SGDQN.cpp | 10 ++-- src/shogun/classifier/svm/SVM.cpp | 22 ++++---- src/shogun/classifier/svm/SVMOcas.cpp | 12 ++--- src/shogun/clustering/GMM.cpp | 2 +- src/shogun/clustering/KMeansBase.cpp | 8 +-- src/shogun/clustering/KMeansMiniBatch.cpp | 2 +- src/shogun/converter/DiffusionMaps.cpp | 4 +- src/shogun/converter/EmbeddingConverter.cpp | 6 +-- src/shogun/converter/FactorAnalysis.cpp | 4 +- src/shogun/converter/HashedDocConverter.cpp | 10 ++-- src/shogun/converter/Isomap.cpp | 2 +- src/shogun/converter/LaplacianEigenmaps.cpp | 4 +- .../converter/LocallyLinearEmbedding.cpp | 6 +-- src/shogun/converter/ManifoldSculpting.cpp | 6 +-- .../converter/MultidimensionalScaling.cpp | 6 +-- .../StochasticProximityEmbedding.cpp | 8 +-- ...DistributedStochasticNeighborEmbedding.cpp | 4 +- src/shogun/converter/ica/FFSep.cpp | 2 +- src/shogun/converter/ica/FastICA.cpp | 2 +- src/shogun/converter/ica/ICAConverter.cpp | 6 +-- src/shogun/converter/ica/Jade.cpp | 2 +- src/shogun/converter/ica/JediSep.cpp | 2 +- src/shogun/converter/ica/SOBI.cpp | 2 +- src/shogun/converter/ica/UWedgeSep.cpp | 2 +- .../distance/AttenuatedEuclideanDistance.cpp | 2 +- src/shogun/distance/CustomDistance.cpp | 2 +- .../distance/CustomMahalanobisDistance.cpp | 2 +- src/shogun/distance/Distance.cpp | 4 +- src/shogun/distance/EuclideanDistance.cpp | 6 +-- src/shogun/distance/HammingWordDistance.cpp | 2 +- src/shogun/distance/KernelDistance.cpp | 4 +- src/shogun/distance/MahalanobisDistance.cpp | 4 +- src/shogun/distance/MinkowskiMetric.cpp | 2 +- src/shogun/distributions/Distribution.cpp | 2 +- src/shogun/distributions/Gaussian.cpp | 10 ++-- src/shogun/distributions/KernelDensity.cpp | 10 ++-- src/shogun/distributions/MixtureModel.cpp | 8 +-- src/shogun/distributions/PositionalPWM.cpp | 10 ++-- .../classical/GaussianDistribution.cpp | 4 +- .../classical/ProbabilityDistribution.cpp | 2 +- src/shogun/ensemble/WeightedMajorityVote.cpp | 2 +- src/shogun/evaluation/CrossValidation.cpp | 2 +- src/shogun/evaluation/CrossValidation.h | 4 +- .../evaluation/CrossValidationStorage.cpp | 16 +++--- src/shogun/evaluation/GradientEvaluation.cpp | 2 +- src/shogun/evaluation/MachineEvaluation.cpp | 14 ++--- src/shogun/evaluation/SigmoidCalibration.cpp | 12 ++--- src/shogun/evaluation/SplittingStrategy.cpp | 8 +-- src/shogun/features/Alphabet.cpp | 6 +-- src/shogun/features/CombinedDotFeatures.cpp | 6 +-- src/shogun/features/CombinedFeatures.cpp | 4 +- src/shogun/features/DenseFeatures.cpp | 6 +-- .../features/DenseSubSamplesFeatures.cpp | 4 +- src/shogun/features/DotFeatures.cpp | 2 +- src/shogun/features/DummyFeatures.cpp | 2 +- src/shogun/features/FKFeatures.cpp | 2 +- src/shogun/features/FactorGraphFeatures.cpp | 2 +- src/shogun/features/Features.cpp | 8 +-- src/shogun/features/IndexFeatures.cpp | 2 +- src/shogun/features/LBPPyrDotFeatures.cpp | 8 +-- src/shogun/features/LatentFeatures.cpp | 2 +- src/shogun/features/MatrixFeatures.cpp | 6 +-- src/shogun/features/PolyFeatures.cpp | 10 ++-- .../features/RandomFourierDotFeatures.cpp | 6 +-- .../RandomKitchenSinksDotFeatures.cpp | 4 +- src/shogun/features/SparsePolyFeatures.cpp | 14 ++--- src/shogun/features/StringFeatures.cpp | 12 ++--- src/shogun/features/Subset.cpp | 2 +- src/shogun/features/SubsetStack.cpp | 4 +- src/shogun/features/TOPFeatures.cpp | 4 +- .../features/hashed/HashedDenseFeatures.cpp | 8 +-- .../features/hashed/HashedDocDotFeatures.cpp | 12 ++--- .../features/hashed/HashedSparseFeatures.cpp | 8 +-- .../StreamingHashedDenseFeatures.cpp | 6 +-- .../StreamingHashedDocDotFeatures.cpp | 6 +-- .../StreamingHashedSparseFeatures.cpp | 6 +-- .../generators/GaussianBlobsDataGenerator.cpp | 10 ++-- .../generators/MeanShiftDataGenerator.cpp | 6 +-- src/shogun/io/Serializable.h | 2 +- src/shogun/io/UAIFile.cpp | 22 ++++---- src/shogun/kernel/ANOVAKernel.cpp | 2 +- src/shogun/kernel/AUCKernel.cpp | 2 +- src/shogun/kernel/BesselKernel.cpp | 4 +- src/shogun/kernel/CauchyKernel.cpp | 4 +- src/shogun/kernel/Chi2Kernel.cpp | 2 +- src/shogun/kernel/CircularKernel.cpp | 4 +- src/shogun/kernel/CombinedKernel.cpp | 12 ++--- src/shogun/kernel/ConstKernel.cpp | 2 +- src/shogun/kernel/CustomKernel.cpp | 12 ++--- src/shogun/kernel/DiagKernel.cpp | 2 +- src/shogun/kernel/DistanceKernel.cpp | 4 +- src/shogun/kernel/ExponentialARDKernel.cpp | 12 ++--- src/shogun/kernel/ExponentialKernel.cpp | 4 +- src/shogun/kernel/GaussianARDKernel.cpp | 4 +- src/shogun/kernel/GaussianKernel.cpp | 2 +- src/shogun/kernel/GaussianShiftKernel.cpp | 4 +- src/shogun/kernel/GaussianShortRealKernel.cpp | 2 +- .../kernel/HistogramIntersectionKernel.cpp | 2 +- .../kernel/InverseMultiQuadricKernel.cpp | 4 +- src/shogun/kernel/Kernel.cpp | 22 ++++---- src/shogun/kernel/LogKernel.cpp | 4 +- src/shogun/kernel/MultiquadricKernel.cpp | 4 +- src/shogun/kernel/PeriodicKernel.cpp | 8 +-- src/shogun/kernel/PolyKernel.cpp | 4 +- src/shogun/kernel/PowerKernel.cpp | 4 +- src/shogun/kernel/ProductKernel.cpp | 4 +- src/shogun/kernel/RationalQuadraticKernel.cpp | 4 +- src/shogun/kernel/ShiftInvariantKernel.cpp | 4 +- src/shogun/kernel/SigmoidKernel.cpp | 4 +- src/shogun/kernel/SphericalKernel.cpp | 4 +- src/shogun/kernel/TStudentKernel.cpp | 4 +- src/shogun/kernel/TensorProductPairKernel.cpp | 2 +- src/shogun/kernel/WaveKernel.cpp | 4 +- src/shogun/kernel/WaveletKernel.cpp | 4 +- src/shogun/kernel/WeightedDegreeRBFKernel.cpp | 4 +- .../normalizer/AvgDiagKernelNormalizer.h | 2 +- .../kernel/normalizer/DiceKernelNormalizer.h | 2 +- .../normalizer/FirstElementKernelNormalizer.h | 2 +- .../kernel/normalizer/KernelNormalizer.h | 2 +- .../kernel/normalizer/RidgeKernelNormalizer.h | 4 +- .../normalizer/ScatterKernelNormalizer.h | 10 ++-- .../normalizer/SqrtDiagKernelNormalizer.h | 2 +- .../normalizer/VarianceKernelNormalizer.h | 4 +- .../kernel/string/CommWordStringKernel.cpp | 6 +-- .../kernel/string/FixedDegreeStringKernel.cpp | 2 +- .../string/GaussianMatchStringKernel.cpp | 2 +- .../string/HistogramWordStringKernel.cpp | 4 +- .../string/LocalAlignmentStringKernel.cpp | 6 +-- .../string/LocalityImprovedStringKernel.cpp | 6 +-- .../kernel/string/MatchWordStringKernel.cpp | 2 +- .../kernel/string/OligoStringKernel.cpp | 6 +-- .../kernel/string/PolyMatchStringKernel.cpp | 6 +-- .../string/PolyMatchWordStringKernel.cpp | 4 +- .../string/RegulatoryModulesStringKernel.cpp | 16 +++--- src/shogun/kernel/string/SNPStringKernel.cpp | 6 +-- .../SimpleLocalityImprovedStringKernel.cpp | 8 +-- .../string/SpectrumMismatchRBFKernel.cpp | 14 ++--- .../kernel/string/SpectrumRBFKernel.cpp | 12 ++--- .../kernel/string/SubsequenceStringKernel.cpp | 4 +- .../WeightedDegreePositionStringKernel.cpp | 16 +++--- .../string/WeightedDegreeStringKernel.cpp | 14 ++--- src/shogun/labels/DenseLabels.cpp | 2 +- src/shogun/labels/Labels.cpp | 4 +- src/shogun/labels/LatentLabels.cpp | 4 +- src/shogun/labels/MultilabelLabels.cpp | 6 +-- src/shogun/labels/StructuredLabels.cpp | 2 +- src/shogun/latent/LatentModel.cpp | 8 +-- src/shogun/lib/DelimiterTokenizer.cpp | 4 +- src/shogun/lib/DynamicArray.h | 12 ++--- src/shogun/lib/DynamicObjectArray.h | 12 ++--- src/shogun/lib/List.h | 4 +- src/shogun/lib/NGramTokenizer.cpp | 4 +- src/shogun/lib/Tokenizer.cpp | 2 +- src/shogun/loss/HuberLoss.cpp | 2 +- src/shogun/machine/BaggingMachine.cpp | 14 ++--- src/shogun/machine/BaseMulticlassMachine.cpp | 2 +- src/shogun/machine/DistanceMachine.cpp | 2 +- src/shogun/machine/GaussianProcessMachine.cpp | 2 +- src/shogun/machine/IterativeMachine.h | 6 +-- src/shogun/machine/KernelMachine.cpp | 18 +++---- .../machine/KernelMulticlassMachine.cpp | 4 +- .../machine/KernelStructuredOutputMachine.cpp | 2 +- src/shogun/machine/LinearLatentMachine.cpp | 8 +-- src/shogun/machine/LinearMachine.cpp | 6 +-- src/shogun/machine/LinearMulticlassMachine.h | 4 +- .../machine/LinearStructuredOutputMachine.cpp | 2 +- src/shogun/machine/Machine.cpp | 10 ++-- src/shogun/machine/MulticlassMachine.cpp | 4 +- src/shogun/machine/OnlineLinearMachine.cpp | 6 +-- src/shogun/machine/RandomForest.cpp | 2 +- src/shogun/machine/StochasticGBMachine.cpp | 14 ++--- .../machine/StructuredOutputMachine.cpp | 8 +-- src/shogun/machine/gp/ConstMean.cpp | 2 +- .../gp/DualVariationalGaussianLikelihood.cpp | 6 +-- src/shogun/machine/gp/GaussianLikelihood.cpp | 2 +- src/shogun/machine/gp/Inference.cpp | 22 ++++---- .../machine/gp/KLCholeskyInferenceMethod.cpp | 4 +- .../gp/KLCovarianceInferenceMethod.cpp | 12 ++--- .../machine/gp/KLDiagonalInferenceMethod.cpp | 2 +- .../machine/gp/KLDualInferenceMethod.cpp | 14 ++--- src/shogun/machine/gp/KLInference.cpp | 18 +++---- .../machine/gp/KLLowerTriangularInference.cpp | 10 ++-- src/shogun/machine/gp/LaplaceInference.cpp | 8 +-- .../gp/LogitVGPiecewiseBoundLikelihood.cpp | 14 ++--- .../gp/MultiLaplaceInferenceMethod.cpp | 12 ++--- .../machine/gp/NumericalVGLikelihood.cpp | 10 ++-- src/shogun/machine/gp/SingleFITCInference.cpp | 12 ++--- .../gp/SingleFITCLaplaceInferenceMethod.cpp | 38 ++++++------- .../gp/SingleLaplaceInferenceMethod.cpp | 22 ++++---- .../machine/gp/SingleSparseInference.cpp | 16 +++--- src/shogun/machine/gp/SoftMaxLikelihood.cpp | 2 +- src/shogun/machine/gp/SparseInference.cpp | 10 ++-- src/shogun/machine/gp/StudentsTLikelihood.cpp | 4 +- .../machine/gp/StudentsTVGLikelihood.cpp | 4 +- .../machine/gp/VarDTCInferenceMethod.cpp | 18 +++---- .../gp/VariationalGaussianLikelihood.cpp | 6 +-- .../machine/gp/VariationalLikelihood.cpp | 4 +- .../linalg/eigsolver/EigenSolver.h | 10 ++-- .../linalg/eigsolver/LanczosEigenSolver.cpp | 6 +-- .../linalg/linop/LinearOperator.cpp | 2 +- .../ratapprox/logdet/LogDetEstimator.cpp | 4 +- .../opfunc/LogRationalApproximationCGM.cpp | 2 +- .../LogRationalApproximationIndividual.cpp | 2 +- .../ratapprox/opfunc/OperatorFunction.h | 2 +- .../opfunc/RationalApproximation.cpp | 12 ++--- .../ratapprox/tracesampler/ProbingSampler.cpp | 8 +-- .../ratapprox/tracesampler/TraceSampler.h | 4 +- src/shogun/metric/LMNN.cpp | 30 +++++------ .../modelselection/GradientModelSelection.cpp | 8 +-- src/shogun/modelselection/ModelSelection.cpp | 4 +- .../modelselection/ParameterCombination.cpp | 2 +- src/shogun/multiclass/GaussianNaiveBayes.cpp | 16 +++--- src/shogun/multiclass/KNN.cpp | 10 ++-- src/shogun/multiclass/MCLDA.cpp | 24 ++++----- src/shogun/multiclass/MulticlassLibLinear.cpp | 10 ++-- src/shogun/multiclass/MulticlassLibSVM.cpp | 2 +- src/shogun/multiclass/MulticlassOCAS.cpp | 10 ++-- .../multiclass/MulticlassOneVsOneStrategy.cpp | 2 +- src/shogun/multiclass/MulticlassSVM.cpp | 2 +- src/shogun/multiclass/MulticlassStrategy.cpp | 6 +-- src/shogun/multiclass/QDA.cpp | 16 +++--- src/shogun/multiclass/ScatterSVM.cpp | 6 +-- src/shogun/multiclass/ShareBoost.cpp | 4 +- .../ecoc/ECOCDiscriminantEncoder.cpp | 2 +- .../multiclass/ecoc/ECOCForestEncoder.cpp | 2 +- .../ecoc/ECOCRandomDenseEncoder.cpp | 6 +-- .../ecoc/ECOCRandomSparseEncoder.cpp | 10 ++-- src/shogun/multiclass/ecoc/ECOCStrategy.cpp | 4 +- .../BalancedConditionalProbabilityTree.cpp | 2 +- .../multiclass/tree/C45ClassifierTree.cpp | 10 ++-- src/shogun/multiclass/tree/CARTree.cpp | 26 ++++----- src/shogun/multiclass/tree/CHAIDTree.cpp | 20 +++---- src/shogun/multiclass/tree/NbodyTree.cpp | 12 ++--- src/shogun/multiclass/tree/RandomCARTree.cpp | 2 +- src/shogun/multiclass/tree/RelaxedTree.cpp | 10 ++-- src/shogun/multiclass/tree/TreeMachine.h | 2 +- src/shogun/multiclass/tree/TreeMachineNode.h | 4 +- src/shogun/neuralnets/Autoencoder.cpp | 6 +-- src/shogun/neuralnets/DeepAutoencoder.cpp | 28 +++++----- src/shogun/neuralnets/DeepBeliefNetwork.cpp | 54 +++++++++---------- .../neuralnets/NeuralConvolutionalLayer.cpp | 28 +++++----- src/shogun/neuralnets/NeuralInputLayer.cpp | 4 +- src/shogun/neuralnets/NeuralLayer.cpp | 28 +++++----- src/shogun/neuralnets/NeuralNetwork.cpp | 48 ++++++++--------- src/shogun/neuralnets/RBM.cpp | 40 +++++++------- src/shogun/optimization/AdaDeltaUpdater.cpp | 10 ++-- src/shogun/optimization/AdaGradUpdater.cpp | 6 +-- src/shogun/optimization/AdamUpdater.cpp | 16 +++--- .../optimization/AdaptMomentumCorrection.cpp | 12 ++--- src/shogun/optimization/ConstLearningRate.cpp | 2 +- src/shogun/optimization/DescendCorrection.cpp | 2 +- .../DescendUpdaterWithCorrection.cpp | 2 +- src/shogun/optimization/ElasticNetPenalty.cpp | 6 +-- .../optimization/FirstOrderMinimizer.cpp | 6 +-- .../FirstOrderStochasticMinimizer.cpp | 10 ++-- .../InverseScalingLearningRate.cpp | 8 +-- src/shogun/optimization/L1Penalty.cpp | 2 +- src/shogun/optimization/L1PenaltyForTG.cpp | 4 +- .../optimization/MomentumCorrection.cpp | 2 +- .../optimization/PNormMappingFunction.cpp | 2 +- src/shogun/optimization/RmsPropUpdater.cpp | 8 +-- src/shogun/optimization/SMDMinimizer.cpp | 2 +- src/shogun/optimization/SMIDASMinimizer.cpp | 2 +- src/shogun/optimization/SVRGMinimizer.cpp | 8 +-- .../optimization/lbfgs/LBFGSMinimizer.cpp | 34 ++++++------ .../preprocessor/DependenceMaximization.cpp | 4 +- src/shogun/preprocessor/FeatureSelection.cpp | 12 ++--- src/shogun/preprocessor/FisherLDA.cpp | 14 ++--- .../preprocessor/HomogeneousKernelMap.cpp | 20 +++---- src/shogun/preprocessor/KernelPCA.cpp | 8 +-- src/shogun/preprocessor/PCA.cpp | 20 +++---- src/shogun/preprocessor/PNorm.cpp | 2 +- src/shogun/preprocessor/PruneVarSubMean.cpp | 10 ++-- .../RandomFourierGaussPreproc.cpp | 24 ++++----- src/shogun/preprocessor/RescaleFeatures.cpp | 4 +- src/shogun/regression/KRRNystrom.cpp | 2 +- .../regression/KernelRidgeRegression.cpp | 2 +- .../regression/LeastAngleRegression.cpp | 8 +-- .../regression/LinearRidgeRegression.cpp | 4 +- .../regression/svr/LibLinearRegression.cpp | 10 ++-- src/shogun/regression/svr/LibSVR.cpp | 2 +- src/shogun/structure/CCSOSVM.cpp | 18 +++---- src/shogun/structure/DisjointSet.cpp | 8 +-- src/shogun/structure/FWSOSVM.cpp | 10 ++-- src/shogun/structure/Factor.cpp | 18 +++---- src/shogun/structure/FactorGraph.cpp | 12 ++--- src/shogun/structure/FactorGraphModel.cpp | 6 +-- src/shogun/structure/FactorType.cpp | 12 ++--- src/shogun/structure/HMSVMModel.cpp | 10 ++-- .../structure/HashedMultilabelModel.cpp | 10 ++-- .../structure/HierarchicalMultilabelModel.cpp | 8 +-- src/shogun/structure/MAPInference.cpp | 10 ++-- src/shogun/structure/MulticlassModel.cpp | 2 +- src/shogun/structure/MulticlassSOLabels.cpp | 6 +-- src/shogun/structure/MultilabelCLRModel.cpp | 2 +- src/shogun/structure/MultilabelModel.cpp | 6 +-- src/shogun/structure/MultilabelSOLabels.cpp | 4 +- src/shogun/structure/PrimalMosekSOSVM.cpp | 10 ++-- src/shogun/structure/SOSVMHelper.cpp | 14 ++--- src/shogun/structure/SequenceLabels.cpp | 2 +- src/shogun/structure/StateModel.cpp | 10 ++-- src/shogun/structure/StochasticSOSVM.cpp | 10 ++-- src/shogun/structure/StructuredModel.cpp | 4 +- .../DomainAdaptationMulticlassLibLinear.cpp | 6 +-- .../domain_adaptation/DomainAdaptationSVM.cpp | 6 +-- .../DomainAdaptationSVMLinear.cpp | 6 +-- .../transfer/multitask/LibLinearMTL.cpp | 10 ++-- .../multitask/MultitaskKernelPlifNormalizer.h | 4 +- src/shogun/transfer/multitask/Task.cpp | 6 +-- src/shogun/transformer/Transformer.cpp | 2 +- .../optimization/NLOPTMinimizer_unittest.cc | 4 +- .../StochasticMinimizers_unittest.cc | 6 +-- .../lbfgs/LBFGSMinimizer_unittest.cc | 4 +- 322 files changed, 1214 insertions(+), 1214 deletions(-) diff --git a/src/shogun/classifier/AveragedPerceptron.cpp b/src/shogun/classifier/AveragedPerceptron.cpp index 2e076b29a52..9da7e3e4d84 100644 --- a/src/shogun/classifier/AveragedPerceptron.cpp +++ b/src/shogun/classifier/AveragedPerceptron.cpp @@ -37,8 +37,8 @@ void CAveragedPerceptron::init() { max_iter = 1000; learn_rate = 0.1; - SG_ADD(&max_iter, "max_iter", "Maximum number of iterations.", MS_AVAILABLE); - SG_ADD(&learn_rate, "learn_rate", "Learning rate.", MS_AVAILABLE); + SG_ADD(&max_iter, "max_iter", "Maximum number of iterations.", ParameterProperties::HYPER); + SG_ADD(&learn_rate, "learn_rate", "Learning rate.", ParameterProperties::HYPER); } bool CAveragedPerceptron::train_machine(CFeatures* data) diff --git a/src/shogun/classifier/LDA.cpp b/src/shogun/classifier/LDA.cpp index 9476e0aa806..2c67075f2f5 100644 --- a/src/shogun/classifier/LDA.cpp +++ b/src/shogun/classifier/LDA.cpp @@ -49,9 +49,9 @@ void CLDA::init() SG_ADD( (machine_int_t*)&m_method, "m_method", - "Method used for LDA calculation", MS_NOT_AVAILABLE); - SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", MS_AVAILABLE); - SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", MS_NOT_AVAILABLE); + "Method used for LDA calculation", ParameterProperties()); + SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", ParameterProperties::HYPER); + SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", ParameterProperties()); } CLDA::~CLDA() diff --git a/src/shogun/classifier/Perceptron.cpp b/src/shogun/classifier/Perceptron.cpp index 083e126945e..dbc3fb7b07f 100644 --- a/src/shogun/classifier/Perceptron.cpp +++ b/src/shogun/classifier/Perceptron.cpp @@ -24,8 +24,8 @@ CPerceptron::CPerceptron() : CIterativeMachine() m_initialize_hyperplane = true; SG_ADD( &m_initialize_hyperplane, "initialize_hyperplane", - "Whether to initialize hyperplane.", MS_AVAILABLE); - SG_ADD(&learn_rate, "learn_rate", "Learning rate.", MS_AVAILABLE); + "Whether to initialize hyperplane.", ParameterProperties::HYPER); + SG_ADD(&learn_rate, "learn_rate", "Learning rate.", ParameterProperties::HYPER); } CPerceptron::~CPerceptron() diff --git a/src/shogun/classifier/PluginEstimate.cpp b/src/shogun/classifier/PluginEstimate.cpp index 2d40f70ba6d..73220557ff9 100644 --- a/src/shogun/classifier/PluginEstimate.cpp +++ b/src/shogun/classifier/PluginEstimate.cpp @@ -22,17 +22,17 @@ CPluginEstimate::CPluginEstimate(float64_t pos_pseudo, float64_t neg_pseudo) { SG_ADD( &m_pos_pseudo, "pos_pseudo", "pseudo count for positive class", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_neg_pseudo, "neg_pseudo", "pseudo count for negative class", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &pos_model, "pos_model", "LinearHMM modelling positive class.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &neg_model, "neg_model", "LinearHMM modelling negative class.", - MS_NOT_AVAILABLE); - SG_ADD(&features, "features", "String Features.", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&features, "features", "String Features.", ParameterProperties()); } CPluginEstimate::~CPluginEstimate() diff --git a/src/shogun/classifier/mkl/MKL.cpp b/src/shogun/classifier/mkl/MKL.cpp index 871e01794f3..db6718587a3 100644 --- a/src/shogun/classifier/mkl/MKL.cpp +++ b/src/shogun/classifier/mkl/MKL.cpp @@ -271,21 +271,21 @@ void CMKL::register_params() rho = 0; lp_initialized = false; - SG_ADD((CMachine**)&svm, "svm", "wrapper svm", MS_NOT_AVAILABLE); - SG_ADD(&C_mkl, "C_mkl", "C mkl", MS_NOT_AVAILABLE); - SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", MS_NOT_AVAILABLE); - SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", MS_NOT_AVAILABLE); - SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", MS_NOT_AVAILABLE); + SG_ADD((CMachine**)&svm, "svm", "wrapper svm", ParameterProperties()); + SG_ADD(&C_mkl, "C_mkl", "C mkl", ParameterProperties()); + SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", ParameterProperties()); + SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", ParameterProperties()); + SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", ParameterProperties()); m_parameters->add_vector(&beta_local, &beta_local_size, "beta_local", "subkernel weights on L1 term of elastic net mkl"); watch_param("beta_local", &beta_local, &beta_local_size); - SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", MS_NOT_AVAILABLE); - SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", MS_NOT_AVAILABLE); - SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", MS_NOT_AVAILABLE); - SG_ADD(&w_gap, "w_gap", "gap between interactions", MS_NOT_AVAILABLE); - SG_ADD(&rho, "rho", "objective after mkl iterations", MS_NOT_AVAILABLE); - SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", MS_NOT_AVAILABLE); + SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", ParameterProperties()); + SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", ParameterProperties()); + SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", ParameterProperties()); + SG_ADD(&w_gap, "w_gap", "gap between interactions", ParameterProperties()); + SG_ADD(&rho, "rho", "objective after mkl iterations", ParameterProperties()); + SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", ParameterProperties()); // Missing: self (3rd party specific, handled in clone()) } diff --git a/src/shogun/classifier/svm/LibLinear.cpp b/src/shogun/classifier/svm/LibLinear.cpp index ec01c7cf26e..3d97845117a 100644 --- a/src/shogun/classifier/svm/LibLinear.cpp +++ b/src/shogun/classifier/svm/LibLinear.cpp @@ -50,18 +50,18 @@ void CLibLinear::init() set_max_iterations(); set_epsilon(1e-5); - SG_ADD(&C1, "C1", "C Cost constant 1.", MS_AVAILABLE); - SG_ADD(&C2, "C2", "C Cost constant 2.", MS_AVAILABLE); + SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE); - SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE); + &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); + SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties()); SG_ADD( &max_iterations, "max_iterations", "Max number of iterations.", - MS_NOT_AVAILABLE); - SG_ADD(&m_linear_term, "linear_term", "Linear Term", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_linear_term, "linear_term", "Linear Term", ParameterProperties()); SG_ADD( (machine_int_t*)&liblinear_solver_type, "liblinear_solver_type", - "Type of LibLinear solver.", MS_NOT_AVAILABLE); + "Type of LibLinear solver.", ParameterProperties()); } CLibLinear::~CLibLinear() diff --git a/src/shogun/classifier/svm/LibSVM.cpp b/src/shogun/classifier/svm/LibSVM.cpp index 5f6fc537b0d..2ae5e3a5b9a 100644 --- a/src/shogun/classifier/svm/LibSVM.cpp +++ b/src/shogun/classifier/svm/LibSVM.cpp @@ -36,7 +36,7 @@ CLibSVM::~CLibSVM() void CLibSVM::register_params() { - SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type", MS_NOT_AVAILABLE); + SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type", ParameterProperties()); } bool CLibSVM::train_machine(CFeatures* data) diff --git a/src/shogun/classifier/svm/OnlineLibLinear.cpp b/src/shogun/classifier/svm/OnlineLibLinear.cpp index 13089de7798..50cc95f9f40 100644 --- a/src/shogun/classifier/svm/OnlineLibLinear.cpp +++ b/src/shogun/classifier/svm/OnlineLibLinear.cpp @@ -63,10 +63,10 @@ void COnlineLibLinear::init() Cn=1; use_bias=false; - SG_ADD(&C1, "C1", "C Cost constant 1.", MS_AVAILABLE); - SG_ADD(&C2, "C2", "C Cost constant 2.", MS_AVAILABLE); + SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE); + &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); PG = 0; PGmax_old = CMath::INFTY; diff --git a/src/shogun/classifier/svm/OnlineSVMSGD.cpp b/src/shogun/classifier/svm/OnlineSVMSGD.cpp index 0fc1ebf1f60..77017e6ff76 100644 --- a/src/shogun/classifier/svm/OnlineSVMSGD.cpp +++ b/src/shogun/classifier/svm/OnlineSVMSGD.cpp @@ -206,17 +206,17 @@ void COnlineSVMSGD::init() loss=new CHingeLoss(); SG_REF(loss); - SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE); - SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE); - SG_ADD(&lambda, "lambda", "Regularization parameter.", MS_AVAILABLE); - SG_ADD(&wscale, "wscale", "W scale", MS_NOT_AVAILABLE); - SG_ADD(&bscale, "bscale", "b scale", MS_NOT_AVAILABLE); - SG_ADD(&epochs, "epochs", "epochs", MS_NOT_AVAILABLE); - SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE); - SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE); + SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER); + SG_ADD(&lambda, "lambda", "Regularization parameter.", ParameterProperties::HYPER); + SG_ADD(&wscale, "wscale", "W scale", ParameterProperties()); + SG_ADD(&bscale, "bscale", "b scale", ParameterProperties()); + SG_ADD(&epochs, "epochs", "epochs", ParameterProperties()); + SG_ADD(&skip, "skip", "skip", ParameterProperties()); + SG_ADD(&count, "count", "count", ParameterProperties()); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE); + &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); SG_ADD( &use_regularized_bias, "use_regularized_bias", - "Indicates if bias is regularized.", MS_NOT_AVAILABLE); + "Indicates if bias is regularized.", ParameterProperties()); } diff --git a/src/shogun/classifier/svm/SGDQN.cpp b/src/shogun/classifier/svm/SGDQN.cpp index d48f9d86291..9c0d208d51e 100644 --- a/src/shogun/classifier/svm/SGDQN.cpp +++ b/src/shogun/classifier/svm/SGDQN.cpp @@ -226,9 +226,9 @@ void CSGDQN::init() loss=new CHingeLoss(); SG_REF(loss); - SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE); - SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE); - SG_ADD(&epochs, "epochs", "epochs", MS_AVAILABLE); - SG_ADD(&skip, "skip", "skip", MS_NOT_AVAILABLE); - SG_ADD(&count, "count", "count", MS_NOT_AVAILABLE); + SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER); + SG_ADD(&epochs, "epochs", "epochs", ParameterProperties::HYPER); + SG_ADD(&skip, "skip", "skip", ParameterProperties()); + SG_ADD(&count, "count", "count", ParameterProperties()); } diff --git a/src/shogun/classifier/svm/SVM.cpp b/src/shogun/classifier/svm/SVM.cpp index 4498640ec60..48446ba8239 100644 --- a/src/shogun/classifier/svm/SVM.cpp +++ b/src/shogun/classifier/svm/SVM.cpp @@ -40,21 +40,21 @@ CSVM::~CSVM() void CSVM::set_defaults(int32_t num_sv) { - SG_ADD(&C1, "C1", "", MS_AVAILABLE); - SG_ADD(&C2, "C2", "", MS_AVAILABLE); - SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded.", MS_NOT_AVAILABLE); - SG_ADD(&epsilon, "epsilon", "", MS_AVAILABLE); + SG_ADD(&C1, "C1", "", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "", ParameterProperties::HYPER); + SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded.", ParameterProperties()); + SG_ADD(&epsilon, "epsilon", "", ParameterProperties::HYPER); SG_ADD(&tube_epsilon, "tube_epsilon", - "Tube epsilon for support vector regression.", MS_AVAILABLE); - SG_ADD(&nu, "nu", "", MS_AVAILABLE); - SG_ADD(&objective, "objective", "", MS_NOT_AVAILABLE); - SG_ADD(&qpsize, "qpsize", "", MS_NOT_AVAILABLE); + "Tube epsilon for support vector regression.", ParameterProperties::HYPER); + SG_ADD(&nu, "nu", "", ParameterProperties::HYPER); + SG_ADD(&objective, "objective", "", ParameterProperties()); + SG_ADD(&qpsize, "qpsize", "", ParameterProperties()); SG_ADD(&use_shrinking, "use_shrinking", "Shrinking shall be used.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**) &mkl, "mkl", "MKL object that svm optimizers need.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_linear_term, "linear_term", "Linear term in qp.", - MS_NOT_AVAILABLE); + ParameterProperties()); callback=NULL; mkl=NULL; diff --git a/src/shogun/classifier/svm/SVMOcas.cpp b/src/shogun/classifier/svm/SVMOcas.cpp index 69101da7ebc..beb62412a1e 100644 --- a/src/shogun/classifier/svm/SVMOcas.cpp +++ b/src/shogun/classifier/svm/SVMOcas.cpp @@ -345,17 +345,17 @@ void CSVMOcas::init() primal_objective = 0.0; - SG_ADD(&C1, "C1", "Cost constant 1.", MS_AVAILABLE); - SG_ADD(&C2, "C2", "Cost constant 2.", MS_AVAILABLE); + SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", MS_NOT_AVAILABLE); - SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE); + &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); + SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties()); SG_ADD( &bufsize, "bufsize", "Maximum number of cutting planes.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( (machine_int_t*)&method, "method", "SVMOcas solver type.", - MS_NOT_AVAILABLE); + ParameterProperties()); } float64_t CSVMOcas::compute_primal_objective() const diff --git a/src/shogun/clustering/GMM.cpp b/src/shogun/clustering/GMM.cpp index 01fb85d791d..221d72f555a 100644 --- a/src/shogun/clustering/GMM.cpp +++ b/src/shogun/clustering/GMM.cpp @@ -827,5 +827,5 @@ void CGMM::register_params() //m_parameters->add((SGVector*) &m_components, "m_components", "Mixture components"); SG_ADD( &m_coefficients, "m_coefficients", "Mixture coefficients.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/clustering/KMeansBase.cpp b/src/shogun/clustering/KMeansBase.cpp index aa302438bf4..f97499c2616 100644 --- a/src/shogun/clustering/KMeansBase.cpp +++ b/src/shogun/clustering/KMeansBase.cpp @@ -359,10 +359,10 @@ void CKMeansBase::init() dimensions=0; fixed_centers=false; use_kmeanspp=false; - SG_ADD(&max_iter, "max_iter", "Maximum number of iterations", MS_AVAILABLE); - SG_ADD(&k, "k", "k, the number of clusters", MS_AVAILABLE); - SG_ADD(&dimensions, "dimensions", "Dimensions of data", MS_NOT_AVAILABLE); - SG_ADD(&R, "radiuses", "Cluster radiuses", MS_NOT_AVAILABLE); + SG_ADD(&max_iter, "max_iter", "Maximum number of iterations", ParameterProperties::HYPER); + SG_ADD(&k, "k", "k, the number of clusters", ParameterProperties::HYPER); + SG_ADD(&dimensions, "dimensions", "Dimensions of data", ParameterProperties()); + SG_ADD(&R, "radiuses", "Cluster radiuses", ParameterProperties()); watch_method("cluster_centers", &CKMeansBase::get_cluster_centers); } diff --git a/src/shogun/clustering/KMeansMiniBatch.cpp b/src/shogun/clustering/KMeansMiniBatch.cpp index 9d9ce41b206..5c17ac89d58 100644 --- a/src/shogun/clustering/KMeansMiniBatch.cpp +++ b/src/shogun/clustering/KMeansMiniBatch.cpp @@ -141,7 +141,7 @@ void CKMeansMiniBatch::init_mb_params() SG_ADD( &batch_size, "batch_size", "batch size for mini-batch KMeans", - MS_NOT_AVAILABLE); + ParameterProperties()); } bool CKMeansMiniBatch::train_machine(CFeatures* data) diff --git a/src/shogun/converter/DiffusionMaps.cpp b/src/shogun/converter/DiffusionMaps.cpp index 048ec1ab12a..ea36973b351 100644 --- a/src/shogun/converter/DiffusionMaps.cpp +++ b/src/shogun/converter/DiffusionMaps.cpp @@ -25,8 +25,8 @@ CDiffusionMaps::CDiffusionMaps() : void CDiffusionMaps::init() { - SG_ADD(&m_t, "t", "number of steps", MS_AVAILABLE); - SG_ADD(&m_width, "width", "gaussian kernel width", MS_AVAILABLE); + SG_ADD(&m_t, "t", "number of steps", ParameterProperties::HYPER); + SG_ADD(&m_width, "width", "gaussian kernel width", ParameterProperties::HYPER); } CDiffusionMaps::~CDiffusionMaps() diff --git a/src/shogun/converter/EmbeddingConverter.cpp b/src/shogun/converter/EmbeddingConverter.cpp index 4e99109eb31..6c0daab0dcd 100644 --- a/src/shogun/converter/EmbeddingConverter.cpp +++ b/src/shogun/converter/EmbeddingConverter.cpp @@ -69,11 +69,11 @@ CKernel* CEmbeddingConverter::get_kernel() const void CEmbeddingConverter::init() { SG_ADD(&m_target_dim, "target_dim", - "target dimensionality of preprocessor", MS_AVAILABLE); + "target dimensionality of preprocessor", ParameterProperties::HYPER); SG_ADD( &m_distance, "distance", "distance to be used for embedding", - MS_AVAILABLE); + ParameterProperties::HYPER); SG_ADD( - &m_kernel, "kernel", "kernel to be used for embedding", MS_AVAILABLE); + &m_kernel, "kernel", "kernel to be used for embedding", ParameterProperties::HYPER); } } diff --git a/src/shogun/converter/FactorAnalysis.cpp b/src/shogun/converter/FactorAnalysis.cpp index e2ed90b9160..95490fd8c60 100644 --- a/src/shogun/converter/FactorAnalysis.cpp +++ b/src/shogun/converter/FactorAnalysis.cpp @@ -21,8 +21,8 @@ CFactorAnalysis::CFactorAnalysis() : void CFactorAnalysis::init() { - SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_epsilon, "epsilon", "convergence parameter", MS_NOT_AVAILABLE); + SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", ParameterProperties()); + SG_ADD(&m_epsilon, "epsilon", "convergence parameter", ParameterProperties()); } CFactorAnalysis::~CFactorAnalysis() diff --git a/src/shogun/converter/HashedDocConverter.cpp b/src/shogun/converter/HashedDocConverter.cpp index 2757927cf8f..3e91a3c8ce1 100644 --- a/src/shogun/converter/HashedDocConverter.cpp +++ b/src/shogun/converter/HashedDocConverter.cpp @@ -58,14 +58,14 @@ void CHashedDocConverter::init(CTokenizer* tzer, int32_t hash_bits, bool normali SG_REF(tokenizer); SG_ADD(&num_bits, "num_bits", "Number of bits of the hash", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&ngrams, "ngrams", "Number of consecutive tokens", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&tokens_to_skip, "tokens_to_skip", "Number of tokens to skip", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&should_normalize, "should_normalize", "Whether to normalize vectors or not", - MS_NOT_AVAILABLE); - SG_ADD(&tokenizer, "tokenizer", "Tokenizer", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&tokenizer, "tokenizer", "Tokenizer", ParameterProperties()); } const char* CHashedDocConverter::get_name() const diff --git a/src/shogun/converter/Isomap.cpp b/src/shogun/converter/Isomap.cpp index 1ef2076e815..9a9755cc120 100644 --- a/src/shogun/converter/Isomap.cpp +++ b/src/shogun/converter/Isomap.cpp @@ -21,7 +21,7 @@ CIsomap::CIsomap() : CMultidimensionalScaling() void CIsomap::init() { - SG_ADD(&m_k, "k", "number of neighbors", MS_AVAILABLE); + SG_ADD(&m_k, "k", "number of neighbors", ParameterProperties::HYPER); } CIsomap::~CIsomap() diff --git a/src/shogun/converter/LaplacianEigenmaps.cpp b/src/shogun/converter/LaplacianEigenmaps.cpp index 679e5dd463f..44929e60813 100644 --- a/src/shogun/converter/LaplacianEigenmaps.cpp +++ b/src/shogun/converter/LaplacianEigenmaps.cpp @@ -23,8 +23,8 @@ CLaplacianEigenmaps::CLaplacianEigenmaps() : void CLaplacianEigenmaps::init() { - SG_ADD(&m_k, "k", "number of neighbors", MS_AVAILABLE); - SG_ADD(&m_tau, "tau", "heat distribution coefficient", MS_AVAILABLE); + SG_ADD(&m_k, "k", "number of neighbors", ParameterProperties::HYPER); + SG_ADD(&m_tau, "tau", "heat distribution coefficient", ParameterProperties::HYPER); } CLaplacianEigenmaps::~CLaplacianEigenmaps() diff --git a/src/shogun/converter/LocallyLinearEmbedding.cpp b/src/shogun/converter/LocallyLinearEmbedding.cpp index c0152b12e75..4aa734a9a5a 100644 --- a/src/shogun/converter/LocallyLinearEmbedding.cpp +++ b/src/shogun/converter/LocallyLinearEmbedding.cpp @@ -26,11 +26,11 @@ CLocallyLinearEmbedding::CLocallyLinearEmbedding() : void CLocallyLinearEmbedding::init() { - SG_ADD(&m_k, "k", "number of neighbors", MS_AVAILABLE); + SG_ADD(&m_k, "k", "number of neighbors", ParameterProperties::HYPER); SG_ADD(&m_nullspace_shift, "nullspace_shift", - "nullspace finding regularization shift",MS_NOT_AVAILABLE); + "nullspace finding regularization shift",ParameterProperties()); SG_ADD(&m_reconstruction_shift, "reconstruction_shift", - "shift used to regularize reconstruction step", MS_NOT_AVAILABLE); + "shift used to regularize reconstruction step", ParameterProperties()); } diff --git a/src/shogun/converter/ManifoldSculpting.cpp b/src/shogun/converter/ManifoldSculpting.cpp index 62cb88dd167..0a0cf7414a6 100644 --- a/src/shogun/converter/ManifoldSculpting.cpp +++ b/src/shogun/converter/ManifoldSculpting.cpp @@ -23,11 +23,11 @@ CManifoldSculpting::CManifoldSculpting() : void CManifoldSculpting::init() { - SG_ADD(&m_k, "k", "number of neighbors", MS_NOT_AVAILABLE); + SG_ADD(&m_k, "k", "number of neighbors", ParameterProperties()); SG_ADD(&m_squishing_rate, "quishing_rate", - "squishing rate",MS_NOT_AVAILABLE); + "squishing rate",ParameterProperties()); SG_ADD(&m_max_iteration, "max_iteration", - "maximum number of algorithm's iterations", MS_NOT_AVAILABLE); + "maximum number of algorithm's iterations", ParameterProperties()); } CManifoldSculpting::~CManifoldSculpting() diff --git a/src/shogun/converter/MultidimensionalScaling.cpp b/src/shogun/converter/MultidimensionalScaling.cpp index f7d9f94c673..e90ace6bd0a 100644 --- a/src/shogun/converter/MultidimensionalScaling.cpp +++ b/src/shogun/converter/MultidimensionalScaling.cpp @@ -30,11 +30,11 @@ CMultidimensionalScaling::CMultidimensionalScaling() : CEmbeddingConverter() void CMultidimensionalScaling::init() { SG_ADD(&m_eigenvalues, "eigenvalues", "eigenvalues of last embedding", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_landmark, "landmark", - "indicates if landmark approximation should be used", MS_NOT_AVAILABLE); + "indicates if landmark approximation should be used", ParameterProperties()); SG_ADD(&m_landmark_number, "landmark_number", - "the number of landmarks for approximation", MS_AVAILABLE); + "the number of landmarks for approximation", ParameterProperties::HYPER); } CMultidimensionalScaling::~CMultidimensionalScaling() diff --git a/src/shogun/converter/StochasticProximityEmbedding.cpp b/src/shogun/converter/StochasticProximityEmbedding.cpp index da2fc8d6ba0..40abb7da76a 100644 --- a/src/shogun/converter/StochasticProximityEmbedding.cpp +++ b/src/shogun/converter/StochasticProximityEmbedding.cpp @@ -27,13 +27,13 @@ CStochasticProximityEmbedding::CStochasticProximityEmbedding() : void CStochasticProximityEmbedding::init() { - SG_ADD(&m_k, "m_k", "Number of neighbors", MS_NOT_AVAILABLE); + SG_ADD(&m_k, "m_k", "Number of neighbors", ParameterProperties()); SG_ADD((machine_int_t*) &m_strategy, "m_strategy", "SPE strategy", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_tolerance, "m_tolerance", "Regularization parameter", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", - MS_NOT_AVAILABLE); + ParameterProperties()); } CStochasticProximityEmbedding::~CStochasticProximityEmbedding() diff --git a/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp b/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp index 176ecaed283..3a171603263 100644 --- a/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp +++ b/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp @@ -21,8 +21,8 @@ CTDistributedStochasticNeighborEmbedding::CTDistributedStochasticNeighborEmbeddi void CTDistributedStochasticNeighborEmbedding::init() { - SG_ADD(&m_perplexity, "perplexity", "perplexity", MS_NOT_AVAILABLE); - SG_ADD(&m_theta, "theta", "learning rate", MS_NOT_AVAILABLE); + SG_ADD(&m_perplexity, "perplexity", "perplexity", ParameterProperties()); + SG_ADD(&m_theta, "theta", "learning rate", ParameterProperties()); } CTDistributedStochasticNeighborEmbedding::~CTDistributedStochasticNeighborEmbedding() diff --git a/src/shogun/converter/ica/FFSep.cpp b/src/shogun/converter/ica/FFSep.cpp index 20a5e0e2273..3b88d03f5f5 100644 --- a/src/shogun/converter/ica/FFSep.cpp +++ b/src/shogun/converter/ica/FFSep.cpp @@ -30,7 +30,7 @@ void CFFSep::init() m_covs = SGNDArray(); - SG_ADD(&m_tau, "tau", "tau vector", MS_AVAILABLE); + SG_ADD(&m_tau, "tau", "tau vector", ParameterProperties::HYPER); } CFFSep::~CFFSep() diff --git a/src/shogun/converter/ica/FastICA.cpp b/src/shogun/converter/ica/FastICA.cpp index 156b3ffb41e..14d5679e442 100644 --- a/src/shogun/converter/ica/FastICA.cpp +++ b/src/shogun/converter/ica/FastICA.cpp @@ -49,7 +49,7 @@ CFastICA::CFastICA() : CICAConverter() void CFastICA::init() { whiten = true; - SG_ADD(&whiten, "whiten", "flag indicating whether to whiten the data", MS_NOT_AVAILABLE); + SG_ADD(&whiten, "whiten", "flag indicating whether to whiten the data", ParameterProperties()); } CFastICA::~CFastICA() diff --git a/src/shogun/converter/ica/ICAConverter.cpp b/src/shogun/converter/ica/ICAConverter.cpp index cbf046b892c..093674ad948 100644 --- a/src/shogun/converter/ica/ICAConverter.cpp +++ b/src/shogun/converter/ica/ICAConverter.cpp @@ -23,9 +23,9 @@ void CICAConverter::init() max_iter = 200; tol = 1e-6; - SG_ADD(&m_mixing_matrix, "mixing_matrix", "the mixing matrix", MS_NOT_AVAILABLE); - SG_ADD(&max_iter, "max_iter", "maximum number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&tol, "tol", "the convergence tolerance", MS_NOT_AVAILABLE); + SG_ADD(&m_mixing_matrix, "mixing_matrix", "the mixing matrix", ParameterProperties()); + SG_ADD(&max_iter, "max_iter", "maximum number of iterations", ParameterProperties()); + SG_ADD(&tol, "tol", "the convergence tolerance", ParameterProperties()); } CICAConverter::~CICAConverter() diff --git a/src/shogun/converter/ica/Jade.cpp b/src/shogun/converter/ica/Jade.cpp index 279642a8e89..29db09d32a8 100644 --- a/src/shogun/converter/ica/Jade.cpp +++ b/src/shogun/converter/ica/Jade.cpp @@ -28,7 +28,7 @@ CJade::CJade() : CICAConverter() void CJade::init() { m_cumulant_matrix = SGMatrix(); - SG_ADD(&m_cumulant_matrix, "cumulant_matrix", "m_cumulant_matrix", MS_NOT_AVAILABLE); + SG_ADD(&m_cumulant_matrix, "cumulant_matrix", "m_cumulant_matrix", ParameterProperties()); } CJade::~CJade() diff --git a/src/shogun/converter/ica/JediSep.cpp b/src/shogun/converter/ica/JediSep.cpp index d62b312167d..b133ad664d3 100644 --- a/src/shogun/converter/ica/JediSep.cpp +++ b/src/shogun/converter/ica/JediSep.cpp @@ -30,7 +30,7 @@ void CJediSep::init() m_covs = SGNDArray(); - SG_ADD(&m_tau, "tau", "tau vector", MS_AVAILABLE); + SG_ADD(&m_tau, "tau", "tau vector", ParameterProperties::HYPER); } CJediSep::~CJediSep() diff --git a/src/shogun/converter/ica/SOBI.cpp b/src/shogun/converter/ica/SOBI.cpp index 65fd6d20a17..380c94665c8 100644 --- a/src/shogun/converter/ica/SOBI.cpp +++ b/src/shogun/converter/ica/SOBI.cpp @@ -30,7 +30,7 @@ void CSOBI::init() m_covs = SGNDArray(); - SG_ADD(&m_tau, "tau", "tau vector", MS_AVAILABLE); + SG_ADD(&m_tau, "tau", "tau vector", ParameterProperties::HYPER); } CSOBI::~CSOBI() diff --git a/src/shogun/converter/ica/UWedgeSep.cpp b/src/shogun/converter/ica/UWedgeSep.cpp index 1061961201d..f7166eae0e4 100644 --- a/src/shogun/converter/ica/UWedgeSep.cpp +++ b/src/shogun/converter/ica/UWedgeSep.cpp @@ -30,7 +30,7 @@ void CUWedgeSep::init() m_covs = SGNDArray(); - SG_ADD(&m_tau, "tau", "tau vector", MS_AVAILABLE); + SG_ADD(&m_tau, "tau", "tau vector", ParameterProperties::HYPER); } CUWedgeSep::~CUWedgeSep() diff --git a/src/shogun/distance/AttenuatedEuclideanDistance.cpp b/src/shogun/distance/AttenuatedEuclideanDistance.cpp index 2df7ecb801a..d10c7e81030 100644 --- a/src/shogun/distance/AttenuatedEuclideanDistance.cpp +++ b/src/shogun/distance/AttenuatedEuclideanDistance.cpp @@ -68,5 +68,5 @@ void CAttenuatedEuclideanDistance::init() SG_ADD( &disable_sqrt, "disable_sqrt", "If sqrt shall not be applied.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/distance/CustomDistance.cpp b/src/shogun/distance/CustomDistance.cpp index 7bd28fa5eed..5a0f0cfb5c4 100644 --- a/src/shogun/distance/CustomDistance.cpp +++ b/src/shogun/distance/CustomDistance.cpp @@ -128,7 +128,7 @@ void CCustomDistance::init() AnyParameterProperties("Distance Matrix")); SG_ADD( - &upper_diagonal, "upper_diagonal", "Upper diagonal", MS_NOT_AVAILABLE); + &upper_diagonal, "upper_diagonal", "Upper diagonal", ParameterProperties()); } void CCustomDistance::cleanup() diff --git a/src/shogun/distance/CustomMahalanobisDistance.cpp b/src/shogun/distance/CustomMahalanobisDistance.cpp index d327522484a..c3056df3a7c 100644 --- a/src/shogun/distance/CustomMahalanobisDistance.cpp +++ b/src/shogun/distance/CustomMahalanobisDistance.cpp @@ -26,7 +26,7 @@ CCustomMahalanobisDistance::CCustomMahalanobisDistance(CFeatures* l, CFeatures* void CCustomMahalanobisDistance::register_params() { - SG_ADD(&m_mahalanobis_matrix, "m_mahalanobis_matrix", "Mahalanobis matrix", MS_NOT_AVAILABLE) + SG_ADD(&m_mahalanobis_matrix, "m_mahalanobis_matrix", "Mahalanobis matrix", ParameterProperties()); } CCustomMahalanobisDistance::~CCustomMahalanobisDistance() diff --git a/src/shogun/distance/Distance.cpp b/src/shogun/distance/Distance.cpp index c1289f3655c..74eafab8f4c 100644 --- a/src/shogun/distance/Distance.cpp +++ b/src/shogun/distance/Distance.cpp @@ -259,8 +259,8 @@ void CDistance::init() num_lhs=0; num_rhs=0; - SG_ADD(&lhs, "lhs", "Left hand side features.", MS_NOT_AVAILABLE); - SG_ADD(&rhs, "rhs", "Right hand side features.", MS_NOT_AVAILABLE); + SG_ADD(&lhs, "lhs", "Left hand side features.", ParameterProperties()); + SG_ADD(&rhs, "rhs", "Right hand side features.", ParameterProperties()); } template diff --git a/src/shogun/distance/EuclideanDistance.cpp b/src/shogun/distance/EuclideanDistance.cpp index f7dd1a0542f..62c1481d084 100644 --- a/src/shogun/distance/EuclideanDistance.cpp +++ b/src/shogun/distance/EuclideanDistance.cpp @@ -133,9 +133,9 @@ void CEuclideanDistance::register_params() { disable_sqrt=false; reset_precompute(); - SG_ADD(&disable_sqrt, "disable_sqrt", "If sqrt shall not be applied.", MS_NOT_AVAILABLE); - SG_ADD(&m_rhs_squared_norms, "m_rhs_squared_norms", "Squared norms from features of right hand side", MS_NOT_AVAILABLE); - SG_ADD(&m_lhs_squared_norms, "m_lhs_squared_norms", "Squared norms from features of left hand side", MS_NOT_AVAILABLE); + SG_ADD(&disable_sqrt, "disable_sqrt", "If sqrt shall not be applied.", ParameterProperties()); + SG_ADD(&m_rhs_squared_norms, "m_rhs_squared_norms", "Squared norms from features of right hand side", ParameterProperties()); + SG_ADD(&m_lhs_squared_norms, "m_lhs_squared_norms", "Squared norms from features of left hand side", ParameterProperties()); } float64_t CEuclideanDistance::distance_upper_bounded(int32_t idx_a, int32_t idx_b, float64_t upper_bound) diff --git a/src/shogun/distance/HammingWordDistance.cpp b/src/shogun/distance/HammingWordDistance.cpp index 613a6b650ad..3a47c6b1046 100644 --- a/src/shogun/distance/HammingWordDistance.cpp +++ b/src/shogun/distance/HammingWordDistance.cpp @@ -171,5 +171,5 @@ void CHammingWordDistance::init() use_sign = false; SG_ADD( &use_sign, "use_sign", "If signum(counts) is used instead of counts.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/distance/KernelDistance.cpp b/src/shogun/distance/KernelDistance.cpp index 128e051c443..12890a8f066 100644 --- a/src/shogun/distance/KernelDistance.cpp +++ b/src/shogun/distance/KernelDistance.cpp @@ -69,6 +69,6 @@ void CKernelDistance::init() kernel = NULL; width = 0.0; - SG_ADD(&width, "width", "Width of RBF Kernel", MS_AVAILABLE); - SG_ADD(&kernel, "kernel", "Kernel.", MS_NOT_AVAILABLE); + SG_ADD(&width, "width", "Width of RBF Kernel", ParameterProperties::HYPER); + SG_ADD(&kernel, "kernel", "Kernel.", ParameterProperties()); } diff --git a/src/shogun/distance/MahalanobisDistance.cpp b/src/shogun/distance/MahalanobisDistance.cpp index f9344824ce3..05474af18d0 100644 --- a/src/shogun/distance/MahalanobisDistance.cpp +++ b/src/shogun/distance/MahalanobisDistance.cpp @@ -112,11 +112,11 @@ void CMahalanobisDistance::init() SG_ADD( &disable_sqrt, "disable_sqrt", "If sqrt shall not be applied.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &use_mean, "use_mean", "If distance shall be computed between mean " "vector and vector from rhs or between lhs and " "rhs.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/distance/MinkowskiMetric.cpp b/src/shogun/distance/MinkowskiMetric.cpp index 204a765a3fe..3c436101ddb 100644 --- a/src/shogun/distance/MinkowskiMetric.cpp +++ b/src/shogun/distance/MinkowskiMetric.cpp @@ -84,5 +84,5 @@ float64_t CMinkowskiMetric::compute(int32_t idx_a, int32_t idx_b) void CMinkowskiMetric::init() { k = 2.0; - SG_ADD(&k, "k", "L_k norm.", MS_AVAILABLE); + SG_ADD(&k, "k", "L_k norm.", ParameterProperties::HYPER); } diff --git a/src/shogun/distributions/Distribution.cpp b/src/shogun/distributions/Distribution.cpp index 053f1d46920..57e5e4bbcbb 100644 --- a/src/shogun/distributions/Distribution.cpp +++ b/src/shogun/distributions/Distribution.cpp @@ -12,7 +12,7 @@ using namespace shogun; CDistribution::CDistribution() : CSGObject(), features(NULL), pseudo_count(1e-10) { - SG_ADD(&features, "features", "features to be used", MS_NOT_AVAILABLE); + SG_ADD(&features, "features", "features to be used", ParameterProperties()); } CDistribution::~CDistribution() diff --git a/src/shogun/distributions/Gaussian.cpp b/src/shogun/distributions/Gaussian.cpp index 99b04b540d9..5a2acdeeaaf 100644 --- a/src/shogun/distributions/Gaussian.cpp +++ b/src/shogun/distributions/Gaussian.cpp @@ -337,11 +337,11 @@ SGMatrix CGaussian::get_cov() void CGaussian::register_params() { - SG_ADD(&m_u, "m_u", "Unitary matrix.",MS_NOT_AVAILABLE); - SG_ADD(&m_d, "m_d", "Diagonal.",MS_NOT_AVAILABLE); - SG_ADD(&m_mean, "m_mean", "Mean.",MS_NOT_AVAILABLE); - SG_ADD(&m_constant, "m_constant", "Constant part.",MS_NOT_AVAILABLE); - SG_ADD((machine_int_t*)&m_cov_type, "m_cov_type", "Covariance type.",MS_NOT_AVAILABLE); + SG_ADD(&m_u, "m_u", "Unitary matrix.",ParameterProperties()); + SG_ADD(&m_d, "m_d", "Diagonal.",ParameterProperties()); + SG_ADD(&m_mean, "m_mean", "Mean.",ParameterProperties()); + SG_ADD(&m_constant, "m_constant", "Constant part.",ParameterProperties()); + SG_ADD((machine_int_t*)&m_cov_type, "m_cov_type", "Covariance type.",ParameterProperties()); } void CGaussian::decompose_cov(SGMatrix cov) diff --git a/src/shogun/distributions/KernelDensity.cpp b/src/shogun/distributions/KernelDensity.cpp index f9bab0d8c1d..9bd3765d78e 100644 --- a/src/shogun/distributions/KernelDensity.cpp +++ b/src/shogun/distributions/KernelDensity.cpp @@ -159,9 +159,9 @@ void CKernelDensity::init() m_rtol=0; tree=NULL; - SG_ADD(&m_bandwidth,"m_bandwidth","bandwidth",MS_NOT_AVAILABLE); - SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",MS_NOT_AVAILABLE); - SG_ADD(&m_atol,"m_atol","absolute tolerance",MS_NOT_AVAILABLE); - SG_ADD(&m_rtol,"m_rtol","relative tolerance",MS_NOT_AVAILABLE); - SG_ADD((CSGObject**) &tree,"tree","tree",MS_NOT_AVAILABLE); + SG_ADD(&m_bandwidth,"m_bandwidth","bandwidth",ParameterProperties()); + SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",ParameterProperties()); + SG_ADD(&m_atol,"m_atol","absolute tolerance",ParameterProperties()); + SG_ADD(&m_rtol,"m_rtol","relative tolerance",ParameterProperties()); + SG_ADD((CSGObject**) &tree,"tree","tree",ParameterProperties()); } \ No newline at end of file diff --git a/src/shogun/distributions/MixtureModel.cpp b/src/shogun/distributions/MixtureModel.cpp index 6983926238b..c174c7d43e9 100644 --- a/src/shogun/distributions/MixtureModel.cpp +++ b/src/shogun/distributions/MixtureModel.cpp @@ -212,8 +212,8 @@ void CMixtureModel::init() m_conv_tol=1e-8; m_max_iters=1000; - SG_ADD((CSGObject**)&m_components,"m_components","components of mixture",MS_NOT_AVAILABLE); - SG_ADD(&m_weights,"m_weights","weights of components",MS_NOT_AVAILABLE); - SG_ADD(&m_conv_tol,"m_conv_tol","convergence tolerance",MS_NOT_AVAILABLE); - SG_ADD(&m_max_iters,"m_max_iters","max number of iterations",MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_components,"m_components","components of mixture",ParameterProperties()); + SG_ADD(&m_weights,"m_weights","weights of components",ParameterProperties()); + SG_ADD(&m_conv_tol,"m_conv_tol","convergence tolerance",ParameterProperties()); + SG_ADD(&m_max_iters,"m_max_iters","max number of iterations",ParameterProperties()); } diff --git a/src/shogun/distributions/PositionalPWM.cpp b/src/shogun/distributions/PositionalPWM.cpp index 6b57334b5a9..1bf4f40ee7f 100644 --- a/src/shogun/distributions/PositionalPWM.cpp +++ b/src/shogun/distributions/PositionalPWM.cpp @@ -124,11 +124,11 @@ void CPositionalPWM::compute_w(int32_t num_pos) void CPositionalPWM::register_params() { - SG_ADD(&m_poim, "poim", "POIM Scoring Matrix", MS_NOT_AVAILABLE); - SG_ADD(&m_w, "w", "Scoring Matrix", MS_NOT_AVAILABLE); - SG_ADD(&m_pwm, "pwm", "Positional Weight Matrix.", MS_NOT_AVAILABLE); - SG_ADD(&m_sigma, "sigma", "Standard Deviation.", MS_NOT_AVAILABLE); - SG_ADD(&m_mean, "mean", "Mean.", MS_NOT_AVAILABLE); + SG_ADD(&m_poim, "poim", "POIM Scoring Matrix", ParameterProperties()); + SG_ADD(&m_w, "w", "Scoring Matrix", ParameterProperties()); + SG_ADD(&m_pwm, "pwm", "Positional Weight Matrix.", ParameterProperties()); + SG_ADD(&m_sigma, "sigma", "Standard Deviation.", ParameterProperties()); + SG_ADD(&m_mean, "mean", "Mean.", ParameterProperties()); } void CPositionalPWM::compute_scoring(int32_t max_degree) diff --git a/src/shogun/distributions/classical/GaussianDistribution.cpp b/src/shogun/distributions/classical/GaussianDistribution.cpp index 2c4e49e488e..fc0388e46b8 100644 --- a/src/shogun/distributions/classical/GaussianDistribution.cpp +++ b/src/shogun/distributions/classical/GaussianDistribution.cpp @@ -164,7 +164,7 @@ SGVector CGaussianDistribution::log_pdf_multiple(SGMatrix void CGaussianDistribution::init() { - SG_ADD(&m_mean, "mean", "Mean of the Gaussian.", MS_NOT_AVAILABLE); + SG_ADD(&m_mean, "mean", "Mean of the Gaussian.", ParameterProperties()); SG_ADD(&m_L, "L", "Lower factor of covariance matrix, " - "depending on the factorization type.", MS_NOT_AVAILABLE); + "depending on the factorization type.", ParameterProperties()); } diff --git a/src/shogun/distributions/classical/ProbabilityDistribution.cpp b/src/shogun/distributions/classical/ProbabilityDistribution.cpp index 72607694dfc..d3e6d672d53 100644 --- a/src/shogun/distributions/classical/ProbabilityDistribution.cpp +++ b/src/shogun/distributions/classical/ProbabilityDistribution.cpp @@ -71,5 +71,5 @@ void CProbabilityDistribution::init() m_dimension=0; SG_ADD(&m_dimension, "dimension", "Dimension of distribution.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/ensemble/WeightedMajorityVote.cpp b/src/shogun/ensemble/WeightedMajorityVote.cpp index 623230ae800..ce26ec3d99d 100644 --- a/src/shogun/ensemble/WeightedMajorityVote.cpp +++ b/src/shogun/ensemble/WeightedMajorityVote.cpp @@ -103,5 +103,5 @@ void CWeightedMajorityVote::init() void CWeightedMajorityVote::register_parameters() { - SG_ADD(&m_weights, "weights", "Weights for the majority vote", MS_AVAILABLE); + SG_ADD(&m_weights, "weights", "Weights for the majority vote", ParameterProperties::HYPER); } diff --git a/src/shogun/evaluation/CrossValidation.cpp b/src/shogun/evaluation/CrossValidation.cpp index 65d03ef7e01..780cc1c68e8 100644 --- a/src/shogun/evaluation/CrossValidation.cpp +++ b/src/shogun/evaluation/CrossValidation.cpp @@ -51,7 +51,7 @@ void CCrossValidation::init() { m_num_runs = 1; - SG_ADD(&m_num_runs, "num_runs", "Number of repetitions", MS_NOT_AVAILABLE); + SG_ADD(&m_num_runs, "num_runs", "Number of repetitions", ParameterProperties()); } CEvaluationResult* CCrossValidation::evaluate_impl() diff --git a/src/shogun/evaluation/CrossValidation.h b/src/shogun/evaluation/CrossValidation.h index 3c15f306089..5dbf948308e 100644 --- a/src/shogun/evaluation/CrossValidation.h +++ b/src/shogun/evaluation/CrossValidation.h @@ -29,11 +29,11 @@ namespace shogun public: CCrossValidationResult() { - SG_ADD(&mean, "mean", "Mean of results", MS_NOT_AVAILABLE); + SG_ADD(&mean, "mean", "Mean of results", ParameterProperties()); SG_ADD( &std_dev, "std_dev", "Standard deviation of cross-validation folds", - MS_NOT_AVAILABLE); + ParameterProperties()); mean = 0; std_dev = 0; diff --git a/src/shogun/evaluation/CrossValidationStorage.cpp b/src/shogun/evaluation/CrossValidationStorage.cpp index b35ba034842..ae5f5e5923a 100644 --- a/src/shogun/evaluation/CrossValidationStorage.cpp +++ b/src/shogun/evaluation/CrossValidationStorage.cpp @@ -51,19 +51,19 @@ CrossValidationFoldStorage::CrossValidationFoldStorage() : CSGObject() SG_ADD( &m_current_run_index, "m_current_run_index", - "The current run index of this fold", MS_AVAILABLE) + "The current run index of this fold", ParameterProperties::HYPER); SG_ADD( &m_current_fold_index, "m_current_fold_index", "The current fold index", - MS_AVAILABLE) + ParameterProperties::HYPER); SG_ADD( (CSGObject**)&m_trained_machine, "m_trained_machine", - "The machine trained by this fold", MS_AVAILABLE) + "The machine trained by this fold", ParameterProperties::HYPER); SG_ADD( (CSGObject**)&m_test_result, "m_test_result", - "The test result of this fold", MS_AVAILABLE) + "The test result of this fold", ParameterProperties::HYPER); SG_ADD( (CSGObject**)&m_test_true_result, "m_test_true_result", - "The true test result for this fold", MS_AVAILABLE) + "The true test result for this fold", ParameterProperties::HYPER); } CrossValidationFoldStorage::~CrossValidationFoldStorage() @@ -197,13 +197,13 @@ CrossValidationStorage::CrossValidationStorage() : CSGObject() SG_ADD( &m_num_runs, "m_num_runs", "The total number of cross-validation runs", - MS_AVAILABLE) + ParameterProperties::HYPER); SG_ADD( &m_num_folds, "m_num_folds", - "The total number of cross-validation folds", MS_AVAILABLE) + "The total number of cross-validation folds", ParameterProperties::HYPER); SG_ADD( (CSGObject**)&m_expose_labels, "m_expose_labels", - "The labels used for this cross-validation", MS_AVAILABLE) + "The labels used for this cross-validation", ParameterProperties::HYPER); } CrossValidationStorage::~CrossValidationStorage() diff --git a/src/shogun/evaluation/GradientEvaluation.cpp b/src/shogun/evaluation/GradientEvaluation.cpp index 316d49e23be..3b071c711e4 100644 --- a/src/shogun/evaluation/GradientEvaluation.cpp +++ b/src/shogun/evaluation/GradientEvaluation.cpp @@ -30,7 +30,7 @@ void CGradientEvaluation::init() SG_ADD( &m_diff, "differentiable_function", "Differentiable " "function", - MS_AVAILABLE); + ParameterProperties::HYPER); } CGradientEvaluation::~CGradientEvaluation() diff --git a/src/shogun/evaluation/MachineEvaluation.cpp b/src/shogun/evaluation/MachineEvaluation.cpp index f3096d7f6d5..2eed61c6a81 100644 --- a/src/shogun/evaluation/MachineEvaluation.cpp +++ b/src/shogun/evaluation/MachineEvaluation.cpp @@ -83,21 +83,21 @@ void CMachineEvaluation::init() m_pause_computation_flag = false; SG_ADD((CSGObject**)&m_machine, "machine", "Used learning machine", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**)&m_features, "features", "Used features", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**)&m_labels, "labels", "Used labels", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**)&m_splitting_strategy, "splitting_strategy", - "Used splitting strategy", MS_NOT_AVAILABLE); + "Used splitting strategy", ParameterProperties()); SG_ADD((CSGObject**)&m_evaluation_criterion, "evaluation_criterion", - "Used evaluation criterion", MS_NOT_AVAILABLE); + "Used evaluation criterion", ParameterProperties()); SG_ADD(&m_do_unlock, "do_unlock", "Whether machine should be unlocked after evaluation", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_autolock, "m_autolock", "Whether machine should automatically try to be locked before ", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/evaluation/SigmoidCalibration.cpp b/src/shogun/evaluation/SigmoidCalibration.cpp index 03ed9aaffd5..f4f2cadce8e 100644 --- a/src/shogun/evaluation/SigmoidCalibration.cpp +++ b/src/shogun/evaluation/SigmoidCalibration.cpp @@ -29,21 +29,21 @@ void CSigmoidCalibration::init() SG_ADD( &m_sigmoid_as, "m_sigmoid_as", - "Vector of paramter A of sigmoid for each class.", MS_NOT_AVAILABLE); + "Vector of paramter A of sigmoid for each class.", ParameterProperties()); SG_ADD( &m_sigmoid_bs, "m_sigmoid_bs", - "Vector of paramter B of sigmoid for each class.", MS_NOT_AVAILABLE); + "Vector of paramter B of sigmoid for each class.", ParameterProperties()); SG_ADD( &m_maxiter, "m_maxiter", "Maximum number of iteration for search.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_minstep, "m_minstep", "Minimum step taken in line search.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_sigma, "m_sigma", "Positive parameter to ensure positive semi-definite Hessian.", - MS_NOT_AVAILABLE); - SG_ADD(&m_epsilon, "m_epsilon", "Stopping criteria.", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_epsilon, "m_epsilon", "Stopping criteria.", ParameterProperties()); } void CSigmoidCalibration::set_maxiter(index_t maxiter) diff --git a/src/shogun/evaluation/SplittingStrategy.cpp b/src/shogun/evaluation/SplittingStrategy.cpp index 9425c430382..35804729e35 100644 --- a/src/shogun/evaluation/SplittingStrategy.cpp +++ b/src/shogun/evaluation/SplittingStrategy.cpp @@ -56,16 +56,16 @@ void CSplittingStrategy::init() m_is_filled=false; m_num_subsets=0; - SG_ADD(&m_labels, "labels", "Labels for subsets", MS_NOT_AVAILABLE); + SG_ADD(&m_labels, "labels", "Labels for subsets", ParameterProperties()); SG_ADD( &m_subset_indices, "subset_indices", "Set of sets of subset indices", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_is_filled, "is_filled", "Whether ther are index sets", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_num_subsets, "num_subsets", "Number of index sets", - MS_NOT_AVAILABLE); + ParameterProperties()); } CSplittingStrategy::~CSplittingStrategy() diff --git a/src/shogun/features/Alphabet.cpp b/src/shogun/features/Alphabet.cpp index 7b2de669b66..7f7b6748c31 100644 --- a/src/shogun/features/Alphabet.cpp +++ b/src/shogun/features/Alphabet.cpp @@ -728,9 +728,9 @@ void CAlphabet::init() SG_ADD( (machine_int_t*)&alphabet, "alphabet", "Alphabet enum.", - MS_NOT_AVAILABLE); - SG_ADD(&num_symbols, "num_symbols", "Number of symbols.", MS_NOT_AVAILABLE); - SG_ADD(&num_bits, "num_bits", "Number of bits.", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&num_symbols, "num_symbols", "Number of symbols.", ParameterProperties()); + SG_ADD(&num_bits, "num_bits", "Number of bits.", ParameterProperties()); /* We don't need to serialize the mapping tables / they can be computed * after de-serializing. Lets not serialize the histogram for now. Doesn't diff --git a/src/shogun/features/CombinedDotFeatures.cpp b/src/shogun/features/CombinedDotFeatures.cpp index 10bc9546a1d..762355c9b09 100644 --- a/src/shogun/features/CombinedDotFeatures.cpp +++ b/src/shogun/features/CombinedDotFeatures.cpp @@ -343,10 +343,10 @@ void CCombinedDotFeatures::init() { SG_ADD( &num_dimensions, "num_dimensions", "Total number of dimensions.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &num_vectors, "num_vectors", "Total number of vectors.", - MS_NOT_AVAILABLE); - SG_ADD(&feature_array, "feature_array", "Feature array.", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&feature_array, "feature_array", "Feature array.", ParameterProperties()); } diff --git a/src/shogun/features/CombinedFeatures.cpp b/src/shogun/features/CombinedFeatures.cpp index 0ef5d28e16f..c11b76738d6 100644 --- a/src/shogun/features/CombinedFeatures.cpp +++ b/src/shogun/features/CombinedFeatures.cpp @@ -166,8 +166,8 @@ int32_t CCombinedFeatures::get_num_feature_obj() const void CCombinedFeatures::init() { - SG_ADD(&num_vec, "num_vec", "Number of vectors.", MS_NOT_AVAILABLE); - SG_ADD(&feature_array, "array", "Feature array.", MS_NOT_AVAILABLE); + SG_ADD(&num_vec, "num_vec", "Number of vectors.", ParameterProperties()); + SG_ADD(&feature_array, "array", "Feature array.", ParameterProperties()); } CFeatures* CCombinedFeatures::create_merged_copy(CFeatures* other) const diff --git a/src/shogun/features/DenseFeatures.cpp b/src/shogun/features/DenseFeatures.cpp index 488ee8d0b52..f0fec9fe8ed 100644 --- a/src/shogun/features/DenseFeatures.cpp +++ b/src/shogun/features/DenseFeatures.cpp @@ -603,10 +603,10 @@ template void CDenseFeatures::init() set_generic(); /* not store number of vectors in subset */ - SG_ADD(&num_vectors, "num_vectors", "Number of vectors.", MS_NOT_AVAILABLE); - SG_ADD(&num_features, "num_features", "Number of features.", MS_NOT_AVAILABLE); + SG_ADD(&num_vectors, "num_vectors", "Number of vectors.", ParameterProperties()); + SG_ADD(&num_features, "num_features", "Number of features.", ParameterProperties()); SG_ADD(&feature_matrix, "feature_matrix", - "Matrix of feature vectors / 1 vector per column.", MS_NOT_AVAILABLE); + "Matrix of feature vectors / 1 vector per column.", ParameterProperties()); } #define GET_FEATURE_TYPE(f_type, sg_type) \ diff --git a/src/shogun/features/DenseSubSamplesFeatures.cpp b/src/shogun/features/DenseSubSamplesFeatures.cpp index b58d79d26c9..af026789c7e 100644 --- a/src/shogun/features/DenseSubSamplesFeatures.cpp +++ b/src/shogun/features/DenseSubSamplesFeatures.cpp @@ -68,8 +68,8 @@ template void CDenseSubSamplesFeatures::init() set_generic(); m_fea=NULL; m_idx=SGVector(); - SG_ADD(&m_idx, "idx", "idx", MS_NOT_AVAILABLE); - SG_ADD((CSGObject **)&m_fea, "fea", "fea", MS_NOT_AVAILABLE); + SG_ADD(&m_idx, "idx", "idx", ParameterProperties()); + SG_ADD((CSGObject **)&m_fea, "fea", "fea", ParameterProperties()); } template CFeatures* CDenseSubSamplesFeatures::duplicate() const diff --git a/src/shogun/features/DotFeatures.cpp b/src/shogun/features/DotFeatures.cpp index 2e4f70b0f76..6f45808fcbf 100644 --- a/src/shogun/features/DotFeatures.cpp +++ b/src/shogun/features/DotFeatures.cpp @@ -342,5 +342,5 @@ void CDotFeatures::init() set_property(FP_DOT); SG_ADD( &combined_weight, "combined_weight", - "Feature weighting in combined dot features.", MS_NOT_AVAILABLE); + "Feature weighting in combined dot features.", ParameterProperties()); } diff --git a/src/shogun/features/DummyFeatures.cpp b/src/shogun/features/DummyFeatures.cpp index 6b0a701b5b1..2a0263acd2a 100644 --- a/src/shogun/features/DummyFeatures.cpp +++ b/src/shogun/features/DummyFeatures.cpp @@ -48,5 +48,5 @@ void CDummyFeatures::init() { SG_ADD( &num_vectors, "num_vectors", "Number of feature vectors.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/features/FKFeatures.cpp b/src/shogun/features/FKFeatures.cpp index 5a2cbfe3a61..61570fa91e1 100644 --- a/src/shogun/features/FKFeatures.cpp +++ b/src/shogun/features/FKFeatures.cpp @@ -255,5 +255,5 @@ void CFKFeatures::init() //TODO serialize HMMs //m_parameters->add((CSGObject**) &pos, "pos", "HMM for positive class."); //m_parameters->add((CSGObject**) &neg, "neg", "HMM for negative class."); - SG_ADD(&weight_a, "weight_a", "Class prior.", MS_NOT_AVAILABLE); + SG_ADD(&weight_a, "weight_a", "Class prior.", ParameterProperties()); } diff --git a/src/shogun/features/FactorGraphFeatures.cpp b/src/shogun/features/FactorGraphFeatures.cpp index e8f359bd8fb..072e07e6b3a 100644 --- a/src/shogun/features/FactorGraphFeatures.cpp +++ b/src/shogun/features/FactorGraphFeatures.cpp @@ -73,7 +73,7 @@ CFactorGraph* CFactorGraphFeatures::get_sample(index_t idx) void CFactorGraphFeatures::init() { SG_ADD((CSGObject**) &m_samples, "samples", "Array of examples", - MS_NOT_AVAILABLE); + ParameterProperties()); } CFactorGraphFeatures* CFactorGraphFeatures::obtain_from_generic(CFeatures* base_feats) diff --git a/src/shogun/features/Features.cpp b/src/shogun/features/Features.cpp index 610184932df..aa1ed5a2d5b 100644 --- a/src/shogun/features/Features.cpp +++ b/src/shogun/features/Features.cpp @@ -52,14 +52,14 @@ CFeatures::~CFeatures() void CFeatures::init() { - SG_ADD(&properties, "properties", "Feature properties", MS_NOT_AVAILABLE); - SG_ADD(&cache_size, "cache_size", "Size of cache in MB", MS_NOT_AVAILABLE); + SG_ADD(&properties, "properties", "Feature properties", ParameterProperties()); + SG_ADD(&cache_size, "cache_size", "Size of cache in MB", ParameterProperties()); SG_ADD((CSGObject**) &preproc, "preproc", "Array of preprocessors.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**)&m_subset_stack, "subset_stack", "Stack of subsets", - MS_NOT_AVAILABLE); + ParameterProperties()); m_subset_stack=new CSubsetStack(); SG_REF(m_subset_stack); diff --git a/src/shogun/features/IndexFeatures.cpp b/src/shogun/features/IndexFeatures.cpp index 3b5a13364ec..aef51a81d06 100644 --- a/src/shogun/features/IndexFeatures.cpp +++ b/src/shogun/features/IndexFeatures.cpp @@ -82,5 +82,5 @@ void CIndexFeatures::init() { num_vectors = 0; SG_ADD(&m_feature_index, "m_feature_index", - "Vector of feature index.", MS_NOT_AVAILABLE); + "Vector of feature index.", ParameterProperties()); } diff --git a/src/shogun/features/LBPPyrDotFeatures.cpp b/src/shogun/features/LBPPyrDotFeatures.cpp index 77e7217862c..3501105589a 100644 --- a/src/shogun/features/LBPPyrDotFeatures.cpp +++ b/src/shogun/features/LBPPyrDotFeatures.cpp @@ -34,10 +34,10 @@ void CLBPPyrDotFeatures::init(CDenseFeatures* image_set, int32_t image image_width = image_w; image_height = image_h; - SG_ADD((CSGObject**) &images, "images", "Set of images", MS_NOT_AVAILABLE); - SG_ADD(&image_width, "image_width", "The image width", MS_NOT_AVAILABLE); - SG_ADD(&image_height, "image_height", "The image height", MS_NOT_AVAILABLE); - SG_ADD(&vec_nDim, "vec_nDim", "The dimension of the pyr", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**) &images, "images", "Set of images", ParameterProperties()); + SG_ADD(&image_width, "image_width", "The image width", ParameterProperties()); + SG_ADD(&image_height, "image_height", "The image height", ParameterProperties()); + SG_ADD(&vec_nDim, "vec_nDim", "The dimension of the pyr", ParameterProperties()); } CLBPPyrDotFeatures::~CLBPPyrDotFeatures() diff --git a/src/shogun/features/LatentFeatures.cpp b/src/shogun/features/LatentFeatures.cpp index 973d307bf76..b065db63a0d 100644 --- a/src/shogun/features/LatentFeatures.cpp +++ b/src/shogun/features/LatentFeatures.cpp @@ -77,7 +77,7 @@ CData* CLatentFeatures::get_sample(index_t idx) void CLatentFeatures::init() { SG_ADD((CSGObject**) &m_samples, "samples", "Array of examples", - MS_NOT_AVAILABLE); + ParameterProperties()); } CLatentFeatures* CLatentFeatures::obtain_from_generic(CFeatures* base_feats) diff --git a/src/shogun/features/MatrixFeatures.cpp b/src/shogun/features/MatrixFeatures.cpp index b35e5d28b2b..577d8ff1d06 100644 --- a/src/shogun/features/MatrixFeatures.cpp +++ b/src/shogun/features/MatrixFeatures.cpp @@ -144,11 +144,11 @@ template< class ST > void CMatrixFeatures< ST >::set_features( template< class ST > void CMatrixFeatures< ST >::init() { SG_ADD(&m_num_vectors, "m_num_vectors", "Number of feature vectors", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_num_features, "m_num_features", - "Number of features per vector (optional)", MS_NOT_AVAILABLE); + "Number of features per vector (optional)", ParameterProperties()); //TODO add SG_ADD for SGMatrixList - //SG_ADD(&m_features, "m_features", "Matrix features", MS_NOT_AVAILABLE); + //SG_ADD(&m_features, "m_features", "Matrix features", ParameterProperties()); m_num_vectors = 0; m_num_features = 0; diff --git a/src/shogun/features/PolyFeatures.cpp b/src/shogun/features/PolyFeatures.cpp index b60d3b03a9a..03c632959dd 100644 --- a/src/shogun/features/PolyFeatures.cpp +++ b/src/shogun/features/PolyFeatures.cpp @@ -378,17 +378,17 @@ void CPolyFeatures::register_parameters() { SG_ADD( (CSGObject**)&m_feat, "features", "Features in original space.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( - &m_degree, "degree", "Degree of the polynomial kernel.", MS_AVAILABLE); - SG_ADD(&m_normalize, "normalize", "Normalize?", MS_NOT_AVAILABLE); + &m_degree, "degree", "Degree of the polynomial kernel.", ParameterProperties::HYPER); + SG_ADD(&m_normalize, "normalize", "Normalize?", ParameterProperties()); SG_ADD( &m_input_dimensions, "input_dimensions", - "Dimensions of the input space.", MS_NOT_AVAILABLE); + "Dimensions of the input space.", ParameterProperties()); SG_ADD( &m_output_dimensions, "output_dimensions", "Dimensions of the feature space of the polynomial kernel.", - MS_NOT_AVAILABLE); + ParameterProperties()); multi_index_length=m_output_dimensions*m_degree; m_parameters->add_vector( diff --git a/src/shogun/features/RandomFourierDotFeatures.cpp b/src/shogun/features/RandomFourierDotFeatures.cpp index c0aa3c3235e..e91b1f59335 100644 --- a/src/shogun/features/RandomFourierDotFeatures.cpp +++ b/src/shogun/features/RandomFourierDotFeatures.cpp @@ -56,11 +56,11 @@ void CRandomFourierDotFeatures::init(KernelName kernel_name, SGVector constant = num_samples > 0 ? std::sqrt(2.0 / num_samples) : 1; SG_ADD( &kernel_params, "kernel_params", - "The parameters of the kernel to approximate", MS_NOT_AVAILABLE); + "The parameters of the kernel to approximate", ParameterProperties()); SG_ADD((machine_int_t* ) &kernel, "kernel", - "The kernel to approximate", MS_NOT_AVAILABLE); + "The kernel to approximate", ParameterProperties()); SG_ADD(&constant, "constant", "A constant needed", - MS_NOT_AVAILABLE); + ParameterProperties()); } CFeatures* CRandomFourierDotFeatures::duplicate() const diff --git a/src/shogun/features/RandomKitchenSinksDotFeatures.cpp b/src/shogun/features/RandomKitchenSinksDotFeatures.cpp index 9707f6ba7bb..3b5b2401a0f 100644 --- a/src/shogun/features/RandomKitchenSinksDotFeatures.cpp +++ b/src/shogun/features/RandomKitchenSinksDotFeatures.cpp @@ -74,10 +74,10 @@ void CRandomKitchenSinksDotFeatures::init(CDotFeatures* dataset, num_samples = K; SG_ADD((CSGObject** ) &feats, "feats", "Features to work on", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &random_coeff, "random_coeff", "Random function parameters", - MS_NOT_AVAILABLE); + ParameterProperties()); } int32_t CRandomKitchenSinksDotFeatures::get_dim_feature_space() const diff --git a/src/shogun/features/SparsePolyFeatures.cpp b/src/shogun/features/SparsePolyFeatures.cpp index 563fdf6cd4e..bda68e6d5b9 100644 --- a/src/shogun/features/SparsePolyFeatures.cpp +++ b/src/shogun/features/SparsePolyFeatures.cpp @@ -250,17 +250,17 @@ CFeatures* CSparsePolyFeatures::duplicate() const void CSparsePolyFeatures::init() { SG_ADD( - &m_feat, "features", "Features in original space.", MS_NOT_AVAILABLE); + &m_feat, "features", "Features in original space.", ParameterProperties()); SG_ADD( - &m_degree, "degree", "Degree of the polynomial kernel.", MS_AVAILABLE); - SG_ADD(&m_normalize, "normalize", "Normalize", MS_NOT_AVAILABLE); + &m_degree, "degree", "Degree of the polynomial kernel.", ParameterProperties::HYPER); + SG_ADD(&m_normalize, "normalize", "Normalize", ParameterProperties()); SG_ADD( &m_input_dimensions, "input_dimensions", - "Dimensions of the input space.", MS_NOT_AVAILABLE); + "Dimensions of the input space.", ParameterProperties()); SG_ADD( &m_output_dimensions, "output_dimensions", "Dimensions of the feature space of the polynomial kernel.", - MS_NOT_AVAILABLE); + ParameterProperties()); m_normalization_values_len = get_num_vectors(); m_parameters->add_vector(&m_normalization_values, &m_normalization_values_len, @@ -269,8 +269,8 @@ void CSparsePolyFeatures::init() "m_normalization_values", &m_normalization_values, &m_normalization_values_len); - SG_ADD(&mask, "mask", "Mask.", MS_NOT_AVAILABLE); + SG_ADD(&mask, "mask", "Mask.", ParameterProperties()); SG_ADD( &m_hash_bits, "m_hash_bits", "Number of bits in hash", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/features/StringFeatures.cpp b/src/shogun/features/StringFeatures.cpp index 201ee32f62a..994f6c9cc47 100644 --- a/src/shogun/features/StringFeatures.cpp +++ b/src/shogun/features/StringFeatures.cpp @@ -1705,7 +1705,7 @@ template void CStringFeatures::init() num_symbols=0.0; original_num_symbols=0; - SG_ADD(&alphabet, "alphabet", "Alphabet used.", MS_NOT_AVAILABLE); + SG_ADD(&alphabet, "alphabet", "Alphabet used.", ParameterProperties()); m_parameters->add_vector(&features, &num_vectors, "features", "This contains the array of features."); @@ -1719,19 +1719,19 @@ template void CStringFeatures::init() SG_ADD( &max_string_length, "max_string_length", "Length of longest string.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &num_symbols, "num_symbols", "Number of used symbols.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &original_num_symbols, "original_num_symbols", - "Original number of used symbols.", MS_NOT_AVAILABLE); + "Original number of used symbols.", ParameterProperties()); SG_ADD( &order, "order", "Order used in higher order mapping.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &preprocess_on_get, "preprocess_on_get", "Preprocess on-the-fly?", - MS_NOT_AVAILABLE); + ParameterProperties()); m_parameters->add_vector(&symbol_mask_table, &symbol_mask_table_len, "mask_table", "Symbol mask table - using in higher order mapping"); watch_param("mask_table", &symbol_mask_table, &symbol_mask_table_len); diff --git a/src/shogun/features/Subset.cpp b/src/shogun/features/Subset.cpp index a7e5d50f61f..999272d1268 100644 --- a/src/shogun/features/Subset.cpp +++ b/src/shogun/features/Subset.cpp @@ -28,5 +28,5 @@ CSubset::~CSubset() void CSubset::init() { SG_ADD(&m_subset_idx, "subset", "Vector of subset indices", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/features/SubsetStack.cpp b/src/shogun/features/SubsetStack.cpp index e47b29309da..5ce4e88ae06 100644 --- a/src/shogun/features/SubsetStack.cpp +++ b/src/shogun/features/SubsetStack.cpp @@ -72,9 +72,9 @@ void CSubsetStack::remove_all_subsets() void CSubsetStack::init() { SG_ADD((CSGObject**)&m_active_subset, "active_subset", - "Currently active subset", MS_NOT_AVAILABLE); + "Currently active subset", ParameterProperties()); SG_ADD((CSGObject**)&m_active_subsets_stack, "active_subsets_stack", - "Stack of active subsets", MS_NOT_AVAILABLE); + "Stack of active subsets", ParameterProperties()); m_active_subset=NULL; m_active_subsets_stack=new CDynamicObjectArray(); diff --git a/src/shogun/features/TOPFeatures.cpp b/src/shogun/features/TOPFeatures.cpp index ed81a82b109..cad929a07f6 100644 --- a/src/shogun/features/TOPFeatures.cpp +++ b/src/shogun/features/TOPFeatures.cpp @@ -369,8 +369,8 @@ void CTOPFeatures::init() //m_parameters->add((CSGObject**) &neg, "neg", "HMM for negative class."); SG_ADD( &neglinear, "neglinear", "If negative HMM is a LinearHMM", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &poslinear, "poslinear", "If positive HMM is a LinearHMM", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/features/hashed/HashedDenseFeatures.cpp b/src/shogun/features/hashed/HashedDenseFeatures.cpp index 02bb245a1d1..0d7fcd71c08 100644 --- a/src/shogun/features/hashed/HashedDenseFeatures.cpp +++ b/src/shogun/features/hashed/HashedDenseFeatures.cpp @@ -67,12 +67,12 @@ void CHashedDenseFeatures::init(CDenseFeatures* feats, int32_t d, bool u keep_linear_terms = keep_lin_terms; SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - MS_NOT_AVAILABLE); - SG_ADD(&dim, "dim", "Dimension of new feature space", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&dim, "dim", "Dimension of new feature space", ParameterProperties()); SG_ADD((CSGObject** ) &dense_feats, "dense_feats", "Dense features to work on", - MS_NOT_AVAILABLE); + ParameterProperties()); set_generic(); } diff --git a/src/shogun/features/hashed/HashedDocDotFeatures.cpp b/src/shogun/features/hashed/HashedDocDotFeatures.cpp index 484bfa23f69..c77fdaff798 100644 --- a/src/shogun/features/hashed/HashedDocDotFeatures.cpp +++ b/src/shogun/features/hashed/HashedDocDotFeatures.cpp @@ -51,17 +51,17 @@ void CHashedDocDotFeatures::init(int32_t hash_bits, CStringFeatures* docs, ((CDelimiterTokenizer* )tokenizer)->init_for_whitespace(); } - SG_ADD(&num_bits, "num_bits", "Number of bits of hash", MS_NOT_AVAILABLE); + SG_ADD(&num_bits, "num_bits", "Number of bits of hash", ParameterProperties()); SG_ADD(&ngrams, "ngrams", "Number of tokens to combine for quadratic feature support", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&tokens_to_skip, "tokens_to_skip", "Number of tokens to skip when combining features", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**) &doc_collection, "doc_collection", "Document collection", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**) &tokenizer, "tokenizer", "Document tokenizer", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&should_normalize, "should_normalize", "Normalize or not the dot products", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_REF(doc_collection); SG_REF(tokenizer); diff --git a/src/shogun/features/hashed/HashedSparseFeatures.cpp b/src/shogun/features/hashed/HashedSparseFeatures.cpp index a3b91bebac8..47b5c216ca8 100644 --- a/src/shogun/features/hashed/HashedSparseFeatures.cpp +++ b/src/shogun/features/hashed/HashedSparseFeatures.cpp @@ -59,12 +59,12 @@ void CHashedSparseFeatures::init(CSparseFeatures* feats, int32_t d, bool SG_REF(sparse_feats); SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - MS_NOT_AVAILABLE); - SG_ADD(&dim, "dim", "Dimension of new feature space", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&dim, "dim", "Dimension of new feature space", ParameterProperties()); SG_ADD((CSGObject** ) &sparse_feats, "sparse_feats", "Sparse features to work on", - MS_NOT_AVAILABLE); + ParameterProperties()); set_generic(); } diff --git a/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp b/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp index b225ea24b83..5ab08a45921 100644 --- a/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp +++ b/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp @@ -55,10 +55,10 @@ void CStreamingHashedDenseFeatures::init(CStreamingFile* file, bool is_label keep_linear_terms = keep_lin_terms; SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - MS_NOT_AVAILABLE); - SG_ADD(&dim, "dim", "Size of target dimension", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&dim, "dim", "Size of target dimension", ParameterProperties()); has_labels = is_labelled; if (file) diff --git a/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp b/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp index 840bae2a8cf..08650c329b1 100644 --- a/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp +++ b/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp @@ -50,10 +50,10 @@ void CStreamingHashedDocDotFeatures::init(CStreamingFile* file, bool is_labelled else converter=NULL; - SG_ADD(&num_bits, "num_bits", "Number of bits for hash", MS_NOT_AVAILABLE); + SG_ADD(&num_bits, "num_bits", "Number of bits for hash", ParameterProperties()); SG_ADD((CSGObject** ) &tokenizer, "tokenizer", "The tokenizer used on the documents", - MS_NOT_AVAILABLE); - SG_ADD((CSGObject** ) &converter, "converter", "Converter", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD((CSGObject** ) &converter, "converter", "Converter", ParameterProperties()); has_labels = is_labelled; if (file) diff --git a/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp b/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp index c0c69785ce1..c077b294dfe 100644 --- a/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp +++ b/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp @@ -52,15 +52,15 @@ void CStreamingHashedSparseFeatures::init(CStreamingFile* file, bool is_labe int32_t size, int32_t d, bool use_quadr, bool keep_lin_terms) { dim = d; - SG_ADD(&dim, "dim", "Size of target dimension", MS_NOT_AVAILABLE); + SG_ADD(&dim, "dim", "Size of target dimension", ParameterProperties()); use_quadratic = use_quadr; keep_linear_terms = keep_lin_terms; SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - MS_NOT_AVAILABLE); + ParameterProperties()); has_labels = is_labelled; if (file) diff --git a/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp b/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp index b7b44b5a937..e9fd13572d1 100644 --- a/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp +++ b/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp @@ -55,15 +55,15 @@ void CGaussianBlobsDataGenerator::set_blobs_model(index_t sqrt_num_blobs, void CGaussianBlobsDataGenerator::init() { SG_ADD(&m_sqrt_num_blobs, "sqrt_num_blobs", "Number of Blobs per row", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_distance, "distance", "Distance between blobs", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_stretch, "stretch", "Stretch of blobs", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_angle, "angle", "Angle of Blobs", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_cholesky, "cholesky", "Cholesky factor of covariance matrix", - MS_NOT_AVAILABLE); + ParameterProperties()); m_sqrt_num_blobs=1; m_distance=0; diff --git a/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp b/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp index 3c14c200e7a..657addbc757 100644 --- a/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp +++ b/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp @@ -41,11 +41,11 @@ void CMeanShiftDataGenerator::set_mean_shift_model(float64_t mean_shift, void CMeanShiftDataGenerator::init() { - SG_ADD(&m_dimension, "dimension", "Dimension of data", MS_NOT_AVAILABLE); + SG_ADD(&m_dimension, "dimension", "Dimension of data", ParameterProperties()); SG_ADD(&m_mean_shift, "mean_shift", "Mean shift in one dimension", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_dimension_shift, "m_dimension_shift", "Dimension of mean shift", - MS_NOT_AVAILABLE); + ParameterProperties()); m_dimension=0; m_mean_shift=0; diff --git a/src/shogun/io/Serializable.h b/src/shogun/io/Serializable.h index a1d393ea435..916a4cc6e9d 100644 --- a/src/shogun/io/Serializable.h +++ b/src/shogun/io/Serializable.h @@ -93,7 +93,7 @@ template class CSerializable: public CSGObject { set_generic::value_type>(); m_value = 0; - SG_ADD(&m_value, "value", "Serialized value", MS_NOT_AVAILABLE); + SG_ADD(&m_value, "value", "Serialized value", ParameterProperties()); } protected: diff --git a/src/shogun/io/UAIFile.cpp b/src/shogun/io/UAIFile.cpp index b89f02aa1b8..3dd6ac19a44 100644 --- a/src/shogun/io/UAIFile.cpp +++ b/src/shogun/io/UAIFile.cpp @@ -52,21 +52,21 @@ CUAIFile::~CUAIFile() void CUAIFile::init() { - SG_ADD((CSGObject**)&m_line_reader, "line_reader", "line reader used to read lines from file", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_parser, "parser", "parser used to parse file", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_line_tokenizer, "line_tokenizer", "line tokenizer used to parse file", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_tokenizer, "tokenizer", "tokenizer used to parse file", MS_NOT_AVAILABLE); - SG_ADD(&m_delimiter, "delimiter", "delimiter used in get_vector function", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_line_reader, "line_reader", "line reader used to read lines from file", ParameterProperties()); + SG_ADD((CSGObject**)&m_parser, "parser", "parser used to parse file", ParameterProperties()); + SG_ADD((CSGObject**)&m_line_tokenizer, "line_tokenizer", "line tokenizer used to parse file", ParameterProperties()); + SG_ADD((CSGObject**)&m_tokenizer, "tokenizer", "tokenizer used to parse file", ParameterProperties()); + SG_ADD(&m_delimiter, "delimiter", "delimiter used in get_vector function", ParameterProperties()); - SG_ADD(&m_num_vars, "num_vars", "number of variables", MS_NOT_AVAILABLE); - SG_ADD(&m_num_factors, "num_factors", "number of factors", MS_NOT_AVAILABLE); - SG_ADD(&m_net_type, "net_type", "network type (either BAYES or MARKOV)", MS_NOT_AVAILABLE); - SG_ADD(&m_vars_card, "vars_card", "cardinality of all the variables", MS_NOT_AVAILABLE); + SG_ADD(&m_num_vars, "num_vars", "number of variables", ParameterProperties()); + SG_ADD(&m_num_factors, "num_factors", "number of factors", ParameterProperties()); + SG_ADD(&m_net_type, "net_type", "network type (either BAYES or MARKOV)", ParameterProperties()); + SG_ADD(&m_vars_card, "vars_card", "cardinality of all the variables", ParameterProperties()); /** Can only be enable after this issue is https://github.com/shogun-toolbox/shogun/issues/1972 * resolved - * SG_ADD(m_factors_table, "m_factors_table", "table of factors", MS_NOT_AVAILABLE); - * SG_ADD(m_factors_scope, "m_factors_scope", "scope of factors", MS_NOT_AVAILABLE); + * SG_ADD(m_factors_table, "m_factors_table", "table of factors", ParameterProperties()); + * SG_ADD(m_factors_scope, "m_factors_scope", "scope of factors", ParameterProperties()); */ m_delimiter = ' '; diff --git a/src/shogun/kernel/ANOVAKernel.cpp b/src/shogun/kernel/ANOVAKernel.cpp index 2feeb1dbd8d..2114d605ce1 100644 --- a/src/shogun/kernel/ANOVAKernel.cpp +++ b/src/shogun/kernel/ANOVAKernel.cpp @@ -90,7 +90,7 @@ float64_t CANOVAKernel::compute_rec2(int32_t idx_a, int32_t idx_b) void CANOVAKernel::register_params() { - SG_ADD(&cardinality, "cardinality", "Kernel cardinality.", MS_AVAILABLE); + SG_ADD(&cardinality, "cardinality", "Kernel cardinality.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/AUCKernel.cpp b/src/shogun/kernel/AUCKernel.cpp index 8b3dca88d5c..68c61de922d 100644 --- a/src/shogun/kernel/AUCKernel.cpp +++ b/src/shogun/kernel/AUCKernel.cpp @@ -17,7 +17,7 @@ void CAUCKernel::init() { SG_ADD((CSGObject**) &subkernel, "subkernel", "The subkernel.", - MS_AVAILABLE); + ParameterProperties::HYPER); } CAUCKernel::CAUCKernel() diff --git a/src/shogun/kernel/BesselKernel.cpp b/src/shogun/kernel/BesselKernel.cpp index ebdb8b161b1..f999d60253b 100644 --- a/src/shogun/kernel/BesselKernel.cpp +++ b/src/shogun/kernel/BesselKernel.cpp @@ -54,8 +54,8 @@ bool CBesselKernel::init(CFeatures* l, CFeatures* r) void CBesselKernel::init() { - SG_ADD(&order, "order", "Kernel order.", MS_AVAILABLE); - SG_ADD(°ree, "degree", "Kernel degree.", MS_AVAILABLE); + SG_ADD(&order, "order", "Kernel order.", ParameterProperties::HYPER); + SG_ADD(°ree, "degree", "Kernel degree.", ParameterProperties::HYPER); } float64_t CBesselKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/CauchyKernel.cpp b/src/shogun/kernel/CauchyKernel.cpp index ea32c904376..0924d39bf53 100644 --- a/src/shogun/kernel/CauchyKernel.cpp +++ b/src/shogun/kernel/CauchyKernel.cpp @@ -47,8 +47,8 @@ bool CCauchyKernel::init(CFeatures* l, CFeatures* r) void CCauchyKernel::init() { - SG_ADD(&m_sigma, "sigma", "Sigma kernel parameter.", MS_AVAILABLE); - SG_ADD(&m_distance, "distance", "Distance to be used.", MS_AVAILABLE); + SG_ADD(&m_sigma, "sigma", "Sigma kernel parameter.", ParameterProperties::HYPER); + SG_ADD(&m_distance, "distance", "Distance to be used.", ParameterProperties::HYPER); } float64_t CCauchyKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/Chi2Kernel.cpp b/src/shogun/kernel/Chi2Kernel.cpp index ba1b1595ac2..8889331cb68 100644 --- a/src/shogun/kernel/Chi2Kernel.cpp +++ b/src/shogun/kernel/Chi2Kernel.cpp @@ -15,7 +15,7 @@ using namespace shogun; void CChi2Kernel::init() { - SG_ADD(&width, "width", "Kernel width.", MS_AVAILABLE); + SG_ADD(&width, "width", "Kernel width.", ParameterProperties::HYPER); } CChi2Kernel::CChi2Kernel() diff --git a/src/shogun/kernel/CircularKernel.cpp b/src/shogun/kernel/CircularKernel.cpp index e40c764d3d9..69b5b77d267 100644 --- a/src/shogun/kernel/CircularKernel.cpp +++ b/src/shogun/kernel/CircularKernel.cpp @@ -58,8 +58,8 @@ void CCircularKernel::load_serializable_post() throw (ShogunException) void CCircularKernel::init() { SG_ADD((CSGObject**) &distance, "distance", "Distance to be used.", - MS_AVAILABLE); - SG_ADD(&sigma, "sigma", "Sigma kernel parameter.", MS_AVAILABLE); + ParameterProperties::HYPER); + SG_ADD(&sigma, "sigma", "Sigma kernel parameter.", ParameterProperties::HYPER); } float64_t CCircularKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/CombinedKernel.cpp b/src/shogun/kernel/CombinedKernel.cpp index fecb723eb83..1f1d88087ff 100644 --- a/src/shogun/kernel/CombinedKernel.cpp +++ b/src/shogun/kernel/CombinedKernel.cpp @@ -740,7 +740,7 @@ void CCombinedKernel::init() kernel_array=new CDynamicObjectArray(); SG_REF(kernel_array); - SG_ADD(&kernel_array, "kernel_array", "Array of kernels.", MS_AVAILABLE); + SG_ADD(&kernel_array, "kernel_array", "Array of kernels.", ParameterProperties::HYPER); m_parameters->add_vector(&sv_idx, &sv_count, "sv_idx", "Support vector index."); @@ -751,21 +751,21 @@ void CCombinedKernel::init() watch_param("sv_weight", &sv_weight, &sv_count); SG_ADD(&append_subkernel_weights, "append_subkernel_weights", - "If subkernel weights are appended.", MS_AVAILABLE); + "If subkernel weights are appended.", ParameterProperties::HYPER); SG_ADD(&initialized, "initialized", "Whether kernel is ready to be used.", - MS_NOT_AVAILABLE); + ParameterProperties()); enable_subkernel_weight_opt=false; subkernel_log_weights = SGVector(1); subkernel_log_weights[0] = 0; SG_ADD(&subkernel_log_weights, "subkernel_log_weights", - "subkernel weights", MS_AVAILABLE, GRADIENT_AVAILABLE); + "subkernel weights", ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&enable_subkernel_weight_opt, "enable_subkernel_weight_opt", - "enable subkernel weight opt", MS_NOT_AVAILABLE); + "enable subkernel weight opt", ParameterProperties()); weight_update = false; SG_ADD(&weight_update, "weight_update", - "weight update", MS_NOT_AVAILABLE); + "weight update", ParameterProperties()); } void CCombinedKernel::enable_subkernel_weight_learning() diff --git a/src/shogun/kernel/ConstKernel.cpp b/src/shogun/kernel/ConstKernel.cpp index 4ca693257ac..8c480ac996a 100644 --- a/src/shogun/kernel/ConstKernel.cpp +++ b/src/shogun/kernel/ConstKernel.cpp @@ -49,5 +49,5 @@ void CConstKernel::init() { const_value=1.0; SG_ADD(&const_value, "const_value", "Value for kernel elements.", - MS_AVAILABLE); + ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/CustomKernel.cpp b/src/shogun/kernel/CustomKernel.cpp index c8e280ecc6f..a953b02d221 100644 --- a/src/shogun/kernel/CustomKernel.cpp +++ b/src/shogun/kernel/CustomKernel.cpp @@ -27,15 +27,15 @@ void CCustomKernel::init() m_free_km=true; SG_ADD((CSGObject**)&m_row_subset_stack, "row_subset_stack", - "Subset stack of rows", MS_NOT_AVAILABLE); + "Subset stack of rows", ParameterProperties()); SG_ADD((CSGObject**)&m_col_subset_stack, "col_subset_stack", - "Subset stack of columns", MS_NOT_AVAILABLE); + "Subset stack of columns", ParameterProperties()); SG_ADD(&m_free_km, "free_km", "Whether kernel matrix should be freed in " - "destructor", MS_NOT_AVAILABLE); + "destructor", ParameterProperties()); SG_ADD(&m_is_symmetric, "is_symmetric", "Whether kernel matrix is symmetric", - MS_NOT_AVAILABLE); - SG_ADD(&kmatrix, "kmatrix", "Kernel matrix.", MS_NOT_AVAILABLE); - SG_ADD(&upper_diagonal, "upper_diagonal", "Upper diagonal", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&kmatrix, "kmatrix", "Kernel matrix.", ParameterProperties()); + SG_ADD(&upper_diagonal, "upper_diagonal", "Upper diagonal", ParameterProperties()); } CCustomKernel::CCustomKernel() diff --git a/src/shogun/kernel/DiagKernel.cpp b/src/shogun/kernel/DiagKernel.cpp index 76be1c92c72..f5904eaa3d9 100644 --- a/src/shogun/kernel/DiagKernel.cpp +++ b/src/shogun/kernel/DiagKernel.cpp @@ -47,5 +47,5 @@ bool CDiagKernel::init(CFeatures* l, CFeatures* r) void CDiagKernel::init() { diag=1.0; - SG_ADD(&diag, "diag", "Value on kernel diagonal.", MS_AVAILABLE); + SG_ADD(&diag, "diag", "Value on kernel diagonal.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/DistanceKernel.cpp b/src/shogun/kernel/DistanceKernel.cpp index 573b427b530..fc85ef8a34b 100644 --- a/src/shogun/kernel/DistanceKernel.cpp +++ b/src/shogun/kernel/DistanceKernel.cpp @@ -63,7 +63,7 @@ float64_t CDistanceKernel::compute(int32_t idx_a, int32_t idx_b) void CDistanceKernel::register_params() { - SG_ADD(&width, "width", "Kernel width.", MS_AVAILABLE); + SG_ADD(&width, "width", "Kernel width.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/ExponentialARDKernel.cpp b/src/shogun/kernel/ExponentialARDKernel.cpp index 36f70eb2975..1d7a29b639e 100644 --- a/src/shogun/kernel/ExponentialARDKernel.cpp +++ b/src/shogun/kernel/ExponentialARDKernel.cpp @@ -32,15 +32,15 @@ void CExponentialARDKernel::init() m_weights_cols=1.0; - SG_ADD(&m_log_weights, "log_weights", "Feature weights in log domain", MS_AVAILABLE, - GRADIENT_AVAILABLE); + SG_ADD(&m_log_weights, "log_weights", "Feature weights in log domain", ParameterProperties::HYPER | + ParameterProperties::GRADIENT); - SG_ADD(&m_weights_rows, "weights_rows", "Row of feature weights", MS_NOT_AVAILABLE); - SG_ADD(&m_weights_cols, "weights_cols", "Column of feature weights", MS_NOT_AVAILABLE); - SG_ADD((int *)(&m_ARD_type), "type", "ARD kernel type", MS_NOT_AVAILABLE); + SG_ADD(&m_weights_rows, "weights_rows", "Row of feature weights", ParameterProperties()); + SG_ADD(&m_weights_cols, "weights_cols", "Column of feature weights", ParameterProperties()); + SG_ADD((int *)(&m_ARD_type), "type", "ARD kernel type", ParameterProperties()); m_weights_raw=SGMatrix(); - SG_ADD(&m_weights_raw, "weights_raw", "Features weights in standard domain", MS_NOT_AVAILABLE); + SG_ADD(&m_weights_raw, "weights_raw", "Features weights in standard domain", ParameterProperties()); } diff --git a/src/shogun/kernel/ExponentialKernel.cpp b/src/shogun/kernel/ExponentialKernel.cpp index 90747f2c21f..fc20cb83705 100644 --- a/src/shogun/kernel/ExponentialKernel.cpp +++ b/src/shogun/kernel/ExponentialKernel.cpp @@ -62,7 +62,7 @@ void CExponentialKernel::load_serializable_post() throw (ShogunException) void CExponentialKernel::init() { - SG_ADD(&m_width, "width", "Kernel width.", MS_AVAILABLE); + SG_ADD(&m_width, "width", "Kernel width.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &m_distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/GaussianARDKernel.cpp b/src/shogun/kernel/GaussianARDKernel.cpp index 0ade24842b1..07f42ba5b55 100644 --- a/src/shogun/kernel/GaussianARDKernel.cpp +++ b/src/shogun/kernel/GaussianARDKernel.cpp @@ -24,8 +24,8 @@ void CGaussianARDKernel::init() { m_sq_lhs=SGVector(); m_sq_rhs=SGVector(); - SG_ADD(&m_sq_lhs, "sq_lhs", "squared left-hand side", MS_NOT_AVAILABLE); - SG_ADD(&m_sq_rhs, "sq_rhs", "squared right-hand side", MS_NOT_AVAILABLE); + SG_ADD(&m_sq_lhs, "sq_lhs", "squared left-hand side", ParameterProperties()); + SG_ADD(&m_sq_rhs, "sq_rhs", "squared right-hand side", ParameterProperties()); } float64_t CGaussianARDKernel::distance(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/GaussianKernel.cpp b/src/shogun/kernel/GaussianKernel.cpp index 60eba54ae1a..643e36467ce 100644 --- a/src/shogun/kernel/GaussianKernel.cpp +++ b/src/shogun/kernel/GaussianKernel.cpp @@ -149,5 +149,5 @@ void CGaussianKernel::register_params() m_distance=dist; SG_REF(m_distance); - SG_ADD(&m_log_width, "log_width", "Kernel width in log domain", MS_AVAILABLE, GRADIENT_AVAILABLE); + SG_ADD(&m_log_width, "log_width", "Kernel width in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); } diff --git a/src/shogun/kernel/GaussianShiftKernel.cpp b/src/shogun/kernel/GaussianShiftKernel.cpp index ce5651b971f..81d064cb5bf 100644 --- a/src/shogun/kernel/GaussianShiftKernel.cpp +++ b/src/shogun/kernel/GaussianShiftKernel.cpp @@ -76,6 +76,6 @@ float64_t CGaussianShiftKernel::compute(int32_t idx_a, int32_t idx_b) void CGaussianShiftKernel::init() { - SG_ADD(&max_shift, "max_shift", "Maximum shift.", MS_AVAILABLE); - SG_ADD(&shift_step, "shift_step", "Shift stepsize.", MS_AVAILABLE); + SG_ADD(&max_shift, "max_shift", "Maximum shift.", ParameterProperties::HYPER); + SG_ADD(&shift_step, "shift_step", "Shift stepsize.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/GaussianShortRealKernel.cpp b/src/shogun/kernel/GaussianShortRealKernel.cpp index e648f2124ee..e2be9441694 100644 --- a/src/shogun/kernel/GaussianShortRealKernel.cpp +++ b/src/shogun/kernel/GaussianShortRealKernel.cpp @@ -64,5 +64,5 @@ float64_t CGaussianShortRealKernel::compute(int32_t idx_a, int32_t idx_b) void CGaussianShortRealKernel::register_params() { - SG_ADD(&width, "width", "kernel width", MS_AVAILABLE); + SG_ADD(&width, "width", "kernel width", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/HistogramIntersectionKernel.cpp b/src/shogun/kernel/HistogramIntersectionKernel.cpp index 69f5c06f7b2..83fce9d35da 100644 --- a/src/shogun/kernel/HistogramIntersectionKernel.cpp +++ b/src/shogun/kernel/HistogramIntersectionKernel.cpp @@ -78,5 +78,5 @@ float64_t CHistogramIntersectionKernel::compute(int32_t idx_a, int32_t idx_b) void CHistogramIntersectionKernel::register_params() { - SG_ADD(&m_beta, "beta", "the beta parameter of the kernel", MS_AVAILABLE); + SG_ADD(&m_beta, "beta", "the beta parameter of the kernel", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/InverseMultiQuadricKernel.cpp b/src/shogun/kernel/InverseMultiQuadricKernel.cpp index 7e094f26966..ed341c90137 100644 --- a/src/shogun/kernel/InverseMultiQuadricKernel.cpp +++ b/src/shogun/kernel/InverseMultiQuadricKernel.cpp @@ -49,9 +49,9 @@ void CInverseMultiQuadricKernel::load_serializable_post() throw (ShogunException void CInverseMultiQuadricKernel::init() { - SG_ADD(&coef, "coef", "Kernel Coefficient.", MS_AVAILABLE); + SG_ADD(&coef, "coef", "Kernel Coefficient.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } float64_t CInverseMultiQuadricKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/Kernel.cpp b/src/shogun/kernel/Kernel.cpp index f2848f1c1cf..8c67fe58df9 100644 --- a/src/shogun/kernel/Kernel.cpp +++ b/src/shogun/kernel/Kernel.cpp @@ -919,28 +919,28 @@ void CKernel::save_serializable_post() throw (ShogunException) void CKernel::register_params() { SG_ADD(&cache_size, "cache_size", - "Cache size in MB.", MS_NOT_AVAILABLE); + "Cache size in MB.", ParameterProperties()); SG_ADD( &lhs, "lhs", "Feature vectors to occur on left hand side.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &rhs, "rhs", "Feature vectors to occur on right hand side.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&lhs_equals_rhs, "lhs_equals_rhs", - "If features on lhs are the same as on rhs.", MS_NOT_AVAILABLE); + "If features on lhs are the same as on rhs.", ParameterProperties()); SG_ADD(&num_lhs, "num_lhs", "Number of feature vectors on left hand side.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&num_rhs, "num_rhs", "Number of feature vectors on right hand side.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&combined_kernel_weight, "combined_kernel_weight", - "Combined kernel weight.", MS_AVAILABLE); + "Combined kernel weight.", ParameterProperties::HYPER); SG_ADD(&optimization_initialized, "optimization_initialized", - "Optimization is initialized.", MS_NOT_AVAILABLE); + "Optimization is initialized.", ParameterProperties()); SG_ADD((machine_int_t*) &opt_type, "opt_type", - "Optimization type.", MS_NOT_AVAILABLE); - SG_ADD(&properties, "properties", "Kernel properties.", MS_NOT_AVAILABLE); - SG_ADD(&normalizer, "normalizer", "Normalize the kernel.", MS_AVAILABLE); + "Optimization type.", ParameterProperties()); + SG_ADD(&properties, "properties", "Kernel properties.", ParameterProperties()); + SG_ADD(&normalizer, "normalizer", "Normalize the kernel.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/LogKernel.cpp b/src/shogun/kernel/LogKernel.cpp index a26b415fef5..36b18fc6985 100644 --- a/src/shogun/kernel/LogKernel.cpp +++ b/src/shogun/kernel/LogKernel.cpp @@ -47,8 +47,8 @@ bool CLogKernel::init(CFeatures* l, CFeatures* r) void CLogKernel::init() { - SG_ADD(&m_degree, "degree", "Degree kernel parameter.", MS_AVAILABLE); - SG_ADD(&m_distance, "distance", "Distance to be used.", MS_AVAILABLE); + SG_ADD(&m_degree, "degree", "Degree kernel parameter.", ParameterProperties::HYPER); + SG_ADD(&m_distance, "distance", "Distance to be used.", ParameterProperties::HYPER); } float64_t CLogKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/MultiquadricKernel.cpp b/src/shogun/kernel/MultiquadricKernel.cpp index 053a880a365..8fd9b9ffa99 100644 --- a/src/shogun/kernel/MultiquadricKernel.cpp +++ b/src/shogun/kernel/MultiquadricKernel.cpp @@ -53,7 +53,7 @@ float64_t CMultiquadricKernel::compute(int32_t idx_a, int32_t idx_b) void CMultiquadricKernel::init() { - SG_ADD(&m_coef, "coef", "Kernel coefficient.", MS_AVAILABLE); + SG_ADD(&m_coef, "coef", "Kernel coefficient.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &m_distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/PeriodicKernel.cpp b/src/shogun/kernel/PeriodicKernel.cpp index edfe8b30c93..49c6b669b0a 100644 --- a/src/shogun/kernel/PeriodicKernel.cpp +++ b/src/shogun/kernel/PeriodicKernel.cpp @@ -157,13 +157,13 @@ void CPeriodicKernel::init() set_period(1.0); SG_ADD(&m_length_scale, "length_scale", - "Kernel length scale", MS_AVAILABLE, GRADIENT_AVAILABLE); + "Kernel length scale", ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_period, "period", - "Kernel period", MS_AVAILABLE, GRADIENT_AVAILABLE); + "Kernel period", ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_sq_lhs, "sq_lhs", - "Vector of dot products of each left-hand-side vector with itself.", MS_NOT_AVAILABLE); + "Vector of dot products of each left-hand-side vector with itself.", ParameterProperties()); SG_ADD(&m_sq_rhs, "sq_rhs", - "Vector of dot products of each right-hand-side vector with itself.", MS_NOT_AVAILABLE); + "Vector of dot products of each right-hand-side vector with itself.", ParameterProperties()); } float64_t CPeriodicKernel::distance(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/PolyKernel.cpp b/src/shogun/kernel/PolyKernel.cpp index 160c160a56c..e64273cc6c2 100644 --- a/src/shogun/kernel/PolyKernel.cpp +++ b/src/shogun/kernel/PolyKernel.cpp @@ -70,8 +70,8 @@ void CPolyKernel::init() inhomogene = false; set_normalizer(new CSqrtDiagKernelNormalizer()); - SG_ADD(°ree, "degree", "Degree of polynomial kernel", MS_AVAILABLE); + SG_ADD(°ree, "degree", "Degree of polynomial kernel", ParameterProperties::HYPER); SG_ADD(&inhomogene, "inhomogene", "If kernel is inhomogeneous.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/kernel/PowerKernel.cpp b/src/shogun/kernel/PowerKernel.cpp index d2d49b53daa..ea8dd322a42 100644 --- a/src/shogun/kernel/PowerKernel.cpp +++ b/src/shogun/kernel/PowerKernel.cpp @@ -47,9 +47,9 @@ bool CPowerKernel::init(CFeatures* l, CFeatures* r) void CPowerKernel::init() { - SG_ADD(&m_degree, "degree", "Degree kernel parameter.", MS_AVAILABLE); + SG_ADD(&m_degree, "degree", "Degree kernel parameter.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } float64_t CPowerKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/ProductKernel.cpp b/src/shogun/kernel/ProductKernel.cpp index f6b216e041f..49ec215986c 100644 --- a/src/shogun/kernel/ProductKernel.cpp +++ b/src/shogun/kernel/ProductKernel.cpp @@ -229,9 +229,9 @@ void CProductKernel::init() SG_REF(kernel_array); SG_ADD((CSGObject**) &kernel_array, "kernel_array", "Array of kernels", - MS_AVAILABLE); + ParameterProperties::HYPER); SG_ADD(&initialized, "initialized", "Whether kernel is ready to be used", - MS_NOT_AVAILABLE); + ParameterProperties()); } SGMatrix CProductKernel::get_parameter_gradient( diff --git a/src/shogun/kernel/RationalQuadraticKernel.cpp b/src/shogun/kernel/RationalQuadraticKernel.cpp index d0c07d6bb93..9517e16c488 100644 --- a/src/shogun/kernel/RationalQuadraticKernel.cpp +++ b/src/shogun/kernel/RationalQuadraticKernel.cpp @@ -54,8 +54,8 @@ float64_t CRationalQuadraticKernel::compute(int32_t idx_a, int32_t idx_b) void CRationalQuadraticKernel::init() { - SG_ADD(&m_coef, "coef", "Kernel coefficient.", MS_AVAILABLE); + SG_ADD(&m_coef, "coef", "Kernel coefficient.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &m_distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/ShiftInvariantKernel.cpp b/src/shogun/kernel/ShiftInvariantKernel.cpp index 14a29238a34..3aefe5e2140 100644 --- a/src/shogun/kernel/ShiftInvariantKernel.cpp +++ b/src/shogun/kernel/ShiftInvariantKernel.cpp @@ -102,8 +102,8 @@ float64_t CShiftInvariantKernel::distance(int32_t a, int32_t b) const void CShiftInvariantKernel::register_params() { - SG_ADD((CSGObject**) &m_distance, "m_distance", "Distance to be used.", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**) &m_precomputed_distance, "m_precomputed_distance", "Precomputed istance to be used.", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**) &m_distance, "m_distance", "Distance to be used.", ParameterProperties()); + SG_ADD((CSGObject**) &m_precomputed_distance, "m_precomputed_distance", "Precomputed istance to be used.", ParameterProperties()); m_distance=NULL; m_precomputed_distance=NULL; diff --git a/src/shogun/kernel/SigmoidKernel.cpp b/src/shogun/kernel/SigmoidKernel.cpp index 2bf6bfa434f..4dbb9b2c5ca 100644 --- a/src/shogun/kernel/SigmoidKernel.cpp +++ b/src/shogun/kernel/SigmoidKernel.cpp @@ -55,6 +55,6 @@ void CSigmoidKernel::init() gamma=0.0; coef0=0.0; - SG_ADD(&gamma, "gamma", "Gamma.", MS_AVAILABLE); - SG_ADD(&coef0, "coef0", "Coefficient 0.", MS_AVAILABLE); + SG_ADD(&gamma, "gamma", "Gamma.", ParameterProperties::HYPER); + SG_ADD(&coef0, "coef0", "Coefficient 0.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/SphericalKernel.cpp b/src/shogun/kernel/SphericalKernel.cpp index afc9c3dcb26..3f9cb32414c 100644 --- a/src/shogun/kernel/SphericalKernel.cpp +++ b/src/shogun/kernel/SphericalKernel.cpp @@ -52,8 +52,8 @@ bool CSphericalKernel::init(CFeatures* l, CFeatures* r) void CSphericalKernel::register_params() { SG_ADD((CSGObject**) &distance, "distance", "Distance to be used.", - MS_AVAILABLE); - SG_ADD(&sigma, "sigma", "Sigma kernel parameter.", MS_AVAILABLE); + ParameterProperties::HYPER); + SG_ADD(&sigma, "sigma", "Sigma kernel parameter.", ParameterProperties::HYPER); } float64_t CSphericalKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/TStudentKernel.cpp b/src/shogun/kernel/TStudentKernel.cpp index 9fae4b9bb39..2141a064969 100644 --- a/src/shogun/kernel/TStudentKernel.cpp +++ b/src/shogun/kernel/TStudentKernel.cpp @@ -11,9 +11,9 @@ using namespace shogun; void CTStudentKernel::init() { - SG_ADD(°ree, "degree", "Kernel degree.", MS_AVAILABLE); + SG_ADD(°ree, "degree", "Kernel degree.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } CTStudentKernel::CTStudentKernel(): CKernel(0), distance(NULL), degree(1.0) diff --git a/src/shogun/kernel/TensorProductPairKernel.cpp b/src/shogun/kernel/TensorProductPairKernel.cpp index a7d9d9df4c0..dfe9876ecfa 100644 --- a/src/shogun/kernel/TensorProductPairKernel.cpp +++ b/src/shogun/kernel/TensorProductPairKernel.cpp @@ -74,5 +74,5 @@ float64_t CTensorProductPairKernel::compute(int32_t idx_a, int32_t idx_b) void CTensorProductPairKernel::register_params() { - SG_ADD((CSGObject**)&subkernel, "subkernel", "the subkernel", MS_AVAILABLE); + SG_ADD((CSGObject**)&subkernel, "subkernel", "the subkernel", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/WaveKernel.cpp b/src/shogun/kernel/WaveKernel.cpp index 0c37eb4630e..2a1f99cf702 100644 --- a/src/shogun/kernel/WaveKernel.cpp +++ b/src/shogun/kernel/WaveKernel.cpp @@ -47,9 +47,9 @@ bool CWaveKernel::init(CFeatures* l, CFeatures* r) void CWaveKernel::init() { - SG_ADD(&m_theta, "theta", "Theta kernel parameter.", MS_AVAILABLE); + SG_ADD(&m_theta, "theta", "Theta kernel parameter.", ParameterProperties::HYPER); SG_ADD((CSGObject**) &m_distance, "distance", "Distance to be used.", - MS_AVAILABLE); + ParameterProperties::HYPER); } float64_t CWaveKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/WaveletKernel.cpp b/src/shogun/kernel/WaveletKernel.cpp index 45b8f8f21cb..cf0b47d126e 100644 --- a/src/shogun/kernel/WaveletKernel.cpp +++ b/src/shogun/kernel/WaveletKernel.cpp @@ -46,8 +46,8 @@ bool CWaveletKernel::init(CFeatures* l, CFeatures* r) void CWaveletKernel::init() { - SG_ADD(&Wdilation, "Wdilation", "Dilation coefficient", MS_AVAILABLE); - SG_ADD(&Wtranslation, "Wtranslaton", "Translation coefficient", MS_AVAILABLE); + SG_ADD(&Wdilation, "Wdilation", "Dilation coefficient", ParameterProperties::HYPER); + SG_ADD(&Wtranslation, "Wtranslaton", "Translation coefficient", ParameterProperties::HYPER); } float64_t CWaveletKernel::compute(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/WeightedDegreeRBFKernel.cpp b/src/shogun/kernel/WeightedDegreeRBFKernel.cpp index 818453a5c90..ae2e6474e13 100644 --- a/src/shogun/kernel/WeightedDegreeRBFKernel.cpp +++ b/src/shogun/kernel/WeightedDegreeRBFKernel.cpp @@ -110,6 +110,6 @@ float64_t CWeightedDegreeRBFKernel::compute(int32_t idx_a, int32_t idx_b) void CWeightedDegreeRBFKernel::register_params() { - SG_ADD(&width, "width", "Kernel width", MS_AVAILABLE); - SG_ADD(°ree, "degree", "Kernel degree", MS_AVAILABLE); + SG_ADD(&width, "width", "Kernel width", ParameterProperties::HYPER); + SG_ADD(°ree, "degree", "Kernel degree", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/normalizer/AvgDiagKernelNormalizer.h b/src/shogun/kernel/normalizer/AvgDiagKernelNormalizer.h index 912b516de61..fd5ad29e12c 100644 --- a/src/shogun/kernel/normalizer/AvgDiagKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/AvgDiagKernelNormalizer.h @@ -39,7 +39,7 @@ class CAvgDiagKernelNormalizer : public CKernelNormalizer scale=c; SG_ADD(&scale, "scale", "Scale quotient by which kernel is scaled.", - MS_AVAILABLE); + ParameterProperties::HYPER); } /** default destructor */ diff --git a/src/shogun/kernel/normalizer/DiceKernelNormalizer.h b/src/shogun/kernel/normalizer/DiceKernelNormalizer.h index 5ed9430153a..29a4b531d60 100644 --- a/src/shogun/kernel/normalizer/DiceKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/DiceKernelNormalizer.h @@ -42,7 +42,7 @@ class CDiceKernelNormalizer : public CKernelNormalizer SG_ADD(&use_optimized_diagonal_computation, "use_optimized_diagonal_computation", - "flat if optimized diagonal computation is used", MS_NOT_AVAILABLE); + "flat if optimized diagonal computation is used", ParameterProperties()); } /** default destructor */ diff --git a/src/shogun/kernel/normalizer/FirstElementKernelNormalizer.h b/src/shogun/kernel/normalizer/FirstElementKernelNormalizer.h index a1b1944a37b..17804116077 100644 --- a/src/shogun/kernel/normalizer/FirstElementKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/FirstElementKernelNormalizer.h @@ -31,7 +31,7 @@ class CFirstElementKernelNormalizer : public CKernelNormalizer CFirstElementKernelNormalizer() : CKernelNormalizer(), scale(1.0) { SG_ADD(&scale, "scale", "Scale quotient by which kernel is scaled.", - MS_AVAILABLE); + ParameterProperties::HYPER); } /** default destructor */ diff --git a/src/shogun/kernel/normalizer/KernelNormalizer.h b/src/shogun/kernel/normalizer/KernelNormalizer.h index cc937a748ec..274a6a14a94 100644 --- a/src/shogun/kernel/normalizer/KernelNormalizer.h +++ b/src/shogun/kernel/normalizer/KernelNormalizer.h @@ -90,7 +90,7 @@ class CKernelNormalizer : public CSGObject virtual void register_params() { SG_ADD((machine_int_t*) &m_type, "m_type", "Normalizer type.", - MS_NOT_AVAILABLE); + ParameterProperties()); } /** getter for normalizer type diff --git a/src/shogun/kernel/normalizer/RidgeKernelNormalizer.h b/src/shogun/kernel/normalizer/RidgeKernelNormalizer.h index 98da9dc071d..54c2bef215e 100644 --- a/src/shogun/kernel/normalizer/RidgeKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/RidgeKernelNormalizer.h @@ -55,8 +55,8 @@ class CRidgeKernelNormalizer : public CKernelNormalizer : CKernelNormalizer() { SG_ADD(&scale, "scale", "Scale quotient by which kernel is scaled.", - MS_AVAILABLE); - SG_ADD(&ridge, "ridge", "Ridge added to diagonal.", MS_AVAILABLE); + ParameterProperties::HYPER); + SG_ADD(&ridge, "ridge", "Ridge added to diagonal.", ParameterProperties::HYPER); scale=c; ridge=r; diff --git a/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h b/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h index cc507405a4e..fd0ccb3b5aa 100644 --- a/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h @@ -154,15 +154,15 @@ class CScatterKernelNormalizer: public CKernelNormalizer m_testing_class = -1; SG_ADD(&m_testing_class, "m_testing_class", - "Testing Class.", MS_NOT_AVAILABLE); + "Testing Class.", ParameterProperties()); SG_ADD(&m_const_diag, "m_const_diag", - "Factor to multiply to diagonal elements.", MS_AVAILABLE); + "Factor to multiply to diagonal elements.", ParameterProperties::HYPER); SG_ADD(&m_const_offdiag, "m_const_offdiag", - "Factor to multiply to off-diagonal elements.", MS_AVAILABLE); + "Factor to multiply to off-diagonal elements.", ParameterProperties::HYPER); - SG_ADD((CSGObject**) &m_labels, "m_labels", "Labels", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**) &m_labels, "m_labels", "Labels", ParameterProperties()); SG_ADD((CSGObject**) &m_normalizer, "m_normalizer", "Kernel normalizer.", - MS_AVAILABLE); + ParameterProperties::HYPER); } protected: diff --git a/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h b/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h index 0c15b266b4a..ecd0908f214 100644 --- a/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h @@ -46,7 +46,7 @@ class CSqrtDiagKernelNormalizer : public CKernelNormalizer SG_ADD(&use_optimized_diagonal_computation, "use_optimized_diagonal_computation", - "flat if optimized diagonal computation is used", MS_NOT_AVAILABLE); + "flat if optimized diagonal computation is used", ParameterProperties()); } /** default destructor */ diff --git a/src/shogun/kernel/normalizer/VarianceKernelNormalizer.h b/src/shogun/kernel/normalizer/VarianceKernelNormalizer.h index fc53e6bf02b..39e508ba9bd 100644 --- a/src/shogun/kernel/normalizer/VarianceKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/VarianceKernelNormalizer.h @@ -30,9 +30,9 @@ class CVarianceKernelNormalizer : public CKernelNormalizer CVarianceKernelNormalizer() : CKernelNormalizer(), meandiff(1.0), sqrt_meandiff(1.0) { - SG_ADD(&meandiff, "meandiff", "Scaling constant.", MS_AVAILABLE); + SG_ADD(&meandiff, "meandiff", "Scaling constant.", ParameterProperties::HYPER); SG_ADD(&sqrt_meandiff, "sqrt_meandiff", - "Square root of scaling constant.", MS_AVAILABLE); + "Square root of scaling constant.", ParameterProperties::HYPER); } /** default destructor */ diff --git a/src/shogun/kernel/string/CommWordStringKernel.cpp b/src/shogun/kernel/string/CommWordStringKernel.cpp index ccb3b6e6137..54281fe8277 100644 --- a/src/shogun/kernel/string/CommWordStringKernel.cpp +++ b/src/shogun/kernel/string/CommWordStringKernel.cpp @@ -593,10 +593,10 @@ void CCommWordStringKernel::init() set_normalizer(new CSqrtDiagKernelNormalizer(use_dict_diagonal_optimization)); SG_ADD(&dictionary_weights, "dictionary_weights", - "Dictionary for applying kernel.", MS_NOT_AVAILABLE); + "Dictionary for applying kernel.", ParameterProperties()); SG_ADD(&use_sign, "use_sign", - "If signum(counts) is used instead of counts.", MS_AVAILABLE); + "If signum(counts) is used instead of counts.", ParameterProperties::HYPER); SG_ADD(&use_dict_diagonal_optimization, "use_dict_diagonal_optimization", "If K(x,x) is computed potentially " - "more efficiently.", MS_NOT_AVAILABLE); + "more efficiently.", ParameterProperties()); } diff --git a/src/shogun/kernel/string/FixedDegreeStringKernel.cpp b/src/shogun/kernel/string/FixedDegreeStringKernel.cpp index eab50a5fea8..c234fcd3a98 100644 --- a/src/shogun/kernel/string/FixedDegreeStringKernel.cpp +++ b/src/shogun/kernel/string/FixedDegreeStringKernel.cpp @@ -16,7 +16,7 @@ using namespace shogun; void CFixedDegreeStringKernel::init() { - SG_ADD(°ree, "degree", "The degree.", MS_AVAILABLE); + SG_ADD(°ree, "degree", "The degree.", ParameterProperties::HYPER); set_normalizer(new CSqrtDiagKernelNormalizer()); } diff --git a/src/shogun/kernel/string/GaussianMatchStringKernel.cpp b/src/shogun/kernel/string/GaussianMatchStringKernel.cpp index 093cf62e60f..a6a918c9992 100644 --- a/src/shogun/kernel/string/GaussianMatchStringKernel.cpp +++ b/src/shogun/kernel/string/GaussianMatchStringKernel.cpp @@ -77,5 +77,5 @@ float64_t CGaussianMatchStringKernel::compute(int32_t idx_a, int32_t idx_b) void CGaussianMatchStringKernel::register_params() { - SG_ADD(&width, "width", "kernel width", MS_AVAILABLE); + SG_ADD(&width, "width", "kernel width", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/HistogramWordStringKernel.cpp b/src/shogun/kernel/string/HistogramWordStringKernel.cpp index 3484b9445e4..509417c05fd 100644 --- a/src/shogun/kernel/string/HistogramWordStringKernel.cpp +++ b/src/shogun/kernel/string/HistogramWordStringKernel.cpp @@ -409,7 +409,7 @@ void CHistogramWordStringKernel::init() initialized=false; SG_ADD(&initialized, "initialized", "If kernel is initalized.", - MS_NOT_AVAILABLE); + ParameterProperties()); m_parameters->add_vector(&plo_lhs, &num_lhs, "plo_lhs"); watch_param("plo_lhs", &plo_lhs, &num_lhs); @@ -436,7 +436,7 @@ void CHistogramWordStringKernel::init() watch_param("variance", &variance, &num_params2); SG_ADD((CSGObject**) &estimate, "estimate", "Plugin Estimate.", - MS_NOT_AVAILABLE); + ParameterProperties()); } #ifdef DEBUG_HWSK_COMPUTATION diff --git a/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp b/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp index 026e85f5386..8cb8e186635 100644 --- a/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp +++ b/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp @@ -406,8 +406,8 @@ void CLocalAlignmentStringKernel::init() init_logsum(); SG_ADD(&initialized, "initialized", "If kernel is initalized.", - MS_NOT_AVAILABLE); - SG_ADD(&m_opening, "opening", "Opening gap opening penalty.", MS_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_opening, "opening", "Opening gap opening penalty.", ParameterProperties::HYPER); SG_ADD(&m_extension, "extension", "Extension gap extension penalty.", - MS_AVAILABLE); + ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/LocalityImprovedStringKernel.cpp b/src/shogun/kernel/string/LocalityImprovedStringKernel.cpp index 97eb3998b23..26cc821f99e 100644 --- a/src/shogun/kernel/string/LocalityImprovedStringKernel.cpp +++ b/src/shogun/kernel/string/LocalityImprovedStringKernel.cpp @@ -102,7 +102,7 @@ void CLocalityImprovedStringKernel::init() inner_degree = 0; outer_degree = 0; - SG_ADD(&length, "length", "Window Length.", MS_AVAILABLE); - SG_ADD(&inner_degree, "inner_degree", "Inner degree.", MS_AVAILABLE); - SG_ADD(&outer_degree, "outer_degree", "Outer degree.", MS_AVAILABLE); + SG_ADD(&length, "length", "Window Length.", ParameterProperties::HYPER); + SG_ADD(&inner_degree, "inner_degree", "Inner degree.", ParameterProperties::HYPER); + SG_ADD(&outer_degree, "outer_degree", "Outer degree.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/MatchWordStringKernel.cpp b/src/shogun/kernel/string/MatchWordStringKernel.cpp index a6730d4138c..bf92ec94b5d 100644 --- a/src/shogun/kernel/string/MatchWordStringKernel.cpp +++ b/src/shogun/kernel/string/MatchWordStringKernel.cpp @@ -69,5 +69,5 @@ void CMatchWordStringKernel::init() { degree=0; set_normalizer(new CAvgDiagKernelNormalizer()); - SG_ADD(°ree, "degree", "Degree of poly kernel", MS_AVAILABLE); + SG_ADD(°ree, "degree", "Degree of poly kernel", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/OligoStringKernel.cpp b/src/shogun/kernel/string/OligoStringKernel.cpp index 614aa914565..b7b8124ed15 100644 --- a/src/shogun/kernel/string/OligoStringKernel.cpp +++ b/src/shogun/kernel/string/OligoStringKernel.cpp @@ -302,7 +302,7 @@ void COligoStringKernel::init() set_normalizer(new CSqrtDiagKernelNormalizer()); - SG_ADD(&k, "k", "K-mer length.", MS_AVAILABLE); - SG_ADD(&width, "width", "Width of Gaussian.", MS_AVAILABLE); - SG_ADD(&gauss_table, "gauss_table", "Gauss Cache Table.", MS_NOT_AVAILABLE); + SG_ADD(&k, "k", "K-mer length.", ParameterProperties::HYPER); + SG_ADD(&width, "width", "Width of Gaussian.", ParameterProperties::HYPER); + SG_ADD(&gauss_table, "gauss_table", "Gauss Cache Table.", ParameterProperties()); } diff --git a/src/shogun/kernel/string/PolyMatchStringKernel.cpp b/src/shogun/kernel/string/PolyMatchStringKernel.cpp index 3e29a144169..2f47491a524 100644 --- a/src/shogun/kernel/string/PolyMatchStringKernel.cpp +++ b/src/shogun/kernel/string/PolyMatchStringKernel.cpp @@ -87,9 +87,9 @@ void CPolyMatchStringKernel::init() rescaling=false; set_normalizer(new CSqrtDiagKernelNormalizer()); - SG_ADD(°ree, "degree", "Degree of poly-kernel.", MS_AVAILABLE); + SG_ADD(°ree, "degree", "Degree of poly-kernel.", ParameterProperties::HYPER); SG_ADD(&inhomogene, "inhomogene", "True for inhomogene poly-kernel.", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&rescaling, "rescaling", - "True to rescale kernel with string length.", MS_AVAILABLE); + "True to rescale kernel with string length.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp b/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp index aae3de09b8e..dfa3d9efc81 100644 --- a/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp +++ b/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp @@ -90,7 +90,7 @@ void CPolyMatchWordStringKernel::init() inhomogene=false; set_normalizer(new CSqrtDiagKernelNormalizer()); - SG_ADD(°ree, "degree", "Degree of poly-kernel.", MS_AVAILABLE); + SG_ADD(°ree, "degree", "Degree of poly-kernel.", ParameterProperties::HYPER); SG_ADD(&inhomogene, "inhomogene", "True for inhomogene poly-kernel.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp b/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp index 7757c2bbc64..b16112c9995 100644 --- a/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp +++ b/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp @@ -49,18 +49,18 @@ void CRegulatoryModulesStringKernel::init() motif_positions_lhs=NULL; motif_positions_rhs=NULL; - SG_ADD(&width, "width", "the width of Gaussian kernel part", MS_AVAILABLE); + SG_ADD(&width, "width", "the width of Gaussian kernel part", ParameterProperties::HYPER); SG_ADD(°ree, "degree", "the degree of weighted degree kernel part", - MS_AVAILABLE); + ParameterProperties::HYPER); SG_ADD(&shift, "shift", - "the shift of weighted degree with shifts kernel part", MS_AVAILABLE); - SG_ADD(&window, "window", "the size of window around motifs", MS_AVAILABLE); + "the shift of weighted degree with shifts kernel part", ParameterProperties::HYPER); + SG_ADD(&window, "window", "the size of window around motifs", ParameterProperties::HYPER); SG_ADD((CSGObject**)&motif_positions_lhs, "motif_positions_lhs", - "the matrix of motif positions from sequences left-hand side", MS_NOT_AVAILABLE); + "the matrix of motif positions from sequences left-hand side", ParameterProperties()); SG_ADD((CSGObject**)&motif_positions_rhs, "motif_positions_rhs", - "the matrix of motif positions from sequences right-hand side", MS_NOT_AVAILABLE); - SG_ADD(&position_weights, "position_weights", "scaling weights in window", MS_NOT_AVAILABLE); - SG_ADD(&weights, "weights", "weights of WD kernel", MS_NOT_AVAILABLE); + "the matrix of motif positions from sequences right-hand side", ParameterProperties()); + SG_ADD(&position_weights, "position_weights", "scaling weights in window", ParameterProperties()); + SG_ADD(&weights, "weights", "weights of WD kernel", ParameterProperties()); } bool CRegulatoryModulesStringKernel::init(CFeatures* l, CFeatures* r) diff --git a/src/shogun/kernel/string/SNPStringKernel.cpp b/src/shogun/kernel/string/SNPStringKernel.cpp index fe85783de3a..8d40ad53764 100644 --- a/src/shogun/kernel/string/SNPStringKernel.cpp +++ b/src/shogun/kernel/string/SNPStringKernel.cpp @@ -177,10 +177,10 @@ float64_t CSNPStringKernel::compute(int32_t idx_a, int32_t idx_b) void CSNPStringKernel::register_params() { - SG_ADD(&m_degree, "m_degree", "the order of the kernel", MS_AVAILABLE); - SG_ADD(&m_win_len, "m_win_len", "the window length", MS_AVAILABLE); + SG_ADD(&m_degree, "m_degree", "the order of the kernel", ParameterProperties::HYPER); + SG_ADD(&m_win_len, "m_win_len", "the window length", ParameterProperties::HYPER); SG_ADD(&m_inhomogene, "m_inhomogene", - "the mark of whether it's an inhomogeneous poly kernel", MS_NOT_AVAILABLE); + "the mark of whether it's an inhomogeneous poly kernel", ParameterProperties()); m_parameters->add_vector(&m_str_min, &m_str_len, "m_str_min", "allele A"); watch_param("m_str_min", &m_str_min, &m_str_len); diff --git a/src/shogun/kernel/string/SimpleLocalityImprovedStringKernel.cpp b/src/shogun/kernel/string/SimpleLocalityImprovedStringKernel.cpp index 5ad46739ed1..775b03c5a0f 100644 --- a/src/shogun/kernel/string/SimpleLocalityImprovedStringKernel.cpp +++ b/src/shogun/kernel/string/SimpleLocalityImprovedStringKernel.cpp @@ -197,8 +197,8 @@ void CSimpleLocalityImprovedStringKernel::init() inner_degree = 3; outer_degree = 1; - SG_ADD(&length, "length", "Window Length.", MS_AVAILABLE); - SG_ADD(&inner_degree, "inner_degree", "Inner degree.", MS_AVAILABLE); - SG_ADD(&outer_degree, "outer_degree", "Outer degree.", MS_AVAILABLE); - SG_ADD(&pyramid_weights,"pyramid_weights", "Pyramid weights.", MS_AVAILABLE); + SG_ADD(&length, "length", "Window Length.", ParameterProperties::HYPER); + SG_ADD(&inner_degree, "inner_degree", "Inner degree.", ParameterProperties::HYPER); + SG_ADD(&outer_degree, "outer_degree", "Outer degree.", ParameterProperties::HYPER); + SG_ADD(&pyramid_weights,"pyramid_weights", "Pyramid weights.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp b/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp index e649e2b39ef..cdeae06bfb1 100644 --- a/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp +++ b/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp @@ -297,24 +297,24 @@ bool CSpectrumMismatchRBFKernel::set_max_mismatch(int32_t max) void CSpectrumMismatchRBFKernel::register_params() { - SG_ADD(°ree, "degree", "degree of the kernel", MS_AVAILABLE); + SG_ADD(°ree, "degree", "degree of the kernel", ParameterProperties::HYPER); SG_ADD(&AA_matrix, "AA_matrix", "128*128 scalar product matrix", - MS_NOT_AVAILABLE); - SG_ADD(&width, "width", "width of Gaussian", MS_AVAILABLE); + ParameterProperties()); + SG_ADD(&width, "width", "width of Gaussian", ParameterProperties::HYPER); SG_ADD(&target_letter_0, "target_letter_0", "target letter 0", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&initialized, "initialized", "the mark of initialization status", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject** )&kernel_matrix, "kernel_matrix", "the kernel matrix with its length " "defined by the number of vectors of the string features", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CSpectrumMismatchRBFKernel::register_alphabet() { SG_ADD((CSGObject** )&alphabet, "alphabet", "the alphabet used by kernel", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CSpectrumMismatchRBFKernel::init() diff --git a/src/shogun/kernel/string/SpectrumRBFKernel.cpp b/src/shogun/kernel/string/SpectrumRBFKernel.cpp index efd97e0be90..15060820108 100644 --- a/src/shogun/kernel/string/SpectrumRBFKernel.cpp +++ b/src/shogun/kernel/string/SpectrumRBFKernel.cpp @@ -378,23 +378,23 @@ bool CSpectrumRBFKernel::set_AA_matrix( void CSpectrumRBFKernel::register_param() { - SG_ADD(°ree, "degree", "degree of the kernel", MS_AVAILABLE); - SG_ADD(&AA_matrix, "AA_matrix", "128*128 scalar product matrix", MS_NOT_AVAILABLE); - SG_ADD(&width, "width", "width of Gaussian", MS_AVAILABLE); + SG_ADD(°ree, "degree", "degree of the kernel", ParameterProperties::HYPER); + SG_ADD(&AA_matrix, "AA_matrix", "128*128 scalar product matrix", ParameterProperties()); + SG_ADD(&width, "width", "width of Gaussian", ParameterProperties::HYPER); SG_ADD(&nof_sequences, "nof_sequences", "length of the sequence", - MS_NOT_AVAILABLE); + ParameterProperties()); m_parameters->add_vector(&sequences, &nof_sequences, "sequences", "the sequences as a part of profile"); watch_param("sequences", &sequences, &nof_sequences); SG_ADD(&max_sequence_length, - "max_sequence_length", "max length of the sequence", MS_NOT_AVAILABLE); + "max_sequence_length", "max length of the sequence", ParameterProperties()); } void CSpectrumRBFKernel::register_alphabet() { SG_ADD((CSGObject**)&alphabet, "alphabet", "the alphabet used by kernel", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CSpectrumRBFKernel::init() diff --git a/src/shogun/kernel/string/SubsequenceStringKernel.cpp b/src/shogun/kernel/string/SubsequenceStringKernel.cpp index 4d6925916c6..7ea9dc81fb3 100644 --- a/src/shogun/kernel/string/SubsequenceStringKernel.cpp +++ b/src/shogun/kernel/string/SubsequenceStringKernel.cpp @@ -131,6 +131,6 @@ float64_t CSubsequenceStringKernel::compute(int32_t idx_a, int32_t idx_b) void CSubsequenceStringKernel::register_params() { - SG_ADD(&m_maxlen, "m_maxlen", "maximum length of common subsequences", MS_AVAILABLE); - SG_ADD(&m_lambda, "m_lambda", "gap penalty", MS_AVAILABLE); + SG_ADD(&m_maxlen, "m_maxlen", "maximum length of common subsequences", ParameterProperties::HYPER); + SG_ADD(&m_lambda, "m_lambda", "gap penalty", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp b/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp index 3af8f216774..748a09fdebb 100644 --- a/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp +++ b/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp @@ -1949,18 +1949,18 @@ void CWeightedDegreePositionStringKernel::init() "Shift Vector."); watch_param("shift", &shift, &shift_len); - SG_ADD(&max_shift, "max_shift", "Maximal shift.", MS_AVAILABLE); - SG_ADD(&mkl_stepsize, "mkl_stepsize", "MKL step size.", MS_AVAILABLE); - SG_ADD(°ree, "degree", "Order of WD kernel.", MS_AVAILABLE); + SG_ADD(&max_shift, "max_shift", "Maximal shift.", ParameterProperties::HYPER); + SG_ADD(&mkl_stepsize, "mkl_stepsize", "MKL step size.", ParameterProperties::HYPER); + SG_ADD(°ree, "degree", "Order of WD kernel.", ParameterProperties::HYPER); SG_ADD(&max_mismatch, "max_mismatch", - "Number of allowed mismatches.", MS_AVAILABLE); + "Number of allowed mismatches.", ParameterProperties::HYPER); SG_ADD(&block_computation, "block_computation", - "If block computation shall be used.", MS_NOT_AVAILABLE); + "If block computation shall be used.", ParameterProperties()); SG_ADD((machine_int_t*) &type, "type", - "WeightedDegree kernel type.", MS_AVAILABLE); + "WeightedDegree kernel type.", ParameterProperties::HYPER); SG_ADD(&which_degree, "which_degree", "The selected degree. All degrees are used by default (for value -1).", - MS_AVAILABLE); + ParameterProperties::HYPER); SG_ADD((CSGObject**) &alphabet, "alphabet", - "Alphabet of Features.", MS_NOT_AVAILABLE); + "Alphabet of Features.", ParameterProperties()); } diff --git a/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp b/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp index 69f4397e83c..9f64662d540 100644 --- a/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp +++ b/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp @@ -1018,17 +1018,17 @@ void CWeightedDegreeStringKernel::init() "Weights per position."); watch_param("position_weights", &position_weights, &position_weights_len); - SG_ADD(&mkl_stepsize, "mkl_stepsize", "MKL step size.", MS_AVAILABLE); - SG_ADD(°ree, "degree", "Order of WD kernel.", MS_AVAILABLE); + SG_ADD(&mkl_stepsize, "mkl_stepsize", "MKL step size.", ParameterProperties::HYPER); + SG_ADD(°ree, "degree", "Order of WD kernel.", ParameterProperties::HYPER); SG_ADD(&max_mismatch, "max_mismatch", - "Number of allowed mismatches.", MS_AVAILABLE); + "Number of allowed mismatches.", ParameterProperties::HYPER); SG_ADD(&block_computation, "block_computation", - "If block computation shall be used.", MS_NOT_AVAILABLE); + "If block computation shall be used.", ParameterProperties()); SG_ADD((machine_int_t*) &type, "type", - "WeightedDegree kernel type.", MS_AVAILABLE); + "WeightedDegree kernel type.", ParameterProperties::HYPER); SG_ADD(&which_degree, "which_degree", "The selected degree. All degrees are used by default (for value -1).", - MS_AVAILABLE); + ParameterProperties::HYPER); SG_ADD((CSGObject**) &alphabet, "alphabet", - "Alphabet of Features.", MS_NOT_AVAILABLE); + "Alphabet of Features.", ParameterProperties()); } diff --git a/src/shogun/labels/DenseLabels.cpp b/src/shogun/labels/DenseLabels.cpp index 4a5c551c28c..3fe10ee9616 100644 --- a/src/shogun/labels/DenseLabels.cpp +++ b/src/shogun/labels/DenseLabels.cpp @@ -49,7 +49,7 @@ CDenseLabels::~CDenseLabels() void CDenseLabels::init() { - SG_ADD(&m_labels, "labels", "The labels.", MS_NOT_AVAILABLE); + SG_ADD(&m_labels, "labels", "The labels.", ParameterProperties()); } void CDenseLabels::set_to_one() diff --git a/src/shogun/labels/Labels.cpp b/src/shogun/labels/Labels.cpp index 07cde25c31c..e0b471b662b 100644 --- a/src/shogun/labels/Labels.cpp +++ b/src/shogun/labels/Labels.cpp @@ -41,10 +41,10 @@ CLabels::~CLabels() void CLabels::init() { SG_ADD((CSGObject **)&m_subset_stack, "subset_stack", - "Current subset stack", MS_NOT_AVAILABLE); + "Current subset stack", ParameterProperties()); SG_ADD( &m_current_values, "current_values", "current active value vector", - MS_NOT_AVAILABLE) + ParameterProperties()); m_subset_stack = new CSubsetStack(); SG_REF(m_subset_stack); } diff --git a/src/shogun/labels/LatentLabels.cpp b/src/shogun/labels/LatentLabels.cpp index 3898af884b0..69d1abed1cb 100644 --- a/src/shogun/labels/LatentLabels.cpp +++ b/src/shogun/labels/LatentLabels.cpp @@ -45,8 +45,8 @@ CLatentLabels::~CLatentLabels() void CLatentLabels::init() { - SG_ADD((CSGObject**) &m_latent_labels, "m_latent_labels", "The latent labels", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**) &m_latent_labels, "m_latent_labels", "The latent labels", ParameterProperties()); + SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", ParameterProperties()); m_latent_labels = NULL; m_labels = NULL; } diff --git a/src/shogun/labels/MultilabelLabels.cpp b/src/shogun/labels/MultilabelLabels.cpp index dc184ecdbd3..ebfd8efce07 100644 --- a/src/shogun/labels/MultilabelLabels.cpp +++ b/src/shogun/labels/MultilabelLabels.cpp @@ -69,9 +69,9 @@ CMultilabelLabels::init(int32_t num_labels, int32_t num_classes) // This one does consider the contained labels, so its simply BROKEN // Can be disabled as - SG_ADD(&m_num_labels, "m_num_labels", "number of labels", MS_NOT_AVAILABLE); - SG_ADD(&m_num_classes, "m_num_classes", "number of classes", MS_NOT_AVAILABLE); - // SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", MS_NOT_AVAILABLE); + SG_ADD(&m_num_labels, "m_num_labels", "number of labels", ParameterProperties()); + SG_ADD(&m_num_classes, "m_num_classes", "number of classes", ParameterProperties()); + // SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", ParameterProperties()); // Can only be enabled after this issue has been solved: diff --git a/src/shogun/labels/StructuredLabels.cpp b/src/shogun/labels/StructuredLabels.cpp index ece4f0ee1ae..4a83107f4c6 100644 --- a/src/shogun/labels/StructuredLabels.cpp +++ b/src/shogun/labels/StructuredLabels.cpp @@ -84,7 +84,7 @@ int32_t CStructuredLabels::get_num_labels() const void CStructuredLabels::init() { - SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", ParameterProperties()); m_labels = NULL; m_sdt = SDT_UNKNOWN; diff --git a/src/shogun/latent/LatentModel.cpp b/src/shogun/latent/LatentModel.cpp index e91434cddc3..f191b4c13ff 100644 --- a/src/shogun/latent/LatentModel.cpp +++ b/src/shogun/latent/LatentModel.cpp @@ -82,15 +82,15 @@ void CLatentModel::argmax_h(const SGVector& w) void CLatentModel::register_parameters() { - SG_ADD(&m_features, "features", "Latent features", MS_NOT_AVAILABLE); - SG_ADD(&m_labels, "labels", "Latent labels", MS_NOT_AVAILABLE); + SG_ADD(&m_features, "features", "Latent features", ParameterProperties()); + SG_ADD(&m_labels, "labels", "Latent labels", ParameterProperties()); SG_ADD( &m_cached_psi, "cached_psi", "Cached PSI features after argmax_h", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_do_caching, "do_caching", "Indicate whether or not do PSI vector caching after argmax_h", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/lib/DelimiterTokenizer.cpp b/src/shogun/lib/DelimiterTokenizer.cpp index 8fe3b7d03c8..970b4d3d321 100644 --- a/src/shogun/lib/DelimiterTokenizer.cpp +++ b/src/shogun/lib/DelimiterTokenizer.cpp @@ -32,9 +32,9 @@ CDelimiterTokenizer::CDelimiterTokenizer(const CDelimiterTokenizer& orig) void CDelimiterTokenizer::init() { SG_ADD(&last_idx, "last_idx", "Index of last token", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&skip_consecutive_delimiters, "skip_consecutive_delimiters", - "Whether to skip consecutive delimiters or not", MS_NOT_AVAILABLE); + "Whether to skip consecutive delimiters or not", ParameterProperties()); SGVector::fill_vector(delimiters, 256, 0); } diff --git a/src/shogun/lib/DynamicArray.h b/src/shogun/lib/DynamicArray.h index 31ec0df3baa..5a2ebdc980a 100644 --- a/src/shogun/lib/DynamicArray.h +++ b/src/shogun/lib/DynamicArray.h @@ -627,18 +627,18 @@ template class CDynamicArray :public CSGObject SG_ADD(&m_array.resize_granularity, "resize_granularity", - "shrink/grow step size.", MS_NOT_AVAILABLE); + "shrink/grow step size.", ParameterProperties()); SG_ADD(&m_array.use_sg_mallocs, "use_sg_malloc", "whether SG_MALLOC or malloc should be used", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_array.free_array, "free_array", "whether array must be freed", - MS_NOT_AVAILABLE); - SG_ADD(&dim1_size, "dim1_size", "Dimension 1", MS_NOT_AVAILABLE); - SG_ADD(&dim2_size, "dim2_size", "Dimension 2", MS_NOT_AVAILABLE); - SG_ADD(&dim3_size, "dim3_size", "Dimension 3", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&dim1_size, "dim1_size", "Dimension 1", ParameterProperties()); + SG_ADD(&dim2_size, "dim2_size", "Dimension 2", ParameterProperties()); + SG_ADD(&dim3_size, "dim3_size", "Dimension 3", ParameterProperties()); } protected: diff --git a/src/shogun/lib/DynamicObjectArray.h b/src/shogun/lib/DynamicObjectArray.h index de39407a518..47e48686100 100644 --- a/src/shogun/lib/DynamicObjectArray.h +++ b/src/shogun/lib/DynamicObjectArray.h @@ -473,18 +473,18 @@ class CDynamicObjectArray : public CSGObject SG_ADD(&m_array.resize_granularity, "resize_granularity", - "shrink/grow step size.", MS_NOT_AVAILABLE); + "shrink/grow step size.", ParameterProperties()); SG_ADD(&m_array.use_sg_mallocs, "use_sg_malloc", "whether SG_MALLOC or malloc should be used", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_array.free_array, "free_array", "whether array must be freed", - MS_NOT_AVAILABLE); - SG_ADD(&dim1_size, "dim1_size", "Dimension 1", MS_NOT_AVAILABLE); - SG_ADD(&dim2_size, "dim2_size", "Dimension 2", MS_NOT_AVAILABLE); - SG_ADD(&dim3_size, "dim3_size", "Dimension 3", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&dim1_size, "dim1_size", "Dimension 1", ParameterProperties()); + SG_ADD(&dim2_size, "dim2_size", "Dimension 2", ParameterProperties()); + SG_ADD(&dim3_size, "dim3_size", "Dimension 3", ParameterProperties()); } /** de-reference all elements of this array once */ diff --git a/src/shogun/lib/List.h b/src/shogun/lib/List.h index 4bcd1686686..854c0959f2d 100644 --- a/src/shogun/lib/List.h +++ b/src/shogun/lib/List.h @@ -53,8 +53,8 @@ class CListElement :public CSGObject private: void init() { - SG_ADD(&data, "data", "Data of this element.", MS_NOT_AVAILABLE); - SG_ADD(&next, "next", "Next element in list.", MS_NOT_AVAILABLE); + SG_ADD(&data, "data", "Data of this element.", ParameterProperties()); + SG_ADD(&next, "next", "Next element in list.", ParameterProperties()); } public: diff --git a/src/shogun/lib/NGramTokenizer.cpp b/src/shogun/lib/NGramTokenizer.cpp index 2903a1f57ae..675db75340d 100644 --- a/src/shogun/lib/NGramTokenizer.cpp +++ b/src/shogun/lib/NGramTokenizer.cpp @@ -29,9 +29,9 @@ CNGramTokenizer::CNGramTokenizer(const CNGramTokenizer& orig) void CNGramTokenizer::init() { SG_ADD(&n, "n", "Size of n-grams", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&last_idx, "last_idx", "Index of last token", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CNGramTokenizer::set_text(SGVector txt) diff --git a/src/shogun/lib/Tokenizer.cpp b/src/shogun/lib/Tokenizer.cpp index dcc770b08c0..fcb66eac866 100644 --- a/src/shogun/lib/Tokenizer.cpp +++ b/src/shogun/lib/Tokenizer.cpp @@ -21,6 +21,6 @@ void CTokenizer::set_text(SGVector txt) void CTokenizer::init() { - SG_ADD(&text, "text", "The text", MS_NOT_AVAILABLE) + SG_ADD(&text, "text", "The text", ParameterProperties()); } } diff --git a/src/shogun/loss/HuberLoss.cpp b/src/shogun/loss/HuberLoss.cpp index 95d292a4503..8883fc75d03 100644 --- a/src/shogun/loss/HuberLoss.cpp +++ b/src/shogun/loss/HuberLoss.cpp @@ -95,5 +95,5 @@ void CHuberLoss::init() { m_delta=0; - SG_ADD(&m_delta,"m_delta","delta",MS_NOT_AVAILABLE); + SG_ADD(&m_delta,"m_delta","delta",ParameterProperties()); } diff --git a/src/shogun/machine/BaggingMachine.cpp b/src/shogun/machine/BaggingMachine.cpp index 5e4c501b326..a2b0ebc5258 100644 --- a/src/shogun/machine/BaggingMachine.cpp +++ b/src/shogun/machine/BaggingMachine.cpp @@ -256,18 +256,18 @@ void CBaggingMachine::register_parameters() { SG_ADD( &m_features, "features", "Train features for bagging", - MS_NOT_AVAILABLE); - SG_ADD(&m_num_bags, "num_bags", "Number of bags", MS_AVAILABLE); - SG_ADD(&m_bag_size, "bag_size", "Number of vectors per bag", MS_AVAILABLE); - SG_ADD(&m_bags, "bags", "Bags array", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_num_bags, "num_bags", "Number of bags", ParameterProperties::HYPER); + SG_ADD(&m_bag_size, "bag_size", "Number of vectors per bag", ParameterProperties::HYPER); + SG_ADD(&m_bags, "bags", "Bags array", ParameterProperties()); SG_ADD( &m_combination_rule, "combination_rule", - "Combination rule to use for aggregating", MS_AVAILABLE); + "Combination rule to use for aggregating", ParameterProperties::HYPER); SG_ADD(&m_all_oob_idx, "all_oob_idx", "Indices of all oob vectors", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_oob_indices, "oob_indices", "OOB indices for each machine", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CBaggingMachine::set_num_bags(int32_t num_bags) diff --git a/src/shogun/machine/BaseMulticlassMachine.cpp b/src/shogun/machine/BaseMulticlassMachine.cpp index 7e1d8e91223..d775ffeee78 100644 --- a/src/shogun/machine/BaseMulticlassMachine.cpp +++ b/src/shogun/machine/BaseMulticlassMachine.cpp @@ -12,7 +12,7 @@ CBaseMulticlassMachine::CBaseMulticlassMachine() { m_machines = new CDynamicObjectArray(); - SG_ADD((CSGObject**)&m_machines, "machines", "Machines that jointly make up the multi-class machine.", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_machines, "machines", "Machines that jointly make up the multi-class machine.", ParameterProperties()); } CBaseMulticlassMachine::~CBaseMulticlassMachine() diff --git a/src/shogun/machine/DistanceMachine.cpp b/src/shogun/machine/DistanceMachine.cpp index 8645416a041..265c4666997 100644 --- a/src/shogun/machine/DistanceMachine.cpp +++ b/src/shogun/machine/DistanceMachine.cpp @@ -35,7 +35,7 @@ void CDistanceMachine::init() set_store_model_features(true); distance=NULL; - SG_ADD(&distance, "distance", "Distance to use", MS_AVAILABLE); + SG_ADD(&distance, "distance", "Distance to use", ParameterProperties::HYPER); } void CDistanceMachine::distances_lhs(SGVector& result, index_t idx_a1, index_t idx_a2, index_t idx_b) diff --git a/src/shogun/machine/GaussianProcessMachine.cpp b/src/shogun/machine/GaussianProcessMachine.cpp index 80c61d89798..5900b7b5816 100644 --- a/src/shogun/machine/GaussianProcessMachine.cpp +++ b/src/shogun/machine/GaussianProcessMachine.cpp @@ -62,7 +62,7 @@ void CGaussianProcessMachine::init() m_method=NULL; SG_ADD((CSGObject**) &m_method, "inference_method", "Inference method", - MS_AVAILABLE); + ParameterProperties::HYPER); } CGaussianProcessMachine::~CGaussianProcessMachine() diff --git a/src/shogun/machine/IterativeMachine.h b/src/shogun/machine/IterativeMachine.h index 8964c25f4c1..bc716bfb7d8 100644 --- a/src/shogun/machine/IterativeMachine.h +++ b/src/shogun/machine/IterativeMachine.h @@ -35,13 +35,13 @@ namespace shogun SG_ADD( &m_current_iteration, "current_iteration", - "Current Iteration of training", MS_NOT_AVAILABLE); + "Current Iteration of training", ParameterProperties()); SG_ADD( &m_max_iterations, "max_iterations", - "Maximum number of Iterations", MS_AVAILABLE); + "Maximum number of Iterations", ParameterProperties::HYPER); SG_ADD( &m_complete, "complete", "Convergence status", - MS_NOT_AVAILABLE); + ParameterProperties()); } virtual ~CIterativeMachine() diff --git a/src/shogun/machine/KernelMachine.cpp b/src/shogun/machine/KernelMachine.cpp index 18ad7ab5a97..16b41512583 100644 --- a/src/shogun/machine/KernelMachine.cpp +++ b/src/shogun/machine/KernelMachine.cpp @@ -610,19 +610,19 @@ void CKernelMachine::init() use_linadd=true; use_bias=true; - SG_ADD(&kernel, "kernel", "", MS_AVAILABLE); + SG_ADD(&kernel, "kernel", "", ParameterProperties::HYPER); SG_ADD((CSGObject**) &m_custom_kernel, "custom_kernel", "Custom kernel for" - " data lock", MS_NOT_AVAILABLE); + " data lock", ParameterProperties()); SG_ADD((CSGObject**) &m_kernel_backup, "kernel_backup", - "Kernel backup for data lock", MS_NOT_AVAILABLE); + "Kernel backup for data lock", ParameterProperties()); SG_ADD(&use_batch_computation, "use_batch_computation", - "Batch computation is enabled.", MS_NOT_AVAILABLE); - SG_ADD(&use_linadd, "use_linadd", "Linadd is enabled.", MS_NOT_AVAILABLE); - SG_ADD(&use_bias, "use_bias", "Bias shall be used.", MS_NOT_AVAILABLE); - SG_ADD(&m_bias, "m_bias", "Bias term.", MS_NOT_AVAILABLE); + "Batch computation is enabled.", ParameterProperties()); + SG_ADD(&use_linadd, "use_linadd", "Linadd is enabled.", ParameterProperties()); + SG_ADD(&use_bias, "use_bias", "Bias shall be used.", ParameterProperties()); + SG_ADD(&m_bias, "m_bias", "Bias term.", ParameterProperties()); SG_ADD(&m_alpha, "m_alpha", "Array of coefficients alpha.", - MS_NOT_AVAILABLE); - SG_ADD(&m_svs, "m_svs", "Number of ``support vectors''.", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_svs, "m_svs", "Number of ``support vectors''.", ParameterProperties()); } bool CKernelMachine::supports_locking() const diff --git a/src/shogun/machine/KernelMulticlassMachine.cpp b/src/shogun/machine/KernelMulticlassMachine.cpp index 40975a729cc..f1848b94a38 100644 --- a/src/shogun/machine/KernelMulticlassMachine.cpp +++ b/src/shogun/machine/KernelMulticlassMachine.cpp @@ -81,7 +81,7 @@ void CKernelMulticlassMachine::store_model_features() CKernelMulticlassMachine::CKernelMulticlassMachine() : CMulticlassMachine(), m_kernel(NULL) { - SG_ADD((CSGObject**)&m_kernel,"kernel", "The kernel to be used", MS_AVAILABLE); + SG_ADD((CSGObject**)&m_kernel,"kernel", "The kernel to be used", ParameterProperties::HYPER); } /** standard constructor @@ -94,7 +94,7 @@ CKernelMulticlassMachine::CKernelMulticlassMachine(CMulticlassStrategy *strategy CMulticlassMachine(strategy,(CMachine*)machine,labs), m_kernel(NULL) { set_kernel(kernel); - SG_ADD((CSGObject**)&m_kernel,"kernel", "The kernel to be used", MS_AVAILABLE); + SG_ADD((CSGObject**)&m_kernel,"kernel", "The kernel to be used", ParameterProperties::HYPER); } /** destructor */ diff --git a/src/shogun/machine/KernelStructuredOutputMachine.cpp b/src/shogun/machine/KernelStructuredOutputMachine.cpp index dbb98c6a6e4..a307e908402 100644 --- a/src/shogun/machine/KernelStructuredOutputMachine.cpp +++ b/src/shogun/machine/KernelStructuredOutputMachine.cpp @@ -45,5 +45,5 @@ CKernel* CKernelStructuredOutputMachine::get_kernel() const void CKernelStructuredOutputMachine::register_parameters() { - SG_ADD((CSGObject**)&m_kernel, "m_kernel", "The kernel", MS_AVAILABLE); + SG_ADD((CSGObject**)&m_kernel, "m_kernel", "The kernel", ParameterProperties::HYPER); } diff --git a/src/shogun/machine/LinearLatentMachine.cpp b/src/shogun/machine/LinearLatentMachine.cpp index 0b4fe25dc61..1b7750410f1 100644 --- a/src/shogun/machine/LinearLatentMachine.cpp +++ b/src/shogun/machine/LinearLatentMachine.cpp @@ -118,9 +118,9 @@ void CLinearLatentMachine::init() m_max_iter = 400; m_model = NULL; - SG_ADD(&m_C, "C", "Cost constant.", MS_NOT_AVAILABLE); - SG_ADD(&m_epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "max_iter", "Maximum iterations.", MS_NOT_AVAILABLE); - SG_ADD(&m_model, "latent_model", "Latent Model.", MS_NOT_AVAILABLE); + SG_ADD(&m_C, "C", "Cost constant.", ParameterProperties()); + SG_ADD(&m_epsilon, "epsilon", "Convergence precision.", ParameterProperties()); + SG_ADD(&m_max_iter, "max_iter", "Maximum iterations.", ParameterProperties()); + SG_ADD(&m_model, "latent_model", "Latent Model.", ParameterProperties()); } diff --git a/src/shogun/machine/LinearMachine.cpp b/src/shogun/machine/LinearMachine.cpp index ca8bd4415ac..6ae712d95ed 100644 --- a/src/shogun/machine/LinearMachine.cpp +++ b/src/shogun/machine/LinearMachine.cpp @@ -35,11 +35,11 @@ void CLinearMachine::init() bias = 0; features = NULL; - SG_ADD(&m_w, "w", "Parameter vector w.", MS_NOT_AVAILABLE); - SG_ADD(&bias, "bias", "Bias b.", MS_NOT_AVAILABLE); + SG_ADD(&m_w, "w", "Parameter vector w.", ParameterProperties()); + SG_ADD(&bias, "bias", "Bias b.", ParameterProperties()); SG_ADD( (CFeatures**)&features, "features", "Feature object.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/machine/LinearMulticlassMachine.h b/src/shogun/machine/LinearMulticlassMachine.h index 03e7c532b1a..8d3560137ba 100644 --- a/src/shogun/machine/LinearMulticlassMachine.h +++ b/src/shogun/machine/LinearMulticlassMachine.h @@ -31,7 +31,7 @@ class CLinearMulticlassMachine : public CMulticlassMachine CLinearMulticlassMachine() : CMulticlassMachine(), m_features(NULL) { SG_ADD((CSGObject**)&m_features, "m_features", "Feature object.", - MS_NOT_AVAILABLE); + ParameterProperties()); } /** standard constructor @@ -45,7 +45,7 @@ class CLinearMulticlassMachine : public CMulticlassMachine { set_features(features); SG_ADD((CSGObject**)&m_features, "m_features", "Feature object.", - MS_NOT_AVAILABLE); + ParameterProperties()); } /** destructor */ diff --git a/src/shogun/machine/LinearStructuredOutputMachine.cpp b/src/shogun/machine/LinearStructuredOutputMachine.cpp index 75e3e7e3694..e52644b73f1 100644 --- a/src/shogun/machine/LinearStructuredOutputMachine.cpp +++ b/src/shogun/machine/LinearStructuredOutputMachine.cpp @@ -68,7 +68,7 @@ CStructuredLabels* CLinearStructuredOutputMachine::apply_structured(CFeatures* d void CLinearStructuredOutputMachine::register_parameters() { - SG_ADD(&m_w, "m_w", "Weight vector", MS_NOT_AVAILABLE); + SG_ADD(&m_w, "m_w", "Weight vector", ParameterProperties()); } void CLinearStructuredOutputMachine::store_model_features() diff --git a/src/shogun/machine/Machine.cpp b/src/shogun/machine/Machine.cpp index cfcc05f4caf..ae0cef78e44 100644 --- a/src/shogun/machine/Machine.cpp +++ b/src/shogun/machine/Machine.cpp @@ -21,15 +21,15 @@ CMachine::CMachine() m_store_model_features=false; SG_ADD(&m_max_train_time, "max_train_time", - "Maximum training time.", MS_NOT_AVAILABLE); + "Maximum training time.", ParameterProperties()); SG_ADD((machine_int_t*) &m_solver_type, "solver_type", - "Type of solver.", MS_NOT_AVAILABLE); + "Type of solver.", ParameterProperties()); - SG_ADD(&m_labels, "labels", "Labels to be used.", MS_NOT_AVAILABLE); + SG_ADD(&m_labels, "labels", "Labels to be used.", ParameterProperties()); SG_ADD(&m_store_model_features, "store_model_features", - "Should feature data of model be stored after training?", MS_NOT_AVAILABLE); + "Should feature data of model be stored after training?", ParameterProperties()); SG_ADD(&m_data_locked, "data_locked", - "Indicates whether data is locked", MS_NOT_AVAILABLE); + "Indicates whether data is locked", ParameterProperties()); } CMachine::~CMachine() diff --git a/src/shogun/machine/MulticlassMachine.cpp b/src/shogun/machine/MulticlassMachine.cpp index 2899ae36d1d..47a53dd358f 100644 --- a/src/shogun/machine/MulticlassMachine.cpp +++ b/src/shogun/machine/MulticlassMachine.cpp @@ -50,8 +50,8 @@ void CMulticlassMachine::set_labels(CLabels* lab) void CMulticlassMachine::register_parameters() { - SG_ADD(&m_multiclass_strategy,"multiclass_strategy", "Multiclass strategy", MS_NOT_AVAILABLE); - SG_ADD(&m_machine, "machine", "The base machine", MS_NOT_AVAILABLE); + SG_ADD(&m_multiclass_strategy,"multiclass_strategy", "Multiclass strategy", ParameterProperties()); + SG_ADD(&m_machine, "machine", "The base machine", ParameterProperties()); } void CMulticlassMachine::init_strategy() diff --git a/src/shogun/machine/OnlineLinearMachine.cpp b/src/shogun/machine/OnlineLinearMachine.cpp index 45fa08eb284..67a12220c19 100644 --- a/src/shogun/machine/OnlineLinearMachine.cpp +++ b/src/shogun/machine/OnlineLinearMachine.cpp @@ -19,10 +19,10 @@ using namespace shogun; COnlineLinearMachine::COnlineLinearMachine() : CMachine(), bias(0), features(NULL) { - SG_ADD(&m_w, "m_w", "Parameter vector w.", MS_NOT_AVAILABLE); - SG_ADD(&bias, "bias", "Bias b.", MS_NOT_AVAILABLE); + SG_ADD(&m_w, "m_w", "Parameter vector w.", ParameterProperties()); + SG_ADD(&bias, "bias", "Bias b.", ParameterProperties()); SG_ADD((CSGObject**) &features, "features", - "Feature object.", MS_NOT_AVAILABLE); + "Feature object.", ParameterProperties()); } COnlineLinearMachine::~COnlineLinearMachine() diff --git a/src/shogun/machine/RandomForest.cpp b/src/shogun/machine/RandomForest.cpp index 3cbe9cae426..428355ffdea 100644 --- a/src/shogun/machine/RandomForest.cpp +++ b/src/shogun/machine/RandomForest.cpp @@ -185,5 +185,5 @@ void CRandomForest::init() m_machine=new CRandomCARTree(); m_weights=SGVector(); - SG_ADD(&m_weights,"m_weights","weights",MS_NOT_AVAILABLE) + SG_ADD(&m_weights,"m_weights","weights",ParameterProperties()); } diff --git a/src/shogun/machine/StochasticGBMachine.cpp b/src/shogun/machine/StochasticGBMachine.cpp index 0bac9dd0ba9..42856e41e41 100644 --- a/src/shogun/machine/StochasticGBMachine.cpp +++ b/src/shogun/machine/StochasticGBMachine.cpp @@ -401,11 +401,11 @@ void CStochasticGBMachine::init() m_gamma=new CDynamicArray(); SG_REF(m_gamma); - SG_ADD((CSGObject**)&m_machine,"m_machine","machine",MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_loss,"m_loss","loss function",MS_NOT_AVAILABLE); - SG_ADD(&m_num_iter,"m_num_iter","number of iterations",MS_NOT_AVAILABLE); - SG_ADD(&m_subset_frac,"m_subset_frac","subset fraction",MS_NOT_AVAILABLE); - SG_ADD(&m_learning_rate,"m_learning_rate","learning rate",MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_weak_learners,"m_weak_learners","array of weak learners",MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_gamma,"m_gamma","array of learner weights",MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_machine,"m_machine","machine",ParameterProperties()); + SG_ADD((CSGObject**)&m_loss,"m_loss","loss function",ParameterProperties()); + SG_ADD(&m_num_iter,"m_num_iter","number of iterations",ParameterProperties()); + SG_ADD(&m_subset_frac,"m_subset_frac","subset fraction",ParameterProperties()); + SG_ADD(&m_learning_rate,"m_learning_rate","learning rate",ParameterProperties()); + SG_ADD((CSGObject**)&m_weak_learners,"m_weak_learners","array of weak learners",ParameterProperties()); + SG_ADD((CSGObject**)&m_gamma,"m_gamma","array of learner weights",ParameterProperties()); } diff --git a/src/shogun/machine/StructuredOutputMachine.cpp b/src/shogun/machine/StructuredOutputMachine.cpp index acd8ed2fe53..7ac81a1f0c8 100644 --- a/src/shogun/machine/StructuredOutputMachine.cpp +++ b/src/shogun/machine/StructuredOutputMachine.cpp @@ -52,10 +52,10 @@ CStructuredModel* CStructuredOutputMachine::get_model() const void CStructuredOutputMachine::register_parameters() { - SG_ADD((CSGObject**)&m_model, "m_model", "Structured model", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_surrogate_loss, "m_surrogate_loss", "Surrogate loss", MS_NOT_AVAILABLE); - SG_ADD(&m_verbose, "verbose", "Verbosity flag", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_helper, "helper", "Training helper", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_model, "m_model", "Structured model", ParameterProperties()); + SG_ADD((CSGObject**)&m_surrogate_loss, "m_surrogate_loss", "Surrogate loss", ParameterProperties()); + SG_ADD(&m_verbose, "verbose", "Verbosity flag", ParameterProperties()); + SG_ADD((CSGObject**)&m_helper, "helper", "Training helper", ParameterProperties()); m_verbose = false; m_helper = NULL; diff --git a/src/shogun/machine/gp/ConstMean.cpp b/src/shogun/machine/gp/ConstMean.cpp index 2dacb6c3aa0..87c12d11c7b 100644 --- a/src/shogun/machine/gp/ConstMean.cpp +++ b/src/shogun/machine/gp/ConstMean.cpp @@ -54,7 +54,7 @@ CConstMean::CConstMean(float64_t mean) void CConstMean::init() { m_mean=0.0; - SG_ADD(&m_mean, "mean", "const value of mean function", MS_AVAILABLE, GRADIENT_AVAILABLE); + SG_ADD(&m_mean, "mean", "const value of mean function", ParameterProperties::HYPER | ParameterProperties::GRADIENT); } SGVector CConstMean::get_mean_vector(const CFeatures* features) const diff --git a/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp b/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp index c084fa0d7a3..e20c5220b75 100644 --- a/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp +++ b/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp @@ -218,15 +218,15 @@ void CDualVariationalGaussianLikelihood::init() { SG_ADD(&m_lambda, "lambda", "Dual parameter for variational s2", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_is_valid, "is_valid", "Is the Dual parameter valid", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_strict_scale, "strict_scale", "The strict variable used in adjust_step_wrt_dual_parameter", - MS_NOT_AVAILABLE); + ParameterProperties()); m_is_valid=false; m_strict_scale=1e-5; diff --git a/src/shogun/machine/gp/GaussianLikelihood.cpp b/src/shogun/machine/gp/GaussianLikelihood.cpp index 64ba2c1b0ff..a5bf9244957 100644 --- a/src/shogun/machine/gp/GaussianLikelihood.cpp +++ b/src/shogun/machine/gp/GaussianLikelihood.cpp @@ -52,7 +52,7 @@ CGaussianLikelihood::CGaussianLikelihood(float64_t sigma) : CLikelihoodModel() void CGaussianLikelihood::init() { m_log_sigma=0.0; - SG_ADD(&m_log_sigma, "log_sigma", "Observation noise in log domain", MS_AVAILABLE, GRADIENT_AVAILABLE); + SG_ADD(&m_log_sigma, "log_sigma", "Observation noise in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); } CGaussianLikelihood::~CGaussianLikelihood() diff --git a/src/shogun/machine/gp/Inference.cpp b/src/shogun/machine/gp/Inference.cpp index df94569c67f..427b4eb3471 100644 --- a/src/shogun/machine/gp/Inference.cpp +++ b/src/shogun/machine/gp/Inference.cpp @@ -88,13 +88,13 @@ CInference::~CInference() void CInference::init() { - SG_ADD(&m_kernel, "kernel", "Kernel", MS_AVAILABLE); - SG_ADD(&m_log_scale, "log_scale", "Kernel log scale", MS_AVAILABLE, GRADIENT_AVAILABLE); - SG_ADD(&m_model, "likelihood_model", "Likelihood model", MS_AVAILABLE); - SG_ADD(&m_mean, "mean_function", "Mean function", MS_AVAILABLE); - SG_ADD(&m_labels, "labels", "Labels", MS_NOT_AVAILABLE); - SG_ADD(&m_features, "features", "Features", MS_NOT_AVAILABLE); - SG_ADD(&m_gradient_update, "gradient_update", "Whether gradients are updated", MS_NOT_AVAILABLE); + SG_ADD(&m_kernel, "kernel", "Kernel", ParameterProperties::HYPER); + SG_ADD(&m_log_scale, "log_scale", "Kernel log scale", ParameterProperties::HYPER | ParameterProperties::GRADIENT); + SG_ADD(&m_model, "likelihood_model", "Likelihood model", ParameterProperties::HYPER); + SG_ADD(&m_mean, "mean_function", "Mean function", ParameterProperties::HYPER); + SG_ADD(&m_labels, "labels", "Labels", ParameterProperties()); + SG_ADD(&m_features, "features", "Features", ParameterProperties()); + SG_ADD(&m_gradient_update, "gradient_update", "Whether gradients are updated", ParameterProperties()); m_kernel=NULL; @@ -106,10 +106,10 @@ void CInference::init() m_gradient_update=false; m_minimizer=NULL; - SG_ADD((CSGObject**)&m_minimizer, "Inference__m_minimizer", "minimizer in Inference", MS_NOT_AVAILABLE); - SG_ADD(&m_alpha, "alpha", "alpha vector used in process mean calculation", MS_NOT_AVAILABLE); - SG_ADD(&m_L, "L", "upper triangular factor of Cholesky decomposition", MS_NOT_AVAILABLE); - SG_ADD(&m_E, "E", "the matrix used for multi classification", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_minimizer, "Inference__m_minimizer", "minimizer in Inference", ParameterProperties()); + SG_ADD(&m_alpha, "alpha", "alpha vector used in process mean calculation", ParameterProperties()); + SG_ADD(&m_L, "L", "upper triangular factor of Cholesky decomposition", ParameterProperties()); + SG_ADD(&m_E, "E", "the matrix used for multi classification", ParameterProperties()); } void CInference::register_minimizer(Minimizer* minimizer) diff --git a/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp b/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp index e5b64061701..c5f45721afd 100644 --- a/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp @@ -67,10 +67,10 @@ void CKLCholeskyInferenceMethod::init() { SG_ADD(&m_C, "C", "The Cholesky represention of the variational co-variance matrix", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_InvK_C, "invK_C", " The K^{-1}C matrix", - MS_NOT_AVAILABLE); + ParameterProperties()); } CKLCholeskyInferenceMethod* CKLCholeskyInferenceMethod::obtain_from_generic( diff --git a/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp b/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp index b21f635211a..e369026b5d3 100644 --- a/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp @@ -67,22 +67,22 @@ void CKLCovarianceInferenceMethod::init() { SG_ADD(&m_V, "V", "V is L'*V=diag(sW)*K", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_A, "A", "A is A=I-K*diag(sW)*inv(L)'*inv(L)*diag(sW)", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_W, "W", "noise matrix W", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_sW, "sW", "Square root of noise matrix W", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_dv, "dv", "the gradient of the variational expection wrt sigma2", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_df, "df", "the gradient of the variational expection wrt mu", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp b/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp index 016e7936a6f..45d083d21bc 100644 --- a/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp @@ -67,7 +67,7 @@ void CKLDiagonalInferenceMethod::init() { SG_ADD(&m_InvK, "invK", "The K^{-1} matrix", - MS_NOT_AVAILABLE); + ParameterProperties()); } CKLDiagonalInferenceMethod* CKLDiagonalInferenceMethod::obtain_from_generic( diff --git a/src/shogun/machine/gp/KLDualInferenceMethod.cpp b/src/shogun/machine/gp/KLDualInferenceMethod.cpp index 2944e05627d..54c07a83a67 100644 --- a/src/shogun/machine/gp/KLDualInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLDualInferenceMethod.cpp @@ -106,9 +106,9 @@ friend class CKLDualInferenceMethodMinimizer; m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "KLDualInferenceMethodCostFunction__m_derivatives", - "derivatives in KLDualInferenceMethodCostFunction", MS_NOT_AVAILABLE); + "derivatives in KLDualInferenceMethodCostFunction", ParameterProperties()); SG_ADD((CSGObject **)&m_obj, "KLDualInferenceMethodCostFunction__m_obj", - "obj in KLDualInferenceMethodCostFunction", MS_NOT_AVAILABLE); + "obj in KLDualInferenceMethodCostFunction", ParameterProperties()); } CKLDualInferenceMethod *m_obj; CDualVariationalGaussianLikelihood* get_dual_variational_likelihood() const @@ -279,19 +279,19 @@ void CKLDualInferenceMethod::init() { SG_ADD(&m_W, "W", "noise matrix W", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_sW, "sW", "Square root of noise matrix W", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_dv, "dv", "the gradient of the variational expection wrt sigma2", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_df, "df", "the gradient of the variational expection wrt mu", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_is_dual_valid, "is_dual_valid", "whether the lambda (m_W) is valid or not", - MS_NOT_AVAILABLE); + ParameterProperties()); m_is_dual_valid=false; register_minimizer(new CKLDualInferenceMethodMinimizer()); diff --git a/src/shogun/machine/gp/KLInference.cpp b/src/shogun/machine/gp/KLInference.cpp index 8faa391dd9b..8a384848192 100644 --- a/src/shogun/machine/gp/KLInference.cpp +++ b/src/shogun/machine/gp/KLInference.cpp @@ -99,9 +99,9 @@ class KLInferenceCostFunction: public FirstOrderCostFunction m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "KLInferenceCostFunction__m_derivatives", - "derivatives in KLInferenceCostFunction", MS_NOT_AVAILABLE); + "derivatives in KLInferenceCostFunction", ParameterProperties()); SG_ADD((CSGObject **)&m_obj, "KLInferenceCostFunction__m_obj", - "obj in KLInferenceCostFunction", MS_NOT_AVAILABLE); + "obj in KLInferenceCostFunction", ParameterProperties()); } CKLInference *m_obj; }; @@ -144,25 +144,25 @@ void CKLInference::init() m_min_coeff_kernel=1e-5; SG_ADD(&m_noise_factor, "noise_factor", "The noise factor used for correcting Kernel matrix", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_exp_factor, "exp_factor", "The exponential factor used for increasing noise_factor", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_max_attempt, "max_attempt", "The max number of attempt to correct Kernel matrix", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_min_coeff_kernel, "min_coeff_kernel", "The minimum coeefficient of kernel matrix in LDLT factorization used to check whether the kernel matrix is positive definite or not", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_s2, "s2", "Variational parameter sigma2", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_mu, "mu", "Variational parameter mu and posterior mean", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_Sigma, "Sigma", "Posterior covariance matrix Sigma", - MS_NOT_AVAILABLE); + ParameterProperties()); register_minimizer(new CLBFGSMinimizer()); } diff --git a/src/shogun/machine/gp/KLLowerTriangularInference.cpp b/src/shogun/machine/gp/KLLowerTriangularInference.cpp index 7d69620e64e..5ab701620c1 100644 --- a/src/shogun/machine/gp/KLLowerTriangularInference.cpp +++ b/src/shogun/machine/gp/KLLowerTriangularInference.cpp @@ -66,20 +66,20 @@ void CKLLowerTriangularInference::init() { SG_ADD(&m_InvK_Sigma, "invk_Sigma", "K^{-1}Sigma'", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_mean_vec, "mean_vec", "The mean vector generated from mean function", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_log_det_Kernel, "log_det_kernel", "The Log-determinant of Kernel", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_Kernel_LsD, "L_sqrt_D", "The L*sqrt(D) matrix, where L and D are defined in LDLT factorization on Kernel*sq(m_scale)", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_Kernel_P, "Permutation_P", "The permutation sequence of P, where P are defined in LDLT factorization on Kernel*sq(m_scale)", - MS_NOT_AVAILABLE); + ParameterProperties()); m_log_det_Kernel=0; } diff --git a/src/shogun/machine/gp/LaplaceInference.cpp b/src/shogun/machine/gp/LaplaceInference.cpp index 14a2fcec333..bc3e80709b3 100644 --- a/src/shogun/machine/gp/LaplaceInference.cpp +++ b/src/shogun/machine/gp/LaplaceInference.cpp @@ -56,10 +56,10 @@ CLaplaceInference::CLaplaceInference(CKernel* kern, void CLaplaceInference::init() { - SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location", MS_NOT_AVAILABLE); - SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior", MS_NOT_AVAILABLE); - SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior", MS_NOT_AVAILABLE); - SG_ADD(&m_W, "W", "the noise matrix", MS_NOT_AVAILABLE); + SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location", ParameterProperties()); + SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior", ParameterProperties()); + SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior", ParameterProperties()); + SG_ADD(&m_W, "W", "the noise matrix", ParameterProperties()); } CLaplaceInference::~CLaplaceInference() diff --git a/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp b/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp index 69efeff2c04..57307e3908e 100644 --- a/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp +++ b/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp @@ -388,25 +388,25 @@ void CLogitVGPiecewiseBoundLikelihood::init() { SG_ADD(&m_bound, "bound", "Variational piecewise bound for logit likelihood", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_pl, "pdf_l", "The pdf given the lower range and parameters(mu and variance)", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_ph, "pdf_h", "The pdf given the higher range and parameters(mu and variance)", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_cdf_diff, "cdf_h_minus_cdf_l", "The CDF difference between the lower and higher range given the parameters(mu and variance)", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_l2_plus_s2, "l2_plus_sigma2", "The result of l^2 + sigma^2", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_h2_plus_s2, "h2_plus_sigma2", "The result of h^2 + sigma^2", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_weighted_pdf_diff, "weighted_pdf_diff", "The result of l*pdf(l_norm)-h*pdf(h_norm)", - MS_NOT_AVAILABLE); + ParameterProperties()); init_likelihood(); } diff --git a/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp b/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp index 016819ee1dd..ca052d95f10 100644 --- a/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp +++ b/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp @@ -122,13 +122,13 @@ void CMultiLaplaceInferenceMethod::init() m_opt_max=10; m_nlz=0; - SG_ADD(&m_nlz, "nlz", "negative log marginal likelihood ", MS_NOT_AVAILABLE); - SG_ADD(&m_U, "U", "the matrix used to compute gradient wrt hyperparameters", MS_NOT_AVAILABLE); + SG_ADD(&m_nlz, "nlz", "negative log marginal likelihood ", ParameterProperties()); + SG_ADD(&m_U, "U", "the matrix used to compute gradient wrt hyperparameters", ParameterProperties()); - SG_ADD(&m_tolerance, "tolerance", "amount of tolerance for Newton's iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_iter, "iter", "max Newton's iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_opt_tolerance, "opt_tolerance", "amount of tolerance for Brent's minimization method", MS_NOT_AVAILABLE); - SG_ADD(&m_opt_max, "opt_max", "max iterations for Brent's minimization method", MS_NOT_AVAILABLE); + SG_ADD(&m_tolerance, "tolerance", "amount of tolerance for Newton's iterations", ParameterProperties()); + SG_ADD(&m_iter, "iter", "max Newton's iterations", ParameterProperties()); + SG_ADD(&m_opt_tolerance, "opt_tolerance", "amount of tolerance for Brent's minimization method", ParameterProperties()); + SG_ADD(&m_opt_max, "opt_max", "max iterations for Brent's minimization method", ParameterProperties()); } CMultiLaplaceInferenceMethod::~CMultiLaplaceInferenceMethod() diff --git a/src/shogun/machine/gp/NumericalVGLikelihood.cpp b/src/shogun/machine/gp/NumericalVGLikelihood.cpp index 7cfbeec7bba..df8f4500f0b 100644 --- a/src/shogun/machine/gp/NumericalVGLikelihood.cpp +++ b/src/shogun/machine/gp/NumericalVGLikelihood.cpp @@ -67,23 +67,23 @@ void CNumericalVGLikelihood::init() { SG_ADD(&m_log_lam, "log_lam", "The result of used for computing variational expection\n", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_xgh, "xgh", "Gaussian-Hermite quadrature base points (abscissas)\n", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_wgh, "wgh", "Gaussian-Hermite quadrature weight factors\n", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_GHQ_N, "GHQ_N", "The number of Gaussian-Hermite quadrature point\n", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_is_init_GHQ, "is_init_GHQ", "Whether Gaussian-Hermite quadrature points are initialized or not\n", - MS_NOT_AVAILABLE); + ParameterProperties()); m_GHQ_N=20; m_is_init_GHQ=false; diff --git a/src/shogun/machine/gp/SingleFITCInference.cpp b/src/shogun/machine/gp/SingleFITCInference.cpp index ed8154f0627..d945ae7c8fc 100644 --- a/src/shogun/machine/gp/SingleFITCInference.cpp +++ b/src/shogun/machine/gp/SingleFITCInference.cpp @@ -53,12 +53,12 @@ CSingleFITCInference::CSingleFITCInference(CKernel* kern, CFeatures* feat, void CSingleFITCInference::init() { - SG_ADD(&m_al, "al", "alpha", MS_NOT_AVAILABLE); - SG_ADD(&m_t, "t", "noise", MS_NOT_AVAILABLE); - SG_ADD(&m_B, "B", "B", MS_NOT_AVAILABLE); - SG_ADD(&m_w, "w", "B*al", MS_NOT_AVAILABLE); - SG_ADD(&m_Rvdd, "Rvdd", "Rvdd", MS_NOT_AVAILABLE); - SG_ADD(&m_V, "V", "V", MS_NOT_AVAILABLE); + SG_ADD(&m_al, "al", "alpha", ParameterProperties()); + SG_ADD(&m_t, "t", "noise", ParameterProperties()); + SG_ADD(&m_B, "B", "B", ParameterProperties()); + SG_ADD(&m_w, "w", "B*al", ParameterProperties()); + SG_ADD(&m_Rvdd, "Rvdd", "Rvdd", ParameterProperties()); + SG_ADD(&m_V, "V", "V", ParameterProperties()); } CSingleFITCInference::~CSingleFITCInference() diff --git a/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp b/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp index b70069190db..1f0b698cd8b 100644 --- a/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp +++ b/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp @@ -146,9 +146,9 @@ class SingleFITCLaplaceInferenceMethodCostFunction: public FirstOrderCostFunctio m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "SingleFITCLaplaceInferenceMethodCostFunction__m_derivatives", - "derivatives in SingleFITCLaplaceInferenceMethodCostFunction", MS_NOT_AVAILABLE); + "derivatives in SingleFITCLaplaceInferenceMethodCostFunction", ParameterProperties()); SG_ADD((CSGObject **)&m_obj, "SingleFITCLaplaceInferenceMethodCostFunction__m_obj", - "obj in SingleFITCLaplaceInferenceMethodCostFunction", MS_NOT_AVAILABLE); + "obj in SingleFITCLaplaceInferenceMethodCostFunction", ParameterProperties()); } SGVector m_derivatives; @@ -187,15 +187,15 @@ void CSingleFITCLaplaceNewtonOptimizer::init() m_opt_max=10; SG_ADD((CSGObject **)&m_obj, "CSingleFITCLaplaceNewtonOptimizer__m_obj", - "obj in CSingleFITCLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "obj in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_iter, "CSingleFITCLaplaceNewtonOptimizer__m_iter", - "iter in CSingleFITCLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "iter in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_tolerance, "CSingleFITCLaplaceNewtonOptimizer__m_tolerance", - "tolerance in CSingleFITCLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "tolerance in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_opt_tolerance, "CSingleFITCLaplaceNewtonOptimizer__m_opt_tolerance", - "opt_tolerance in CSingleFITCLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "opt_tolerance in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_opt_max, "CSingleFITCLaplaceNewtonOptimizer__m_opt_max", - "opt_max in CSingleFITCLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "opt_max in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); } float64_t CSingleFITCLaplaceNewtonOptimizer::minimize() @@ -320,18 +320,18 @@ void CSingleFITCLaplaceInferenceMethod::init() m_Psi=0; m_Wneg=false; - SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location", MS_NOT_AVAILABLE); - SG_ADD(&m_W, "W", "the noise matrix", MS_NOT_AVAILABLE); - - SG_ADD(&m_sW, "sW", "square root of W", MS_NOT_AVAILABLE); - SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location", MS_NOT_AVAILABLE); - SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location", MS_NOT_AVAILABLE); - SG_ADD(&m_chol_R0, "chol_R0", "Cholesky of inverse covariance of inducing features", MS_NOT_AVAILABLE); - SG_ADD(&m_dfhat, "dfhat", "derivative of negative log (approximated) marginal likelihood wrt f", MS_NOT_AVAILABLE); - SG_ADD(&m_g, "g", "variable g defined in infFITC_Laplace.m", MS_NOT_AVAILABLE); - SG_ADD(&m_dg, "dg", "variable d0 defined in infFITC_Laplace.m", MS_NOT_AVAILABLE); - SG_ADD(&m_Psi, "Psi", "the negative log likelihood without constant terms used in Newton's method", MS_NOT_AVAILABLE); - SG_ADD(&m_Wneg, "Wneg", "whether W contains negative elements", MS_NOT_AVAILABLE); + SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location", ParameterProperties()); + SG_ADD(&m_W, "W", "the noise matrix", ParameterProperties()); + + SG_ADD(&m_sW, "sW", "square root of W", ParameterProperties()); + SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location", ParameterProperties()); + SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location", ParameterProperties()); + SG_ADD(&m_chol_R0, "chol_R0", "Cholesky of inverse covariance of inducing features", ParameterProperties()); + SG_ADD(&m_dfhat, "dfhat", "derivative of negative log (approximated) marginal likelihood wrt f", ParameterProperties()); + SG_ADD(&m_g, "g", "variable g defined in infFITC_Laplace.m", ParameterProperties()); + SG_ADD(&m_dg, "dg", "variable d0 defined in infFITC_Laplace.m", ParameterProperties()); + SG_ADD(&m_Psi, "Psi", "the negative log likelihood without constant terms used in Newton's method", ParameterProperties()); + SG_ADD(&m_Wneg, "Wneg", "whether W contains negative elements", ParameterProperties()); register_minimizer(new CSingleFITCLaplaceNewtonOptimizer()); } diff --git a/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp b/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp index f577d7ab0df..e6b2f7e91b6 100644 --- a/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp +++ b/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp @@ -110,9 +110,9 @@ class SingleLaplaceInferenceMethodCostFunction: public FirstOrderCostFunction m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "SingleLaplaceInferenceMethodCostFunction__m_derivatives", - "derivatives in SingleLaplaceInferenceMethodCostFunction", MS_NOT_AVAILABLE); + "derivatives in SingleLaplaceInferenceMethodCostFunction", ParameterProperties()); SG_ADD((CSGObject **)&m_obj, "SingleLaplaceInferenceMethodCostFunction__m_obj", - "obj in SingleLaplaceInferenceMethodCostFunction", MS_NOT_AVAILABLE); + "obj in SingleLaplaceInferenceMethodCostFunction", ParameterProperties()); } @@ -152,15 +152,15 @@ void CSingleLaplaceNewtonOptimizer::init() m_opt_max=10; SG_ADD((CSGObject **)&m_obj, "CSingleLaplaceNewtonOptimizer__m_obj", - "obj in CSingleLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "obj in CSingleLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_iter, "CSingleLaplaceNewtonOptimizer__m_iter", - "iter in CSingleLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "iter in CSingleLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_tolerance, "CSingleLaplaceNewtonOptimizer__m_tolerance", - "tolerance in CSingleLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "tolerance in CSingleLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_opt_tolerance, "CSingleLaplaceNewtonOptimizer__m_opt_tolerance", - "opt_tolerance in CSingleLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "opt_tolerance in CSingleLaplaceNewtonOptimizer", ParameterProperties()); SG_ADD(&m_opt_max, "CSingleLaplaceNewtonOptimizer__m_opt_max", - "opt_max in CSingleLaplaceNewtonOptimizer", MS_NOT_AVAILABLE); + "opt_max in CSingleLaplaceNewtonOptimizer", ParameterProperties()); } float64_t CSingleLaplaceNewtonOptimizer::minimize() @@ -283,10 +283,10 @@ CSingleLaplaceInferenceMethod::CSingleLaplaceInferenceMethod(CKernel* kern, void CSingleLaplaceInferenceMethod::init() { m_Psi=0; - SG_ADD(&m_Psi, "Psi", "posterior log likelihood without constant terms", MS_NOT_AVAILABLE); - SG_ADD(&m_sW, "sW", "square root of W", MS_NOT_AVAILABLE); - SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location", MS_NOT_AVAILABLE); - SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location", MS_NOT_AVAILABLE); + SG_ADD(&m_Psi, "Psi", "posterior log likelihood without constant terms", ParameterProperties()); + SG_ADD(&m_sW, "sW", "square root of W", ParameterProperties()); + SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location", ParameterProperties()); + SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location", ParameterProperties()); register_minimizer(new CSingleLaplaceNewtonOptimizer()); } diff --git a/src/shogun/machine/gp/SingleSparseInference.cpp b/src/shogun/machine/gp/SingleSparseInference.cpp index 33e9115f66e..1640f87d057 100644 --- a/src/shogun/machine/gp/SingleSparseInference.cpp +++ b/src/shogun/machine/gp/SingleSparseInference.cpp @@ -114,7 +114,7 @@ class SingleSparseInferenceCostFunction: public FirstOrderBoundConstraintsCostFu //The existing implementation in CSGObject::get_parameter_incremental_hash() //can NOT deal with circular reference when parameter_hash_changed() is called //SG_ADD((CSGObject **)&m_obj, "CSigleSparseInference__m_obj", - //"m_obj in SingleSparseInferenceCostFunction", MS_NOT_AVAILABLE); + //"m_obj in SingleSparseInferenceCostFunction", ParameterProperties()); } }; #endif //DOXYGEN_SHOULD_SKIP_THIS @@ -137,22 +137,22 @@ void CSingleSparseInference::init() m_fully_sparse=false; m_inducing_minimizer=NULL; SG_ADD(&m_fully_sparse, "fully_Sparse", - "whether the kernel support sparse inference", MS_NOT_AVAILABLE); + "whether the kernel support sparse inference", ParameterProperties()); m_lock=new CLock(); SG_ADD(&m_upper_bound, "upper_bound", - "upper bound of inducing features", MS_NOT_AVAILABLE); + "upper bound of inducing features", ParameterProperties()); SG_ADD(&m_lower_bound, "lower_bound", - "lower bound of inducing features", MS_NOT_AVAILABLE); + "lower bound of inducing features", ParameterProperties()); SG_ADD(&m_max_ind_iterations, "max_ind_iterations", - "max number of iterations used in inducing features optimization", MS_NOT_AVAILABLE); + "max number of iterations used in inducing features optimization", ParameterProperties()); SG_ADD(&m_ind_tolerance, "ind_tolerance", - "tolearance used in inducing features optimization", MS_NOT_AVAILABLE); + "tolearance used in inducing features optimization", ParameterProperties()); SG_ADD(&m_opt_inducing_features, - "opt_inducing_features", "whether optimize inducing features", MS_NOT_AVAILABLE); + "opt_inducing_features", "whether optimize inducing features", ParameterProperties()); SG_ADD((CSGObject **)&m_inducing_minimizer, - "inducing_minimizer", "Minimizer used in optimize inducing features", MS_NOT_AVAILABLE); + "inducing_minimizer", "Minimizer used in optimize inducing features", ParameterProperties()); m_max_ind_iterations=50; m_ind_tolerance=1e-3; diff --git a/src/shogun/machine/gp/SoftMaxLikelihood.cpp b/src/shogun/machine/gp/SoftMaxLikelihood.cpp index d3ee1360bb9..dc0ba98b8b7 100644 --- a/src/shogun/machine/gp/SoftMaxLikelihood.cpp +++ b/src/shogun/machine/gp/SoftMaxLikelihood.cpp @@ -61,7 +61,7 @@ void CSoftMaxLikelihood::init() m_num_samples=10000; SG_ADD(&m_num_samples, "num_samples", "Number of samples to be generated", - MS_NOT_AVAILABLE); + ParameterProperties()); } SGVector CSoftMaxLikelihood::get_log_probability_f(const CLabels* lab, diff --git a/src/shogun/machine/gp/SparseInference.cpp b/src/shogun/machine/gp/SparseInference.cpp index 6b9c82f0cd6..d3e2860faf2 100644 --- a/src/shogun/machine/gp/SparseInference.cpp +++ b/src/shogun/machine/gp/SparseInference.cpp @@ -98,12 +98,12 @@ CSparseInference::CSparseInference(CKernel* kern, CFeatures* feat, void CSparseInference::init() { SG_ADD(&m_inducing_features, "inducing_features", "inducing features", - MS_AVAILABLE, GRADIENT_AVAILABLE); + ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_log_ind_noise, "log_inducing_noise", "noise about inducing potins in log domain", - MS_AVAILABLE, GRADIENT_AVAILABLE); - SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior", MS_NOT_AVAILABLE); - SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior", MS_NOT_AVAILABLE); - SG_ADD(&m_ktrtr_diag, "ktrtr_diag", "diagonal elements of kernel matrix m_ktrtr", MS_NOT_AVAILABLE); + ParameterProperties::HYPER | ParameterProperties::GRADIENT); + SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior", ParameterProperties()); + SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior", ParameterProperties()); + SG_ADD(&m_ktrtr_diag, "ktrtr_diag", "diagonal elements of kernel matrix m_ktrtr", ParameterProperties()); m_log_ind_noise = std::log(1e-10); m_inducing_features=SGMatrix(); diff --git a/src/shogun/machine/gp/StudentsTLikelihood.cpp b/src/shogun/machine/gp/StudentsTLikelihood.cpp index b05cf731271..bc743400b56 100644 --- a/src/shogun/machine/gp/StudentsTLikelihood.cpp +++ b/src/shogun/machine/gp/StudentsTLikelihood.cpp @@ -282,8 +282,8 @@ void CStudentsTLikelihood::init() { m_log_sigma=0.0; m_log_df = std::log(2.0); - SG_ADD(&m_log_df, "log_df", "Degrees of freedom in log domain", MS_AVAILABLE, GRADIENT_AVAILABLE); - SG_ADD(&m_log_sigma, "log_sigma", "Scale parameter in log domain", MS_AVAILABLE, GRADIENT_AVAILABLE); + SG_ADD(&m_log_df, "log_df", "Degrees of freedom in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); + SG_ADD(&m_log_sigma, "log_sigma", "Scale parameter in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); } CStudentsTLikelihood::~CStudentsTLikelihood() diff --git a/src/shogun/machine/gp/StudentsTVGLikelihood.cpp b/src/shogun/machine/gp/StudentsTVGLikelihood.cpp index 15111719dca..b33fc859d71 100644 --- a/src/shogun/machine/gp/StudentsTVGLikelihood.cpp +++ b/src/shogun/machine/gp/StudentsTVGLikelihood.cpp @@ -79,8 +79,8 @@ void CStudentsTVGLikelihood::init_likelihood() void CStudentsTVGLikelihood::init() { init_likelihood(); - SG_ADD(&m_log_df, "log_df", "Degrees of freedom in log domain", MS_AVAILABLE, GRADIENT_AVAILABLE); - SG_ADD(&m_log_sigma, "log_sigma", "Scale parameter in log domain", MS_AVAILABLE, GRADIENT_AVAILABLE); + SG_ADD(&m_log_df, "log_df", "Degrees of freedom in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); + SG_ADD(&m_log_sigma, "log_sigma", "Scale parameter in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); } } /* namespace shogun */ diff --git a/src/shogun/machine/gp/VarDTCInferenceMethod.cpp b/src/shogun/machine/gp/VarDTCInferenceMethod.cpp index 1df3a22abbc..e639274c05f 100644 --- a/src/shogun/machine/gp/VarDTCInferenceMethod.cpp +++ b/src/shogun/machine/gp/VarDTCInferenceMethod.cpp @@ -69,15 +69,15 @@ void CVarDTCInferenceMethod::init() m_inv_La=SGMatrix(); m_Knm_inv_Lm=SGMatrix(); - SG_ADD(&m_yy, "yy", "yy", MS_NOT_AVAILABLE); - SG_ADD(&m_f3, "f3", "f3", MS_NOT_AVAILABLE); - SG_ADD(&m_sigma2, "sigma2", "sigma2", MS_NOT_AVAILABLE); - SG_ADD(&m_trk, "trk", "trk", MS_NOT_AVAILABLE); - SG_ADD(&m_Tmm, "Tmm", "Tmm", MS_NOT_AVAILABLE); - SG_ADD(&m_Tnm, "Tnm", "Tnm", MS_NOT_AVAILABLE); - SG_ADD(&m_inv_Lm, "inv_Lm", "inv_Lm", MS_NOT_AVAILABLE); - SG_ADD(&m_inv_La, "inv_La", "inv_La", MS_NOT_AVAILABLE); - SG_ADD(&m_Knm_inv_Lm, "Knm_Inv_Lm", "Knm_Inv_Lm", MS_NOT_AVAILABLE); + SG_ADD(&m_yy, "yy", "yy", ParameterProperties()); + SG_ADD(&m_f3, "f3", "f3", ParameterProperties()); + SG_ADD(&m_sigma2, "sigma2", "sigma2", ParameterProperties()); + SG_ADD(&m_trk, "trk", "trk", ParameterProperties()); + SG_ADD(&m_Tmm, "Tmm", "Tmm", ParameterProperties()); + SG_ADD(&m_Tnm, "Tnm", "Tnm", ParameterProperties()); + SG_ADD(&m_inv_Lm, "inv_Lm", "inv_Lm", ParameterProperties()); + SG_ADD(&m_inv_La, "inv_La", "inv_La", ParameterProperties()); + SG_ADD(&m_Knm_inv_Lm, "Knm_Inv_Lm", "Knm_Inv_Lm", ParameterProperties()); } CVarDTCInferenceMethod::~CVarDTCInferenceMethod() diff --git a/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp b/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp index 09aed57b802..fc794b27e55 100644 --- a/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp +++ b/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp @@ -45,15 +45,15 @@ void CVariationalGaussianLikelihood::init() { SG_ADD(&m_mu, "mu", "The mean of variational normal distribution\n", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_s2, "sigma2", "The variance of variational normal distribution\n", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_noise_factor, "noise_factor", "Correct the variance if variance is close to zero or negative\n", - MS_NOT_AVAILABLE); + ParameterProperties()); m_noise_factor=1e-6; } diff --git a/src/shogun/machine/gp/VariationalLikelihood.cpp b/src/shogun/machine/gp/VariationalLikelihood.cpp index c81b6f15325..6b74d3b9302 100644 --- a/src/shogun/machine/gp/VariationalLikelihood.cpp +++ b/src/shogun/machine/gp/VariationalLikelihood.cpp @@ -62,11 +62,11 @@ void CVariationalLikelihood::init() SG_ADD(&m_lab, "labels", "The label of the data\n", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**)&m_likelihood, "likelihood", "The distribution used to model the data\n", - MS_NOT_AVAILABLE); + ParameterProperties()); } SGVector CVariationalLikelihood::get_predictive_means( diff --git a/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h b/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h index 05d2e0fe6d9..3a1d050fcb6 100644 --- a/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h +++ b/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h @@ -113,23 +113,23 @@ class CEigenSolver : public CSGObject SG_ADD(&m_min_eigenvalue, "min_eigenvalue", "Minimum eigenvalue of a real valued self-adjoint linear operator", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_max_eigenvalue, "max_eigenvalue", "Maximum eigenvalue of a real valued self-adjoint linear operator", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**)&m_linear_operator, "linear_operator", "Self-adjoint linear operator", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_is_computed_min, "is_computed_min", "Flag denoting that the minimum eigenvalue has already been computed", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_max_eigenvalue, "is_computed_max", "Flag denoting that the maximum eigenvalue has already been computed", - MS_NOT_AVAILABLE); + ParameterProperties()); } }; diff --git a/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp b/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp index a73f89881e5..7c5ff9f1344 100644 --- a/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp +++ b/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp @@ -43,13 +43,13 @@ void CLanczosEigenSolver::init() m_absolute_tolerence=1E-6; SG_ADD(&m_max_iteration_limit, "max_iteration_limit", - "Maximum number of iteration for the solver", MS_NOT_AVAILABLE); + "Maximum number of iteration for the solver", ParameterProperties()); SG_ADD(&m_relative_tolerence, "relative_tolerence", - "Relative tolerence of solver", MS_NOT_AVAILABLE); + "Relative tolerence of solver", ParameterProperties()); SG_ADD(&m_absolute_tolerence, "absolute_tolerence", - "Absolute tolerence of solver", MS_NOT_AVAILABLE); + "Absolute tolerence of solver", ParameterProperties()); } CLanczosEigenSolver::~CLanczosEigenSolver() diff --git a/src/shogun/mathematics/linalg/linop/LinearOperator.cpp b/src/shogun/mathematics/linalg/linop/LinearOperator.cpp index 760df7b74a6..3e707d149eb 100644 --- a/src/shogun/mathematics/linalg/linop/LinearOperator.cpp +++ b/src/shogun/mathematics/linalg/linop/LinearOperator.cpp @@ -42,7 +42,7 @@ void CLinearOperator::init() SG_ADD(&m_dimension, "dimension", "Dimension of the vector on which linear operator can apply", - MS_NOT_AVAILABLE); + ParameterProperties()); } template class CLinearOperator; diff --git a/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp b/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp index 57e42b574b8..b3de2ab3f5a 100644 --- a/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp @@ -79,10 +79,10 @@ void CLogDetEstimator::init() m_operator_log=NULL; SG_ADD((CSGObject**)&m_trace_sampler, "trace_sampler", - "Trace sampler for the log operator", MS_NOT_AVAILABLE); + "Trace sampler for the log operator", ParameterProperties()); SG_ADD((CSGObject**)&m_operator_log, "operator_log", - "The log operator function", MS_NOT_AVAILABLE); + "The log operator function", ParameterProperties()); } CLogDetEstimator::~CLogDetEstimator() diff --git a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp index 673047d060c..e72592c10dd 100644 --- a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp @@ -41,7 +41,7 @@ void CLogRationalApproximationCGM::init() m_linear_solver=NULL; SG_ADD((CSGObject**)&m_linear_solver, "linear_solver", - "Linear solver for complex systems", MS_NOT_AVAILABLE); + "Linear solver for complex systems", ParameterProperties()); } CLogRationalApproximationCGM::~CLogRationalApproximationCGM() diff --git a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp index 4786f3ac13c..b4a18bd49d1 100644 --- a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp @@ -44,7 +44,7 @@ void CLogRationalApproximationIndividual::init() m_linear_solver=NULL; SG_ADD((CSGObject**)&m_linear_solver, "linear_solver", - "Linear solver for complex systems", MS_NOT_AVAILABLE); + "Linear solver for complex systems", ParameterProperties()); } CLogRationalApproximationIndividual::~CLogRationalApproximationIndividual() diff --git a/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h b/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h index 6786157f32c..0cad7d2974e 100644 --- a/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h +++ b/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h @@ -100,7 +100,7 @@ template class COperatorFunction : public CSGObject m_linear_operator=NULL; SG_ADD((CSGObject**)&m_linear_operator, "linear_operator", - "Linear operator of this operator function", MS_NOT_AVAILABLE); + "Linear operator of this operator function", ParameterProperties()); } }; } diff --git a/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp b/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp index d1506307883..2005557bf19 100644 --- a/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp @@ -57,23 +57,23 @@ void CRationalApproximation::init() m_desired_accuracy=0.0; SG_ADD((CSGObject**)&m_eigen_solver, "eigen_solver", - "Eigen solver for computing extremal eigenvalues", MS_NOT_AVAILABLE); + "Eigen solver for computing extremal eigenvalues", ParameterProperties()); SG_ADD(&m_shifts, "complex_shifts", "Complex shifts in the linear system", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_weights, "complex_weights", "Complex weights of the linear system", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_constant_multiplier, "constant_multiplier", "Constant multiplier in the rational approximation", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_num_shifts, "num_shifts", - "Number of shifts in the quadrature rule", MS_NOT_AVAILABLE); + "Number of shifts in the quadrature rule", ParameterProperties()); SG_ADD(&m_desired_accuracy, "desired_accuracy", - "Desired accuracy of the rational approximation", MS_NOT_AVAILABLE); + "Desired accuracy of the rational approximation", ParameterProperties()); } SGVector CRationalApproximation::get_shifts() const diff --git a/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp b/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp index d9908e02736..147fb4749eb 100644 --- a/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp @@ -55,16 +55,16 @@ void CProbingSampler::init() m_is_precomputed=false; SG_ADD(&m_coloring_vector, "coloring_vector", "the coloring vector generated" - " from coloring", MS_NOT_AVAILABLE); + " from coloring", ParameterProperties()); SG_ADD(&m_power, "matrix_power", "power of the sparse-matrix for coloring", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_is_precomputed, "is_precomputed", - "flag that is true if already precomputed", MS_NOT_AVAILABLE); + "flag that is true if already precomputed", ParameterProperties()); SG_ADD((CSGObject**)&m_matrix_operator, "matrix_operator", - "the sparse-matrix linear opeator for coloring", MS_NOT_AVAILABLE); + "the sparse-matrix linear opeator for coloring", ParameterProperties()); } CProbingSampler::~CProbingSampler() diff --git a/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h b/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h index e5d25054ef9..3fd36d6c4a8 100644 --- a/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h +++ b/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h @@ -97,10 +97,10 @@ class CTraceSampler : public CSGObject m_dimension=0; SG_ADD(&m_num_samples, "num_samples", - "Number of samples this sampler can generate", MS_NOT_AVAILABLE); + "Number of samples this sampler can generate", ParameterProperties()); SG_ADD(&m_dimension, "sample_dimension", - "Dimension of samples this sampler can generate", MS_NOT_AVAILABLE); + "Dimension of samples this sampler can generate", ParameterProperties()); } }; diff --git a/src/shogun/metric/LMNN.cpp b/src/shogun/metric/LMNN.cpp index 29755ac9064..6eef5d1707e 100644 --- a/src/shogun/metric/LMNN.cpp +++ b/src/shogun/metric/LMNN.cpp @@ -258,28 +258,28 @@ CLMNNStatistics* CLMNN::get_statistics() const void CLMNN::init() { SG_ADD(&m_linear_transform, "linear_transform", - "Linear transform in matrix form", MS_NOT_AVAILABLE) + "Linear transform in matrix form", ParameterProperties()); SG_ADD((CSGObject**) &m_features, "features", "Training features", - MS_NOT_AVAILABLE) + ParameterProperties()); SG_ADD((CSGObject**) &m_labels, "labels", "Training labels", - MS_NOT_AVAILABLE) + ParameterProperties()); SG_ADD(&m_k, "k", "Number of target neighbours per example", - MS_NOT_AVAILABLE) + ParameterProperties()); SG_ADD(&m_regularization, "regularization", "Regularization", - MS_AVAILABLE) + ParameterProperties::HYPER); SG_ADD(&m_stepsize, "stepsize", "Step size in gradient descent", - MS_NOT_AVAILABLE) + ParameterProperties()); SG_ADD(&m_stepsize_threshold, "stepsize_threshold", "Step size threshold", - MS_NOT_AVAILABLE) + ParameterProperties()); SG_ADD(&m_maxiter, "maxiter", "Maximum number of iterations", - MS_NOT_AVAILABLE) + ParameterProperties()); SG_ADD(&m_correction, "correction", - "Iterations between exact impostors search", MS_NOT_AVAILABLE) + "Iterations between exact impostors search", ParameterProperties()); SG_ADD(&m_obj_threshold, "obj_threshold", "Objective threshold", - MS_NOT_AVAILABLE) - SG_ADD(&m_diagonal, "m_diagonal", "Diagonal transformation", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_diagonal, "m_diagonal", "Diagonal transformation", ParameterProperties()); SG_ADD((CSGObject**) &m_statistics, "statistics", "Training statistics", - MS_NOT_AVAILABLE); + ParameterProperties()); m_features = NULL; m_labels = NULL; @@ -331,9 +331,9 @@ void CLMNNStatistics::set(index_t iter, float64_t obj_iter, float64_t stepsize_i void CLMNNStatistics::init() { - SG_ADD(&obj, "obj", "Objective at each iteration", MS_NOT_AVAILABLE); - SG_ADD(&stepsize, "stepsize", "Step size at each iteration", MS_NOT_AVAILABLE); + SG_ADD(&obj, "obj", "Objective at each iteration", ParameterProperties()); + SG_ADD(&stepsize, "stepsize", "Step size at each iteration", ParameterProperties()); SG_ADD(&num_impostors, "num_impostors", "Number of impostors at each iteration", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/modelselection/GradientModelSelection.cpp b/src/shogun/modelselection/GradientModelSelection.cpp index ac088f50115..0e1ede3fce3 100644 --- a/src/shogun/modelselection/GradientModelSelection.cpp +++ b/src/shogun/modelselection/GradientModelSelection.cpp @@ -83,16 +83,16 @@ class GradientModelSelectionCostFunction: public FirstOrderCostFunction { m_obj=NULL; SG_ADD((CSGObject **)&m_obj, "GradientModelSelectionCostFunction__m_obj", - "obj in GradientModelSelectionCostFunction", MS_NOT_AVAILABLE); + "obj in GradientModelSelectionCostFunction", ParameterProperties()); m_func_data = NULL; m_val = SGVector(); SG_ADD( &m_val, "GradientModelSelectionCostFunction__m_val", - "val in GradientModelSelectionCostFunction", MS_NOT_AVAILABLE); + "val in GradientModelSelectionCostFunction", ParameterProperties()); m_grad = SGVector(); SG_ADD( &m_grad, "GradientModelSelectionCostFunction__m_grad", - "grad in GradientModelSelectionCostFunction", MS_NOT_AVAILABLE); + "grad in GradientModelSelectionCostFunction", ParameterProperties()); } CGradientModelSelection *m_obj; @@ -278,7 +278,7 @@ void CGradientModelSelection::init() SG_REF(m_mode_minimizer); SG_ADD((CSGObject **)&m_mode_minimizer, - "mode_minimizer", "Minimizer used in mode selection", MS_NOT_AVAILABLE); + "mode_minimizer", "Minimizer used in mode selection", ParameterProperties()); } diff --git a/src/shogun/modelselection/ModelSelection.cpp b/src/shogun/modelselection/ModelSelection.cpp index f1514495a45..ff83805d959 100644 --- a/src/shogun/modelselection/ModelSelection.cpp +++ b/src/shogun/modelselection/ModelSelection.cpp @@ -35,10 +35,10 @@ void CModelSelection::init() m_machine_eval=NULL; SG_ADD((CSGObject**)&m_model_parameters, "model_parameters", - "Parameter tree for model selection", MS_NOT_AVAILABLE); + "Parameter tree for model selection", ParameterProperties()); SG_ADD((CSGObject**)&m_machine_eval, "machine_evaluation", - "Machine evaluation strategy", MS_NOT_AVAILABLE); + "Machine evaluation strategy", ParameterProperties()); } CModelSelection::~CModelSelection() diff --git a/src/shogun/modelselection/ParameterCombination.cpp b/src/shogun/modelselection/ParameterCombination.cpp index 1e16b1278ae..5bf92c048a8 100644 --- a/src/shogun/modelselection/ParameterCombination.cpp +++ b/src/shogun/modelselection/ParameterCombination.cpp @@ -122,7 +122,7 @@ void CParameterCombination::init() SG_REF(m_child_nodes); SG_ADD((CSGObject**)&m_child_nodes, "child_nodes", "Children of this node", - MS_NOT_AVAILABLE); + ParameterProperties()); } CParameterCombination::~CParameterCombination() diff --git a/src/shogun/multiclass/GaussianNaiveBayes.cpp b/src/shogun/multiclass/GaussianNaiveBayes.cpp index 405e493c42c..5a9cae53eef 100644 --- a/src/shogun/multiclass/GaussianNaiveBayes.cpp +++ b/src/shogun/multiclass/GaussianNaiveBayes.cpp @@ -223,19 +223,19 @@ float64_t CGaussianNaiveBayes::apply_one(int32_t idx) void CGaussianNaiveBayes::init() { - SG_ADD(&m_min_label, "m_min_label", "minimal label", MS_NOT_AVAILABLE); + SG_ADD(&m_min_label, "m_min_label", "minimal label", ParameterProperties()); SG_ADD(&m_num_classes, "m_num_classes", - "number of different classes (labels)", MS_NOT_AVAILABLE); + "number of different classes (labels)", ParameterProperties()); SG_ADD(&m_dim, "m_dim", - "dimensionality of feature space", MS_NOT_AVAILABLE); + "dimensionality of feature space", ParameterProperties()); SG_ADD(&m_means, "m_means", - "means for normal distributions of features", MS_NOT_AVAILABLE); + "means for normal distributions of features", ParameterProperties()); SG_ADD(&m_variances, "m_variances", - "variances for normal distributions of features", MS_NOT_AVAILABLE); + "variances for normal distributions of features", ParameterProperties()); SG_ADD(&m_label_prob, "m_label_prob", - "a priori probabilities of labels", MS_NOT_AVAILABLE); - SG_ADD(&m_rates, "m_rates", "label rates", MS_NOT_AVAILABLE); + "a priori probabilities of labels", ParameterProperties()); + SG_ADD(&m_rates, "m_rates", "label rates", ParameterProperties()); SG_ADD( (CFeatures**)&m_features, "features", "Training features", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/multiclass/KNN.cpp b/src/shogun/multiclass/KNN.cpp index 2649950af1d..e253132caac 100644 --- a/src/shogun/multiclass/KNN.cpp +++ b/src/shogun/multiclass/KNN.cpp @@ -59,11 +59,11 @@ void CKNN::init() /* use the method classify_multiply_k to experiment with different values * of k */ - SG_ADD(&m_k, "k", "Parameter k", MS_NOT_AVAILABLE); - SG_ADD(&m_q, "q", "Parameter q", MS_AVAILABLE); - SG_ADD(&m_num_classes, "num_classes", "Number of classes", MS_NOT_AVAILABLE); - SG_ADD(&m_leaf_size, "leaf_size", "Leaf size for KDTree", MS_NOT_AVAILABLE); - SG_ADD((machine_int_t*) &m_knn_solver, "knn_solver", "Algorithm to solve knn", MS_NOT_AVAILABLE); + SG_ADD(&m_k, "k", "Parameter k", ParameterProperties()); + SG_ADD(&m_q, "q", "Parameter q", ParameterProperties::HYPER); + SG_ADD(&m_num_classes, "num_classes", "Number of classes", ParameterProperties()); + SG_ADD(&m_leaf_size, "leaf_size", "Leaf size for KDTree", ParameterProperties()); + SG_ADD((machine_int_t*) &m_knn_solver, "knn_solver", "Algorithm to solve knn", ParameterProperties()); } CKNN::~CKNN() diff --git a/src/shogun/multiclass/MCLDA.cpp b/src/shogun/multiclass/MCLDA.cpp index aa9ca103bb0..082c26b16e4 100644 --- a/src/shogun/multiclass/MCLDA.cpp +++ b/src/shogun/multiclass/MCLDA.cpp @@ -50,19 +50,19 @@ CMCLDA::~CMCLDA() void CMCLDA::init() { - SG_ADD(&m_tolerance, "m_tolerance", "Tolerance member.", MS_AVAILABLE); - SG_ADD(&m_store_cov, "m_store_cov", "Store covariance member", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**) &m_features, "m_features", "Feature object.", MS_NOT_AVAILABLE); - SG_ADD(&m_means, "m_means", "Mean vectors list", MS_NOT_AVAILABLE); - SG_ADD(&m_cov, "m_cov", "covariance matrix", MS_NOT_AVAILABLE); - SG_ADD(&m_xbar, "m_xbar", "total mean", MS_NOT_AVAILABLE); - SG_ADD(&m_scalings, "m_scalings", "scalings", MS_NOT_AVAILABLE); - SG_ADD(&m_rank, "m_rank", "rank", MS_NOT_AVAILABLE); - SG_ADD(&m_dim, "m_dim", "dimension of feature space", MS_NOT_AVAILABLE); + SG_ADD(&m_tolerance, "m_tolerance", "Tolerance member.", ParameterProperties::HYPER); + SG_ADD(&m_store_cov, "m_store_cov", "Store covariance member", ParameterProperties()); + SG_ADD((CSGObject**) &m_features, "m_features", "Feature object.", ParameterProperties()); + SG_ADD(&m_means, "m_means", "Mean vectors list", ParameterProperties()); + SG_ADD(&m_cov, "m_cov", "covariance matrix", ParameterProperties()); + SG_ADD(&m_xbar, "m_xbar", "total mean", ParameterProperties()); + SG_ADD(&m_scalings, "m_scalings", "scalings", ParameterProperties()); + SG_ADD(&m_rank, "m_rank", "rank", ParameterProperties()); + SG_ADD(&m_dim, "m_dim", "dimension of feature space", ParameterProperties()); SG_ADD( - &m_num_classes, "m_num_classes", "number of classes", MS_NOT_AVAILABLE); - SG_ADD(&m_coef, "m_coef", "weight vector", MS_NOT_AVAILABLE); - SG_ADD(&m_intercept, "m_intercept", "intercept", MS_NOT_AVAILABLE); + &m_num_classes, "m_num_classes", "number of classes", ParameterProperties()); + SG_ADD(&m_coef, "m_coef", "weight vector", ParameterProperties()); + SG_ADD(&m_intercept, "m_intercept", "intercept", ParameterProperties()); m_features = NULL; m_num_classes=0; diff --git a/src/shogun/multiclass/MulticlassLibLinear.cpp b/src/shogun/multiclass/MulticlassLibLinear.cpp index b16af6dbb58..3d5350b60a9 100644 --- a/src/shogun/multiclass/MulticlassLibLinear.cpp +++ b/src/shogun/multiclass/MulticlassLibLinear.cpp @@ -42,11 +42,11 @@ void CMulticlassLibLinear::init_defaults() void CMulticlassLibLinear::register_parameters() { - SG_ADD(&m_C, "m_C", "regularization constant",MS_AVAILABLE); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE); - SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",MS_NOT_AVAILABLE); - SG_ADD(&m_save_train_state, "m_save_train_state", "indicates whether bias should be used",MS_NOT_AVAILABLE); + SG_ADD(&m_C, "m_C", "regularization constant",ParameterProperties::HYPER); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",ParameterProperties()); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",ParameterProperties()); + SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",ParameterProperties()); + SG_ADD(&m_save_train_state, "m_save_train_state", "indicates whether bias should be used",ParameterProperties()); } CMulticlassLibLinear::~CMulticlassLibLinear() diff --git a/src/shogun/multiclass/MulticlassLibSVM.cpp b/src/shogun/multiclass/MulticlassLibSVM.cpp index 62877a31f02..d800de4087c 100644 --- a/src/shogun/multiclass/MulticlassLibSVM.cpp +++ b/src/shogun/multiclass/MulticlassLibSVM.cpp @@ -28,7 +28,7 @@ CMulticlassLibSVM::~CMulticlassLibSVM() void CMulticlassLibSVM::register_params() { - SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM solver type", MS_NOT_AVAILABLE); + SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM solver type", ParameterProperties()); } bool CMulticlassLibSVM::train_machine(CFeatures* data) diff --git a/src/shogun/multiclass/MulticlassOCAS.cpp b/src/shogun/multiclass/MulticlassOCAS.cpp index 43771da4118..844da1cc0f2 100644 --- a/src/shogun/multiclass/MulticlassOCAS.cpp +++ b/src/shogun/multiclass/MulticlassOCAS.cpp @@ -50,11 +50,11 @@ CMulticlassOCAS::CMulticlassOCAS(float64_t C, CDotFeatures* train_features, CLab void CMulticlassOCAS::register_parameters() { - SG_ADD(&m_C, "m_C", "regularization constant", MS_AVAILABLE); - SG_ADD(&m_epsilon, "m_epsilon", "solver relative tolerance", MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_method, "m_method", "used solver method", MS_NOT_AVAILABLE); - SG_ADD(&m_buf_size, "m_buf_size", "buffer size", MS_NOT_AVAILABLE); + SG_ADD(&m_C, "m_C", "regularization constant", ParameterProperties::HYPER); + SG_ADD(&m_epsilon, "m_epsilon", "solver relative tolerance", ParameterProperties()); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations", ParameterProperties()); + SG_ADD(&m_method, "m_method", "used solver method", ParameterProperties()); + SG_ADD(&m_buf_size, "m_buf_size", "buffer size", ParameterProperties()); } CMulticlassOCAS::~CMulticlassOCAS() diff --git a/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp b/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp index c09a55c96f4..93647435b3f 100644 --- a/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp +++ b/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp @@ -26,7 +26,7 @@ CMulticlassOneVsOneStrategy::CMulticlassOneVsOneStrategy(EProbHeuristicType prob void CMulticlassOneVsOneStrategy::register_parameters() { - //SG_ADD(&m_num_samples, "num_samples", "Number of samples in each training machine", MS_NOT_AVAILABLE); + //SG_ADD(&m_num_samples, "num_samples", "Number of samples in each training machine", ParameterProperties()); SG_WARNING("%s::CMulticlassOneVsOneStrategy(): register parameters!\n", get_name()); } diff --git a/src/shogun/multiclass/MulticlassSVM.cpp b/src/shogun/multiclass/MulticlassSVM.cpp index 50a23d12560..82a3105256f 100644 --- a/src/shogun/multiclass/MulticlassSVM.cpp +++ b/src/shogun/multiclass/MulticlassSVM.cpp @@ -37,7 +37,7 @@ CMulticlassSVM::~CMulticlassSVM() void CMulticlassSVM::init() { - SG_ADD(&m_C, "C", "C regularization constant",MS_AVAILABLE); + SG_ADD(&m_C, "C", "C regularization constant",ParameterProperties::HYPER); m_C=0; } diff --git a/src/shogun/multiclass/MulticlassStrategy.cpp b/src/shogun/multiclass/MulticlassStrategy.cpp index 065a2214f77..490a4f9ffac 100644 --- a/src/shogun/multiclass/MulticlassStrategy.cpp +++ b/src/shogun/multiclass/MulticlassStrategy.cpp @@ -34,9 +34,9 @@ void CMulticlassStrategy::init() m_prob_heuris=PROB_HEURIS_NONE; m_num_classes=0; - SG_ADD((CSGObject**)&m_rejection_strategy, "rejection_strategy", "Strategy of rejection", MS_NOT_AVAILABLE); - SG_ADD(&m_num_classes, "num_classes", "Number of classes", MS_NOT_AVAILABLE); - SG_ADD((machine_int_t*)&m_prob_heuris, "prob_heuris", "Probability estimation heuristics", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_rejection_strategy, "rejection_strategy", "Strategy of rejection", ParameterProperties()); + SG_ADD(&m_num_classes, "num_classes", "Number of classes", ParameterProperties()); + SG_ADD((machine_int_t*)&m_prob_heuris, "prob_heuris", "Probability estimation heuristics", ParameterProperties()); } void CMulticlassStrategy::train_start(CMulticlassLabels *orig_labels, CBinaryLabels *train_labels) diff --git a/src/shogun/multiclass/QDA.cpp b/src/shogun/multiclass/QDA.cpp index b8efa56ec90..a47041921c1 100644 --- a/src/shogun/multiclass/QDA.cpp +++ b/src/shogun/multiclass/QDA.cpp @@ -83,15 +83,15 @@ void CQDA::init() { m_tolerance = 1e-4; m_store_covs = false; - SG_ADD(&m_tolerance, "m_tolerance", "Tolerance member.", MS_AVAILABLE); - SG_ADD(&m_store_covs, "m_store_covs", "Store covariances member", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**) &m_features, "m_features", "Feature object.", MS_NOT_AVAILABLE); - SG_ADD(&m_means, "m_means", "Mean vectors list", MS_NOT_AVAILABLE); - SG_ADD(&m_slog, "m_slog", "Vector used in classification", MS_NOT_AVAILABLE); - SG_ADD(&m_dim, "m_dim", "dimension of feature space", MS_NOT_AVAILABLE); + SG_ADD(&m_tolerance, "m_tolerance", "Tolerance member.", ParameterProperties::HYPER); + SG_ADD(&m_store_covs, "m_store_covs", "Store covariances member", ParameterProperties()); + SG_ADD((CSGObject**) &m_features, "m_features", "Feature object.", ParameterProperties()); + SG_ADD(&m_means, "m_means", "Mean vectors list", ParameterProperties()); + SG_ADD(&m_slog, "m_slog", "Vector used in classification", ParameterProperties()); + SG_ADD(&m_dim, "m_dim", "dimension of feature space", ParameterProperties()); SG_ADD( - &m_num_classes, "m_num_classes", "number of classes", MS_NOT_AVAILABLE); - SG_ADD(&m_M, "m_M", "Matrices used in classification", MS_NOT_AVAILABLE); + &m_num_classes, "m_num_classes", "number of classes", ParameterProperties()); + SG_ADD(&m_M, "m_M", "Matrices used in classification", ParameterProperties()); m_features = NULL; } diff --git a/src/shogun/multiclass/ScatterSVM.cpp b/src/shogun/multiclass/ScatterSVM.cpp index f5696d716d5..7b718560764 100644 --- a/src/shogun/multiclass/ScatterSVM.cpp +++ b/src/shogun/multiclass/ScatterSVM.cpp @@ -44,7 +44,7 @@ CScatterSVM::~CScatterSVM() void CScatterSVM::register_params() { - SG_ADD((machine_int_t*) &scatter_type, "scatter_type", "Type of scatter SVM", MS_NOT_AVAILABLE); + SG_ADD((machine_int_t*) &scatter_type, "scatter_type", "Type of scatter SVM", ParameterProperties()); m_parameters->add_vector(&norm_wc, &norm_wc_len, "norm_wc", "Norm of w_c"); watch_param("norm_wc", &norm_wc, &norm_wc_len); @@ -52,8 +52,8 @@ void CScatterSVM::register_params() m_parameters->add_vector(&norm_wcw, &norm_wcw_len, "norm_wcw", "Norm of w_cw"); watch_param("norm_wcw", &norm_wcw, &norm_wcw_len); - SG_ADD(&rho, "rho", "Scatter SVM rho", MS_NOT_AVAILABLE); - SG_ADD(&m_num_classes, "m_num_classes", "Number of classes", MS_NOT_AVAILABLE); + SG_ADD(&rho, "rho", "Scatter SVM rho", ParameterProperties()); + SG_ADD(&m_num_classes, "m_num_classes", "Number of classes", ParameterProperties()); } bool CScatterSVM::train_machine(CFeatures* data) diff --git a/src/shogun/multiclass/ShareBoost.cpp b/src/shogun/multiclass/ShareBoost.cpp index 6c7bb2156af..8f8b194b9eb 100644 --- a/src/shogun/multiclass/ShareBoost.cpp +++ b/src/shogun/multiclass/ShareBoost.cpp @@ -31,8 +31,8 @@ CShareBoost::CShareBoost(CDenseFeatures *features, CMulticlassLabels void CShareBoost::init_sb_params() { - SG_ADD(&m_nonzero_feas, "nonzero_feas", "Number of non-zero features", MS_NOT_AVAILABLE); - SG_ADD(&m_activeset, "active_set", "Selected features", MS_NOT_AVAILABLE); + SG_ADD(&m_nonzero_feas, "nonzero_feas", "Number of non-zero features", ParameterProperties()); + SG_ADD(&m_activeset, "active_set", "Selected features", ParameterProperties()); } SGVector CShareBoost::get_activeset() diff --git a/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp index e716e8591a1..723f258e606 100644 --- a/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp @@ -37,7 +37,7 @@ void CECOCDiscriminantEncoder::init() // parameters - SG_ADD(&m_iterations, "iterations", "number of iterations in SFFS", MS_NOT_AVAILABLE); + SG_ADD(&m_iterations, "iterations", "number of iterations in SFFS", ParameterProperties()); } void CECOCDiscriminantEncoder::set_features(CDenseFeatures *features) diff --git a/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp index f2841202f1f..60c65b4cc08 100644 --- a/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp @@ -11,7 +11,7 @@ using namespace shogun; CECOCForestEncoder::CECOCForestEncoder() { m_num_trees = 3; - SG_ADD(&m_num_trees, "num_trees", "number of trees", MS_NOT_AVAILABLE); + SG_ADD(&m_num_trees, "num_trees", "number of trees", ParameterProperties()); } void CECOCForestEncoder::set_num_trees(int32_t num_trees) diff --git a/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp index be29aeacb08..d585ed21427 100644 --- a/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp @@ -34,9 +34,9 @@ void CECOCRandomDenseEncoder::init() m_maxiter = 10000; m_codelen = 0; m_pposone = 0.5; - SG_ADD(&m_maxiter, "maxiter", "max number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_codelen, "codelen", "code length", MS_NOT_AVAILABLE); - SG_ADD(&m_pposone, "pposone", "probability of +1", MS_NOT_AVAILABLE); + SG_ADD(&m_maxiter, "maxiter", "max number of iterations", ParameterProperties()); + SG_ADD(&m_codelen, "codelen", "code length", ParameterProperties()); + SG_ADD(&m_pposone, "pposone", "probability of +1", ParameterProperties()); } void CECOCRandomDenseEncoder::set_probability(float64_t pposone) diff --git a/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp index 479bdad3581..63c25b3494a 100644 --- a/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp @@ -25,11 +25,11 @@ CECOCRandomSparseEncoder::CECOCRandomSparseEncoder(int32_t maxiter, int32_t code void CECOCRandomSparseEncoder::init() { - SG_ADD(&m_maxiter, "maxiter", "max number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_codelen, "codelen", "code length", MS_NOT_AVAILABLE); - SG_ADD(&m_pzero, "pzero", "probability of 0", MS_NOT_AVAILABLE); - SG_ADD(&m_pposone, "pposone", "probability of +1", MS_NOT_AVAILABLE); - SG_ADD(&m_pnegone, "pnegone", "probability of -1", MS_NOT_AVAILABLE); + SG_ADD(&m_maxiter, "maxiter", "max number of iterations", ParameterProperties()); + SG_ADD(&m_codelen, "codelen", "code length", ParameterProperties()); + SG_ADD(&m_pzero, "pzero", "probability of 0", ParameterProperties()); + SG_ADD(&m_pposone, "pposone", "probability of +1", ParameterProperties()); + SG_ADD(&m_pnegone, "pnegone", "probability of -1", ParameterProperties()); } void CECOCRandomSparseEncoder::set_probability(float64_t pzero, float64_t pposone, float64_t pnegone) diff --git a/src/shogun/multiclass/ecoc/ECOCStrategy.cpp b/src/shogun/multiclass/ecoc/ECOCStrategy.cpp index d207f57b253..35aef514da3 100644 --- a/src/shogun/multiclass/ecoc/ECOCStrategy.cpp +++ b/src/shogun/multiclass/ecoc/ECOCStrategy.cpp @@ -30,8 +30,8 @@ void CECOCStrategy::init() m_encoder=NULL; m_decoder=NULL; - SG_ADD(&m_encoder, "encoder", "ECOC Encoder", MS_NOT_AVAILABLE); - SG_ADD(&m_decoder, "decoder", "ECOC Decoder", MS_NOT_AVAILABLE); + SG_ADD(&m_encoder, "encoder", "ECOC Encoder", ParameterProperties()); + SG_ADD(&m_decoder, "decoder", "ECOC Decoder", ParameterProperties()); } CECOCStrategy::~CECOCStrategy() diff --git a/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp b/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp index 2ba8c9a7725..71b2093b86b 100644 --- a/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp +++ b/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp @@ -11,7 +11,7 @@ using namespace shogun; CBalancedConditionalProbabilityTree::CBalancedConditionalProbabilityTree() :m_alpha(0.4) { - SG_ADD(&m_alpha, "m_alpha", "Trade-off parameter of tree balance", MS_NOT_AVAILABLE); + SG_ADD(&m_alpha, "m_alpha", "Trade-off parameter of tree balance", ParameterProperties()); } void CBalancedConditionalProbabilityTree::set_alpha(float64_t alpha) diff --git a/src/shogun/multiclass/tree/C45ClassifierTree.cpp b/src/shogun/multiclass/tree/C45ClassifierTree.cpp index 24ffb1b5feb..2fcd1cf73ce 100644 --- a/src/shogun/multiclass/tree/C45ClassifierTree.cpp +++ b/src/shogun/multiclass/tree/C45ClassifierTree.cpp @@ -795,10 +795,10 @@ void CC45ClassifierTree::init() m_types_set=false; m_weights_set=false; - SG_ADD(&m_nominal,"m_nominal", "feature types", MS_NOT_AVAILABLE); - SG_ADD(&m_weights,"m_weights", "weights", MS_NOT_AVAILABLE); - SG_ADD(&m_certainty,"m_certainty", "certainty", MS_NOT_AVAILABLE); - SG_ADD(&m_weights_set,"m_weights_set", "weights set", MS_NOT_AVAILABLE); - SG_ADD(&m_types_set,"m_types_set", "feature types set", MS_NOT_AVAILABLE); + SG_ADD(&m_nominal,"m_nominal", "feature types", ParameterProperties()); + SG_ADD(&m_weights,"m_weights", "weights", ParameterProperties()); + SG_ADD(&m_certainty,"m_certainty", "certainty", ParameterProperties()); + SG_ADD(&m_weights_set,"m_weights_set", "weights set", ParameterProperties()); + SG_ADD(&m_types_set,"m_types_set", "feature types set", ParameterProperties()); } diff --git a/src/shogun/multiclass/tree/CARTree.cpp b/src/shogun/multiclass/tree/CARTree.cpp index 0dcce74e6ea..df28fdf1c8d 100644 --- a/src/shogun/multiclass/tree/CARTree.cpp +++ b/src/shogun/multiclass/tree/CARTree.cpp @@ -1489,17 +1489,17 @@ void CCARTree::init() m_sorted_features=SGMatrix(); m_sorted_indices=SGMatrix(); - SG_ADD(&m_pre_sort, "pre_sort", "presort", MS_NOT_AVAILABLE); - SG_ADD(&m_sorted_features, "sorted_features", "sorted feats", MS_NOT_AVAILABLE); - SG_ADD(&m_sorted_indices, "sorted_indices", "sorted indices", MS_NOT_AVAILABLE); - SG_ADD(&m_nominal, "nominal", "feature types", MS_NOT_AVAILABLE); - SG_ADD(&m_weights, "weights", "weights", MS_NOT_AVAILABLE); - SG_ADD(&m_weights_set, "weights_set", "weights set", MS_NOT_AVAILABLE); - SG_ADD(&m_types_set, "types_set", "feature types set", MS_NOT_AVAILABLE); - SG_ADD(&m_apply_cv_pruning, "apply_cv_pruning", "apply cross validation pruning", MS_NOT_AVAILABLE); - SG_ADD(&m_folds, "folds", "number of subsets for cross validation", MS_NOT_AVAILABLE); - SG_ADD(&m_max_depth, "max_depth", "max allowed tree depth", MS_NOT_AVAILABLE) - SG_ADD(&m_min_node_size, "min_node_size", "min allowed node size", MS_NOT_AVAILABLE) - SG_ADD(&m_label_epsilon, "label_epsilon", "epsilon for labels", MS_NOT_AVAILABLE) - SG_ADD((machine_int_t*)&m_mode, "mode", "problem type (multiclass or regression)", MS_NOT_AVAILABLE) + SG_ADD(&m_pre_sort, "pre_sort", "presort", ParameterProperties()); + SG_ADD(&m_sorted_features, "sorted_features", "sorted feats", ParameterProperties()); + SG_ADD(&m_sorted_indices, "sorted_indices", "sorted indices", ParameterProperties()); + SG_ADD(&m_nominal, "nominal", "feature types", ParameterProperties()); + SG_ADD(&m_weights, "weights", "weights", ParameterProperties()); + SG_ADD(&m_weights_set, "weights_set", "weights set", ParameterProperties()); + SG_ADD(&m_types_set, "types_set", "feature types set", ParameterProperties()); + SG_ADD(&m_apply_cv_pruning, "apply_cv_pruning", "apply cross validation pruning", ParameterProperties()); + SG_ADD(&m_folds, "folds", "number of subsets for cross validation", ParameterProperties()); + SG_ADD(&m_max_depth, "max_depth", "max allowed tree depth", ParameterProperties()); + SG_ADD(&m_min_node_size, "min_node_size", "min allowed node size", ParameterProperties()); + SG_ADD(&m_label_epsilon, "label_epsilon", "epsilon for labels", ParameterProperties()); + SG_ADD((machine_int_t*)&m_mode, "mode", "problem type (multiclass or regression)", ParameterProperties()); } diff --git a/src/shogun/multiclass/tree/CHAIDTree.cpp b/src/shogun/multiclass/tree/CHAIDTree.cpp index 0883703f6c4..cefc5d96cd9 100644 --- a/src/shogun/multiclass/tree/CHAIDTree.cpp +++ b/src/shogun/multiclass/tree/CHAIDTree.cpp @@ -1400,14 +1400,14 @@ void CCHAIDTree::init() m_cont_breakpoints=SGMatrix(); m_num_breakpoints=0; - SG_ADD(&m_weights,"m_weights", "weights", MS_NOT_AVAILABLE); - SG_ADD(&m_weights_set,"m_weights_set", "weights set", MS_NOT_AVAILABLE); - SG_ADD(&m_feature_types,"m_feature_types", "feature types", MS_NOT_AVAILABLE); - SG_ADD(&m_dependent_vartype,"m_dependent_vartype", "dependent variable type", MS_NOT_AVAILABLE); - SG_ADD(&m_max_tree_depth,"m_max_tree_depth", "max tree depth", MS_NOT_AVAILABLE); - SG_ADD(&m_min_node_size,"m_min_node_size", "min node size", MS_NOT_AVAILABLE); - SG_ADD(&m_alpha_merge,"m_alpha_merge", "alpha-merge", MS_NOT_AVAILABLE); - SG_ADD(&m_alpha_split,"m_alpha_split", "alpha-split", MS_NOT_AVAILABLE); - SG_ADD(&m_cont_breakpoints,"m_cont_breakpoints", "breakpoints in continuous attributes", MS_NOT_AVAILABLE); - SG_ADD(&m_num_breakpoints,"m_num_breakpoints", "number of breakpoints", MS_NOT_AVAILABLE); + SG_ADD(&m_weights,"m_weights", "weights", ParameterProperties()); + SG_ADD(&m_weights_set,"m_weights_set", "weights set", ParameterProperties()); + SG_ADD(&m_feature_types,"m_feature_types", "feature types", ParameterProperties()); + SG_ADD(&m_dependent_vartype,"m_dependent_vartype", "dependent variable type", ParameterProperties()); + SG_ADD(&m_max_tree_depth,"m_max_tree_depth", "max tree depth", ParameterProperties()); + SG_ADD(&m_min_node_size,"m_min_node_size", "min node size", ParameterProperties()); + SG_ADD(&m_alpha_merge,"m_alpha_merge", "alpha-merge", ParameterProperties()); + SG_ADD(&m_alpha_split,"m_alpha_split", "alpha-split", ParameterProperties()); + SG_ADD(&m_cont_breakpoints,"m_cont_breakpoints", "breakpoints in continuous attributes", ParameterProperties()); + SG_ADD(&m_num_breakpoints,"m_num_breakpoints", "number of breakpoints", ParameterProperties()); } diff --git a/src/shogun/multiclass/tree/NbodyTree.cpp b/src/shogun/multiclass/tree/NbodyTree.cpp index f5d72422586..8bedd4361da 100644 --- a/src/shogun/multiclass/tree/NbodyTree.cpp +++ b/src/shogun/multiclass/tree/NbodyTree.cpp @@ -584,10 +584,10 @@ void CNbodyTree::init() m_knn_dists=SGMatrix(); m_knn_indices=SGMatrix(); - SG_ADD(&m_data,"m_data","data matrix",MS_NOT_AVAILABLE); - SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",MS_NOT_AVAILABLE); - SG_ADD(&m_vec_id,"m_vec_id","id of vectors",MS_NOT_AVAILABLE); - SG_ADD(&m_knn_done,"knn_done","knn done or not",MS_NOT_AVAILABLE); - SG_ADD(&m_knn_dists,"m_knn_dists","knn distances",MS_NOT_AVAILABLE); - SG_ADD(&m_knn_indices,"knn_indices","knn indices",MS_NOT_AVAILABLE); + SG_ADD(&m_data,"m_data","data matrix",ParameterProperties()); + SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",ParameterProperties()); + SG_ADD(&m_vec_id,"m_vec_id","id of vectors",ParameterProperties()); + SG_ADD(&m_knn_done,"knn_done","knn done or not",ParameterProperties()); + SG_ADD(&m_knn_dists,"m_knn_dists","knn distances",ParameterProperties()); + SG_ADD(&m_knn_indices,"knn_indices","knn indices",ParameterProperties()); } \ No newline at end of file diff --git a/src/shogun/multiclass/tree/RandomCARTree.cpp b/src/shogun/multiclass/tree/RandomCARTree.cpp index fdbd2df16ac..0c5b3473e4a 100644 --- a/src/shogun/multiclass/tree/RandomCARTree.cpp +++ b/src/shogun/multiclass/tree/RandomCARTree.cpp @@ -72,5 +72,5 @@ void CRandomCARTree::init() { m_randsubset_size=0; - SG_ADD(&m_randsubset_size,"m_randsubset_size", "random features subset size", MS_NOT_AVAILABLE); + SG_ADD(&m_randsubset_size,"m_randsubset_size", "random features subset size", ParameterProperties()); } diff --git a/src/shogun/multiclass/tree/RelaxedTree.cpp b/src/shogun/multiclass/tree/RelaxedTree.cpp index 50a36e7d686..b6140c54d67 100644 --- a/src/shogun/multiclass/tree/RelaxedTree.cpp +++ b/src/shogun/multiclass/tree/RelaxedTree.cpp @@ -22,11 +22,11 @@ CRelaxedTree::CRelaxedTree() :m_max_num_iter(3), m_A(0.5), m_B(5), m_svm_C(1), m_svm_epsilon(0.001), m_kernel(NULL), m_feats(NULL), m_machine_for_confusion_matrix(NULL), m_num_classes(0) { - SG_ADD(&m_max_num_iter, "m_max_num_iter", "max number of iterations in alternating optimization", MS_NOT_AVAILABLE); - SG_ADD(&m_svm_C, "m_svm_C", "C for svm", MS_AVAILABLE); - SG_ADD(&m_A, "m_A", "parameter A", MS_AVAILABLE); - SG_ADD(&m_B, "m_B", "parameter B", MS_AVAILABLE); - SG_ADD(&m_svm_epsilon, "m_svm_epsilon", "epsilon for svm", MS_AVAILABLE); + SG_ADD(&m_max_num_iter, "m_max_num_iter", "max number of iterations in alternating optimization", ParameterProperties()); + SG_ADD(&m_svm_C, "m_svm_C", "C for svm", ParameterProperties::HYPER); + SG_ADD(&m_A, "m_A", "parameter A", ParameterProperties::HYPER); + SG_ADD(&m_B, "m_B", "parameter B", ParameterProperties::HYPER); + SG_ADD(&m_svm_epsilon, "m_svm_epsilon", "epsilon for svm", ParameterProperties::HYPER); } CRelaxedTree::~CRelaxedTree() diff --git a/src/shogun/multiclass/tree/TreeMachine.h b/src/shogun/multiclass/tree/TreeMachine.h index 3cdaed9407d..84689f41036 100644 --- a/src/shogun/multiclass/tree/TreeMachine.h +++ b/src/shogun/multiclass/tree/TreeMachine.h @@ -58,7 +58,7 @@ template class CTreeMachine : public CBaseMulticlassMachine CTreeMachine() : CBaseMulticlassMachine() { m_root=NULL; - SG_ADD((CSGObject**)&m_root,"m_root", "tree structure", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_root,"m_root", "tree structure", ParameterProperties()); } /** destructor */ diff --git a/src/shogun/multiclass/tree/TreeMachineNode.h b/src/shogun/multiclass/tree/TreeMachineNode.h index 0f3673f8165..6231da77a10 100644 --- a/src/shogun/multiclass/tree/TreeMachineNode.h +++ b/src/shogun/multiclass/tree/TreeMachineNode.h @@ -185,8 +185,8 @@ class CTreeMachineNode m_machine=-1; m_children=new CDynamicObjectArray(); SG_REF(m_children); - SG_ADD((CSGObject**)&m_parent,"m_parent", "Parent node", MS_NOT_AVAILABLE); - SG_ADD(&m_machine,"m_machine", "Index of associated machine", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_parent,"m_parent", "Parent node", ParameterProperties()); + SG_ADD(&m_machine,"m_machine", "Index of associated machine", ParameterProperties()); } public: diff --git a/src/shogun/neuralnets/Autoencoder.cpp b/src/shogun/neuralnets/Autoencoder.cpp index fd489246e7e..d03762acf3a 100644 --- a/src/shogun/neuralnets/Autoencoder.cpp +++ b/src/shogun/neuralnets/Autoencoder.cpp @@ -176,9 +176,9 @@ void CAutoencoder::init() m_contraction_coefficient = 0.0; SG_ADD((machine_int_t*)&m_noise_type, "noise_type", - "Noise Type", MS_NOT_AVAILABLE); + "Noise Type", ParameterProperties()); SG_ADD(&m_noise_parameter, "noise_parameter", - "Noise Parameter", MS_NOT_AVAILABLE); + "Noise Parameter", ParameterProperties()); SG_ADD(&m_contraction_coefficient, "contraction_coefficient", - "Contraction Coefficient", MS_NOT_AVAILABLE); + "Contraction Coefficient", ParameterProperties()); } diff --git a/src/shogun/neuralnets/DeepAutoencoder.cpp b/src/shogun/neuralnets/DeepAutoencoder.cpp index 096adfc5a04..604f9603c30 100644 --- a/src/shogun/neuralnets/DeepAutoencoder.cpp +++ b/src/shogun/neuralnets/DeepAutoencoder.cpp @@ -270,31 +270,31 @@ void CDeepAutoencoder::init() pt_gd_error_damping_coeff.set_const(-1); SG_ADD(&pt_noise_type, "pt_noise_type", - "Pre-training Noise Type", MS_NOT_AVAILABLE); + "Pre-training Noise Type", ParameterProperties()); SG_ADD(&pt_noise_parameter, "pt_noise_parameter", - "Pre-training Noise Parameter", MS_NOT_AVAILABLE); + "Pre-training Noise Parameter", ParameterProperties()); SG_ADD(&pt_contraction_coefficient, "pt_contraction_coefficient", - "Pre-training Contraction Coefficient", MS_NOT_AVAILABLE); + "Pre-training Contraction Coefficient", ParameterProperties()); SG_ADD(&pt_optimization_method, "pt_optimization_method", - "Pre-training Optimization Method", MS_NOT_AVAILABLE); + "Pre-training Optimization Method", ParameterProperties()); SG_ADD(&pt_gd_mini_batch_size, "pt_gd_mini_batch_size", - "Pre-training Gradient Descent Mini-batch size", MS_NOT_AVAILABLE); + "Pre-training Gradient Descent Mini-batch size", ParameterProperties()); SG_ADD(&pt_max_num_epochs, "pt_max_num_epochs", - "Pre-training Max number of Epochs", MS_NOT_AVAILABLE); + "Pre-training Max number of Epochs", ParameterProperties()); SG_ADD(&pt_gd_learning_rate, "pt_gd_learning_rate", - "Pre-training Gradient descent learning rate", MS_NOT_AVAILABLE); + "Pre-training Gradient descent learning rate", ParameterProperties()); SG_ADD(&pt_gd_learning_rate_decay, "pt_gd_learning_rate_decay", - "Pre-training Gradient descent learning rate decay", MS_NOT_AVAILABLE); + "Pre-training Gradient descent learning rate decay", ParameterProperties()); SG_ADD(&pt_gd_momentum, "pt_gd_momentum", - "Pre-training Gradient Descent Momentum", MS_NOT_AVAILABLE); + "Pre-training Gradient Descent Momentum", ParameterProperties()); SG_ADD(&pt_gd_error_damping_coeff, "pt_gd_error_damping_coeff", - "Pre-training Gradient Descent Error Damping Coeff", MS_NOT_AVAILABLE); + "Pre-training Gradient Descent Error Damping Coeff", ParameterProperties()); SG_ADD(&pt_epsilon, "pt_epsilon", - "Pre-training Epsilon", MS_NOT_AVAILABLE); + "Pre-training Epsilon", ParameterProperties()); SG_ADD(&pt_l2_coefficient, "pt_l2_coefficient", - "Pre-training L2 regularization coeff", MS_NOT_AVAILABLE); + "Pre-training L2 regularization coeff", ParameterProperties()); SG_ADD(&pt_l1_coefficient, "pt_l1_coefficient", - "Pre-training L1 regularization coeff", MS_NOT_AVAILABLE); + "Pre-training L1 regularization coeff", ParameterProperties()); - SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma", MS_NOT_AVAILABLE); + SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma", ParameterProperties()); } diff --git a/src/shogun/neuralnets/DeepBeliefNetwork.cpp b/src/shogun/neuralnets/DeepBeliefNetwork.cpp index 273e4a6d5b1..5d8b95d1caf 100644 --- a/src/shogun/neuralnets/DeepBeliefNetwork.cpp +++ b/src/shogun/neuralnets/DeepBeliefNetwork.cpp @@ -584,60 +584,60 @@ void CDeepBeliefNetwork::init() m_sigma = 0.01; SG_ADD((machine_int_t*)&m_visible_units_type, "visible_units_type", - "Type of the visible units", MS_NOT_AVAILABLE); + "Type of the visible units", ParameterProperties()); SG_ADD(&m_num_layers, "num_layers", - "Number of layers", MS_NOT_AVAILABLE); + "Number of layers", ParameterProperties()); SG_ADD((CSGObject**)&m_layer_sizes, "layer_sizes", - "Size of each hidden layer", MS_NOT_AVAILABLE); + "Size of each hidden layer", ParameterProperties()); SG_ADD(&m_params, "params", - "Parameters of the network", MS_NOT_AVAILABLE); + "Parameters of the network", ParameterProperties()); SG_ADD(&m_num_params, "num_params", - "Number of parameters", MS_NOT_AVAILABLE); + "Number of parameters", ParameterProperties()); SG_ADD(&m_bias_index_offsets, "bias_index_offsets", - "Index offsets of the biases", MS_NOT_AVAILABLE); + "Index offsets of the biases", ParameterProperties()); SG_ADD(&m_weights_index_offsets, "weights_index_offsets", - "Index offsets of the weights", MS_NOT_AVAILABLE); + "Index offsets of the weights", ParameterProperties()); SG_ADD(&pt_cd_num_steps, "pt_cd_num_steps", - "Pre-training Number of CD Steps", MS_NOT_AVAILABLE); + "Pre-training Number of CD Steps", ParameterProperties()); SG_ADD(&pt_cd_persistent, "pt_cd_persistent", - "Pre-training Persistent CD", MS_NOT_AVAILABLE); + "Pre-training Persistent CD", ParameterProperties()); SG_ADD(&pt_cd_sample_visible, "pt_cd_sample_visible", - "Pre-training Number of CD Sample Visible", MS_NOT_AVAILABLE); + "Pre-training Number of CD Sample Visible", ParameterProperties()); SG_ADD(&pt_l2_coefficient, "pt_l2_coefficient", - "Pre-training L2 regularization coeff", MS_NOT_AVAILABLE); + "Pre-training L2 regularization coeff", ParameterProperties()); SG_ADD(&pt_l1_coefficient, "pt_l1_coefficient", - "Pre-training L1 regularization coeff", MS_NOT_AVAILABLE); + "Pre-training L1 regularization coeff", ParameterProperties()); SG_ADD(&pt_monitoring_interval, "pt_monitoring_interval", - "Pre-training Monitoring Interval", MS_NOT_AVAILABLE); + "Pre-training Monitoring Interval", ParameterProperties()); SG_ADD(&pt_monitoring_method, "pt_monitoring_method", - "Pre-training Monitoring Method", MS_NOT_AVAILABLE); + "Pre-training Monitoring Method", ParameterProperties()); SG_ADD(&pt_cd_num_steps, "pt_gd_mini_batch_size", - "Pre-training Gradient Descent Mini-batch size", MS_NOT_AVAILABLE); + "Pre-training Gradient Descent Mini-batch size", ParameterProperties()); SG_ADD(&pt_max_num_epochs, "pt_max_num_epochs", - "Pre-training Max number of Epochs", MS_NOT_AVAILABLE); + "Pre-training Max number of Epochs", ParameterProperties()); SG_ADD(&pt_gd_learning_rate, "pt_gd_learning_rate", - "Pre-training Gradient descent learning rate", MS_NOT_AVAILABLE); + "Pre-training Gradient descent learning rate", ParameterProperties()); SG_ADD(&pt_gd_learning_rate_decay, "pt_gd_learning_rate_decay", - "Pre-training Gradient descent learning rate decay", MS_NOT_AVAILABLE); + "Pre-training Gradient descent learning rate decay", ParameterProperties()); SG_ADD(&pt_gd_momentum, "pt_gd_momentum", - "Pre-training Gradient Descent Momentum", MS_NOT_AVAILABLE); + "Pre-training Gradient Descent Momentum", ParameterProperties()); - SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps", MS_NOT_AVAILABLE); + SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps", ParameterProperties()); SG_ADD(&monitoring_interval, "monitoring_interval", - "Monitoring Interval", MS_NOT_AVAILABLE); + "Monitoring Interval", ParameterProperties()); SG_ADD(&gd_mini_batch_size, "gd_mini_batch_size", - "Gradient Descent Mini-batch size", MS_NOT_AVAILABLE); + "Gradient Descent Mini-batch size", ParameterProperties()); SG_ADD(&max_num_epochs, "max_num_epochs", - "Max number of Epochs", MS_NOT_AVAILABLE); + "Max number of Epochs", ParameterProperties()); SG_ADD(&gd_learning_rate, "gd_learning_rate", - "Gradient descent learning rate", MS_NOT_AVAILABLE); + "Gradient descent learning rate", ParameterProperties()); SG_ADD(&gd_learning_rate_decay, "gd_learning_rate_decay", - "Gradient descent learning rate decay", MS_NOT_AVAILABLE); + "Gradient descent learning rate decay", ParameterProperties()); SG_ADD(&gd_momentum, "gd_momentum", - "Gradient Descent Momentum", MS_NOT_AVAILABLE); + "Gradient Descent Momentum", ParameterProperties()); - SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma", MS_NOT_AVAILABLE); + SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma", ParameterProperties()); } diff --git a/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp b/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp index 6bca596fbe3..e49a7c5124f 100644 --- a/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp +++ b/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp @@ -283,27 +283,27 @@ void CNeuralConvolutionalLayer::init() m_initialization_mode = NORMAL; m_activation_function = CMAF_IDENTITY; - SG_ADD(&m_num_maps, "num_maps", "Number of maps", MS_NOT_AVAILABLE); - SG_ADD(&m_input_width, "input_width", "Input Width", MS_NOT_AVAILABLE); - SG_ADD(&m_input_height, "input_height", "Input Height", MS_NOT_AVAILABLE); + SG_ADD(&m_num_maps, "num_maps", "Number of maps", ParameterProperties()); + SG_ADD(&m_input_width, "input_width", "Input Width", ParameterProperties()); + SG_ADD(&m_input_height, "input_height", "Input Height", ParameterProperties()); SG_ADD(&m_input_num_channels, "input_num_channels", "Input's number of channels", - MS_NOT_AVAILABLE); - SG_ADD(&m_radius_x, "radius_x", "X Radius", MS_NOT_AVAILABLE); - SG_ADD(&m_radius_y, "radius_y", "Y Radius", MS_NOT_AVAILABLE); - SG_ADD(&m_pooling_width, "pooling_width", "Pooling Width", MS_NOT_AVAILABLE); - SG_ADD(&m_pooling_height, "pooling_height", "Pooling Height", MS_NOT_AVAILABLE); - SG_ADD(&m_stride_x, "stride_x", "X Stride", MS_NOT_AVAILABLE); - SG_ADD(&m_stride_y, "stride_y", "Y Stride", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_radius_x, "radius_x", "X Radius", ParameterProperties()); + SG_ADD(&m_radius_y, "radius_y", "Y Radius", ParameterProperties()); + SG_ADD(&m_pooling_width, "pooling_width", "Pooling Width", ParameterProperties()); + SG_ADD(&m_pooling_height, "pooling_height", "Pooling Height", ParameterProperties()); + SG_ADD(&m_stride_x, "stride_x", "X Stride", ParameterProperties()); + SG_ADD(&m_stride_y, "stride_y", "Y Stride", ParameterProperties()); SG_ADD((machine_int_t*) &m_initialization_mode, "initialization_mode", "Initialization Mode", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((machine_int_t*) &m_activation_function, "activation_function", - "Activation Function", MS_NOT_AVAILABLE); + "Activation Function", ParameterProperties()); SG_ADD(&m_convolution_output, "convolution_output", - "Convolution Output", MS_NOT_AVAILABLE); + "Convolution Output", ParameterProperties()); SG_ADD(&m_convolution_output_gradients, "convolution_output_gradients", - "Convolution Output Gradients", MS_NOT_AVAILABLE); + "Convolution Output Gradients", ParameterProperties()); } diff --git a/src/shogun/neuralnets/NeuralInputLayer.cpp b/src/shogun/neuralnets/NeuralInputLayer.cpp index b9191ac7090..3b1f6925a33 100644 --- a/src/shogun/neuralnets/NeuralInputLayer.cpp +++ b/src/shogun/neuralnets/NeuralInputLayer.cpp @@ -82,7 +82,7 @@ void CNeuralInputLayer::init() m_start_index = 0; gaussian_noise = 0; SG_ADD(&m_start_index, "start_index", - "Start Index", MS_NOT_AVAILABLE); + "Start Index", ParameterProperties()); SG_ADD(&gaussian_noise, "gaussian_noise", - "Gaussian Noise Standard Deviation", MS_NOT_AVAILABLE); + "Gaussian Noise Standard Deviation", ParameterProperties()); } diff --git a/src/shogun/neuralnets/NeuralLayer.cpp b/src/shogun/neuralnets/NeuralLayer.cpp index 70a40a0becc..96aeeccb59e 100644 --- a/src/shogun/neuralnets/NeuralLayer.cpp +++ b/src/shogun/neuralnets/NeuralLayer.cpp @@ -121,32 +121,32 @@ void CNeuralLayer::init() autoencoder_position = NLAP_NONE; SG_ADD(&m_num_neurons, "num_neurons", - "Number of Neurons", MS_NOT_AVAILABLE); + "Number of Neurons", ParameterProperties()); SG_ADD(&m_width, "width", - "Width", MS_NOT_AVAILABLE); + "Width", ParameterProperties()); SG_ADD(&m_height, "height", - "Height", MS_NOT_AVAILABLE); + "Height", ParameterProperties()); SG_ADD(&m_input_indices, "input_indices", - "Input Indices", MS_NOT_AVAILABLE); + "Input Indices", ParameterProperties()); SG_ADD(&m_input_sizes, "input_sizes", - "Input Sizes", MS_NOT_AVAILABLE); + "Input Sizes", ParameterProperties()); SG_ADD(&dropout_prop, "dropout_prop", - "Dropout Probabilty", MS_NOT_AVAILABLE); + "Dropout Probabilty", ParameterProperties()); SG_ADD(&contraction_coefficient, "contraction_coefficient", - "Contraction Coefficient", MS_NOT_AVAILABLE); + "Contraction Coefficient", ParameterProperties()); SG_ADD(&is_training, "is_training", - "is_training", MS_NOT_AVAILABLE); + "is_training", ParameterProperties()); SG_ADD(&m_batch_size, "batch_size", - "Batch Size", MS_NOT_AVAILABLE); + "Batch Size", ParameterProperties()); SG_ADD(&m_activations, "activations", - "Activations", MS_NOT_AVAILABLE); + "Activations", ParameterProperties()); SG_ADD(&m_activation_gradients, "activation_gradients", - "Activation Gradients", MS_NOT_AVAILABLE); + "Activation Gradients", ParameterProperties()); SG_ADD(&m_local_gradients, "local_gradients", - "Local Gradients", MS_NOT_AVAILABLE); + "Local Gradients", ParameterProperties()); SG_ADD(&m_dropout_mask, "dropout_mask", - "Dropout mask", MS_NOT_AVAILABLE); + "Dropout mask", ParameterProperties()); SG_ADD((machine_int_t*)&autoencoder_position, "autoencoder_position", - "Autoencoder Position", MS_NOT_AVAILABLE); + "Autoencoder Position", ParameterProperties()); } diff --git a/src/shogun/neuralnets/NeuralNetwork.cpp b/src/shogun/neuralnets/NeuralNetwork.cpp index a5ca73c70af..5ceb97d7d5e 100644 --- a/src/shogun/neuralnets/NeuralNetwork.cpp +++ b/src/shogun/neuralnets/NeuralNetwork.cpp @@ -790,52 +790,52 @@ void CNeuralNetwork::init() SG_REF(m_layers); SG_ADD((machine_int_t*)&m_optimization_method, "optimization_method", - "Optimization Method", MS_NOT_AVAILABLE); + "Optimization Method", ParameterProperties()); SG_ADD(&m_gd_mini_batch_size, "gd_mini_batch_size", - "Gradient Descent Mini-batch size", MS_NOT_AVAILABLE); + "Gradient Descent Mini-batch size", ParameterProperties()); SG_ADD(&m_max_num_epochs, "max_num_epochs", - "Max number of Epochs", MS_NOT_AVAILABLE); + "Max number of Epochs", ParameterProperties()); SG_ADD(&m_gd_learning_rate, "gd_learning_rate", - "Gradient descent learning rate", MS_NOT_AVAILABLE); + "Gradient descent learning rate", ParameterProperties()); SG_ADD(&m_gd_learning_rate_decay, "gd_learning_rate_decay", - "Gradient descent learning rate decay", MS_NOT_AVAILABLE); + "Gradient descent learning rate decay", ParameterProperties()); SG_ADD(&m_gd_momentum, "gd_momentum", - "Gradient Descent Momentum", MS_NOT_AVAILABLE); + "Gradient Descent Momentum", ParameterProperties()); SG_ADD(&m_gd_error_damping_coeff, "gd_error_damping_coeff", - "Gradient Descent Error Damping Coeff", MS_NOT_AVAILABLE); + "Gradient Descent Error Damping Coeff", ParameterProperties()); SG_ADD(&m_epsilon, "epsilon", - "Epsilon", MS_NOT_AVAILABLE); + "Epsilon", ParameterProperties()); SG_ADD(&m_num_inputs, "num_inputs", - "Number of Inputs", MS_NOT_AVAILABLE); + "Number of Inputs", ParameterProperties()); SG_ADD(&m_num_layers, "num_layers", - "Number of Layers", MS_NOT_AVAILABLE); + "Number of Layers", ParameterProperties()); SG_ADD(&m_adj_matrix, "adj_matrix", - "Adjacency Matrix", MS_NOT_AVAILABLE); + "Adjacency Matrix", ParameterProperties()); SG_ADD(&m_l2_coefficient, "l2_coefficient", - "L2 regularization coeff", MS_NOT_AVAILABLE); + "L2 regularization coeff", ParameterProperties()); SG_ADD(&m_l1_coefficient, "l1_coefficient", - "L1 regularization coeff", MS_NOT_AVAILABLE); + "L1 regularization coeff", ParameterProperties()); SG_ADD(&m_dropout_hidden, "dropout_hidden", - "Hidden neuron dropout probability", MS_NOT_AVAILABLE); + "Hidden neuron dropout probability", ParameterProperties()); SG_ADD(&m_dropout_input, "dropout_input", - "Input neuron dropout probability", MS_NOT_AVAILABLE); + "Input neuron dropout probability", ParameterProperties()); SG_ADD(&m_max_norm, "max_norm", - "Max Norm", MS_NOT_AVAILABLE); + "Max Norm", ParameterProperties()); SG_ADD(&m_total_num_parameters, "total_num_parameters", - "Total number of parameters", MS_NOT_AVAILABLE); + "Total number of parameters", ParameterProperties()); SG_ADD(&m_index_offsets, "index_offsets", - "Index Offsets", MS_NOT_AVAILABLE); + "Index Offsets", ParameterProperties()); SG_ADD(&m_params, "params", - "Parameters", MS_NOT_AVAILABLE); + "Parameters", ParameterProperties()); SG_ADD(&m_param_regularizable, "param_regularizable", - "Parameter Regularizable", MS_NOT_AVAILABLE); + "Parameter Regularizable", ParameterProperties()); SG_ADD( &m_layers, "layers", "DynamicObjectArray of NeuralNetwork objects", - MS_NOT_AVAILABLE); - SG_ADD(&m_auto_quick_initialize, "auto_quick_initialize", "auto_quick_initialize", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_auto_quick_initialize, "auto_quick_initialize", "auto_quick_initialize", ParameterProperties()); SG_ADD(&m_is_training, "is_training", - "is_training", MS_NOT_AVAILABLE); + "is_training", ParameterProperties()); SG_ADD( &m_sigma, "sigma", "sigma", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/neuralnets/RBM.cpp b/src/shogun/neuralnets/RBM.cpp index f0a8db89b4e..0be99bc6a1e 100644 --- a/src/shogun/neuralnets/RBM.cpp +++ b/src/shogun/neuralnets/RBM.cpp @@ -623,45 +623,45 @@ void CRBM::init() m_num_params = 0; m_batch_size = 0; - SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps", MS_NOT_AVAILABLE); - SG_ADD(&cd_persistent, "cd_persistent", "Whether to use PCD", MS_NOT_AVAILABLE); + SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps", ParameterProperties()); + SG_ADD(&cd_persistent, "cd_persistent", "Whether to use PCD", ParameterProperties()); SG_ADD(&cd_sample_visible, "sample_visible", - "Whether to sample the visible units during (P)CD", MS_NOT_AVAILABLE); + "Whether to sample the visible units during (P)CD", ParameterProperties()); SG_ADD(&l2_coefficient, "l2_coefficient", - "L2 regularization coeff", MS_NOT_AVAILABLE); + "L2 regularization coeff", ParameterProperties()); SG_ADD(&l1_coefficient, "l1_coefficient", - "L1 regularization coeff", MS_NOT_AVAILABLE); + "L1 regularization coeff", ParameterProperties()); SG_ADD((machine_int_t*)&monitoring_method, "monitoring_method", - "Monitoring Method", MS_NOT_AVAILABLE); + "Monitoring Method", ParameterProperties()); SG_ADD(&monitoring_interval, "monitoring_interval", - "Monitoring Interval", MS_NOT_AVAILABLE); + "Monitoring Interval", ParameterProperties()); SG_ADD(&gd_mini_batch_size, "gd_mini_batch_size", - "Gradient Descent Mini-batch size", MS_NOT_AVAILABLE); + "Gradient Descent Mini-batch size", ParameterProperties()); SG_ADD(&max_num_epochs, "max_num_epochs", - "Max number of Epochs", MS_NOT_AVAILABLE); + "Max number of Epochs", ParameterProperties()); SG_ADD(&gd_learning_rate, "gd_learning_rate", - "Gradient descent learning rate", MS_NOT_AVAILABLE); + "Gradient descent learning rate", ParameterProperties()); SG_ADD(&gd_learning_rate_decay, "gd_learning_rate_decay", - "Gradient descent learning rate decay", MS_NOT_AVAILABLE); + "Gradient descent learning rate decay", ParameterProperties()); SG_ADD(&gd_momentum, "gd_momentum", - "Gradient Descent Momentum", MS_NOT_AVAILABLE); + "Gradient Descent Momentum", ParameterProperties()); SG_ADD(&m_num_hidden, "num_hidden", - "Number of Hidden Units", MS_NOT_AVAILABLE); + "Number of Hidden Units", ParameterProperties()); SG_ADD(&m_num_visible, "num_visible", - "Number of Visible Units", MS_NOT_AVAILABLE); + "Number of Visible Units", ParameterProperties()); SG_ADD(&m_num_visible_groups, "num_visible_groups", - "Number of Visible Unit Groups", MS_NOT_AVAILABLE); + "Number of Visible Unit Groups", ParameterProperties()); SG_ADD((CSGObject**)&m_visible_group_sizes, "visible_group_sizes", - "Sizes of Visible Unit Groups", MS_NOT_AVAILABLE); + "Sizes of Visible Unit Groups", ParameterProperties()); SG_ADD((CSGObject**)&m_visible_group_types, "visible_group_types", - "Types of Visible Unit Groups", MS_NOT_AVAILABLE); + "Types of Visible Unit Groups", ParameterProperties()); SG_ADD((CSGObject**)&m_visible_state_offsets, "visible_group_index_offsets", - "State Index offsets of Visible Unit Groups", MS_NOT_AVAILABLE); + "State Index offsets of Visible Unit Groups", ParameterProperties()); SG_ADD(&m_num_params, "num_params", - "Number of Parameters", MS_NOT_AVAILABLE); - SG_ADD(&m_params, "params", "Parameters", MS_NOT_AVAILABLE); + "Number of Parameters", ParameterProperties()); + SG_ADD(&m_params, "params", "Parameters", ParameterProperties()); } diff --git a/src/shogun/optimization/AdaDeltaUpdater.cpp b/src/shogun/optimization/AdaDeltaUpdater.cpp index 521f22eec55..2fdf5ce6c03 100644 --- a/src/shogun/optimization/AdaDeltaUpdater.cpp +++ b/src/shogun/optimization/AdaDeltaUpdater.cpp @@ -86,15 +86,15 @@ void AdaDeltaUpdater::init() m_gradient_delta_accuracy=SGVector(); SG_ADD(&m_gradient_accuracy, "AdaDeltaUpdater__m_gradient_accuracy", - "gradient_accuracy in AdaDeltaUpdater", MS_NOT_AVAILABLE); + "gradient_accuracy in AdaDeltaUpdater", ParameterProperties()); SG_ADD(&m_gradient_delta_accuracy, "AdaDeltaUpdater__m_gradient_delta_accuracy", - "gradient_delta_accuracy in AdaDeltaUpdater", MS_NOT_AVAILABLE); + "gradient_delta_accuracy in AdaDeltaUpdater", ParameterProperties()); SG_ADD(&m_epsilon, "AdaDeltaUpdater__m_epsilon", - "epsilon in AdaDeltaUpdater", MS_NOT_AVAILABLE); + "epsilon in AdaDeltaUpdater", ParameterProperties()); SG_ADD(&m_decay_factor, "AdaDeltaUpdater__m_decay_factor", - "decay_factor in AdaDeltaUpdater", MS_NOT_AVAILABLE); + "decay_factor in AdaDeltaUpdater", ParameterProperties()); SG_ADD(&m_build_in_learning_rate, "AdaDeltaUpdater__m_build_in_learning_rate", - "m_build_in_learning_rate in AdaDeltaUpdater", MS_NOT_AVAILABLE); + "m_build_in_learning_rate in AdaDeltaUpdater", ParameterProperties()); } float64_t AdaDeltaUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/AdaGradUpdater.cpp b/src/shogun/optimization/AdaGradUpdater.cpp index 46c8213955b..adaf0e4e0cc 100644 --- a/src/shogun/optimization/AdaGradUpdater.cpp +++ b/src/shogun/optimization/AdaGradUpdater.cpp @@ -71,11 +71,11 @@ void AdaGradUpdater::init() m_gradient_accuracy=SGVector(); SG_ADD(&m_epsilon, "AdaGradUpdater__m_epsilon", - "epsilon in AdaGradUpdater", MS_NOT_AVAILABLE); + "epsilon in AdaGradUpdater", ParameterProperties()); SG_ADD(&m_build_in_learning_rate, "AdaGradUpdater__m_build_in_learning_rate", - "m_build_in_learning_rate in AdaGradUpdater", MS_NOT_AVAILABLE); + "m_build_in_learning_rate in AdaGradUpdater", ParameterProperties()); SG_ADD(&m_gradient_accuracy, "AdaGradUpdater__m_gradient_accuracy", - "gradient_accuracy in AdaGradUpdater", MS_NOT_AVAILABLE); + "gradient_accuracy in AdaGradUpdater", ParameterProperties()); } float64_t AdaGradUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/AdamUpdater.cpp b/src/shogun/optimization/AdamUpdater.cpp index f0d78e0afba..2003cfa9501 100644 --- a/src/shogun/optimization/AdamUpdater.cpp +++ b/src/shogun/optimization/AdamUpdater.cpp @@ -97,21 +97,21 @@ void AdamUpdater::init() m_gradient_second_moment=SGVector(); SG_ADD(&m_decay_factor_first_moment, "AdamUpdater__m_decay_factor_first_moment", - "decay_factor_first_moment in AdamUpdater", MS_NOT_AVAILABLE); + "decay_factor_first_moment in AdamUpdater", ParameterProperties()); SG_ADD(&m_decay_factor_second_moment, "AdamUpdater__m_decay_factor_second_moment", - "decay_factor_second_moment in AdamUpdater", MS_NOT_AVAILABLE); + "decay_factor_second_moment in AdamUpdater", ParameterProperties()); SG_ADD(&m_gradient_first_moment, "AdamUpdater__m_gradient_first_moment", - "m_gradient_first_moment in AdamUpdater", MS_NOT_AVAILABLE); + "m_gradient_first_moment in AdamUpdater", ParameterProperties()); SG_ADD(&m_gradient_second_moment, "AdamUpdater__m_gradient_second_moment", - "m_gradient_second_moment in AdamUpdater", MS_NOT_AVAILABLE); + "m_gradient_second_moment in AdamUpdater", ParameterProperties()); SG_ADD(&m_epsilon, "AdamUpdater__m_epsilon", - "epsilon in AdamUpdater", MS_NOT_AVAILABLE); + "epsilon in AdamUpdater", ParameterProperties()); SG_ADD(&m_log_scale_pre_iteration, "AdamUpdater__m_log_scale_pre_iteration", - "log_scale_pre_iteration in AdamUpdater", MS_NOT_AVAILABLE); + "log_scale_pre_iteration in AdamUpdater", ParameterProperties()); SG_ADD(&m_log_learning_rate, "AdamUpdater__m_log_learning_rate", - "m_log_learning_rate in AdamUpdater", MS_NOT_AVAILABLE); + "m_log_learning_rate in AdamUpdater", ParameterProperties()); SG_ADD(&m_iteration_counter, "AdamUpdater__m_iteration_counter", - "m_iteration_counter in AdamUpdater", MS_NOT_AVAILABLE); + "m_iteration_counter in AdamUpdater", ParameterProperties()); } float64_t AdamUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/AdaptMomentumCorrection.cpp b/src/shogun/optimization/AdaptMomentumCorrection.cpp index 8d0095d611a..f756035d657 100644 --- a/src/shogun/optimization/AdaptMomentumCorrection.cpp +++ b/src/shogun/optimization/AdaptMomentumCorrection.cpp @@ -128,15 +128,15 @@ void AdaptMomentumCorrection::init() m_init_descend_rate=1.0; SG_ADD(&m_adapt_rate, "AdaptMomentumCorrection__m_adapt_rate", - "m_adapt_rate in AdaptMomentumCorrection", MS_NOT_AVAILABLE); + "m_adapt_rate in AdaptMomentumCorrection", ParameterProperties()); SG_ADD(&m_rate_min, "AdaptMomentumCorrection__m_rate_min", - "m_rate_min in AdaptMomentumCorrection", MS_NOT_AVAILABLE); + "m_rate_min in AdaptMomentumCorrection", ParameterProperties()); SG_ADD(&m_rate_max, "AdaptMomentumCorrection__m_rate_max", - "m_rate_max in AdaptMomentumCorrection", MS_NOT_AVAILABLE); + "m_rate_max in AdaptMomentumCorrection", ParameterProperties()); SG_ADD(&m_init_descend_rate, "AdaptMomentumCorrection__m_init_descend_rate", - "m_init_descend_rate in AdaptMomentumCorrection", MS_NOT_AVAILABLE); + "m_init_descend_rate in AdaptMomentumCorrection", ParameterProperties()); SG_ADD(&m_descend_rate, "AdaptMomentumCorrection__m_descend_rate", - "m_descend_rate in AdaptMomentumCorrection", MS_NOT_AVAILABLE); + "m_descend_rate in AdaptMomentumCorrection", ParameterProperties()); SG_ADD((CSGObject **)&m_momentum_correction, "AdaptMomentumCorrection__m_momentum_correction", - "m_momentum_correction in AdaptMomentumCorrection", MS_NOT_AVAILABLE); + "m_momentum_correction in AdaptMomentumCorrection", ParameterProperties()); } diff --git a/src/shogun/optimization/ConstLearningRate.cpp b/src/shogun/optimization/ConstLearningRate.cpp index 9f0b6fab470..3cf46f1143e 100644 --- a/src/shogun/optimization/ConstLearningRate.cpp +++ b/src/shogun/optimization/ConstLearningRate.cpp @@ -50,5 +50,5 @@ void ConstLearningRate::init() { m_const_learning_rate=0.0; SG_ADD(&m_const_learning_rate, "ConstLearningRate__m_const_learning_rate", - "learning rate in ConstLearningRate", MS_NOT_AVAILABLE); + "learning rate in ConstLearningRate", ParameterProperties()); } diff --git a/src/shogun/optimization/DescendCorrection.cpp b/src/shogun/optimization/DescendCorrection.cpp index 418818323e7..4408b841040 100644 --- a/src/shogun/optimization/DescendCorrection.cpp +++ b/src/shogun/optimization/DescendCorrection.cpp @@ -43,5 +43,5 @@ void DescendCorrection::init() { m_weight=0.0; SG_ADD(&m_weight, "DescendCorrection__m_weight", - "weight used in descend correction", MS_NOT_AVAILABLE); + "weight used in descend correction", ParameterProperties()); } diff --git a/src/shogun/optimization/DescendUpdaterWithCorrection.cpp b/src/shogun/optimization/DescendUpdaterWithCorrection.cpp index 8e9ca839177..286c6c02ed2 100644 --- a/src/shogun/optimization/DescendUpdaterWithCorrection.cpp +++ b/src/shogun/optimization/DescendUpdaterWithCorrection.cpp @@ -90,5 +90,5 @@ void DescendUpdaterWithCorrection::init() { m_correction=NULL; SG_ADD((CSGObject **)&m_correction, "DescendUpdaterWithCorrection__m_correction", - "correction in DescendUpdaterWithCorrection", MS_NOT_AVAILABLE); + "correction in DescendUpdaterWithCorrection", ParameterProperties()); } diff --git a/src/shogun/optimization/ElasticNetPenalty.cpp b/src/shogun/optimization/ElasticNetPenalty.cpp index 9bab61c614c..6ee3fa17676 100644 --- a/src/shogun/optimization/ElasticNetPenalty.cpp +++ b/src/shogun/optimization/ElasticNetPenalty.cpp @@ -85,9 +85,9 @@ void ElasticNetPenalty::init() m_l1_penalty=new L1Penalty(); m_l2_penalty=new L2Penalty(); SG_ADD(&m_l1_ratio, "ElasticNetPenalty__m_l1_ratio", - "l1_ratio in ElasticNetPenalty", MS_NOT_AVAILABLE); + "l1_ratio in ElasticNetPenalty", ParameterProperties()); SG_ADD((CSGObject **) &m_l1_penalty, "ElasticNetPenalty__m_l1_penalty", - "l1_penalty in ElasticNetPenalty", MS_NOT_AVAILABLE); + "l1_penalty in ElasticNetPenalty", ParameterProperties()); SG_ADD((CSGObject **) &m_l2_penalty, "ElasticNetPenalty__m_l2_penalty", - "l2_penalty in ElasticNetPenalty", MS_NOT_AVAILABLE); + "l2_penalty in ElasticNetPenalty", ParameterProperties()); } diff --git a/src/shogun/optimization/FirstOrderMinimizer.cpp b/src/shogun/optimization/FirstOrderMinimizer.cpp index 8551a229e45..4b404061467 100644 --- a/src/shogun/optimization/FirstOrderMinimizer.cpp +++ b/src/shogun/optimization/FirstOrderMinimizer.cpp @@ -98,9 +98,9 @@ void FirstOrderMinimizer::init() m_penalty_type=NULL; m_penalty_weight=0; SG_ADD(&m_penalty_weight, "FirstOrderMinimizer__m_penalty_weight", - "penalty_weight in FirstOrderMinimizer", MS_NOT_AVAILABLE); + "penalty_weight in FirstOrderMinimizer", ParameterProperties()); SG_ADD((CSGObject **)&m_penalty_type, "FirstOrderMinimizer__m_penalty_type", - "penalty_type in FirstOrderMinimizer", MS_NOT_AVAILABLE); + "penalty_type in FirstOrderMinimizer", ParameterProperties()); SG_ADD((CSGObject **)&m_fun, "FirstOrderMinimizer__m_fun", - "penalty_fun in FirstOrderMinimizer", MS_NOT_AVAILABLE); + "penalty_fun in FirstOrderMinimizer", ParameterProperties()); } diff --git a/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp b/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp index c0362ac6be5..37a8def9394 100644 --- a/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp +++ b/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp @@ -101,13 +101,13 @@ void FirstOrderStochasticMinimizer::init() m_iter_counter=0; SG_ADD((CSGObject **)&m_learning_rate, "FirstOrderMinimizer__m_learning_rate", - "learning_rate in FirstOrderStochasticMinimizer", MS_NOT_AVAILABLE); + "learning_rate in FirstOrderStochasticMinimizer", ParameterProperties()); SG_ADD((CSGObject **)&m_gradient_updater, "FirstOrderMinimizer__m_gradient_updater", - "gradient_updater in FirstOrderStochasticMinimizer", MS_NOT_AVAILABLE); + "gradient_updater in FirstOrderStochasticMinimizer", ParameterProperties()); SG_ADD(&m_num_passes, "FirstOrderMinimizer__m_num_passes", - "num_passes in FirstOrderStochasticMinimizer", MS_NOT_AVAILABLE); + "num_passes in FirstOrderStochasticMinimizer", ParameterProperties()); SG_ADD(&m_cur_passes, "FirstOrderMinimizer__m_cur_passes", - "cur_passes in FirstOrderStochasticMinimizer", MS_NOT_AVAILABLE); + "cur_passes in FirstOrderStochasticMinimizer", ParameterProperties()); SG_ADD(&m_iter_counter, "FirstOrderMinimizer__m_iter_counter", - "m_iter_counter in FirstOrderStochasticMinimizer", MS_NOT_AVAILABLE); + "m_iter_counter in FirstOrderStochasticMinimizer", ParameterProperties()); } diff --git a/src/shogun/optimization/InverseScalingLearningRate.cpp b/src/shogun/optimization/InverseScalingLearningRate.cpp index aa937501726..d8105e1cdef 100644 --- a/src/shogun/optimization/InverseScalingLearningRate.cpp +++ b/src/shogun/optimization/InverseScalingLearningRate.cpp @@ -72,11 +72,11 @@ void InverseScalingLearningRate::init() m_intercept=0.0; m_slope=1.0; SG_ADD(&m_slope, "InverseScalingLearningRate__m_slope", - "slope in InverseScalingLearningRate", MS_NOT_AVAILABLE); + "slope in InverseScalingLearningRate", ParameterProperties()); SG_ADD(&m_exponent, "InverseScalingLearningRate__m_exponent", - "exponent in InverseScalingLearningRate", MS_NOT_AVAILABLE); + "exponent in InverseScalingLearningRate", ParameterProperties()); SG_ADD(&m_intercept, "InverseScalingLearningRate__m_intercept", - "intercept in InverseScalingLearningRate", MS_NOT_AVAILABLE); + "intercept in InverseScalingLearningRate", ParameterProperties()); SG_ADD(&m_initial_learning_rate, "InverseScalingLearningRate__m_initial_learning_rate", - "initial_learning_rate in InverseScalingLearningRate", MS_NOT_AVAILABLE); + "initial_learning_rate in InverseScalingLearningRate", ParameterProperties()); } diff --git a/src/shogun/optimization/L1Penalty.cpp b/src/shogun/optimization/L1Penalty.cpp index 09abb19a535..cd5c4677f30 100644 --- a/src/shogun/optimization/L1Penalty.cpp +++ b/src/shogun/optimization/L1Penalty.cpp @@ -74,5 +74,5 @@ void L1Penalty::init() { m_rounding_epsilon=1e-8; SG_ADD(&m_rounding_epsilon, "L1Penalty__m_rounding_epsilon", - "rounding_epsilon in L1Penalty", MS_NOT_AVAILABLE); + "rounding_epsilon in L1Penalty", ParameterProperties()); } diff --git a/src/shogun/optimization/L1PenaltyForTG.cpp b/src/shogun/optimization/L1PenaltyForTG.cpp index 1dc5c4c9cf1..42d8aebc966 100644 --- a/src/shogun/optimization/L1PenaltyForTG.cpp +++ b/src/shogun/optimization/L1PenaltyForTG.cpp @@ -62,7 +62,7 @@ void L1PenaltyForTG::init() m_u=0; m_q=SGVector(); SG_ADD(&m_u, "L1PenaltyForTG__m_u", - "u in L1PenaltyForTG", MS_NOT_AVAILABLE); + "u in L1PenaltyForTG", ParameterProperties()); SG_ADD(&m_q, "L1PenaltyForTG__m_q", - "q in L1PenaltyForTG", MS_NOT_AVAILABLE); + "q in L1PenaltyForTG", ParameterProperties()); } diff --git a/src/shogun/optimization/MomentumCorrection.cpp b/src/shogun/optimization/MomentumCorrection.cpp index af78cf511fc..3558670625c 100644 --- a/src/shogun/optimization/MomentumCorrection.cpp +++ b/src/shogun/optimization/MomentumCorrection.cpp @@ -51,5 +51,5 @@ void MomentumCorrection::init() { m_previous_descend_direction=SGVector(); SG_ADD(&m_previous_descend_direction, "MomentumCorrection__m_previous_descend_direction", - "previous_descend_direction in MomentumCorrection", MS_NOT_AVAILABLE); + "previous_descend_direction in MomentumCorrection", ParameterProperties()); } diff --git a/src/shogun/optimization/PNormMappingFunction.cpp b/src/shogun/optimization/PNormMappingFunction.cpp index 8062f784715..94524b89e39 100644 --- a/src/shogun/optimization/PNormMappingFunction.cpp +++ b/src/shogun/optimization/PNormMappingFunction.cpp @@ -80,5 +80,5 @@ void PNormMappingFunction::init() { m_p=2.0; SG_ADD(&m_p, "PNormMappingFunction__m_p", - "p in PNormMappingFunction", MS_NOT_AVAILABLE); + "p in PNormMappingFunction", ParameterProperties()); } diff --git a/src/shogun/optimization/RmsPropUpdater.cpp b/src/shogun/optimization/RmsPropUpdater.cpp index 823165fc72d..c57025f7704 100644 --- a/src/shogun/optimization/RmsPropUpdater.cpp +++ b/src/shogun/optimization/RmsPropUpdater.cpp @@ -81,13 +81,13 @@ void RmsPropUpdater::init() m_gradient_accuracy=SGVector(); SG_ADD(&m_decay_factor, "RmsPropUpdater__m_decay_factor", - "decay_factor in RmsPropUpdater", MS_NOT_AVAILABLE); + "decay_factor in RmsPropUpdater", ParameterProperties()); SG_ADD(&m_epsilon, "RmsPropUpdater__m_epsilon", - "epsilon in RmsPropUpdater", MS_NOT_AVAILABLE); + "epsilon in RmsPropUpdater", ParameterProperties()); SG_ADD(&m_build_in_learning_rate, "RmsPropUpdater__m_build_in_learning_rate", - "build_in_learning_rate in RmsPropUpdater", MS_NOT_AVAILABLE); + "build_in_learning_rate in RmsPropUpdater", ParameterProperties()); SG_ADD(&m_gradient_accuracy, "RmsPropUpdater__m_gradient_accuracy", - "gradient_accuracy in RmsPropUpdater", MS_NOT_AVAILABLE); + "gradient_accuracy in RmsPropUpdater", ParameterProperties()); } float64_t RmsPropUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/SMDMinimizer.cpp b/src/shogun/optimization/SMDMinimizer.cpp index 126aaedeede..a8581ede7a3 100644 --- a/src/shogun/optimization/SMDMinimizer.cpp +++ b/src/shogun/optimization/SMDMinimizer.cpp @@ -80,7 +80,7 @@ void SMDMinimizer::init() { m_mapping_fun=NULL; SG_ADD((CSGObject **)&m_mapping_fun, "SMDMinimizer__m_mapping_fun", - "m_mapping_fun in SMDMinimizer", MS_NOT_AVAILABLE); + "m_mapping_fun in SMDMinimizer", ParameterProperties()); } void SMDMinimizer::set_mapping_function(MappingFunction* mapping_fun) diff --git a/src/shogun/optimization/SMIDASMinimizer.cpp b/src/shogun/optimization/SMIDASMinimizer.cpp index 3f8affcaac8..1168a9ee0d1 100644 --- a/src/shogun/optimization/SMIDASMinimizer.cpp +++ b/src/shogun/optimization/SMIDASMinimizer.cpp @@ -92,7 +92,7 @@ void SMIDASMinimizer::init() { m_dual_variable=SGVector(); SG_ADD(&m_dual_variable, "SMIDASMinimizer__m_dual_variable", - "dual_variable in SMIDASMinimizer", MS_NOT_AVAILABLE); + "dual_variable in SMIDASMinimizer", ParameterProperties()); } void SMIDASMinimizer::init_minimization() diff --git a/src/shogun/optimization/SVRGMinimizer.cpp b/src/shogun/optimization/SVRGMinimizer.cpp index dbaf6d8d781..12bcc42aeaa 100644 --- a/src/shogun/optimization/SVRGMinimizer.cpp +++ b/src/shogun/optimization/SVRGMinimizer.cpp @@ -57,13 +57,13 @@ void SVRGMinimizer::init() m_previous_variable=SGVector(); SG_ADD(&m_num_sgd_passes, "SVRGMinimizer__m_num_sgd_passes", - "num_sgd_passes in SVRGMinimizer", MS_NOT_AVAILABLE); + "num_sgd_passes in SVRGMinimizer", ParameterProperties()); SG_ADD(&m_svrg_interval, "SVRGMinimizer__m_svrg_interval", - "svrg_interval in SVRGMinimizer", MS_NOT_AVAILABLE); + "svrg_interval in SVRGMinimizer", ParameterProperties()); SG_ADD(&m_average_gradient, "SVRGMinimizer__m_average_gradient", - "average_gradient in SVRGMinimizer", MS_NOT_AVAILABLE); + "average_gradient in SVRGMinimizer", ParameterProperties()); SG_ADD(&m_previous_variable, "SVRGMinimizer__m_previous_variable", - "previous_variable in SVRGMinimizer", MS_NOT_AVAILABLE); + "previous_variable in SVRGMinimizer", ParameterProperties()); } void SVRGMinimizer::init_minimization() diff --git a/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp b/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp index 82947ce6a61..2d618e5c6b5 100644 --- a/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp +++ b/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp @@ -62,39 +62,39 @@ void CLBFGSMinimizer::init() m_min_step=1e-6; m_xtol=1e-6; SG_ADD(&m_linesearch_id, "CLBFGSMinimizer__m_linesearch_id", - "linesearch_id in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "linesearch_id in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_m, "CLBFGSMinimizer__m_m", - "m in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "m in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_max_linesearch, "CLBFGSMinimizer__m_max_linesearch", - "max_linesearch in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "max_linesearch in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_max_iterations, "CLBFGSMinimizer__m_max_iterations", - "max_iterations in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "max_iterations in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_delta, "CLBFGSMinimizer__m_delta", - "delta in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "delta in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_past, "CLBFGSMinimizer__m_past", - "past in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "past in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_epsilon, "CLBFGSMinimizer__m_epsilon", - "epsilon in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "epsilon in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_min_step, "CLBFGSMinimizer__m_min_step", - "min_step in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "min_step in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_max_step, "CLBFGSMinimizer__m_max_step", - "max_step in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "max_step in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_ftol, "CLBFGSMinimizer__m_ftol", - "ftol in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "ftol in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_wolfe, "CLBFGSMinimizer__m_wolfe", - "wolfe in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "wolfe in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_gtol, "CLBFGSMinimizer__m_gtol", - "gtol in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "gtol in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_xtol, "CLBFGSMinimizer__m_xtol", - "xtol in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "xtol in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_orthantwise_c, "CLBFGSMinimizer__m_orthantwise_c", - "orthantwise_c in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "orthantwise_c in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_orthantwise_start, "CLBFGSMinimizer__m_orthantwise_start", - "orthantwise_start in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "orthantwise_start in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_orthantwise_end, "CLBFGSMinimizer__m_orthantwise_end", - "orthantwise_end in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "orthantwise_end in CLBFGSMinimizer", ParameterProperties()); SG_ADD(&m_target_variable, "CLBFGSMinimizer__m_target_variable", - "m_target_variable in CLBFGSMinimizer", MS_NOT_AVAILABLE); + "m_target_variable in CLBFGSMinimizer", ParameterProperties()); } void CLBFGSMinimizer::set_lbfgs_parameters( diff --git a/src/shogun/preprocessor/DependenceMaximization.cpp b/src/shogun/preprocessor/DependenceMaximization.cpp index 75844ca464b..e51bf65c13e 100644 --- a/src/shogun/preprocessor/DependenceMaximization.cpp +++ b/src/shogun/preprocessor/DependenceMaximization.cpp @@ -46,9 +46,9 @@ CDependenceMaximization::CDependenceMaximization() void CDependenceMaximization::init() { SG_ADD((CSGObject**)&m_estimator, "estimator", - "the estimator for computing measures", MS_NOT_AVAILABLE); + "the estimator for computing measures", ParameterProperties()); SG_ADD((CSGObject**)&m_labels_feats, "labels_feats", - "the features based on labels", MS_NOT_AVAILABLE); + "the features based on labels", ParameterProperties()); m_estimator=NULL; m_labels_feats=NULL; diff --git a/src/shogun/preprocessor/FeatureSelection.cpp b/src/shogun/preprocessor/FeatureSelection.cpp index 0b41bf49564..79236eac856 100644 --- a/src/shogun/preprocessor/FeatureSelection.cpp +++ b/src/shogun/preprocessor/FeatureSelection.cpp @@ -49,17 +49,17 @@ template void CFeatureSelection::initialize_parameters() { SG_ADD(&m_target_dim, "target_dim", "target dimension", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((machine_int_t*)&m_algorithm, "algorithm", - "the feature selectiona algorithm", MS_NOT_AVAILABLE); + "the feature selectiona algorithm", ParameterProperties()); SG_ADD((machine_int_t*)&m_policy, "policy", "feature removal policy", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_num_remove, "num_remove", "number or percentage of features to " - "be removed", MS_NOT_AVAILABLE); + "be removed", ParameterProperties()); SG_ADD((CSGObject**)&m_labels, "labels", - "the class labels for the features", MS_NOT_AVAILABLE); + "the class labels for the features", ParameterProperties()); SG_ADD((CSGObject**)&m_subset, "subset", - "indices of selected features", MS_NOT_AVAILABLE); + "indices of selected features", ParameterProperties()); m_target_dim=0; m_algorithm=BACKWARD_ELIMINATION; diff --git a/src/shogun/preprocessor/FisherLDA.cpp b/src/shogun/preprocessor/FisherLDA.cpp index de23e95da7e..16edf8cb40b 100644 --- a/src/shogun/preprocessor/FisherLDA.cpp +++ b/src/shogun/preprocessor/FisherLDA.cpp @@ -70,21 +70,21 @@ void CFisherLDA::initialize_parameters() m_bdc_svd = true; SG_ADD( &m_method, "FLDA_method", "method for performing FLDA", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_num_dim, "final_dimensions", "dimensions to be retained", - MS_NOT_AVAILABLE); - SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", MS_NOT_AVAILABLE); - SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", ParameterProperties()); + SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", ParameterProperties()); SG_ADD( &m_transformation_matrix, "transformation_matrix", "Transformation" " matrix (Eigenvectors of covariance matrix).", - MS_NOT_AVAILABLE); - SG_ADD(&m_mean_vector, "mean_vector", "Mean Vector.", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_mean_vector, "mean_vector", "Mean Vector.", ParameterProperties()); SG_ADD( &m_eigenvalues_vector, "eigenvalues_vector", "Vector with Eigenvalues.", - MS_NOT_AVAILABLE); + ParameterProperties()); } CFisherLDA::~CFisherLDA() diff --git a/src/shogun/preprocessor/HomogeneousKernelMap.cpp b/src/shogun/preprocessor/HomogeneousKernelMap.cpp index c77d0038572..f800e9cc19c 100644 --- a/src/shogun/preprocessor/HomogeneousKernelMap.cpp +++ b/src/shogun/preprocessor/HomogeneousKernelMap.cpp @@ -346,14 +346,14 @@ SGVector CHomogeneousKernelMap::apply_to_vector(const SGVectoradd_vector(&randomcoeff_additive,&cur_dim_feature_space,"randomcoeff_additive"); watch_param( @@ -112,16 +112,16 @@ CRandomFourierGaussPreproc::CRandomFourierGaussPreproc( if(m_parameters) { SG_ADD(&dim_input_space, "dim_input_space", - "Dimensionality of the input space.", MS_NOT_AVAILABLE); + "Dimensionality of the input space.", ParameterProperties()); SG_ADD(&cur_dim_input_space, "cur_dim_input_space", - "Dimensionality of the input space.", MS_NOT_AVAILABLE); + "Dimensionality of the input space.", ParameterProperties()); SG_ADD(&dim_feature_space, "dim_feature_space", - "Dimensionality of the feature space.", MS_NOT_AVAILABLE); + "Dimensionality of the feature space.", ParameterProperties()); SG_ADD(&cur_dim_feature_space, "cur_dim_feature_space", - "Dimensionality of the feature space.", MS_NOT_AVAILABLE); + "Dimensionality of the feature space.", ParameterProperties()); - SG_ADD(&kernelwidth, "kernelwidth", "Kernel width.", MS_AVAILABLE); - SG_ADD(&cur_kernelwidth, "cur_kernelwidth", "Kernel width.", MS_AVAILABLE); + SG_ADD(&kernelwidth, "kernelwidth", "Kernel width.", ParameterProperties::HYPER); + SG_ADD(&cur_kernelwidth, "cur_kernelwidth", "Kernel width.", ParameterProperties::HYPER); m_parameters->add_vector(&randomcoeff_additive,&cur_dim_feature_space,"randomcoeff_additive"); watch_param( diff --git a/src/shogun/preprocessor/RescaleFeatures.cpp b/src/shogun/preprocessor/RescaleFeatures.cpp index 73592f7e66a..f862c2b9c99 100644 --- a/src/shogun/preprocessor/RescaleFeatures.cpp +++ b/src/shogun/preprocessor/RescaleFeatures.cpp @@ -104,6 +104,6 @@ SGVector CRescaleFeatures::apply_to_feature_vector(SGVector CKRRNystrom::subsample_indices() diff --git a/src/shogun/regression/KernelRidgeRegression.cpp b/src/shogun/regression/KernelRidgeRegression.cpp index 03c5cdf998b..61957fcdabb 100644 --- a/src/shogun/regression/KernelRidgeRegression.cpp +++ b/src/shogun/regression/KernelRidgeRegression.cpp @@ -35,7 +35,7 @@ void CKernelRidgeRegression::init() { set_tau(1e-6); set_epsilon(0.0001); - SG_ADD(&m_tau, "tau", "Regularization parameter", MS_AVAILABLE); + SG_ADD(&m_tau, "tau", "Regularization parameter", ParameterProperties::HYPER); } bool CKernelRidgeRegression::solve_krr_system() diff --git a/src/shogun/regression/LeastAngleRegression.cpp b/src/shogun/regression/LeastAngleRegression.cpp index fef8b462eb6..eca957deb14 100644 --- a/src/shogun/regression/LeastAngleRegression.cpp +++ b/src/shogun/regression/LeastAngleRegression.cpp @@ -43,10 +43,10 @@ void CLeastAngleRegression::init() m_max_nonz = 0; m_max_l1_norm = 0; m_epsilon = CMath::MACHINE_EPSILON; - SG_ADD(&m_epsilon, "epsilon", "Epsilon for early stopping", MS_AVAILABLE); - SG_ADD(&m_max_nonz, "max_nonz", "Max number of non-zero variables", MS_AVAILABLE); - SG_ADD(&m_max_l1_norm, "max_l1_norm", "Max l1-norm of estimator", MS_AVAILABLE); - SG_ADD(&m_lasso, "lasso", "Max l1-norm of estimator", MS_AVAILABLE); + SG_ADD(&m_epsilon, "epsilon", "Epsilon for early stopping", ParameterProperties::HYPER); + SG_ADD(&m_max_nonz, "max_nonz", "Max number of non-zero variables", ParameterProperties::HYPER); + SG_ADD(&m_max_l1_norm, "max_l1_norm", "Max l1-norm of estimator", ParameterProperties::HYPER); + SG_ADD(&m_lasso, "lasso", "Max l1-norm of estimator", ParameterProperties::HYPER); } CLeastAngleRegression::~CLeastAngleRegression() diff --git a/src/shogun/regression/LinearRidgeRegression.cpp b/src/shogun/regression/LinearRidgeRegression.cpp index 94718632b8b..f72548f06fa 100644 --- a/src/shogun/regression/LinearRidgeRegression.cpp +++ b/src/shogun/regression/LinearRidgeRegression.cpp @@ -35,10 +35,10 @@ void CLinearRidgeRegression::init() set_tau(1e-6); m_use_bias = true; - SG_ADD(&m_tau, "tau", "Regularization parameter", MS_AVAILABLE); + SG_ADD(&m_tau, "tau", "Regularization parameter", ParameterProperties::HYPER); SG_ADD( &m_use_bias, "use_bias", "Whether or not to fit an offset term", - MS_NOT_AVAILABLE); + ParameterProperties()); } template diff --git a/src/shogun/regression/svr/LibLinearRegression.cpp b/src/shogun/regression/svr/LibLinearRegression.cpp index cc9d2015d5d..14472d13a26 100644 --- a/src/shogun/regression/svr/LibLinearRegression.cpp +++ b/src/shogun/regression/svr/LibLinearRegression.cpp @@ -43,11 +43,11 @@ void CLibLinearRegression::init_defaults() void CLibLinearRegression::register_parameters() { - SG_ADD(&m_C, "m_C", "regularization constant",MS_AVAILABLE); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",MS_NOT_AVAILABLE); - SG_ADD(&m_epsilon, "m_tube_epsilon", "svr tube epsilon",MS_AVAILABLE); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",MS_NOT_AVAILABLE); - SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",MS_NOT_AVAILABLE); + SG_ADD(&m_C, "m_C", "regularization constant",ParameterProperties::HYPER); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",ParameterProperties()); + SG_ADD(&m_epsilon, "m_tube_epsilon", "svr tube epsilon",ParameterProperties::HYPER); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",ParameterProperties()); + SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",ParameterProperties()); } CLibLinearRegression::~CLibLinearRegression() diff --git a/src/shogun/regression/svr/LibSVR.cpp b/src/shogun/regression/svr/LibSVR.cpp index dac07057409..8baf5498c84 100644 --- a/src/shogun/regression/svr/LibSVR.cpp +++ b/src/shogun/regression/svr/LibSVR.cpp @@ -53,7 +53,7 @@ CLibSVR::~CLibSVR() void CLibSVR::register_params() { - SG_ADD((machine_int_t*) &solver_type, "libsvr_solver_type", "LibSVR Solver type", MS_NOT_AVAILABLE); + SG_ADD((machine_int_t*) &solver_type, "libsvr_solver_type", "LibSVR Solver type", ParameterProperties()); } EMachineType CLibSVR::get_classifier_type() diff --git a/src/shogun/structure/CCSOSVM.cpp b/src/shogun/structure/CCSOSVM.cpp index 7ca5bb7a5a3..36144d88733 100644 --- a/src/shogun/structure/CCSOSVM.cpp +++ b/src/shogun/structure/CCSOSVM.cpp @@ -705,15 +705,15 @@ void CCCSOSVM::init() SG_ERROR("Error while initializing mosek env: %d\n", r) #endif - SG_ADD(&m_C, "m_C", "C", MS_NOT_AVAILABLE); - SG_ADD(&m_eps, "m_eps", "Epsilon", MS_NOT_AVAILABLE); - SG_ADD(&m_alpha_thrld, "m_alpha_thrld", "Alpha threshold", MS_NOT_AVAILABLE); - SG_ADD(&m_cleanup_check, "m_cleanup_check", "Cleanup after given number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_idle_iter, "m_idle_iter", "Maximum number of idle iteration", MS_NOT_AVAILABLE); - SG_ADD(&m_max_iter, "m_max_iter", "Maximum number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_max_rho, "m_max_rho", "Max rho", MS_NOT_AVAILABLE); - SG_ADD(&m_primal_obj, "m_primal_obj", "Primal objective value", MS_NOT_AVAILABLE); - SG_ADD((machine_int_t*) &m_qp_type, "m_qp_type", "QP Solver Type", MS_NOT_AVAILABLE); + SG_ADD(&m_C, "m_C", "C", ParameterProperties()); + SG_ADD(&m_eps, "m_eps", "Epsilon", ParameterProperties()); + SG_ADD(&m_alpha_thrld, "m_alpha_thrld", "Alpha threshold", ParameterProperties()); + SG_ADD(&m_cleanup_check, "m_cleanup_check", "Cleanup after given number of iterations", ParameterProperties()); + SG_ADD(&m_idle_iter, "m_idle_iter", "Maximum number of idle iteration", ParameterProperties()); + SG_ADD(&m_max_iter, "m_max_iter", "Maximum number of iterations", ParameterProperties()); + SG_ADD(&m_max_rho, "m_max_rho", "Max rho", ParameterProperties()); + SG_ADD(&m_primal_obj, "m_primal_obj", "Primal objective value", ParameterProperties()); + SG_ADD((machine_int_t*) &m_qp_type, "m_qp_type", "QP Solver Type", ParameterProperties()); } EMachineType CCCSOSVM::get_classifier_type() diff --git a/src/shogun/structure/DisjointSet.cpp b/src/shogun/structure/DisjointSet.cpp index 1b693af4e2d..9dcbb4f95d8 100644 --- a/src/shogun/structure/DisjointSet.cpp +++ b/src/shogun/structure/DisjointSet.cpp @@ -28,10 +28,10 @@ CDisjointSet::CDisjointSet(int32_t num_elements) void CDisjointSet::init() { - SG_ADD(&m_num_elements, "num_elements", "Number of elements", MS_NOT_AVAILABLE); - SG_ADD(&m_parent, "parent", "Parent pointers", MS_NOT_AVAILABLE); - SG_ADD(&m_rank, "rank", "Rank of each element", MS_NOT_AVAILABLE); - SG_ADD(&m_is_connected, "is_connected", "Whether disjoint sets have been linked", MS_NOT_AVAILABLE); + SG_ADD(&m_num_elements, "num_elements", "Number of elements", ParameterProperties()); + SG_ADD(&m_parent, "parent", "Parent pointers", ParameterProperties()); + SG_ADD(&m_rank, "rank", "Rank of each element", ParameterProperties()); + SG_ADD(&m_is_connected, "is_connected", "Whether disjoint sets have been linked", ParameterProperties()); m_is_connected = false; m_num_elements = -1; diff --git a/src/shogun/structure/FWSOSVM.cpp b/src/shogun/structure/FWSOSVM.cpp index 4831b34ee0c..0eb01e2bcb3 100644 --- a/src/shogun/structure/FWSOSVM.cpp +++ b/src/shogun/structure/FWSOSVM.cpp @@ -38,11 +38,11 @@ CFWSOSVM::CFWSOSVM( void CFWSOSVM::init() { - SG_ADD(&m_lambda, "lambda", "Regularization constant", MS_NOT_AVAILABLE); - SG_ADD(&m_num_iter, "num_iter", "Number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_do_line_search, "do_line_search", "Do line search", MS_NOT_AVAILABLE); - SG_ADD(&m_gap_threshold, "gap_threshold", "Gap threshold", MS_NOT_AVAILABLE); - SG_ADD(&m_ell, "ell", "Average loss", MS_NOT_AVAILABLE); + SG_ADD(&m_lambda, "lambda", "Regularization constant", ParameterProperties()); + SG_ADD(&m_num_iter, "num_iter", "Number of iterations", ParameterProperties()); + SG_ADD(&m_do_line_search, "do_line_search", "Do line search", ParameterProperties()); + SG_ADD(&m_gap_threshold, "gap_threshold", "Gap threshold", ParameterProperties()); + SG_ADD(&m_ell, "ell", "Average loss", ParameterProperties()); m_lambda = 1.0; m_num_iter = 50; diff --git a/src/shogun/structure/Factor.cpp b/src/shogun/structure/Factor.cpp index 80b04fafa14..84903c436be 100644 --- a/src/shogun/structure/Factor.cpp +++ b/src/shogun/structure/Factor.cpp @@ -238,13 +238,13 @@ void CFactor::compute_gradients( void CFactor::init() { - SG_ADD((CSGObject**)&m_factor_type, "type_name", "Factor type name", MS_NOT_AVAILABLE); - SG_ADD(&m_var_index, "var_index", "Factor variable index", MS_NOT_AVAILABLE); - SG_ADD(&m_energies, "energies", "Factor energies", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_data_source, "data_source", "Factor data source", MS_NOT_AVAILABLE); - SG_ADD(&m_data, "data", "Factor data", MS_NOT_AVAILABLE); - SG_ADD(&m_data_sparse, "data_sparse", "Sparse factor data", MS_NOT_AVAILABLE); - SG_ADD(&m_is_data_dep, "is_data_dep", "Factor is data dependent or not", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_factor_type, "type_name", "Factor type name", ParameterProperties()); + SG_ADD(&m_var_index, "var_index", "Factor variable index", ParameterProperties()); + SG_ADD(&m_energies, "energies", "Factor energies", ParameterProperties()); + SG_ADD((CSGObject**)&m_data_source, "data_source", "Factor data source", ParameterProperties()); + SG_ADD(&m_data, "data", "Factor data", ParameterProperties()); + SG_ADD(&m_data_sparse, "data_sparse", "Sparse factor data", ParameterProperties()); + SG_ADD(&m_is_data_dep, "is_data_dep", "Factor is data dependent or not", ParameterProperties()); m_factor_type=NULL; m_data_source=NULL; @@ -302,7 +302,7 @@ void CFactorDataSource::set_data_sparse(SGSparseVectorEntry* sparse, void CFactorDataSource::init() { - SG_ADD(&m_dense, "dense", "Shared data", MS_NOT_AVAILABLE); - SG_ADD(&m_sparse, "sparse", "Shared sparse data", MS_NOT_AVAILABLE); + SG_ADD(&m_dense, "dense", "Shared data", ParameterProperties()); + SG_ADD(&m_sparse, "sparse", "Shared sparse data", ParameterProperties()); } diff --git a/src/shogun/structure/FactorGraph.cpp b/src/shogun/structure/FactorGraph.cpp index bfc77753f03..d6387ab5d8d 100644 --- a/src/shogun/structure/FactorGraph.cpp +++ b/src/shogun/structure/FactorGraph.cpp @@ -57,12 +57,12 @@ CFactorGraph::~CFactorGraph() void CFactorGraph::register_parameters() { - SG_ADD(&m_cardinalities, "cardinalities", "Cardinalities", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_factors, "factors", "Factors", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_datasources, "datasources", "Factor data sources", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_dset, "dset", "Disjoint set", MS_NOT_AVAILABLE); - SG_ADD(&m_has_cycle, "has_cycle", "Whether has circle in graph", MS_NOT_AVAILABLE); - SG_ADD(&m_num_edges, "num_edges", "Number of edges", MS_NOT_AVAILABLE); + SG_ADD(&m_cardinalities, "cardinalities", "Cardinalities", ParameterProperties()); + SG_ADD((CSGObject**)&m_factors, "factors", "Factors", ParameterProperties()); + SG_ADD((CSGObject**)&m_datasources, "datasources", "Factor data sources", ParameterProperties()); + SG_ADD((CSGObject**)&m_dset, "dset", "Disjoint set", ParameterProperties()); + SG_ADD(&m_has_cycle, "has_cycle", "Whether has circle in graph", ParameterProperties()); + SG_ADD(&m_num_edges, "num_edges", "Number of edges", ParameterProperties()); } void CFactorGraph::init() diff --git a/src/shogun/structure/FactorGraphModel.cpp b/src/shogun/structure/FactorGraphModel.cpp index 9a453d744ab..dd10e7bf86b 100644 --- a/src/shogun/structure/FactorGraphModel.cpp +++ b/src/shogun/structure/FactorGraphModel.cpp @@ -37,9 +37,9 @@ CFactorGraphModel::~CFactorGraphModel() void CFactorGraphModel::init() { - SG_ADD((CSGObject**)&m_factor_types, "factor_types", "Array of factor types", MS_NOT_AVAILABLE); - SG_ADD(&m_w_cache, "w_cache", "Cache of global parameters", MS_NOT_AVAILABLE); - SG_ADD(&m_w_map, "w_map", "Parameter mapping", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_factor_types, "factor_types", "Array of factor types", ParameterProperties()); + SG_ADD(&m_w_cache, "w_cache", "Cache of global parameters", ParameterProperties()); + SG_ADD(&m_w_map, "w_map", "Parameter mapping", ParameterProperties()); m_inf_type = TREE_MAX_PROD; m_factor_types = new CDynamicObjectArray(); diff --git a/src/shogun/structure/FactorType.cpp b/src/shogun/structure/FactorType.cpp index d3e4cc70e50..cd241a2bcac 100644 --- a/src/shogun/structure/FactorType.cpp +++ b/src/shogun/structure/FactorType.cpp @@ -48,12 +48,12 @@ CFactorType::~CFactorType() void CFactorType::init() { - SG_ADD(&m_type_id, "type_id", "Factor type name", MS_NOT_AVAILABLE); - SG_ADD(&m_cards, "cards", "Cardinalities", MS_NOT_AVAILABLE); - SG_ADD(&m_cumprod_cards, "cumprod_cards", "Cumulative product of cardinalities", MS_NOT_AVAILABLE); - SG_ADD(&m_num_assignments, "num_assignments", "Number of variable configurations", MS_NOT_AVAILABLE); - SG_ADD(&m_w, "w", "Factor parameters", MS_NOT_AVAILABLE); - SG_ADD(&m_data_size, "data_size", "Size of data vector", MS_NOT_AVAILABLE); + SG_ADD(&m_type_id, "type_id", "Factor type name", ParameterProperties()); + SG_ADD(&m_cards, "cards", "Cardinalities", ParameterProperties()); + SG_ADD(&m_cumprod_cards, "cumprod_cards", "Cumulative product of cardinalities", ParameterProperties()); + SG_ADD(&m_num_assignments, "num_assignments", "Number of variable configurations", ParameterProperties()); + SG_ADD(&m_w, "w", "Factor parameters", ParameterProperties()); + SG_ADD(&m_data_size, "data_size", "Size of data vector", ParameterProperties()); m_type_id = 0; m_data_size = 0; diff --git a/src/shogun/structure/HMSVMModel.cpp b/src/shogun/structure/HMSVMModel.cpp index 351813b7ecf..76a793d266f 100644 --- a/src/shogun/structure/HMSVMModel.cpp +++ b/src/shogun/structure/HMSVMModel.cpp @@ -484,14 +484,14 @@ bool CHMSVMModel::check_training_setup() const void CHMSVMModel::init() { - SG_ADD((CSGObject**) &m_state_model, "m_state_model", "The state model", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**) &m_state_model, "m_state_model", "The state model", ParameterProperties()); SG_ADD(&m_transmission_weights, "m_transmission_weights", - "Transmission weights used in Viterbi", MS_NOT_AVAILABLE); + "Transmission weights used in Viterbi", ParameterProperties()); SG_ADD(&m_emission_weights, "m_emission_weights", - "Emission weights used in Viterbi", MS_NOT_AVAILABLE); + "Emission weights used in Viterbi", ParameterProperties()); SG_ADD(&m_num_plif_nodes, "m_num_plif_nodes", "The number of points per PLiF", - MS_NOT_AVAILABLE); // FIXME It would actually make sense to do MS for this parameter - SG_ADD(&m_use_plifs, "m_use_plifs", "Whether to use plifs", MS_NOT_AVAILABLE); + ParameterProperties()); // FIXME It would actually make sense to do MS for this parameter + SG_ADD(&m_use_plifs, "m_use_plifs", "Whether to use plifs", ParameterProperties()); m_num_obs = 0; m_num_aux = 0; diff --git a/src/shogun/structure/HashedMultilabelModel.cpp b/src/shogun/structure/HashedMultilabelModel.cpp index c3cace68267..84e8cc46c40 100644 --- a/src/shogun/structure/HashedMultilabelModel.cpp +++ b/src/shogun/structure/HashedMultilabelModel.cpp @@ -40,14 +40,14 @@ CStructuredLabels * CHashedMultilabelModel::structured_labels_factory( void CHashedMultilabelModel::init(int32_t dim) { SG_ADD(&m_false_positive, "false_positive", "Misclassification cost for false positive", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_false_negative, "false_negative", "Misclassification cost for false negative", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - MS_NOT_AVAILABLE); - SG_ADD(&m_dim, "dim", "New joint feature space dimension", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_dim, "dim", "New joint feature space dimension", ParameterProperties()); SG_ADD(&m_seeds, "seeds", "Vector of seeds used for hashing", - MS_NOT_AVAILABLE); + ParameterProperties()); m_false_positive = 1; m_false_negative = 1; diff --git a/src/shogun/structure/HierarchicalMultilabelModel.cpp b/src/shogun/structure/HierarchicalMultilabelModel.cpp index e9731774b08..973420d0a18 100644 --- a/src/shogun/structure/HierarchicalMultilabelModel.cpp +++ b/src/shogun/structure/HierarchicalMultilabelModel.cpp @@ -43,12 +43,12 @@ void CHierarchicalMultilabelModel::init(SGVector taxonomy, bool leaf_nodes_mandatory) { SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_taxonomy, "taxonomy", "Taxonomy of the hierarchy of the labels", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_leaf_nodes_mandatory, "leaf_nodes_mandatory", "Whether internal nodes belong" - "to output class or not", MS_NOT_AVAILABLE); - SG_ADD(&m_root, "root", "Node-id of the ROOT element", MS_NOT_AVAILABLE); + "to output class or not", ParameterProperties()); + SG_ADD(&m_root, "root", "Node-id of the ROOT element", ParameterProperties()); m_leaf_nodes_mandatory = leaf_nodes_mandatory; m_num_classes = 0; diff --git a/src/shogun/structure/MAPInference.cpp b/src/shogun/structure/MAPInference.cpp index f9f1687ca74..5854d2d4c2a 100644 --- a/src/shogun/structure/MAPInference.cpp +++ b/src/shogun/structure/MAPInference.cpp @@ -69,10 +69,10 @@ CMAPInference::~CMAPInference() void CMAPInference::init() { - SG_ADD((CSGObject**)&m_fg, "fg", "factor graph", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_outputs, "outputs", "Structured outputs", MS_NOT_AVAILABLE); - SG_ADD((CSGObject**)&m_infer_impl, "infer_impl", "Inference implementation", MS_NOT_AVAILABLE); - SG_ADD(&m_energy, "energy", "Minimized energy", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_fg, "fg", "factor graph", ParameterProperties()); + SG_ADD((CSGObject**)&m_outputs, "outputs", "Structured outputs", ParameterProperties()); + SG_ADD((CSGObject**)&m_infer_impl, "infer_impl", "Inference implementation", ParameterProperties()); + SG_ADD(&m_energy, "energy", "Minimized energy", ParameterProperties()); m_outputs = NULL; m_infer_impl = NULL; @@ -126,7 +126,7 @@ CMAPInferImpl::~CMAPInferImpl() void CMAPInferImpl::register_parameters() { SG_ADD((CSGObject**)&m_fg, "fg", - "Factor graph pointer", MS_NOT_AVAILABLE); + "Factor graph pointer", ParameterProperties()); m_fg = NULL; } diff --git a/src/shogun/structure/MulticlassModel.cpp b/src/shogun/structure/MulticlassModel.cpp index c12c611d314..8e9497dcaef 100644 --- a/src/shogun/structure/MulticlassModel.cpp +++ b/src/shogun/structure/MulticlassModel.cpp @@ -164,7 +164,7 @@ void CMulticlassModel::init_primal_opt( void CMulticlassModel::init() { SG_ADD(&m_num_classes, "m_num_classes", "The number of classes", - MS_NOT_AVAILABLE); + ParameterProperties()); m_num_classes = 0; } diff --git a/src/shogun/structure/MulticlassSOLabels.cpp b/src/shogun/structure/MulticlassSOLabels.cpp index 373a6100745..40ac1527235 100644 --- a/src/shogun/structure/MulticlassSOLabels.cpp +++ b/src/shogun/structure/MulticlassSOLabels.cpp @@ -98,12 +98,12 @@ int32_t CMulticlassSOLabels::get_num_labels() const void CMulticlassSOLabels::init() { SG_ADD(&m_num_classes, "m_num_classes", "The number of classes", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_num_labels_set, "m_num_labels_set", "The number of assigned labels", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD( &m_labels_vector, "labels_vector", "The labels vector", - MS_NOT_AVAILABLE); + ParameterProperties()); m_num_classes = 0; m_num_labels_set = 0; diff --git a/src/shogun/structure/MultilabelCLRModel.cpp b/src/shogun/structure/MultilabelCLRModel.cpp index 1d744b4b26c..910f58c58c3 100644 --- a/src/shogun/structure/MultilabelCLRModel.cpp +++ b/src/shogun/structure/MultilabelCLRModel.cpp @@ -39,7 +39,7 @@ CMultilabelCLRModel::~CMultilabelCLRModel() void CMultilabelCLRModel::init() { SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - MS_NOT_AVAILABLE); + ParameterProperties()); m_num_classes = 0; } diff --git a/src/shogun/structure/MultilabelModel.cpp b/src/shogun/structure/MultilabelModel.cpp index 160e0a6b05c..b150035b200 100644 --- a/src/shogun/structure/MultilabelModel.cpp +++ b/src/shogun/structure/MultilabelModel.cpp @@ -37,11 +37,11 @@ CStructuredLabels * CMultilabelModel::structured_labels_factory(int32_t num_labe void CMultilabelModel::init() { SG_ADD(&m_false_positive, "false_positive", "Misclassification cost for false positive", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_false_negative, "false_negative", "Misclassification cost for false negative", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - MS_NOT_AVAILABLE); + ParameterProperties()); m_false_positive = 1; m_false_negative = 1; m_num_classes = 0; diff --git a/src/shogun/structure/MultilabelSOLabels.cpp b/src/shogun/structure/MultilabelSOLabels.cpp index 86b64e7bf3e..a6c7da37527 100644 --- a/src/shogun/structure/MultilabelSOLabels.cpp +++ b/src/shogun/structure/MultilabelSOLabels.cpp @@ -42,9 +42,9 @@ CMultilabelSOLabels::CMultilabelSOLabels(CMultilabelLabels * multilabel_labels) void CMultilabelSOLabels::init() { SG_ADD((CSGObject **)&m_multilabel_labels, "multilabel_labels", "multilabel labels object", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_last_set_label, "last_set_label", "index of the last label added using add_label() method", - MS_NOT_AVAILABLE); + ParameterProperties()); m_last_set_label = 0; } diff --git a/src/shogun/structure/PrimalMosekSOSVM.cpp b/src/shogun/structure/PrimalMosekSOSVM.cpp index 84eaeed3634..00e9665b594 100644 --- a/src/shogun/structure/PrimalMosekSOSVM.cpp +++ b/src/shogun/structure/PrimalMosekSOSVM.cpp @@ -34,12 +34,12 @@ CPrimalMosekSOSVM::CPrimalMosekSOSVM( void CPrimalMosekSOSVM::init() { - SG_ADD(&m_slacks, "slacks", "Slacks vector", MS_NOT_AVAILABLE); + SG_ADD(&m_slacks, "slacks", "Slacks vector", ParameterProperties()); //FIXME model selection available for SO machines - SG_ADD(&m_regularization, "regularization", "Regularization constant", MS_NOT_AVAILABLE); - SG_ADD(&m_epsilon, "epsilon", "Violation tolerance", MS_NOT_AVAILABLE); - SG_ADD(&m_lb, "lb", "Lower bounds", MS_NOT_AVAILABLE); - SG_ADD(&m_ub, "ub", "Upper bounds", MS_NOT_AVAILABLE); + SG_ADD(&m_regularization, "regularization", "Regularization constant", ParameterProperties()); + SG_ADD(&m_epsilon, "epsilon", "Violation tolerance", ParameterProperties()); + SG_ADD(&m_lb, "lb", "Lower bounds", ParameterProperties()); + SG_ADD(&m_ub, "ub", "Upper bounds", ParameterProperties()); m_regularization = 1.0; m_epsilon = 0.0; diff --git a/src/shogun/structure/SOSVMHelper.cpp b/src/shogun/structure/SOSVMHelper.cpp index 824eced1ae2..b7ab7ff3555 100644 --- a/src/shogun/structure/SOSVMHelper.cpp +++ b/src/shogun/structure/SOSVMHelper.cpp @@ -29,13 +29,13 @@ CSOSVMHelper::~CSOSVMHelper() void CSOSVMHelper::init() { - SG_ADD(&m_primal, "primal", "History of primal values", MS_NOT_AVAILABLE); - SG_ADD(&m_dual, "dual", "History of dual values", MS_NOT_AVAILABLE); - SG_ADD(&m_duality_gap, "duality_gap", "History of duality gaps", MS_NOT_AVAILABLE); - SG_ADD(&m_eff_pass, "eff_pass", "Effective passes", MS_NOT_AVAILABLE); - SG_ADD(&m_train_error, "train_error", "History of training errors", MS_NOT_AVAILABLE); - SG_ADD(&m_tracker, "tracker", "Tracker of training progress", MS_NOT_AVAILABLE); - SG_ADD(&m_bufsize, "bufsize", "Buffer size", MS_NOT_AVAILABLE); + SG_ADD(&m_primal, "primal", "History of primal values", ParameterProperties()); + SG_ADD(&m_dual, "dual", "History of dual values", ParameterProperties()); + SG_ADD(&m_duality_gap, "duality_gap", "History of duality gaps", ParameterProperties()); + SG_ADD(&m_eff_pass, "eff_pass", "Effective passes", ParameterProperties()); + SG_ADD(&m_train_error, "train_error", "History of training errors", ParameterProperties()); + SG_ADD(&m_tracker, "tracker", "Tracker of training progress", ParameterProperties()); + SG_ADD(&m_bufsize, "bufsize", "Buffer size", ParameterProperties()); m_tracker = 0; m_bufsize = 1000; diff --git a/src/shogun/structure/SequenceLabels.cpp b/src/shogun/structure/SequenceLabels.cpp index 006f3527975..af8c7f21418 100644 --- a/src/shogun/structure/SequenceLabels.cpp +++ b/src/shogun/structure/SequenceLabels.cpp @@ -46,5 +46,5 @@ void CSequenceLabels::add_vector_label(SGVector< int32_t > label) void CSequenceLabels::init() { - SG_ADD(&m_num_states, "m_num_states", "Number of states", MS_NOT_AVAILABLE); + SG_ADD(&m_num_states, "m_num_states", "Number of states", ParameterProperties()); } diff --git a/src/shogun/structure/StateModel.cpp b/src/shogun/structure/StateModel.cpp index fb9f200df2c..9b4f1c96992 100644 --- a/src/shogun/structure/StateModel.cpp +++ b/src/shogun/structure/StateModel.cpp @@ -29,13 +29,13 @@ int32_t CStateModel::get_num_transmission_params() const void CStateModel::init() { - SG_ADD(&m_num_states, "m_num_states", "The number of states", MS_NOT_AVAILABLE); + SG_ADD(&m_num_states, "m_num_states", "The number of states", ParameterProperties()); SG_ADD(&m_num_transmission_params, "m_num_tranmission_params", - "The number of tranmission parameters", MS_NOT_AVAILABLE); + "The number of tranmission parameters", ParameterProperties()); SG_ADD(&m_state_loss_mat, "m_state_loss_mat", "The state loss matrix", - MS_NOT_AVAILABLE); - SG_ADD(&m_p, "m_p", "The distribution of start states", MS_NOT_AVAILABLE); - SG_ADD(&m_q, "m_q", "The distribution of stop states", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&m_p, "m_p", "The distribution of start states", ParameterProperties()); + SG_ADD(&m_q, "m_q", "The distribution of stop states", ParameterProperties()); m_num_states = 0; m_num_transmission_params = 0; diff --git a/src/shogun/structure/StochasticSOSVM.cpp b/src/shogun/structure/StochasticSOSVM.cpp index 50eb93d05bc..dd4669689d9 100644 --- a/src/shogun/structure/StochasticSOSVM.cpp +++ b/src/shogun/structure/StochasticSOSVM.cpp @@ -39,11 +39,11 @@ CStochasticSOSVM::CStochasticSOSVM( void CStochasticSOSVM::init() { - SG_ADD(&m_lambda, "lambda", "Regularization constant", MS_NOT_AVAILABLE); - SG_ADD(&m_num_iter, "num_iter", "Number of iterations", MS_NOT_AVAILABLE); - SG_ADD(&m_do_weighted_averaging, "do_weighted_averaging", "Do weighted averaging", MS_NOT_AVAILABLE); - SG_ADD(&m_debug_multiplier, "debug_multiplier", "Debug multiplier", MS_NOT_AVAILABLE); - SG_ADD(&m_rand_seed, "rand_seed", "Random seed", MS_NOT_AVAILABLE); + SG_ADD(&m_lambda, "lambda", "Regularization constant", ParameterProperties()); + SG_ADD(&m_num_iter, "num_iter", "Number of iterations", ParameterProperties()); + SG_ADD(&m_do_weighted_averaging, "do_weighted_averaging", "Do weighted averaging", ParameterProperties()); + SG_ADD(&m_debug_multiplier, "debug_multiplier", "Debug multiplier", ParameterProperties()); + SG_ADD(&m_rand_seed, "rand_seed", "Random seed", ParameterProperties()); m_lambda = 1.0; m_num_iter = 50; diff --git a/src/shogun/structure/StructuredModel.cpp b/src/shogun/structure/StructuredModel.cpp index 3c9a48d6fbb..c6008f8c4e3 100644 --- a/src/shogun/structure/StructuredModel.cpp +++ b/src/shogun/structure/StructuredModel.cpp @@ -163,9 +163,9 @@ float64_t CStructuredModel::delta_loss(CStructuredData* y1, CStructuredData* y2) void CStructuredModel::init() { SG_ADD((CSGObject**) &m_labels, "m_labels", "Structured labels", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD((CSGObject**) &m_features, "m_features", "Feature vectors", - MS_NOT_AVAILABLE); + ParameterProperties()); m_features = NULL; m_labels = NULL; diff --git a/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp b/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp index 4bed492ff87..21c2b899281 100644 --- a/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp +++ b/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp @@ -73,11 +73,11 @@ void CDomainAdaptationMulticlassLibLinear::set_source_machine( void CDomainAdaptationMulticlassLibLinear::register_parameters() { SG_ADD((CSGObject**)&m_source_machine, "source_machine", "source domain machine", - MS_NOT_AVAILABLE); + ParameterProperties()); SG_ADD(&m_train_factor, "train_factor", "factor of target domain regularization", - MS_AVAILABLE); + ParameterProperties::HYPER); SG_ADD(&m_source_bias, "source_bias", "bias to source domain", - MS_AVAILABLE); + ParameterProperties::HYPER); } CDomainAdaptationMulticlassLibLinear::~CDomainAdaptationMulticlassLibLinear() diff --git a/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp b/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp index 97ef0a4c916..34a938fc876 100644 --- a/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp +++ b/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp @@ -180,10 +180,10 @@ void CDomainAdaptationSVM::init() train_factor = 1.0; SG_ADD((CSGObject**) &presvm, "presvm", "SVM to regularize against.", - MS_NOT_AVAILABLE); - SG_ADD(&B, "B", "regularization parameter B.", MS_AVAILABLE); + ParameterProperties()); + SG_ADD(&B, "B", "regularization parameter B.", ParameterProperties::HYPER); SG_ADD(&train_factor, "train_factor", - "flag to switch off regularization in training.", MS_AVAILABLE); + "flag to switch off regularization in training.", ParameterProperties::HYPER); } #endif //USE_SVMLIGHT diff --git a/src/shogun/transfer/domain_adaptation/DomainAdaptationSVMLinear.cpp b/src/shogun/transfer/domain_adaptation/DomainAdaptationSVMLinear.cpp index c315a22fd5a..013b3847738 100644 --- a/src/shogun/transfer/domain_adaptation/DomainAdaptationSVMLinear.cpp +++ b/src/shogun/transfer/domain_adaptation/DomainAdaptationSVMLinear.cpp @@ -63,9 +63,9 @@ void CDomainAdaptationSVMLinear::init(CLinearMachine* pre_svm, float64_t B_param is_presvm_sane(); // serialization code - SG_ADD(&presvm, "presvm", "SVM to regularize against", MS_AVAILABLE); - SG_ADD(&B, "B", "Regularization strenth B.", MS_AVAILABLE); - SG_ADD(&train_factor, "train_factor", "train_factor", MS_AVAILABLE); + SG_ADD(&presvm, "presvm", "SVM to regularize against", ParameterProperties::HYPER); + SG_ADD(&B, "B", "Regularization strenth B.", ParameterProperties::HYPER); + SG_ADD(&train_factor, "train_factor", "train_factor", ParameterProperties::HYPER); } diff --git a/src/shogun/transfer/multitask/LibLinearMTL.cpp b/src/shogun/transfer/multitask/LibLinearMTL.cpp index 251b9e97f01..ef2db2fb76a 100644 --- a/src/shogun/transfer/multitask/LibLinearMTL.cpp +++ b/src/shogun/transfer/multitask/LibLinearMTL.cpp @@ -51,13 +51,13 @@ void CLibLinearMTL::init() set_max_iterations(); epsilon=1e-5; - SG_ADD(&C1, "C1", "C Cost constant 1.", MS_AVAILABLE); - SG_ADD(&C2, "C2", "C Cost constant 2.", MS_AVAILABLE); + SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER); + SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER); SG_ADD(&use_bias, "use_bias", "Indicates if bias is used.", - MS_NOT_AVAILABLE); - SG_ADD(&epsilon, "epsilon", "Convergence precision.", MS_NOT_AVAILABLE); + ParameterProperties()); + SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties()); SG_ADD(&max_iterations, "max_iterations", "Max number of iterations.", - MS_NOT_AVAILABLE); + ParameterProperties()); } diff --git a/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h b/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h index a222f1b6446..8c4a52b49e7 100644 --- a/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h +++ b/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h @@ -326,8 +326,8 @@ class CMultitaskKernelPlifNormalizer: public CMultitaskKernelMklNormalizer */ virtual void register_params() { - SG_ADD(&num_tasks, "num_tasks", "the number of tasks", MS_NOT_AVAILABLE); - SG_ADD(&num_betas, "num_betas", "the number of weights", MS_NOT_AVAILABLE); + SG_ADD(&num_tasks, "num_tasks", "the number of tasks", ParameterProperties()); + SG_ADD(&num_betas, "num_betas", "the number of weights", ParameterProperties()); m_parameters->add_vector((SGString**)&distance_matrix, &num_tasksqr, "distance_matrix", "distance between tasks"); m_parameters->add_vector((SGString**)&similarity_matrix, &num_tasksqr, "similarity_matrix", "similarity between tasks"); diff --git a/src/shogun/transfer/multitask/Task.cpp b/src/shogun/transfer/multitask/Task.cpp index 5c7c51f130d..d79cabcffbf 100644 --- a/src/shogun/transfer/multitask/Task.cpp +++ b/src/shogun/transfer/multitask/Task.cpp @@ -44,9 +44,9 @@ void CTask::init() m_subtasks = new CList(true); SG_REF(m_subtasks); - SG_ADD((CSGObject**)&m_subtasks,"subtasks","subtasks of given task", MS_NOT_AVAILABLE); - SG_ADD(&m_indices,"indices","indices of task", MS_NOT_AVAILABLE); - SG_ADD(&m_weight,"weight","weight of task", MS_NOT_AVAILABLE); + SG_ADD((CSGObject**)&m_subtasks,"subtasks","subtasks of given task", ParameterProperties()); + SG_ADD(&m_indices,"indices","indices of task", ParameterProperties()); + SG_ADD(&m_weight,"weight","weight of task", ParameterProperties()); } CTask::~CTask() diff --git a/src/shogun/transformer/Transformer.cpp b/src/shogun/transformer/Transformer.cpp index 51f9a17129c..8210eb1b8de 100644 --- a/src/shogun/transformer/Transformer.cpp +++ b/src/shogun/transformer/Transformer.cpp @@ -11,7 +11,7 @@ namespace shogun SG_ADD( &m_fitted, "is_fitted", "Whether the transformer has been fitted.", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CTransformer::assert_fitted() const diff --git a/tests/unit/optimization/NLOPTMinimizer_unittest.cc b/tests/unit/optimization/NLOPTMinimizer_unittest.cc index 89f8c256701..64cf423473f 100644 --- a/tests/unit/optimization/NLOPTMinimizer_unittest.cc +++ b/tests/unit/optimization/NLOPTMinimizer_unittest.cc @@ -51,10 +51,10 @@ void CPiecewiseQuadraticObject2::init() m_truth_x=SGVector(); SG_ADD(&m_init_x, "init_x", "init_x", - MS_AVAILABLE, GRADIENT_AVAILABLE); + ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_truth_x, "truth_x", "truth_x", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CPiecewiseQuadraticObject2::set_init_x(SGVector init_x) diff --git a/tests/unit/optimization/StochasticMinimizers_unittest.cc b/tests/unit/optimization/StochasticMinimizers_unittest.cc index 65845bd9b5b..636356d34fb 100644 --- a/tests/unit/optimization/StochasticMinimizers_unittest.cc +++ b/tests/unit/optimization/StochasticMinimizers_unittest.cc @@ -68,9 +68,9 @@ void CRegressionExample::init() m_w=SGVector(); m_y=SGVector(); m_x=SGMatrix(); - SG_ADD(&m_w, "r_w", "r_w",MS_AVAILABLE, GRADIENT_AVAILABLE); - SG_ADD(&m_x, "r_x", "r_x",MS_NOT_AVAILABLE); - SG_ADD(&m_y, "r_y", "r_y",MS_NOT_AVAILABLE); + SG_ADD(&m_w, "r_w", "r_w",ParameterProperties::HYPER | ParameterProperties::GRADIENT); + SG_ADD(&m_x, "r_x", "r_x",ParameterProperties()); + SG_ADD(&m_y, "r_y", "r_y",ParameterProperties()); } float64_t CRegressionExample::get_cost() diff --git a/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc b/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc index cffb105414d..0cb48848349 100644 --- a/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc +++ b/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc @@ -123,10 +123,10 @@ void CPiecewiseQuadraticObject::init() m_truth_x=SGVector(); SG_ADD(&m_init_x, "init_x", "init_x", - MS_AVAILABLE, GRADIENT_AVAILABLE); + ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_truth_x, "truth_x", "truth_x", - MS_NOT_AVAILABLE); + ParameterProperties()); } void CPiecewiseQuadraticObject::set_init_x(SGVector init_x) From d304e15304ca48a5951527d1eef366809d1124a2 Mon Sep 17 00:00:00 2001 From: Gil Date: Fri, 16 Nov 2018 16:33:12 +0000 Subject: [PATCH 3/6] added SG_ADD3 for default parameters [ci skip] also refactored the SG_ADD calls respectively --- src/shogun/base/SGObject.h | 34 ++++++++++-- src/shogun/classifier/LDA.cpp | 4 +- src/shogun/classifier/PluginEstimate.cpp | 14 ++--- src/shogun/classifier/mkl/MKL.cpp | 22 ++++---- src/shogun/classifier/svm/LibLinear.cpp | 11 ++-- src/shogun/classifier/svm/LibSVM.cpp | 2 +- src/shogun/classifier/svm/OnlineLibLinear.cpp | 2 +- src/shogun/classifier/svm/OnlineSVMSGD.cpp | 14 ++--- src/shogun/classifier/svm/SGDQN.cpp | 4 +- src/shogun/classifier/svm/SVM.cpp | 15 +++--- src/shogun/classifier/svm/SVMOcas.cpp | 10 ++-- src/shogun/clustering/GMM.cpp | 3 +- src/shogun/clustering/KMeansBase.cpp | 4 +- src/shogun/clustering/KMeansMiniBatch.cpp | 3 +- src/shogun/converter/FactorAnalysis.cpp | 4 +- src/shogun/converter/HashedDocConverter.cpp | 14 ++--- .../converter/LocallyLinearEmbedding.cpp | 2 +- src/shogun/converter/ManifoldSculpting.cpp | 4 +- .../converter/MultidimensionalScaling.cpp | 5 +- .../StochasticProximityEmbedding.cpp | 11 ++-- ...DistributedStochasticNeighborEmbedding.cpp | 4 +- src/shogun/converter/ica/FastICA.cpp | 2 +- src/shogun/converter/ica/ICAConverter.cpp | 6 +-- src/shogun/converter/ica/Jade.cpp | 2 +- .../distance/AttenuatedEuclideanDistance.cpp | 3 +- src/shogun/distance/CustomDistance.cpp | 2 +- .../distance/CustomMahalanobisDistance.cpp | 2 +- src/shogun/distance/Distance.cpp | 4 +- src/shogun/distance/EuclideanDistance.cpp | 6 +-- src/shogun/distance/HammingWordDistance.cpp | 3 +- src/shogun/distance/KernelDistance.cpp | 2 +- src/shogun/distance/MahalanobisDistance.cpp | 6 +-- src/shogun/distributions/Distribution.cpp | 2 +- src/shogun/distributions/PositionalPWM.cpp | 10 ++-- .../classical/GaussianDistribution.cpp | 4 +- .../classical/ProbabilityDistribution.cpp | 3 +- src/shogun/evaluation/CrossValidation.cpp | 2 +- src/shogun/evaluation/CrossValidation.h | 5 +- src/shogun/evaluation/MachineEvaluation.cpp | 19 +++---- src/shogun/evaluation/SigmoidCalibration.cpp | 15 +++--- src/shogun/evaluation/SplittingStrategy.cpp | 11 ++-- src/shogun/features/Alphabet.cpp | 7 ++- src/shogun/features/CombinedDotFeatures.cpp | 8 ++- src/shogun/features/CombinedFeatures.cpp | 4 +- src/shogun/features/DenseFeatures.cpp | 6 +-- .../features/DenseSubSamplesFeatures.cpp | 4 +- src/shogun/features/DotFeatures.cpp | 2 +- src/shogun/features/DummyFeatures.cpp | 3 +- src/shogun/features/FKFeatures.cpp | 2 +- src/shogun/features/FactorGraphFeatures.cpp | 3 +- src/shogun/features/Features.cpp | 10 ++-- src/shogun/features/IndexFeatures.cpp | 2 +- src/shogun/features/LBPPyrDotFeatures.cpp | 8 +-- src/shogun/features/LatentFeatures.cpp | 3 +- src/shogun/features/MatrixFeatures.cpp | 7 ++- src/shogun/features/PolyFeatures.cpp | 10 ++-- .../features/RandomFourierDotFeatures.cpp | 7 ++- .../RandomKitchenSinksDotFeatures.cpp | 6 +-- src/shogun/features/SparsePolyFeatures.cpp | 14 +++-- src/shogun/features/StringFeatures.cpp | 16 +++--- src/shogun/features/Subset.cpp | 3 +- src/shogun/features/SubsetStack.cpp | 4 +- src/shogun/features/TOPFeatures.cpp | 6 +-- .../features/hashed/HashedDenseFeatures.cpp | 11 ++-- .../features/hashed/HashedDocDotFeatures.cpp | 17 +++--- .../features/hashed/HashedSparseFeatures.cpp | 11 ++-- .../StreamingHashedDenseFeatures.cpp | 8 ++- .../StreamingHashedDocDotFeatures.cpp | 7 ++- .../StreamingHashedSparseFeatures.cpp | 8 ++- .../generators/GaussianBlobsDataGenerator.cpp | 15 ++---- .../generators/MeanShiftDataGenerator.cpp | 8 ++- src/shogun/io/Serializable.h | 2 +- src/shogun/io/UAIFile.cpp | 22 ++++---- src/shogun/kernel/CombinedKernel.cpp | 7 ++- src/shogun/kernel/CustomKernel.cpp | 13 +++-- src/shogun/kernel/ExponentialARDKernel.cpp | 8 +-- src/shogun/kernel/GaussianARDKernel.cpp | 4 +- src/shogun/kernel/Kernel.cpp | 22 ++++---- src/shogun/kernel/PeriodicKernel.cpp | 4 +- src/shogun/kernel/PolyKernel.cpp | 3 +- src/shogun/kernel/ProductKernel.cpp | 3 +- src/shogun/kernel/ShiftInvariantKernel.cpp | 4 +- .../kernel/normalizer/DiceKernelNormalizer.h | 2 +- .../kernel/normalizer/KernelNormalizer.h | 3 +- .../normalizer/ScatterKernelNormalizer.h | 4 +- .../normalizer/SqrtDiagKernelNormalizer.h | 2 +- .../kernel/string/CommWordStringKernel.cpp | 4 +- .../string/HistogramWordStringKernel.cpp | 6 +-- .../string/LocalAlignmentStringKernel.cpp | 3 +- .../kernel/string/OligoStringKernel.cpp | 2 +- .../kernel/string/PolyMatchStringKernel.cpp | 3 +- .../string/PolyMatchWordStringKernel.cpp | 3 +- .../string/RegulatoryModulesStringKernel.cpp | 8 +-- src/shogun/kernel/string/SNPStringKernel.cpp | 2 +- .../string/SpectrumMismatchRBFKernel.cpp | 15 ++---- .../kernel/string/SpectrumRBFKernel.cpp | 10 ++-- .../WeightedDegreePositionStringKernel.cpp | 4 +- .../string/WeightedDegreeStringKernel.cpp | 4 +- src/shogun/labels/DenseLabels.cpp | 2 +- src/shogun/labels/Labels.cpp | 5 +- src/shogun/labels/LatentLabels.cpp | 4 +- src/shogun/labels/MultilabelLabels.cpp | 6 +-- src/shogun/labels/StructuredLabels.cpp | 2 +- src/shogun/latent/LatentModel.cpp | 10 ++-- src/shogun/lib/DelimiterTokenizer.cpp | 5 +- src/shogun/lib/DynamicArray.h | 14 +++-- src/shogun/lib/DynamicObjectArray.h | 14 +++-- src/shogun/lib/List.h | 4 +- src/shogun/lib/NGramTokenizer.cpp | 6 +-- src/shogun/lib/Tokenizer.cpp | 2 +- src/shogun/machine/BaggingMachine.cpp | 11 ++-- src/shogun/machine/BaseMulticlassMachine.cpp | 2 +- src/shogun/machine/IterativeMachine.h | 5 +- src/shogun/machine/KernelMachine.cpp | 17 +++--- src/shogun/machine/LinearLatentMachine.cpp | 8 +-- src/shogun/machine/LinearMachine.cpp | 7 ++- src/shogun/machine/LinearMulticlassMachine.h | 6 +-- .../machine/LinearStructuredOutputMachine.cpp | 2 +- src/shogun/machine/Machine.cpp | 10 ++-- src/shogun/machine/MulticlassMachine.cpp | 4 +- src/shogun/machine/OnlineLinearMachine.cpp | 6 +-- .../machine/StructuredOutputMachine.cpp | 8 +-- .../gp/DualVariationalGaussianLikelihood.cpp | 9 ++-- src/shogun/machine/gp/Inference.cpp | 14 ++--- .../machine/gp/KLCholeskyInferenceMethod.cpp | 6 +-- .../gp/KLCovarianceInferenceMethod.cpp | 18 +++---- .../machine/gp/KLDiagonalInferenceMethod.cpp | 3 +- .../machine/gp/KLDualInferenceMethod.cpp | 19 +++---- src/shogun/machine/gp/KLInference.cpp | 25 ++++----- .../machine/gp/KLLowerTriangularInference.cpp | 15 ++---- src/shogun/machine/gp/LaplaceInference.cpp | 8 +-- .../gp/LogitVGPiecewiseBoundLikelihood.cpp | 21 +++----- .../gp/MultiLaplaceInferenceMethod.cpp | 12 ++--- .../machine/gp/NumericalVGLikelihood.cpp | 15 ++---- src/shogun/machine/gp/SingleFITCInference.cpp | 12 ++--- .../gp/SingleFITCLaplaceInferenceMethod.cpp | 38 ++++++------- .../gp/SingleLaplaceInferenceMethod.cpp | 22 ++++---- .../machine/gp/SingleSparseInference.cpp | 16 +++--- src/shogun/machine/gp/SoftMaxLikelihood.cpp | 3 +- src/shogun/machine/gp/SparseInference.cpp | 6 +-- .../machine/gp/VarDTCInferenceMethod.cpp | 18 +++---- .../gp/VariationalGaussianLikelihood.cpp | 9 ++-- .../machine/gp/VariationalLikelihood.cpp | 6 +-- .../linalg/eigsolver/EigenSolver.h | 15 ++---- .../linalg/eigsolver/LanczosEigenSolver.cpp | 6 +-- .../linalg/linop/LinearOperator.cpp | 3 +- .../ratapprox/logdet/LogDetEstimator.cpp | 4 +- .../opfunc/LogRationalApproximationCGM.cpp | 2 +- .../LogRationalApproximationIndividual.cpp | 2 +- .../ratapprox/opfunc/OperatorFunction.h | 2 +- .../opfunc/RationalApproximation.cpp | 15 +++--- .../ratapprox/tracesampler/ProbingSampler.cpp | 9 ++-- .../ratapprox/tracesampler/TraceSampler.h | 4 +- src/shogun/metric/LMNN.cpp | 37 +++++-------- .../modelselection/GradientModelSelection.cpp | 8 +-- src/shogun/modelselection/ModelSelection.cpp | 4 +- .../modelselection/ParameterCombination.cpp | 3 +- src/shogun/multiclass/GaussianNaiveBayes.cpp | 17 +++--- src/shogun/multiclass/KNN.cpp | 8 +-- src/shogun/multiclass/MCLDA.cpp | 22 ++++---- src/shogun/multiclass/MulticlassLibSVM.cpp | 2 +- src/shogun/multiclass/MulticlassOCAS.cpp | 8 +-- .../multiclass/MulticlassOneVsOneStrategy.cpp | 2 +- src/shogun/multiclass/MulticlassStrategy.cpp | 6 +-- src/shogun/multiclass/QDA.cpp | 14 ++--- src/shogun/multiclass/ScatterSVM.cpp | 6 +-- src/shogun/multiclass/ShareBoost.cpp | 4 +- .../ecoc/ECOCDiscriminantEncoder.cpp | 2 +- .../multiclass/ecoc/ECOCForestEncoder.cpp | 2 +- .../ecoc/ECOCRandomDenseEncoder.cpp | 6 +-- .../ecoc/ECOCRandomSparseEncoder.cpp | 10 ++-- src/shogun/multiclass/ecoc/ECOCStrategy.cpp | 4 +- .../BalancedConditionalProbabilityTree.cpp | 2 +- .../multiclass/tree/C45ClassifierTree.cpp | 10 ++-- src/shogun/multiclass/tree/CARTree.cpp | 26 ++++----- src/shogun/multiclass/tree/CHAIDTree.cpp | 20 +++---- src/shogun/multiclass/tree/RandomCARTree.cpp | 2 +- src/shogun/multiclass/tree/RelaxedTree.cpp | 2 +- src/shogun/multiclass/tree/TreeMachine.h | 2 +- src/shogun/multiclass/tree/TreeMachineNode.h | 4 +- src/shogun/neuralnets/Autoencoder.cpp | 6 +-- src/shogun/neuralnets/DeepAutoencoder.cpp | 28 +++++----- src/shogun/neuralnets/DeepBeliefNetwork.cpp | 54 +++++++++---------- .../neuralnets/NeuralConvolutionalLayer.cpp | 32 ++++++----- src/shogun/neuralnets/NeuralInputLayer.cpp | 4 +- src/shogun/neuralnets/NeuralLayer.cpp | 28 +++++----- src/shogun/neuralnets/NeuralNetwork.cpp | 50 +++++++++-------- src/shogun/neuralnets/RBM.cpp | 40 +++++++------- src/shogun/optimization/AdaDeltaUpdater.cpp | 10 ++-- src/shogun/optimization/AdaGradUpdater.cpp | 6 +-- src/shogun/optimization/AdamUpdater.cpp | 16 +++--- .../optimization/AdaptMomentumCorrection.cpp | 12 ++--- src/shogun/optimization/ConstLearningRate.cpp | 2 +- src/shogun/optimization/DescendCorrection.cpp | 2 +- .../DescendUpdaterWithCorrection.cpp | 2 +- src/shogun/optimization/ElasticNetPenalty.cpp | 6 +-- .../optimization/FirstOrderMinimizer.cpp | 6 +-- .../FirstOrderStochasticMinimizer.cpp | 10 ++-- .../InverseScalingLearningRate.cpp | 8 +-- src/shogun/optimization/L1Penalty.cpp | 2 +- src/shogun/optimization/L1PenaltyForTG.cpp | 4 +- .../optimization/MomentumCorrection.cpp | 2 +- .../optimization/PNormMappingFunction.cpp | 2 +- src/shogun/optimization/RmsPropUpdater.cpp | 8 +-- src/shogun/optimization/SMDMinimizer.cpp | 2 +- src/shogun/optimization/SMIDASMinimizer.cpp | 2 +- src/shogun/optimization/SVRGMinimizer.cpp | 8 +-- .../optimization/lbfgs/LBFGSMinimizer.cpp | 34 ++++++------ .../preprocessor/DependenceMaximization.cpp | 4 +- src/shogun/preprocessor/FeatureSelection.cpp | 14 +++-- src/shogun/preprocessor/FisherLDA.cpp | 18 +++---- src/shogun/preprocessor/KernelPCA.cpp | 4 +- src/shogun/preprocessor/PCA.cpp | 13 +++-- src/shogun/preprocessor/PruneVarSubMean.cpp | 8 +-- .../RandomFourierGaussPreproc.cpp | 16 +++--- src/shogun/preprocessor/RescaleFeatures.cpp | 4 +- .../regression/LinearRidgeRegression.cpp | 3 +- src/shogun/regression/svr/LibSVR.cpp | 2 +- src/shogun/structure/CCSOSVM.cpp | 18 +++---- src/shogun/structure/DisjointSet.cpp | 8 +-- src/shogun/structure/FWSOSVM.cpp | 10 ++-- src/shogun/structure/Factor.cpp | 18 +++---- src/shogun/structure/FactorGraph.cpp | 12 ++--- src/shogun/structure/FactorGraphModel.cpp | 6 +-- src/shogun/structure/FactorType.cpp | 12 ++--- src/shogun/structure/HMSVMModel.cpp | 11 ++-- .../structure/HashedMultilabelModel.cpp | 14 ++--- .../structure/HierarchicalMultilabelModel.cpp | 10 ++-- src/shogun/structure/MAPInference.cpp | 10 ++-- src/shogun/structure/MulticlassModel.cpp | 3 +- src/shogun/structure/MulticlassSOLabels.cpp | 9 ++-- src/shogun/structure/MultilabelCLRModel.cpp | 3 +- src/shogun/structure/MultilabelModel.cpp | 9 ++-- src/shogun/structure/MultilabelSOLabels.cpp | 6 +-- src/shogun/structure/PrimalMosekSOSVM.cpp | 10 ++-- src/shogun/structure/SOSVMHelper.cpp | 14 ++--- src/shogun/structure/SequenceLabels.cpp | 2 +- src/shogun/structure/StateModel.cpp | 11 ++-- src/shogun/structure/StochasticSOSVM.cpp | 10 ++-- src/shogun/structure/StructuredModel.cpp | 6 +-- .../DomainAdaptationMulticlassLibLinear.cpp | 3 +- .../domain_adaptation/DomainAdaptationSVM.cpp | 3 +- .../transfer/multitask/LibLinearMTL.cpp | 8 ++- .../multitask/MultitaskKernelPlifNormalizer.h | 4 +- src/shogun/transfer/multitask/Task.cpp | 6 +-- src/shogun/transformer/Transformer.cpp | 3 +- .../optimization/NLOPTMinimizer_unittest.cc | 3 +- .../lbfgs/LBFGSMinimizer_unittest.cc | 3 +- 248 files changed, 974 insertions(+), 1175 deletions(-) diff --git a/src/shogun/base/SGObject.h b/src/shogun/base/SGObject.h index 305034411f8..7c9cb97ad89 100644 --- a/src/shogun/base/SGObject.h +++ b/src/shogun/base/SGObject.h @@ -57,10 +57,36 @@ template class SGStringList; #define SG_UNREF_NO_NULL(x) { if (x) { (x)->unref(); } } /******************************************************************************* - * Macros for registering parameters/model selection parameters + * Macros for registering parameter properties ******************************************************************************/ -#define SG_ADD(param, name, description, param_properties) \ +#ifdef _MSC_VER + +#define VA_NARGS(...) INTERNAL_EXPAND_ARGS_PRIVATE(INTERNAL_ARGS_AUGMENTER(__VA_ARGS__)) +#define INTERNAL_ARGS_AUGMENTER(...) unused, __VA_ARGS__ +#define INTERNAL_EXPAND(x) x +#define INTERNAL_EXPAND_ARGS_PRIVATE(...) INTERNAL_EXPAND(INTERNAL_GET_ARG_COUNT_PRIVATE(__VA_ARGS__, 4, 3, 2, 1, 0)) +#define INTERNAL_GET_ARG_COUNT_PRIVATE(_0_, _1_, _2_, _3_, _4_, count, ...) count + +#else + +#define VA_NARGS_IMPL(_1, _2, _3, _4, N, ...) N +#define VA_NARGS(...) VA_NARGS_IMPL(__VA_ARGS__, 4, 3, 2, 1) + +#endif + +#define VARARG_IMPL2(base, count, ...) base##count(__VA_ARGS__) +#define VARARG_IMPL(base, count, ...) VARARG_IMPL2(base, count, __VA_ARGS__) +#define VARARG(base, ...) VARARG_IMPL(base, VA_NARGS(__VA_ARGS__), __VA_ARGS__) + +#define SG_ADD3(param, name, description) \ + { \ + this->m_parameters->add(param, name, description); \ + this->watch_param( \ + name, param, AnyParameterProperties()); \ + } + +#define SG_ADD4(param, name, description, param_properties) \ { \ AnyParameterProperties pprop = \ AnyParameterProperties(description, param_properties); \ @@ -72,8 +98,10 @@ template class SGStringList; this->m_gradient_parameters->add(param, name, description); \ } +#define SG_ADD(...) VARARG(SG_ADD, __VA_ARGS__) + /******************************************************************************* - * End of macros for registering parameters/model selection parameters + * End of macros for registering parameter properties ******************************************************************************/ /** @brief Class SGObject is the base class of all shogun objects. diff --git a/src/shogun/classifier/LDA.cpp b/src/shogun/classifier/LDA.cpp index 2c67075f2f5..17ae1bc1e2e 100644 --- a/src/shogun/classifier/LDA.cpp +++ b/src/shogun/classifier/LDA.cpp @@ -49,9 +49,9 @@ void CLDA::init() SG_ADD( (machine_int_t*)&m_method, "m_method", - "Method used for LDA calculation", ParameterProperties()); + "Method used for LDA calculation"); SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", ParameterProperties::HYPER); - SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", ParameterProperties()); + SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm"); } CLDA::~CLDA() diff --git a/src/shogun/classifier/PluginEstimate.cpp b/src/shogun/classifier/PluginEstimate.cpp index 73220557ff9..706b18b6a2e 100644 --- a/src/shogun/classifier/PluginEstimate.cpp +++ b/src/shogun/classifier/PluginEstimate.cpp @@ -21,18 +21,14 @@ CPluginEstimate::CPluginEstimate(float64_t pos_pseudo, float64_t neg_pseudo) pos_model(NULL), neg_model(NULL), features(NULL) { SG_ADD( - &m_pos_pseudo, "pos_pseudo", "pseudo count for positive class", - ParameterProperties()); + &m_pos_pseudo, "pos_pseudo", "pseudo count for positive class"); SG_ADD( - &m_neg_pseudo, "neg_pseudo", "pseudo count for negative class", - ParameterProperties()); + &m_neg_pseudo, "neg_pseudo", "pseudo count for negative class"); SG_ADD( - &pos_model, "pos_model", "LinearHMM modelling positive class.", - ParameterProperties()); + &pos_model, "pos_model", "LinearHMM modelling positive class."); SG_ADD( - &neg_model, "neg_model", "LinearHMM modelling negative class.", - ParameterProperties()); - SG_ADD(&features, "features", "String Features.", ParameterProperties()); + &neg_model, "neg_model", "LinearHMM modelling negative class."); + SG_ADD(&features, "features", "String Features."); } CPluginEstimate::~CPluginEstimate() diff --git a/src/shogun/classifier/mkl/MKL.cpp b/src/shogun/classifier/mkl/MKL.cpp index db6718587a3..70ac705fcdd 100644 --- a/src/shogun/classifier/mkl/MKL.cpp +++ b/src/shogun/classifier/mkl/MKL.cpp @@ -271,21 +271,21 @@ void CMKL::register_params() rho = 0; lp_initialized = false; - SG_ADD((CMachine**)&svm, "svm", "wrapper svm", ParameterProperties()); - SG_ADD(&C_mkl, "C_mkl", "C mkl", ParameterProperties()); - SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl", ParameterProperties()); - SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", ParameterProperties()); - SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", ParameterProperties()); + SG_ADD((CMachine**)&svm, "svm", "wrapper svm"); + SG_ADD(&C_mkl, "C_mkl", "C mkl"); + SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl"); + SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter"); + SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter"); m_parameters->add_vector(&beta_local, &beta_local_size, "beta_local", "subkernel weights on L1 term of elastic net mkl"); watch_param("beta_local", &beta_local, &beta_local_size); - SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps", ParameterProperties()); - SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon", ParameterProperties()); - SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt.", ParameterProperties()); - SG_ADD(&w_gap, "w_gap", "gap between interactions", ParameterProperties()); - SG_ADD(&rho, "rho", "objective after mkl iterations", ParameterProperties()); - SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized", ParameterProperties()); + SG_ADD(&mkl_iterations, "mkl_iterations", "number of mkl steps"); + SG_ADD(&mkl_epsilon, "mkl_epsilon", "mkl epsilon"); + SG_ADD(&interleaved_optimization, "interleaved_optimization", "whether to use mkl wrapper or interleaved opt."); + SG_ADD(&w_gap, "w_gap", "gap between interactions"); + SG_ADD(&rho, "rho", "objective after mkl iterations"); + SG_ADD(&lp_initialized, "lp_initialized", "if lp is Initialized"); // Missing: self (3rd party specific, handled in clone()) } diff --git a/src/shogun/classifier/svm/LibLinear.cpp b/src/shogun/classifier/svm/LibLinear.cpp index 3d97845117a..343967a49b6 100644 --- a/src/shogun/classifier/svm/LibLinear.cpp +++ b/src/shogun/classifier/svm/LibLinear.cpp @@ -53,15 +53,14 @@ void CLibLinear::init() SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER); SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); - SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties()); + &use_bias, "use_bias", "Indicates if bias is used."); + SG_ADD(&epsilon, "epsilon", "Convergence precision."); SG_ADD( - &max_iterations, "max_iterations", "Max number of iterations.", - ParameterProperties()); - SG_ADD(&m_linear_term, "linear_term", "Linear Term", ParameterProperties()); + &max_iterations, "max_iterations", "Max number of iterations."); + SG_ADD(&m_linear_term, "linear_term", "Linear Term"); SG_ADD( (machine_int_t*)&liblinear_solver_type, "liblinear_solver_type", - "Type of LibLinear solver.", ParameterProperties()); + "Type of LibLinear solver."); } CLibLinear::~CLibLinear() diff --git a/src/shogun/classifier/svm/LibSVM.cpp b/src/shogun/classifier/svm/LibSVM.cpp index 2ae5e3a5b9a..46967c09bbf 100644 --- a/src/shogun/classifier/svm/LibSVM.cpp +++ b/src/shogun/classifier/svm/LibSVM.cpp @@ -36,7 +36,7 @@ CLibSVM::~CLibSVM() void CLibSVM::register_params() { - SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type", ParameterProperties()); + SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM Solver type"); } bool CLibSVM::train_machine(CFeatures* data) diff --git a/src/shogun/classifier/svm/OnlineLibLinear.cpp b/src/shogun/classifier/svm/OnlineLibLinear.cpp index 50cc95f9f40..5fa6af63ffa 100644 --- a/src/shogun/classifier/svm/OnlineLibLinear.cpp +++ b/src/shogun/classifier/svm/OnlineLibLinear.cpp @@ -66,7 +66,7 @@ void COnlineLibLinear::init() SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER); SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); + &use_bias, "use_bias", "Indicates if bias is used."); PG = 0; PGmax_old = CMath::INFTY; diff --git a/src/shogun/classifier/svm/OnlineSVMSGD.cpp b/src/shogun/classifier/svm/OnlineSVMSGD.cpp index 77017e6ff76..62fa3735a89 100644 --- a/src/shogun/classifier/svm/OnlineSVMSGD.cpp +++ b/src/shogun/classifier/svm/OnlineSVMSGD.cpp @@ -209,14 +209,14 @@ void COnlineSVMSGD::init() SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER); SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER); SG_ADD(&lambda, "lambda", "Regularization parameter.", ParameterProperties::HYPER); - SG_ADD(&wscale, "wscale", "W scale", ParameterProperties()); - SG_ADD(&bscale, "bscale", "b scale", ParameterProperties()); - SG_ADD(&epochs, "epochs", "epochs", ParameterProperties()); - SG_ADD(&skip, "skip", "skip", ParameterProperties()); - SG_ADD(&count, "count", "count", ParameterProperties()); + SG_ADD(&wscale, "wscale", "W scale"); + SG_ADD(&bscale, "bscale", "b scale"); + SG_ADD(&epochs, "epochs", "epochs"); + SG_ADD(&skip, "skip", "skip"); + SG_ADD(&count, "count", "count"); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); + &use_bias, "use_bias", "Indicates if bias is used."); SG_ADD( &use_regularized_bias, "use_regularized_bias", - "Indicates if bias is regularized.", ParameterProperties()); + "Indicates if bias is regularized."); } diff --git a/src/shogun/classifier/svm/SGDQN.cpp b/src/shogun/classifier/svm/SGDQN.cpp index 9c0d208d51e..7ab2ee14916 100644 --- a/src/shogun/classifier/svm/SGDQN.cpp +++ b/src/shogun/classifier/svm/SGDQN.cpp @@ -229,6 +229,6 @@ void CSGDQN::init() SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER); SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER); SG_ADD(&epochs, "epochs", "epochs", ParameterProperties::HYPER); - SG_ADD(&skip, "skip", "skip", ParameterProperties()); - SG_ADD(&count, "count", "count", ParameterProperties()); + SG_ADD(&skip, "skip", "skip"); + SG_ADD(&count, "count", "count"); } diff --git a/src/shogun/classifier/svm/SVM.cpp b/src/shogun/classifier/svm/SVM.cpp index 48446ba8239..33c98f1a336 100644 --- a/src/shogun/classifier/svm/SVM.cpp +++ b/src/shogun/classifier/svm/SVM.cpp @@ -42,19 +42,16 @@ void CSVM::set_defaults(int32_t num_sv) { SG_ADD(&C1, "C1", "", ParameterProperties::HYPER); SG_ADD(&C2, "C2", "", ParameterProperties::HYPER); - SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded.", ParameterProperties()); + SG_ADD(&svm_loaded, "svm_loaded", "SVM is loaded."); SG_ADD(&epsilon, "epsilon", "", ParameterProperties::HYPER); SG_ADD(&tube_epsilon, "tube_epsilon", "Tube epsilon for support vector regression.", ParameterProperties::HYPER); SG_ADD(&nu, "nu", "", ParameterProperties::HYPER); - SG_ADD(&objective, "objective", "", ParameterProperties()); - SG_ADD(&qpsize, "qpsize", "", ParameterProperties()); - SG_ADD(&use_shrinking, "use_shrinking", "Shrinking shall be used.", - ParameterProperties()); - SG_ADD((CSGObject**) &mkl, "mkl", "MKL object that svm optimizers need.", - ParameterProperties()); - SG_ADD(&m_linear_term, "linear_term", "Linear term in qp.", - ParameterProperties()); + SG_ADD(&objective, "objective", ""); + SG_ADD(&qpsize, "qpsize", ""); + SG_ADD(&use_shrinking, "use_shrinking", "Shrinking shall be used."); + SG_ADD((CSGObject**) &mkl, "mkl", "MKL object that svm optimizers need."); + SG_ADD(&m_linear_term, "linear_term", "Linear term in qp."); callback=NULL; mkl=NULL; diff --git a/src/shogun/classifier/svm/SVMOcas.cpp b/src/shogun/classifier/svm/SVMOcas.cpp index beb62412a1e..5278d51ec73 100644 --- a/src/shogun/classifier/svm/SVMOcas.cpp +++ b/src/shogun/classifier/svm/SVMOcas.cpp @@ -348,14 +348,12 @@ void CSVMOcas::init() SG_ADD(&C1, "C1", "Cost constant 1.", ParameterProperties::HYPER); SG_ADD(&C2, "C2", "Cost constant 2.", ParameterProperties::HYPER); SG_ADD( - &use_bias, "use_bias", "Indicates if bias is used.", ParameterProperties()); - SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties()); + &use_bias, "use_bias", "Indicates if bias is used."); + SG_ADD(&epsilon, "epsilon", "Convergence precision."); SG_ADD( - &bufsize, "bufsize", "Maximum number of cutting planes.", - ParameterProperties()); + &bufsize, "bufsize", "Maximum number of cutting planes."); SG_ADD( - (machine_int_t*)&method, "method", "SVMOcas solver type.", - ParameterProperties()); + (machine_int_t*)&method, "method", "SVMOcas solver type."); } float64_t CSVMOcas::compute_primal_objective() const diff --git a/src/shogun/clustering/GMM.cpp b/src/shogun/clustering/GMM.cpp index 221d72f555a..10e98497fe3 100644 --- a/src/shogun/clustering/GMM.cpp +++ b/src/shogun/clustering/GMM.cpp @@ -826,6 +826,5 @@ void CGMM::register_params() //TODO serialization broken //m_parameters->add((SGVector*) &m_components, "m_components", "Mixture components"); SG_ADD( - &m_coefficients, "m_coefficients", "Mixture coefficients.", - ParameterProperties()); + &m_coefficients, "m_coefficients", "Mixture coefficients."); } diff --git a/src/shogun/clustering/KMeansBase.cpp b/src/shogun/clustering/KMeansBase.cpp index f97499c2616..42deb639fbb 100644 --- a/src/shogun/clustering/KMeansBase.cpp +++ b/src/shogun/clustering/KMeansBase.cpp @@ -361,8 +361,8 @@ void CKMeansBase::init() use_kmeanspp=false; SG_ADD(&max_iter, "max_iter", "Maximum number of iterations", ParameterProperties::HYPER); SG_ADD(&k, "k", "k, the number of clusters", ParameterProperties::HYPER); - SG_ADD(&dimensions, "dimensions", "Dimensions of data", ParameterProperties()); - SG_ADD(&R, "radiuses", "Cluster radiuses", ParameterProperties()); + SG_ADD(&dimensions, "dimensions", "Dimensions of data"); + SG_ADD(&R, "radiuses", "Cluster radiuses"); watch_method("cluster_centers", &CKMeansBase::get_cluster_centers); } diff --git a/src/shogun/clustering/KMeansMiniBatch.cpp b/src/shogun/clustering/KMeansMiniBatch.cpp index 5c17ac89d58..3f7be8efc98 100644 --- a/src/shogun/clustering/KMeansMiniBatch.cpp +++ b/src/shogun/clustering/KMeansMiniBatch.cpp @@ -140,8 +140,7 @@ void CKMeansMiniBatch::init_mb_params() batch_size=-1; SG_ADD( - &batch_size, "batch_size", "batch size for mini-batch KMeans", - ParameterProperties()); + &batch_size, "batch_size", "batch size for mini-batch KMeans"); } bool CKMeansMiniBatch::train_machine(CFeatures* data) diff --git a/src/shogun/converter/FactorAnalysis.cpp b/src/shogun/converter/FactorAnalysis.cpp index 95490fd8c60..5d9973cf2e5 100644 --- a/src/shogun/converter/FactorAnalysis.cpp +++ b/src/shogun/converter/FactorAnalysis.cpp @@ -21,8 +21,8 @@ CFactorAnalysis::CFactorAnalysis() : void CFactorAnalysis::init() { - SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", ParameterProperties()); - SG_ADD(&m_epsilon, "epsilon", "convergence parameter", ParameterProperties()); + SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations"); + SG_ADD(&m_epsilon, "epsilon", "convergence parameter"); } CFactorAnalysis::~CFactorAnalysis() diff --git a/src/shogun/converter/HashedDocConverter.cpp b/src/shogun/converter/HashedDocConverter.cpp index 3e91a3c8ce1..1a5a1c1e7df 100644 --- a/src/shogun/converter/HashedDocConverter.cpp +++ b/src/shogun/converter/HashedDocConverter.cpp @@ -57,15 +57,11 @@ void CHashedDocConverter::init(CTokenizer* tzer, int32_t hash_bits, bool normali tokenizer = tzer; SG_REF(tokenizer); - SG_ADD(&num_bits, "num_bits", "Number of bits of the hash", - ParameterProperties()); - SG_ADD(&ngrams, "ngrams", "Number of consecutive tokens", - ParameterProperties()); - SG_ADD(&tokens_to_skip, "tokens_to_skip", "Number of tokens to skip", - ParameterProperties()); - SG_ADD(&should_normalize, "should_normalize", "Whether to normalize vectors or not", - ParameterProperties()); - SG_ADD(&tokenizer, "tokenizer", "Tokenizer", ParameterProperties()); + SG_ADD(&num_bits, "num_bits", "Number of bits of the hash"); + SG_ADD(&ngrams, "ngrams", "Number of consecutive tokens"); + SG_ADD(&tokens_to_skip, "tokens_to_skip", "Number of tokens to skip"); + SG_ADD(&should_normalize, "should_normalize", "Whether to normalize vectors or not"); + SG_ADD(&tokenizer, "tokenizer", "Tokenizer"); } const char* CHashedDocConverter::get_name() const diff --git a/src/shogun/converter/LocallyLinearEmbedding.cpp b/src/shogun/converter/LocallyLinearEmbedding.cpp index 4aa734a9a5a..34edd88242e 100644 --- a/src/shogun/converter/LocallyLinearEmbedding.cpp +++ b/src/shogun/converter/LocallyLinearEmbedding.cpp @@ -30,7 +30,7 @@ void CLocallyLinearEmbedding::init() SG_ADD(&m_nullspace_shift, "nullspace_shift", "nullspace finding regularization shift",ParameterProperties()); SG_ADD(&m_reconstruction_shift, "reconstruction_shift", - "shift used to regularize reconstruction step", ParameterProperties()); + "shift used to regularize reconstruction step"); } diff --git a/src/shogun/converter/ManifoldSculpting.cpp b/src/shogun/converter/ManifoldSculpting.cpp index 0a0cf7414a6..6d43780e7f8 100644 --- a/src/shogun/converter/ManifoldSculpting.cpp +++ b/src/shogun/converter/ManifoldSculpting.cpp @@ -23,11 +23,11 @@ CManifoldSculpting::CManifoldSculpting() : void CManifoldSculpting::init() { - SG_ADD(&m_k, "k", "number of neighbors", ParameterProperties()); + SG_ADD(&m_k, "k", "number of neighbors"); SG_ADD(&m_squishing_rate, "quishing_rate", "squishing rate",ParameterProperties()); SG_ADD(&m_max_iteration, "max_iteration", - "maximum number of algorithm's iterations", ParameterProperties()); + "maximum number of algorithm's iterations"); } CManifoldSculpting::~CManifoldSculpting() diff --git a/src/shogun/converter/MultidimensionalScaling.cpp b/src/shogun/converter/MultidimensionalScaling.cpp index e90ace6bd0a..e14f4aedae8 100644 --- a/src/shogun/converter/MultidimensionalScaling.cpp +++ b/src/shogun/converter/MultidimensionalScaling.cpp @@ -29,10 +29,9 @@ CMultidimensionalScaling::CMultidimensionalScaling() : CEmbeddingConverter() void CMultidimensionalScaling::init() { - SG_ADD(&m_eigenvalues, "eigenvalues", "eigenvalues of last embedding", - ParameterProperties()); + SG_ADD(&m_eigenvalues, "eigenvalues", "eigenvalues of last embedding"); SG_ADD(&m_landmark, "landmark", - "indicates if landmark approximation should be used", ParameterProperties()); + "indicates if landmark approximation should be used"); SG_ADD(&m_landmark_number, "landmark_number", "the number of landmarks for approximation", ParameterProperties::HYPER); } diff --git a/src/shogun/converter/StochasticProximityEmbedding.cpp b/src/shogun/converter/StochasticProximityEmbedding.cpp index 40abb7da76a..d06a0cd9549 100644 --- a/src/shogun/converter/StochasticProximityEmbedding.cpp +++ b/src/shogun/converter/StochasticProximityEmbedding.cpp @@ -27,13 +27,10 @@ CStochasticProximityEmbedding::CStochasticProximityEmbedding() : void CStochasticProximityEmbedding::init() { - SG_ADD(&m_k, "m_k", "Number of neighbors", ParameterProperties()); - SG_ADD((machine_int_t*) &m_strategy, "m_strategy", "SPE strategy", - ParameterProperties()); - SG_ADD(&m_tolerance, "m_tolerance", "Regularization parameter", - ParameterProperties()); - SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations", - ParameterProperties()); + SG_ADD(&m_k, "m_k", "Number of neighbors"); + SG_ADD((machine_int_t*) &m_strategy, "m_strategy", "SPE strategy"); + SG_ADD(&m_tolerance, "m_tolerance", "Regularization parameter"); + SG_ADD(&m_max_iteration, "max_iteration", "maximum number of iterations"); } CStochasticProximityEmbedding::~CStochasticProximityEmbedding() diff --git a/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp b/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp index 3a171603263..d2aed8c9f9b 100644 --- a/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp +++ b/src/shogun/converter/TDistributedStochasticNeighborEmbedding.cpp @@ -21,8 +21,8 @@ CTDistributedStochasticNeighborEmbedding::CTDistributedStochasticNeighborEmbeddi void CTDistributedStochasticNeighborEmbedding::init() { - SG_ADD(&m_perplexity, "perplexity", "perplexity", ParameterProperties()); - SG_ADD(&m_theta, "theta", "learning rate", ParameterProperties()); + SG_ADD(&m_perplexity, "perplexity", "perplexity"); + SG_ADD(&m_theta, "theta", "learning rate"); } CTDistributedStochasticNeighborEmbedding::~CTDistributedStochasticNeighborEmbedding() diff --git a/src/shogun/converter/ica/FastICA.cpp b/src/shogun/converter/ica/FastICA.cpp index 14d5679e442..93e0e44cd9a 100644 --- a/src/shogun/converter/ica/FastICA.cpp +++ b/src/shogun/converter/ica/FastICA.cpp @@ -49,7 +49,7 @@ CFastICA::CFastICA() : CICAConverter() void CFastICA::init() { whiten = true; - SG_ADD(&whiten, "whiten", "flag indicating whether to whiten the data", ParameterProperties()); + SG_ADD(&whiten, "whiten", "flag indicating whether to whiten the data"); } CFastICA::~CFastICA() diff --git a/src/shogun/converter/ica/ICAConverter.cpp b/src/shogun/converter/ica/ICAConverter.cpp index 093674ad948..ece72676708 100644 --- a/src/shogun/converter/ica/ICAConverter.cpp +++ b/src/shogun/converter/ica/ICAConverter.cpp @@ -23,9 +23,9 @@ void CICAConverter::init() max_iter = 200; tol = 1e-6; - SG_ADD(&m_mixing_matrix, "mixing_matrix", "the mixing matrix", ParameterProperties()); - SG_ADD(&max_iter, "max_iter", "maximum number of iterations", ParameterProperties()); - SG_ADD(&tol, "tol", "the convergence tolerance", ParameterProperties()); + SG_ADD(&m_mixing_matrix, "mixing_matrix", "the mixing matrix"); + SG_ADD(&max_iter, "max_iter", "maximum number of iterations"); + SG_ADD(&tol, "tol", "the convergence tolerance"); } CICAConverter::~CICAConverter() diff --git a/src/shogun/converter/ica/Jade.cpp b/src/shogun/converter/ica/Jade.cpp index 29db09d32a8..381b41b163f 100644 --- a/src/shogun/converter/ica/Jade.cpp +++ b/src/shogun/converter/ica/Jade.cpp @@ -28,7 +28,7 @@ CJade::CJade() : CICAConverter() void CJade::init() { m_cumulant_matrix = SGMatrix(); - SG_ADD(&m_cumulant_matrix, "cumulant_matrix", "m_cumulant_matrix", ParameterProperties()); + SG_ADD(&m_cumulant_matrix, "cumulant_matrix", "m_cumulant_matrix"); } CJade::~CJade() diff --git a/src/shogun/distance/AttenuatedEuclideanDistance.cpp b/src/shogun/distance/AttenuatedEuclideanDistance.cpp index d10c7e81030..391c5989702 100644 --- a/src/shogun/distance/AttenuatedEuclideanDistance.cpp +++ b/src/shogun/distance/AttenuatedEuclideanDistance.cpp @@ -67,6 +67,5 @@ void CAttenuatedEuclideanDistance::init() disable_sqrt=false; SG_ADD( - &disable_sqrt, "disable_sqrt", "If sqrt shall not be applied.", - ParameterProperties()); + &disable_sqrt, "disable_sqrt", "If sqrt shall not be applied."); } diff --git a/src/shogun/distance/CustomDistance.cpp b/src/shogun/distance/CustomDistance.cpp index 5a0f0cfb5c4..1bccd5a0e03 100644 --- a/src/shogun/distance/CustomDistance.cpp +++ b/src/shogun/distance/CustomDistance.cpp @@ -128,7 +128,7 @@ void CCustomDistance::init() AnyParameterProperties("Distance Matrix")); SG_ADD( - &upper_diagonal, "upper_diagonal", "Upper diagonal", ParameterProperties()); + &upper_diagonal, "upper_diagonal", "Upper diagonal"); } void CCustomDistance::cleanup() diff --git a/src/shogun/distance/CustomMahalanobisDistance.cpp b/src/shogun/distance/CustomMahalanobisDistance.cpp index c3056df3a7c..eb58cfaffb0 100644 --- a/src/shogun/distance/CustomMahalanobisDistance.cpp +++ b/src/shogun/distance/CustomMahalanobisDistance.cpp @@ -26,7 +26,7 @@ CCustomMahalanobisDistance::CCustomMahalanobisDistance(CFeatures* l, CFeatures* void CCustomMahalanobisDistance::register_params() { - SG_ADD(&m_mahalanobis_matrix, "m_mahalanobis_matrix", "Mahalanobis matrix", ParameterProperties()); + SG_ADD(&m_mahalanobis_matrix, "m_mahalanobis_matrix", "Mahalanobis matrix"); } CCustomMahalanobisDistance::~CCustomMahalanobisDistance() diff --git a/src/shogun/distance/Distance.cpp b/src/shogun/distance/Distance.cpp index 74eafab8f4c..fa9961e2c45 100644 --- a/src/shogun/distance/Distance.cpp +++ b/src/shogun/distance/Distance.cpp @@ -259,8 +259,8 @@ void CDistance::init() num_lhs=0; num_rhs=0; - SG_ADD(&lhs, "lhs", "Left hand side features.", ParameterProperties()); - SG_ADD(&rhs, "rhs", "Right hand side features.", ParameterProperties()); + SG_ADD(&lhs, "lhs", "Left hand side features."); + SG_ADD(&rhs, "rhs", "Right hand side features."); } template diff --git a/src/shogun/distance/EuclideanDistance.cpp b/src/shogun/distance/EuclideanDistance.cpp index 62c1481d084..66d1def35e8 100644 --- a/src/shogun/distance/EuclideanDistance.cpp +++ b/src/shogun/distance/EuclideanDistance.cpp @@ -133,9 +133,9 @@ void CEuclideanDistance::register_params() { disable_sqrt=false; reset_precompute(); - SG_ADD(&disable_sqrt, "disable_sqrt", "If sqrt shall not be applied.", ParameterProperties()); - SG_ADD(&m_rhs_squared_norms, "m_rhs_squared_norms", "Squared norms from features of right hand side", ParameterProperties()); - SG_ADD(&m_lhs_squared_norms, "m_lhs_squared_norms", "Squared norms from features of left hand side", ParameterProperties()); + SG_ADD(&disable_sqrt, "disable_sqrt", "If sqrt shall not be applied."); + SG_ADD(&m_rhs_squared_norms, "m_rhs_squared_norms", "Squared norms from features of right hand side"); + SG_ADD(&m_lhs_squared_norms, "m_lhs_squared_norms", "Squared norms from features of left hand side"); } float64_t CEuclideanDistance::distance_upper_bounded(int32_t idx_a, int32_t idx_b, float64_t upper_bound) diff --git a/src/shogun/distance/HammingWordDistance.cpp b/src/shogun/distance/HammingWordDistance.cpp index 3a47c6b1046..39cec8a5eb5 100644 --- a/src/shogun/distance/HammingWordDistance.cpp +++ b/src/shogun/distance/HammingWordDistance.cpp @@ -170,6 +170,5 @@ void CHammingWordDistance::init() { use_sign = false; SG_ADD( - &use_sign, "use_sign", "If signum(counts) is used instead of counts.", - ParameterProperties()); + &use_sign, "use_sign", "If signum(counts) is used instead of counts."); } diff --git a/src/shogun/distance/KernelDistance.cpp b/src/shogun/distance/KernelDistance.cpp index 12890a8f066..db838c79388 100644 --- a/src/shogun/distance/KernelDistance.cpp +++ b/src/shogun/distance/KernelDistance.cpp @@ -70,5 +70,5 @@ void CKernelDistance::init() width = 0.0; SG_ADD(&width, "width", "Width of RBF Kernel", ParameterProperties::HYPER); - SG_ADD(&kernel, "kernel", "Kernel.", ParameterProperties()); + SG_ADD(&kernel, "kernel", "Kernel."); } diff --git a/src/shogun/distance/MahalanobisDistance.cpp b/src/shogun/distance/MahalanobisDistance.cpp index 05474af18d0..c635b3f26db 100644 --- a/src/shogun/distance/MahalanobisDistance.cpp +++ b/src/shogun/distance/MahalanobisDistance.cpp @@ -111,12 +111,10 @@ void CMahalanobisDistance::init() use_mean=false; SG_ADD( - &disable_sqrt, "disable_sqrt", "If sqrt shall not be applied.", - ParameterProperties()); + &disable_sqrt, "disable_sqrt", "If sqrt shall not be applied."); SG_ADD( &use_mean, "use_mean", "If distance shall be computed between mean " "vector and vector from rhs or between lhs and " - "rhs.", - ParameterProperties()); + "rhs."); } diff --git a/src/shogun/distributions/Distribution.cpp b/src/shogun/distributions/Distribution.cpp index 57e5e4bbcbb..ffc72939151 100644 --- a/src/shogun/distributions/Distribution.cpp +++ b/src/shogun/distributions/Distribution.cpp @@ -12,7 +12,7 @@ using namespace shogun; CDistribution::CDistribution() : CSGObject(), features(NULL), pseudo_count(1e-10) { - SG_ADD(&features, "features", "features to be used", ParameterProperties()); + SG_ADD(&features, "features", "features to be used"); } CDistribution::~CDistribution() diff --git a/src/shogun/distributions/PositionalPWM.cpp b/src/shogun/distributions/PositionalPWM.cpp index 1bf4f40ee7f..839b0e639c4 100644 --- a/src/shogun/distributions/PositionalPWM.cpp +++ b/src/shogun/distributions/PositionalPWM.cpp @@ -124,11 +124,11 @@ void CPositionalPWM::compute_w(int32_t num_pos) void CPositionalPWM::register_params() { - SG_ADD(&m_poim, "poim", "POIM Scoring Matrix", ParameterProperties()); - SG_ADD(&m_w, "w", "Scoring Matrix", ParameterProperties()); - SG_ADD(&m_pwm, "pwm", "Positional Weight Matrix.", ParameterProperties()); - SG_ADD(&m_sigma, "sigma", "Standard Deviation.", ParameterProperties()); - SG_ADD(&m_mean, "mean", "Mean.", ParameterProperties()); + SG_ADD(&m_poim, "poim", "POIM Scoring Matrix"); + SG_ADD(&m_w, "w", "Scoring Matrix"); + SG_ADD(&m_pwm, "pwm", "Positional Weight Matrix."); + SG_ADD(&m_sigma, "sigma", "Standard Deviation."); + SG_ADD(&m_mean, "mean", "Mean."); } void CPositionalPWM::compute_scoring(int32_t max_degree) diff --git a/src/shogun/distributions/classical/GaussianDistribution.cpp b/src/shogun/distributions/classical/GaussianDistribution.cpp index fc0388e46b8..e8fc7de19c7 100644 --- a/src/shogun/distributions/classical/GaussianDistribution.cpp +++ b/src/shogun/distributions/classical/GaussianDistribution.cpp @@ -164,7 +164,7 @@ SGVector CGaussianDistribution::log_pdf_multiple(SGMatrix void CGaussianDistribution::init() { - SG_ADD(&m_mean, "mean", "Mean of the Gaussian.", ParameterProperties()); + SG_ADD(&m_mean, "mean", "Mean of the Gaussian."); SG_ADD(&m_L, "L", "Lower factor of covariance matrix, " - "depending on the factorization type.", ParameterProperties()); + "depending on the factorization type."); } diff --git a/src/shogun/distributions/classical/ProbabilityDistribution.cpp b/src/shogun/distributions/classical/ProbabilityDistribution.cpp index d3e6d672d53..7d62be37cbd 100644 --- a/src/shogun/distributions/classical/ProbabilityDistribution.cpp +++ b/src/shogun/distributions/classical/ProbabilityDistribution.cpp @@ -70,6 +70,5 @@ void CProbabilityDistribution::init() { m_dimension=0; - SG_ADD(&m_dimension, "dimension", "Dimension of distribution.", - ParameterProperties()); + SG_ADD(&m_dimension, "dimension", "Dimension of distribution."); } diff --git a/src/shogun/evaluation/CrossValidation.cpp b/src/shogun/evaluation/CrossValidation.cpp index 780cc1c68e8..2a0e143a637 100644 --- a/src/shogun/evaluation/CrossValidation.cpp +++ b/src/shogun/evaluation/CrossValidation.cpp @@ -51,7 +51,7 @@ void CCrossValidation::init() { m_num_runs = 1; - SG_ADD(&m_num_runs, "num_runs", "Number of repetitions", ParameterProperties()); + SG_ADD(&m_num_runs, "num_runs", "Number of repetitions"); } CEvaluationResult* CCrossValidation::evaluate_impl() diff --git a/src/shogun/evaluation/CrossValidation.h b/src/shogun/evaluation/CrossValidation.h index 5dbf948308e..e3953bbd32a 100644 --- a/src/shogun/evaluation/CrossValidation.h +++ b/src/shogun/evaluation/CrossValidation.h @@ -29,11 +29,10 @@ namespace shogun public: CCrossValidationResult() { - SG_ADD(&mean, "mean", "Mean of results", ParameterProperties()); + SG_ADD(&mean, "mean", "Mean of results"); SG_ADD( &std_dev, "std_dev", - "Standard deviation of cross-validation folds", - ParameterProperties()); + "Standard deviation of cross-validation folds"); mean = 0; std_dev = 0; diff --git a/src/shogun/evaluation/MachineEvaluation.cpp b/src/shogun/evaluation/MachineEvaluation.cpp index 2eed61c6a81..c76e4534e79 100644 --- a/src/shogun/evaluation/MachineEvaluation.cpp +++ b/src/shogun/evaluation/MachineEvaluation.cpp @@ -82,22 +82,17 @@ void CMachineEvaluation::init() m_cancel_computation = false; m_pause_computation_flag = false; - SG_ADD((CSGObject**)&m_machine, "machine", "Used learning machine", - ParameterProperties()); - SG_ADD((CSGObject**)&m_features, "features", "Used features", - ParameterProperties()); - SG_ADD((CSGObject**)&m_labels, "labels", "Used labels", - ParameterProperties()); + SG_ADD((CSGObject**)&m_machine, "machine", "Used learning machine"); + SG_ADD((CSGObject**)&m_features, "features", "Used features"); + SG_ADD((CSGObject**)&m_labels, "labels", "Used labels"); SG_ADD((CSGObject**)&m_splitting_strategy, "splitting_strategy", - "Used splitting strategy", ParameterProperties()); + "Used splitting strategy"); SG_ADD((CSGObject**)&m_evaluation_criterion, "evaluation_criterion", - "Used evaluation criterion", ParameterProperties()); + "Used evaluation criterion"); SG_ADD(&m_do_unlock, "do_unlock", - "Whether machine should be unlocked after evaluation", - ParameterProperties()); + "Whether machine should be unlocked after evaluation"); SG_ADD(&m_autolock, "m_autolock", - "Whether machine should automatically try to be locked before ", - ParameterProperties()); + "Whether machine should automatically try to be locked before "); } diff --git a/src/shogun/evaluation/SigmoidCalibration.cpp b/src/shogun/evaluation/SigmoidCalibration.cpp index f4f2cadce8e..e0da4abca09 100644 --- a/src/shogun/evaluation/SigmoidCalibration.cpp +++ b/src/shogun/evaluation/SigmoidCalibration.cpp @@ -29,21 +29,18 @@ void CSigmoidCalibration::init() SG_ADD( &m_sigmoid_as, "m_sigmoid_as", - "Vector of paramter A of sigmoid for each class.", ParameterProperties()); + "Vector of paramter A of sigmoid for each class."); SG_ADD( &m_sigmoid_bs, "m_sigmoid_bs", - "Vector of paramter B of sigmoid for each class.", ParameterProperties()); + "Vector of paramter B of sigmoid for each class."); SG_ADD( - &m_maxiter, "m_maxiter", "Maximum number of iteration for search.", - ParameterProperties()); + &m_maxiter, "m_maxiter", "Maximum number of iteration for search."); SG_ADD( - &m_minstep, "m_minstep", "Minimum step taken in line search.", - ParameterProperties()); + &m_minstep, "m_minstep", "Minimum step taken in line search."); SG_ADD( &m_sigma, "m_sigma", - "Positive parameter to ensure positive semi-definite Hessian.", - ParameterProperties()); - SG_ADD(&m_epsilon, "m_epsilon", "Stopping criteria.", ParameterProperties()); + "Positive parameter to ensure positive semi-definite Hessian."); + SG_ADD(&m_epsilon, "m_epsilon", "Stopping criteria."); } void CSigmoidCalibration::set_maxiter(index_t maxiter) diff --git a/src/shogun/evaluation/SplittingStrategy.cpp b/src/shogun/evaluation/SplittingStrategy.cpp index 35804729e35..304e4caec8d 100644 --- a/src/shogun/evaluation/SplittingStrategy.cpp +++ b/src/shogun/evaluation/SplittingStrategy.cpp @@ -56,16 +56,13 @@ void CSplittingStrategy::init() m_is_filled=false; m_num_subsets=0; - SG_ADD(&m_labels, "labels", "Labels for subsets", ParameterProperties()); + SG_ADD(&m_labels, "labels", "Labels for subsets"); SG_ADD( - &m_subset_indices, "subset_indices", "Set of sets of subset indices", - ParameterProperties()); + &m_subset_indices, "subset_indices", "Set of sets of subset indices"); SG_ADD( - &m_is_filled, "is_filled", "Whether ther are index sets", - ParameterProperties()); + &m_is_filled, "is_filled", "Whether ther are index sets"); SG_ADD( - &m_num_subsets, "num_subsets", "Number of index sets", - ParameterProperties()); + &m_num_subsets, "num_subsets", "Number of index sets"); } CSplittingStrategy::~CSplittingStrategy() diff --git a/src/shogun/features/Alphabet.cpp b/src/shogun/features/Alphabet.cpp index 7f7b6748c31..958c6365f4b 100644 --- a/src/shogun/features/Alphabet.cpp +++ b/src/shogun/features/Alphabet.cpp @@ -727,10 +727,9 @@ void CAlphabet::init() memset(histogram, 0, sizeof (histogram)); SG_ADD( - (machine_int_t*)&alphabet, "alphabet", "Alphabet enum.", - ParameterProperties()); - SG_ADD(&num_symbols, "num_symbols", "Number of symbols.", ParameterProperties()); - SG_ADD(&num_bits, "num_bits", "Number of bits.", ParameterProperties()); + (machine_int_t*)&alphabet, "alphabet", "Alphabet enum."); + SG_ADD(&num_symbols, "num_symbols", "Number of symbols."); + SG_ADD(&num_bits, "num_bits", "Number of bits."); /* We don't need to serialize the mapping tables / they can be computed * after de-serializing. Lets not serialize the histogram for now. Doesn't diff --git a/src/shogun/features/CombinedDotFeatures.cpp b/src/shogun/features/CombinedDotFeatures.cpp index 762355c9b09..3cb486585c2 100644 --- a/src/shogun/features/CombinedDotFeatures.cpp +++ b/src/shogun/features/CombinedDotFeatures.cpp @@ -342,11 +342,9 @@ void CCombinedDotFeatures::set_subfeature_weights(SGVector weights) void CCombinedDotFeatures::init() { SG_ADD( - &num_dimensions, "num_dimensions", "Total number of dimensions.", - ParameterProperties()); + &num_dimensions, "num_dimensions", "Total number of dimensions."); SG_ADD( - &num_vectors, "num_vectors", "Total number of vectors.", - ParameterProperties()); - SG_ADD(&feature_array, "feature_array", "Feature array.", ParameterProperties()); + &num_vectors, "num_vectors", "Total number of vectors."); + SG_ADD(&feature_array, "feature_array", "Feature array."); } diff --git a/src/shogun/features/CombinedFeatures.cpp b/src/shogun/features/CombinedFeatures.cpp index c11b76738d6..59d79b948f8 100644 --- a/src/shogun/features/CombinedFeatures.cpp +++ b/src/shogun/features/CombinedFeatures.cpp @@ -166,8 +166,8 @@ int32_t CCombinedFeatures::get_num_feature_obj() const void CCombinedFeatures::init() { - SG_ADD(&num_vec, "num_vec", "Number of vectors.", ParameterProperties()); - SG_ADD(&feature_array, "array", "Feature array.", ParameterProperties()); + SG_ADD(&num_vec, "num_vec", "Number of vectors."); + SG_ADD(&feature_array, "array", "Feature array."); } CFeatures* CCombinedFeatures::create_merged_copy(CFeatures* other) const diff --git a/src/shogun/features/DenseFeatures.cpp b/src/shogun/features/DenseFeatures.cpp index f0fec9fe8ed..550f4292f3a 100644 --- a/src/shogun/features/DenseFeatures.cpp +++ b/src/shogun/features/DenseFeatures.cpp @@ -603,10 +603,10 @@ template void CDenseFeatures::init() set_generic(); /* not store number of vectors in subset */ - SG_ADD(&num_vectors, "num_vectors", "Number of vectors.", ParameterProperties()); - SG_ADD(&num_features, "num_features", "Number of features.", ParameterProperties()); + SG_ADD(&num_vectors, "num_vectors", "Number of vectors."); + SG_ADD(&num_features, "num_features", "Number of features."); SG_ADD(&feature_matrix, "feature_matrix", - "Matrix of feature vectors / 1 vector per column.", ParameterProperties()); + "Matrix of feature vectors / 1 vector per column."); } #define GET_FEATURE_TYPE(f_type, sg_type) \ diff --git a/src/shogun/features/DenseSubSamplesFeatures.cpp b/src/shogun/features/DenseSubSamplesFeatures.cpp index af026789c7e..8d706e3d304 100644 --- a/src/shogun/features/DenseSubSamplesFeatures.cpp +++ b/src/shogun/features/DenseSubSamplesFeatures.cpp @@ -68,8 +68,8 @@ template void CDenseSubSamplesFeatures::init() set_generic(); m_fea=NULL; m_idx=SGVector(); - SG_ADD(&m_idx, "idx", "idx", ParameterProperties()); - SG_ADD((CSGObject **)&m_fea, "fea", "fea", ParameterProperties()); + SG_ADD(&m_idx, "idx", "idx"); + SG_ADD((CSGObject **)&m_fea, "fea", "fea"); } template CFeatures* CDenseSubSamplesFeatures::duplicate() const diff --git a/src/shogun/features/DotFeatures.cpp b/src/shogun/features/DotFeatures.cpp index 6f45808fcbf..2c6dd47ffa2 100644 --- a/src/shogun/features/DotFeatures.cpp +++ b/src/shogun/features/DotFeatures.cpp @@ -342,5 +342,5 @@ void CDotFeatures::init() set_property(FP_DOT); SG_ADD( &combined_weight, "combined_weight", - "Feature weighting in combined dot features.", ParameterProperties()); + "Feature weighting in combined dot features."); } diff --git a/src/shogun/features/DummyFeatures.cpp b/src/shogun/features/DummyFeatures.cpp index 2a0263acd2a..cbcb467b71b 100644 --- a/src/shogun/features/DummyFeatures.cpp +++ b/src/shogun/features/DummyFeatures.cpp @@ -47,6 +47,5 @@ EFeatureClass CDummyFeatures::get_feature_class() const void CDummyFeatures::init() { SG_ADD( - &num_vectors, "num_vectors", "Number of feature vectors.", - ParameterProperties()); + &num_vectors, "num_vectors", "Number of feature vectors."); } diff --git a/src/shogun/features/FKFeatures.cpp b/src/shogun/features/FKFeatures.cpp index 61570fa91e1..4e336e6ec4a 100644 --- a/src/shogun/features/FKFeatures.cpp +++ b/src/shogun/features/FKFeatures.cpp @@ -255,5 +255,5 @@ void CFKFeatures::init() //TODO serialize HMMs //m_parameters->add((CSGObject**) &pos, "pos", "HMM for positive class."); //m_parameters->add((CSGObject**) &neg, "neg", "HMM for negative class."); - SG_ADD(&weight_a, "weight_a", "Class prior.", ParameterProperties()); + SG_ADD(&weight_a, "weight_a", "Class prior."); } diff --git a/src/shogun/features/FactorGraphFeatures.cpp b/src/shogun/features/FactorGraphFeatures.cpp index 072e07e6b3a..08381b65318 100644 --- a/src/shogun/features/FactorGraphFeatures.cpp +++ b/src/shogun/features/FactorGraphFeatures.cpp @@ -72,8 +72,7 @@ CFactorGraph* CFactorGraphFeatures::get_sample(index_t idx) void CFactorGraphFeatures::init() { - SG_ADD((CSGObject**) &m_samples, "samples", "Array of examples", - ParameterProperties()); + SG_ADD((CSGObject**) &m_samples, "samples", "Array of examples"); } CFactorGraphFeatures* CFactorGraphFeatures::obtain_from_generic(CFeatures* base_feats) diff --git a/src/shogun/features/Features.cpp b/src/shogun/features/Features.cpp index aa1ed5a2d5b..9d2a57bccc6 100644 --- a/src/shogun/features/Features.cpp +++ b/src/shogun/features/Features.cpp @@ -52,14 +52,12 @@ CFeatures::~CFeatures() void CFeatures::init() { - SG_ADD(&properties, "properties", "Feature properties", ParameterProperties()); - SG_ADD(&cache_size, "cache_size", "Size of cache in MB", ParameterProperties()); + SG_ADD(&properties, "properties", "Feature properties"); + SG_ADD(&cache_size, "cache_size", "Size of cache in MB"); - SG_ADD((CSGObject**) &preproc, "preproc", "Array of preprocessors.", - ParameterProperties()); + SG_ADD((CSGObject**) &preproc, "preproc", "Array of preprocessors."); - SG_ADD((CSGObject**)&m_subset_stack, "subset_stack", "Stack of subsets", - ParameterProperties()); + SG_ADD((CSGObject**)&m_subset_stack, "subset_stack", "Stack of subsets"); m_subset_stack=new CSubsetStack(); SG_REF(m_subset_stack); diff --git a/src/shogun/features/IndexFeatures.cpp b/src/shogun/features/IndexFeatures.cpp index aef51a81d06..4f0da59271d 100644 --- a/src/shogun/features/IndexFeatures.cpp +++ b/src/shogun/features/IndexFeatures.cpp @@ -82,5 +82,5 @@ void CIndexFeatures::init() { num_vectors = 0; SG_ADD(&m_feature_index, "m_feature_index", - "Vector of feature index.", ParameterProperties()); + "Vector of feature index."); } diff --git a/src/shogun/features/LBPPyrDotFeatures.cpp b/src/shogun/features/LBPPyrDotFeatures.cpp index 3501105589a..6d5c6c68fe0 100644 --- a/src/shogun/features/LBPPyrDotFeatures.cpp +++ b/src/shogun/features/LBPPyrDotFeatures.cpp @@ -34,10 +34,10 @@ void CLBPPyrDotFeatures::init(CDenseFeatures* image_set, int32_t image image_width = image_w; image_height = image_h; - SG_ADD((CSGObject**) &images, "images", "Set of images", ParameterProperties()); - SG_ADD(&image_width, "image_width", "The image width", ParameterProperties()); - SG_ADD(&image_height, "image_height", "The image height", ParameterProperties()); - SG_ADD(&vec_nDim, "vec_nDim", "The dimension of the pyr", ParameterProperties()); + SG_ADD((CSGObject**) &images, "images", "Set of images"); + SG_ADD(&image_width, "image_width", "The image width"); + SG_ADD(&image_height, "image_height", "The image height"); + SG_ADD(&vec_nDim, "vec_nDim", "The dimension of the pyr"); } CLBPPyrDotFeatures::~CLBPPyrDotFeatures() diff --git a/src/shogun/features/LatentFeatures.cpp b/src/shogun/features/LatentFeatures.cpp index b065db63a0d..223b034af95 100644 --- a/src/shogun/features/LatentFeatures.cpp +++ b/src/shogun/features/LatentFeatures.cpp @@ -76,8 +76,7 @@ CData* CLatentFeatures::get_sample(index_t idx) void CLatentFeatures::init() { - SG_ADD((CSGObject**) &m_samples, "samples", "Array of examples", - ParameterProperties()); + SG_ADD((CSGObject**) &m_samples, "samples", "Array of examples"); } CLatentFeatures* CLatentFeatures::obtain_from_generic(CFeatures* base_feats) diff --git a/src/shogun/features/MatrixFeatures.cpp b/src/shogun/features/MatrixFeatures.cpp index 577d8ff1d06..149ce2ab5bd 100644 --- a/src/shogun/features/MatrixFeatures.cpp +++ b/src/shogun/features/MatrixFeatures.cpp @@ -143,12 +143,11 @@ template< class ST > void CMatrixFeatures< ST >::set_features( template< class ST > void CMatrixFeatures< ST >::init() { - SG_ADD(&m_num_vectors, "m_num_vectors", "Number of feature vectors", - ParameterProperties()); + SG_ADD(&m_num_vectors, "m_num_vectors", "Number of feature vectors"); SG_ADD(&m_num_features, "m_num_features", - "Number of features per vector (optional)", ParameterProperties()); + "Number of features per vector (optional)"); //TODO add SG_ADD for SGMatrixList - //SG_ADD(&m_features, "m_features", "Matrix features", ParameterProperties()); + //SG_ADD(&m_features, "m_features", "Matrix features"); m_num_vectors = 0; m_num_features = 0; diff --git a/src/shogun/features/PolyFeatures.cpp b/src/shogun/features/PolyFeatures.cpp index 03c632959dd..561ae9dce1d 100644 --- a/src/shogun/features/PolyFeatures.cpp +++ b/src/shogun/features/PolyFeatures.cpp @@ -377,18 +377,16 @@ CFeatures* CPolyFeatures::duplicate() const void CPolyFeatures::register_parameters() { SG_ADD( - (CSGObject**)&m_feat, "features", "Features in original space.", - ParameterProperties()); + (CSGObject**)&m_feat, "features", "Features in original space."); SG_ADD( &m_degree, "degree", "Degree of the polynomial kernel.", ParameterProperties::HYPER); - SG_ADD(&m_normalize, "normalize", "Normalize?", ParameterProperties()); + SG_ADD(&m_normalize, "normalize", "Normalize?"); SG_ADD( &m_input_dimensions, "input_dimensions", - "Dimensions of the input space.", ParameterProperties()); + "Dimensions of the input space."); SG_ADD( &m_output_dimensions, "output_dimensions", - "Dimensions of the feature space of the polynomial kernel.", - ParameterProperties()); + "Dimensions of the feature space of the polynomial kernel."); multi_index_length=m_output_dimensions*m_degree; m_parameters->add_vector( diff --git a/src/shogun/features/RandomFourierDotFeatures.cpp b/src/shogun/features/RandomFourierDotFeatures.cpp index e91b1f59335..f703273c22c 100644 --- a/src/shogun/features/RandomFourierDotFeatures.cpp +++ b/src/shogun/features/RandomFourierDotFeatures.cpp @@ -56,11 +56,10 @@ void CRandomFourierDotFeatures::init(KernelName kernel_name, SGVector constant = num_samples > 0 ? std::sqrt(2.0 / num_samples) : 1; SG_ADD( &kernel_params, "kernel_params", - "The parameters of the kernel to approximate", ParameterProperties()); + "The parameters of the kernel to approximate"); SG_ADD((machine_int_t* ) &kernel, "kernel", - "The kernel to approximate", ParameterProperties()); - SG_ADD(&constant, "constant", "A constant needed", - ParameterProperties()); + "The kernel to approximate"); + SG_ADD(&constant, "constant", "A constant needed"); } CFeatures* CRandomFourierDotFeatures::duplicate() const diff --git a/src/shogun/features/RandomKitchenSinksDotFeatures.cpp b/src/shogun/features/RandomKitchenSinksDotFeatures.cpp index 3b5b2401a0f..9770feb2e69 100644 --- a/src/shogun/features/RandomKitchenSinksDotFeatures.cpp +++ b/src/shogun/features/RandomKitchenSinksDotFeatures.cpp @@ -73,11 +73,9 @@ void CRandomKitchenSinksDotFeatures::init(CDotFeatures* dataset, num_samples = K; - SG_ADD((CSGObject** ) &feats, "feats", "Features to work on", - ParameterProperties()); + SG_ADD((CSGObject** ) &feats, "feats", "Features to work on"); SG_ADD( - &random_coeff, "random_coeff", "Random function parameters", - ParameterProperties()); + &random_coeff, "random_coeff", "Random function parameters"); } int32_t CRandomKitchenSinksDotFeatures::get_dim_feature_space() const diff --git a/src/shogun/features/SparsePolyFeatures.cpp b/src/shogun/features/SparsePolyFeatures.cpp index bda68e6d5b9..1187286effe 100644 --- a/src/shogun/features/SparsePolyFeatures.cpp +++ b/src/shogun/features/SparsePolyFeatures.cpp @@ -250,17 +250,16 @@ CFeatures* CSparsePolyFeatures::duplicate() const void CSparsePolyFeatures::init() { SG_ADD( - &m_feat, "features", "Features in original space.", ParameterProperties()); + &m_feat, "features", "Features in original space."); SG_ADD( &m_degree, "degree", "Degree of the polynomial kernel.", ParameterProperties::HYPER); - SG_ADD(&m_normalize, "normalize", "Normalize", ParameterProperties()); + SG_ADD(&m_normalize, "normalize", "Normalize"); SG_ADD( &m_input_dimensions, "input_dimensions", - "Dimensions of the input space.", ParameterProperties()); + "Dimensions of the input space."); SG_ADD( &m_output_dimensions, "output_dimensions", - "Dimensions of the feature space of the polynomial kernel.", - ParameterProperties()); + "Dimensions of the feature space of the polynomial kernel."); m_normalization_values_len = get_num_vectors(); m_parameters->add_vector(&m_normalization_values, &m_normalization_values_len, @@ -269,8 +268,7 @@ void CSparsePolyFeatures::init() "m_normalization_values", &m_normalization_values, &m_normalization_values_len); - SG_ADD(&mask, "mask", "Mask.", ParameterProperties()); + SG_ADD(&mask, "mask", "Mask."); SG_ADD( - &m_hash_bits, "m_hash_bits", "Number of bits in hash", - ParameterProperties()); + &m_hash_bits, "m_hash_bits", "Number of bits in hash"); } diff --git a/src/shogun/features/StringFeatures.cpp b/src/shogun/features/StringFeatures.cpp index 994f6c9cc47..579a8517f38 100644 --- a/src/shogun/features/StringFeatures.cpp +++ b/src/shogun/features/StringFeatures.cpp @@ -1705,7 +1705,7 @@ template void CStringFeatures::init() num_symbols=0.0; original_num_symbols=0; - SG_ADD(&alphabet, "alphabet", "Alphabet used.", ParameterProperties()); + SG_ADD(&alphabet, "alphabet", "Alphabet used."); m_parameters->add_vector(&features, &num_vectors, "features", "This contains the array of features."); @@ -1718,20 +1718,16 @@ template void CStringFeatures::init() watch_param("single_string", &single_string, &length_of_single_string); SG_ADD( - &max_string_length, "max_string_length", "Length of longest string.", - ParameterProperties()); + &max_string_length, "max_string_length", "Length of longest string."); SG_ADD( - &num_symbols, "num_symbols", "Number of used symbols.", - ParameterProperties()); + &num_symbols, "num_symbols", "Number of used symbols."); SG_ADD( &original_num_symbols, "original_num_symbols", - "Original number of used symbols.", ParameterProperties()); + "Original number of used symbols."); SG_ADD( - &order, "order", "Order used in higher order mapping.", - ParameterProperties()); + &order, "order", "Order used in higher order mapping."); SG_ADD( - &preprocess_on_get, "preprocess_on_get", "Preprocess on-the-fly?", - ParameterProperties()); + &preprocess_on_get, "preprocess_on_get", "Preprocess on-the-fly?"); m_parameters->add_vector(&symbol_mask_table, &symbol_mask_table_len, "mask_table", "Symbol mask table - using in higher order mapping"); watch_param("mask_table", &symbol_mask_table, &symbol_mask_table_len); diff --git a/src/shogun/features/Subset.cpp b/src/shogun/features/Subset.cpp index 999272d1268..bacbaad739c 100644 --- a/src/shogun/features/Subset.cpp +++ b/src/shogun/features/Subset.cpp @@ -27,6 +27,5 @@ CSubset::~CSubset() void CSubset::init() { - SG_ADD(&m_subset_idx, "subset", "Vector of subset indices", - ParameterProperties()); + SG_ADD(&m_subset_idx, "subset", "Vector of subset indices"); } diff --git a/src/shogun/features/SubsetStack.cpp b/src/shogun/features/SubsetStack.cpp index 5ce4e88ae06..fe900f0d679 100644 --- a/src/shogun/features/SubsetStack.cpp +++ b/src/shogun/features/SubsetStack.cpp @@ -72,9 +72,9 @@ void CSubsetStack::remove_all_subsets() void CSubsetStack::init() { SG_ADD((CSGObject**)&m_active_subset, "active_subset", - "Currently active subset", ParameterProperties()); + "Currently active subset"); SG_ADD((CSGObject**)&m_active_subsets_stack, "active_subsets_stack", - "Stack of active subsets", ParameterProperties()); + "Stack of active subsets"); m_active_subset=NULL; m_active_subsets_stack=new CDynamicObjectArray(); diff --git a/src/shogun/features/TOPFeatures.cpp b/src/shogun/features/TOPFeatures.cpp index cad929a07f6..ac940e33769 100644 --- a/src/shogun/features/TOPFeatures.cpp +++ b/src/shogun/features/TOPFeatures.cpp @@ -368,9 +368,7 @@ void CTOPFeatures::init() //m_parameters->add((CSGObject**) &pos, "pos", "HMM for positive class."); //m_parameters->add((CSGObject**) &neg, "neg", "HMM for negative class."); SG_ADD( - &neglinear, "neglinear", "If negative HMM is a LinearHMM", - ParameterProperties()); + &neglinear, "neglinear", "If negative HMM is a LinearHMM"); SG_ADD( - &poslinear, "poslinear", "If positive HMM is a LinearHMM", - ParameterProperties()); + &poslinear, "poslinear", "If positive HMM is a LinearHMM"); } diff --git a/src/shogun/features/hashed/HashedDenseFeatures.cpp b/src/shogun/features/hashed/HashedDenseFeatures.cpp index 0d7fcd71c08..1c1cb46323c 100644 --- a/src/shogun/features/hashed/HashedDenseFeatures.cpp +++ b/src/shogun/features/hashed/HashedDenseFeatures.cpp @@ -66,13 +66,10 @@ void CHashedDenseFeatures::init(CDenseFeatures* feats, int32_t d, bool u use_quadratic = use_quadr; keep_linear_terms = keep_lin_terms; - SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - ParameterProperties()); - SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - ParameterProperties()); - SG_ADD(&dim, "dim", "Dimension of new feature space", ParameterProperties()); - SG_ADD((CSGObject** ) &dense_feats, "dense_feats", "Dense features to work on", - ParameterProperties()); + SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features"); + SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not"); + SG_ADD(&dim, "dim", "Dimension of new feature space"); + SG_ADD((CSGObject** ) &dense_feats, "dense_feats", "Dense features to work on"); set_generic(); } diff --git a/src/shogun/features/hashed/HashedDocDotFeatures.cpp b/src/shogun/features/hashed/HashedDocDotFeatures.cpp index c77fdaff798..876fd55fd2d 100644 --- a/src/shogun/features/hashed/HashedDocDotFeatures.cpp +++ b/src/shogun/features/hashed/HashedDocDotFeatures.cpp @@ -51,17 +51,12 @@ void CHashedDocDotFeatures::init(int32_t hash_bits, CStringFeatures* docs, ((CDelimiterTokenizer* )tokenizer)->init_for_whitespace(); } - SG_ADD(&num_bits, "num_bits", "Number of bits of hash", ParameterProperties()); - SG_ADD(&ngrams, "ngrams", "Number of tokens to combine for quadratic feature support", - ParameterProperties()); - SG_ADD(&tokens_to_skip, "tokens_to_skip", "Number of tokens to skip when combining features", - ParameterProperties()); - SG_ADD((CSGObject**) &doc_collection, "doc_collection", "Document collection", - ParameterProperties()); - SG_ADD((CSGObject**) &tokenizer, "tokenizer", "Document tokenizer", - ParameterProperties()); - SG_ADD(&should_normalize, "should_normalize", "Normalize or not the dot products", - ParameterProperties()); + SG_ADD(&num_bits, "num_bits", "Number of bits of hash"); + SG_ADD(&ngrams, "ngrams", "Number of tokens to combine for quadratic feature support"); + SG_ADD(&tokens_to_skip, "tokens_to_skip", "Number of tokens to skip when combining features"); + SG_ADD((CSGObject**) &doc_collection, "doc_collection", "Document collection"); + SG_ADD((CSGObject**) &tokenizer, "tokenizer", "Document tokenizer"); + SG_ADD(&should_normalize, "should_normalize", "Normalize or not the dot products"); SG_REF(doc_collection); SG_REF(tokenizer); diff --git a/src/shogun/features/hashed/HashedSparseFeatures.cpp b/src/shogun/features/hashed/HashedSparseFeatures.cpp index 47b5c216ca8..6a772c436f9 100644 --- a/src/shogun/features/hashed/HashedSparseFeatures.cpp +++ b/src/shogun/features/hashed/HashedSparseFeatures.cpp @@ -58,13 +58,10 @@ void CHashedSparseFeatures::init(CSparseFeatures* feats, int32_t d, bool sparse_feats = feats; SG_REF(sparse_feats); - SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - ParameterProperties()); - SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - ParameterProperties()); - SG_ADD(&dim, "dim", "Dimension of new feature space", ParameterProperties()); - SG_ADD((CSGObject** ) &sparse_feats, "sparse_feats", "Sparse features to work on", - ParameterProperties()); + SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features"); + SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not"); + SG_ADD(&dim, "dim", "Dimension of new feature space"); + SG_ADD((CSGObject** ) &sparse_feats, "sparse_feats", "Sparse features to work on"); set_generic(); } diff --git a/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp b/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp index 5ab08a45921..f8a9bcec61c 100644 --- a/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp +++ b/src/shogun/features/streaming/StreamingHashedDenseFeatures.cpp @@ -54,11 +54,9 @@ void CStreamingHashedDenseFeatures::init(CStreamingFile* file, bool is_label use_quadratic = use_quadr; keep_linear_terms = keep_lin_terms; - SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - ParameterProperties()); - SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - ParameterProperties()); - SG_ADD(&dim, "dim", "Size of target dimension", ParameterProperties()); + SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features"); + SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not"); + SG_ADD(&dim, "dim", "Size of target dimension"); has_labels = is_labelled; if (file) diff --git a/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp b/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp index 08650c329b1..e7f5e706e97 100644 --- a/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp +++ b/src/shogun/features/streaming/StreamingHashedDocDotFeatures.cpp @@ -50,10 +50,9 @@ void CStreamingHashedDocDotFeatures::init(CStreamingFile* file, bool is_labelled else converter=NULL; - SG_ADD(&num_bits, "num_bits", "Number of bits for hash", ParameterProperties()); - SG_ADD((CSGObject** ) &tokenizer, "tokenizer", "The tokenizer used on the documents", - ParameterProperties()); - SG_ADD((CSGObject** ) &converter, "converter", "Converter", ParameterProperties()); + SG_ADD(&num_bits, "num_bits", "Number of bits for hash"); + SG_ADD((CSGObject** ) &tokenizer, "tokenizer", "The tokenizer used on the documents"); + SG_ADD((CSGObject** ) &converter, "converter", "Converter"); has_labels = is_labelled; if (file) diff --git a/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp b/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp index c077b294dfe..af419db792b 100644 --- a/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp +++ b/src/shogun/features/streaming/StreamingHashedSparseFeatures.cpp @@ -52,15 +52,13 @@ void CStreamingHashedSparseFeatures::init(CStreamingFile* file, bool is_labe int32_t size, int32_t d, bool use_quadr, bool keep_lin_terms) { dim = d; - SG_ADD(&dim, "dim", "Size of target dimension", ParameterProperties()); + SG_ADD(&dim, "dim", "Size of target dimension"); use_quadratic = use_quadr; keep_linear_terms = keep_lin_terms; - SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features", - ParameterProperties()); - SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not", - ParameterProperties()); + SG_ADD(&use_quadratic, "use_quadratic", "Whether to use quadratic features"); + SG_ADD(&keep_linear_terms, "keep_linear_terms", "Whether to keep the linear terms or not"); has_labels = is_labelled; if (file) diff --git a/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp b/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp index e9fd13572d1..ff722167e10 100644 --- a/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp +++ b/src/shogun/features/streaming/generators/GaussianBlobsDataGenerator.cpp @@ -54,16 +54,11 @@ void CGaussianBlobsDataGenerator::set_blobs_model(index_t sqrt_num_blobs, void CGaussianBlobsDataGenerator::init() { - SG_ADD(&m_sqrt_num_blobs, "sqrt_num_blobs", "Number of Blobs per row", - ParameterProperties()); - SG_ADD(&m_distance, "distance", "Distance between blobs", - ParameterProperties()); - SG_ADD(&m_stretch, "stretch", "Stretch of blobs", - ParameterProperties()); - SG_ADD(&m_angle, "angle", "Angle of Blobs", - ParameterProperties()); - SG_ADD(&m_cholesky, "cholesky", "Cholesky factor of covariance matrix", - ParameterProperties()); + SG_ADD(&m_sqrt_num_blobs, "sqrt_num_blobs", "Number of Blobs per row"); + SG_ADD(&m_distance, "distance", "Distance between blobs"); + SG_ADD(&m_stretch, "stretch", "Stretch of blobs"); + SG_ADD(&m_angle, "angle", "Angle of Blobs"); + SG_ADD(&m_cholesky, "cholesky", "Cholesky factor of covariance matrix"); m_sqrt_num_blobs=1; m_distance=0; diff --git a/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp b/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp index 657addbc757..4c6262e0372 100644 --- a/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp +++ b/src/shogun/features/streaming/generators/MeanShiftDataGenerator.cpp @@ -41,11 +41,9 @@ void CMeanShiftDataGenerator::set_mean_shift_model(float64_t mean_shift, void CMeanShiftDataGenerator::init() { - SG_ADD(&m_dimension, "dimension", "Dimension of data", ParameterProperties()); - SG_ADD(&m_mean_shift, "mean_shift", "Mean shift in one dimension", - ParameterProperties()); - SG_ADD(&m_dimension_shift, "m_dimension_shift", "Dimension of mean shift", - ParameterProperties()); + SG_ADD(&m_dimension, "dimension", "Dimension of data"); + SG_ADD(&m_mean_shift, "mean_shift", "Mean shift in one dimension"); + SG_ADD(&m_dimension_shift, "m_dimension_shift", "Dimension of mean shift"); m_dimension=0; m_mean_shift=0; diff --git a/src/shogun/io/Serializable.h b/src/shogun/io/Serializable.h index 916a4cc6e9d..210cdded11a 100644 --- a/src/shogun/io/Serializable.h +++ b/src/shogun/io/Serializable.h @@ -93,7 +93,7 @@ template class CSerializable: public CSGObject { set_generic::value_type>(); m_value = 0; - SG_ADD(&m_value, "value", "Serialized value", ParameterProperties()); + SG_ADD(&m_value, "value", "Serialized value"); } protected: diff --git a/src/shogun/io/UAIFile.cpp b/src/shogun/io/UAIFile.cpp index 3dd6ac19a44..f6735608069 100644 --- a/src/shogun/io/UAIFile.cpp +++ b/src/shogun/io/UAIFile.cpp @@ -52,21 +52,21 @@ CUAIFile::~CUAIFile() void CUAIFile::init() { - SG_ADD((CSGObject**)&m_line_reader, "line_reader", "line reader used to read lines from file", ParameterProperties()); - SG_ADD((CSGObject**)&m_parser, "parser", "parser used to parse file", ParameterProperties()); - SG_ADD((CSGObject**)&m_line_tokenizer, "line_tokenizer", "line tokenizer used to parse file", ParameterProperties()); - SG_ADD((CSGObject**)&m_tokenizer, "tokenizer", "tokenizer used to parse file", ParameterProperties()); - SG_ADD(&m_delimiter, "delimiter", "delimiter used in get_vector function", ParameterProperties()); + SG_ADD((CSGObject**)&m_line_reader, "line_reader", "line reader used to read lines from file"); + SG_ADD((CSGObject**)&m_parser, "parser", "parser used to parse file"); + SG_ADD((CSGObject**)&m_line_tokenizer, "line_tokenizer", "line tokenizer used to parse file"); + SG_ADD((CSGObject**)&m_tokenizer, "tokenizer", "tokenizer used to parse file"); + SG_ADD(&m_delimiter, "delimiter", "delimiter used in get_vector function"); - SG_ADD(&m_num_vars, "num_vars", "number of variables", ParameterProperties()); - SG_ADD(&m_num_factors, "num_factors", "number of factors", ParameterProperties()); - SG_ADD(&m_net_type, "net_type", "network type (either BAYES or MARKOV)", ParameterProperties()); - SG_ADD(&m_vars_card, "vars_card", "cardinality of all the variables", ParameterProperties()); + SG_ADD(&m_num_vars, "num_vars", "number of variables"); + SG_ADD(&m_num_factors, "num_factors", "number of factors"); + SG_ADD(&m_net_type, "net_type", "network type (either BAYES or MARKOV)"); + SG_ADD(&m_vars_card, "vars_card", "cardinality of all the variables"); /** Can only be enable after this issue is https://github.com/shogun-toolbox/shogun/issues/1972 * resolved - * SG_ADD(m_factors_table, "m_factors_table", "table of factors", ParameterProperties()); - * SG_ADD(m_factors_scope, "m_factors_scope", "scope of factors", ParameterProperties()); + * SG_ADD(m_factors_table, "m_factors_table", "table of factors"); + * SG_ADD(m_factors_scope, "m_factors_scope", "scope of factors"); */ m_delimiter = ' '; diff --git a/src/shogun/kernel/CombinedKernel.cpp b/src/shogun/kernel/CombinedKernel.cpp index 1f1d88087ff..e84cfdc87ec 100644 --- a/src/shogun/kernel/CombinedKernel.cpp +++ b/src/shogun/kernel/CombinedKernel.cpp @@ -752,8 +752,7 @@ void CCombinedKernel::init() SG_ADD(&append_subkernel_weights, "append_subkernel_weights", "If subkernel weights are appended.", ParameterProperties::HYPER); - SG_ADD(&initialized, "initialized", "Whether kernel is ready to be used.", - ParameterProperties()); + SG_ADD(&initialized, "initialized", "Whether kernel is ready to be used."); enable_subkernel_weight_opt=false; subkernel_log_weights = SGVector(1); @@ -761,11 +760,11 @@ void CCombinedKernel::init() SG_ADD(&subkernel_log_weights, "subkernel_log_weights", "subkernel weights", ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&enable_subkernel_weight_opt, "enable_subkernel_weight_opt", - "enable subkernel weight opt", ParameterProperties()); + "enable subkernel weight opt"); weight_update = false; SG_ADD(&weight_update, "weight_update", - "weight update", ParameterProperties()); + "weight update"); } void CCombinedKernel::enable_subkernel_weight_learning() diff --git a/src/shogun/kernel/CustomKernel.cpp b/src/shogun/kernel/CustomKernel.cpp index a953b02d221..02512376b85 100644 --- a/src/shogun/kernel/CustomKernel.cpp +++ b/src/shogun/kernel/CustomKernel.cpp @@ -27,15 +27,14 @@ void CCustomKernel::init() m_free_km=true; SG_ADD((CSGObject**)&m_row_subset_stack, "row_subset_stack", - "Subset stack of rows", ParameterProperties()); + "Subset stack of rows"); SG_ADD((CSGObject**)&m_col_subset_stack, "col_subset_stack", - "Subset stack of columns", ParameterProperties()); + "Subset stack of columns"); SG_ADD(&m_free_km, "free_km", "Whether kernel matrix should be freed in " - "destructor", ParameterProperties()); - SG_ADD(&m_is_symmetric, "is_symmetric", "Whether kernel matrix is symmetric", - ParameterProperties()); - SG_ADD(&kmatrix, "kmatrix", "Kernel matrix.", ParameterProperties()); - SG_ADD(&upper_diagonal, "upper_diagonal", "Upper diagonal", ParameterProperties()); + "destructor"); + SG_ADD(&m_is_symmetric, "is_symmetric", "Whether kernel matrix is symmetric"); + SG_ADD(&kmatrix, "kmatrix", "Kernel matrix."); + SG_ADD(&upper_diagonal, "upper_diagonal", "Upper diagonal"); } CCustomKernel::CCustomKernel() diff --git a/src/shogun/kernel/ExponentialARDKernel.cpp b/src/shogun/kernel/ExponentialARDKernel.cpp index 1d7a29b639e..ab43e358d61 100644 --- a/src/shogun/kernel/ExponentialARDKernel.cpp +++ b/src/shogun/kernel/ExponentialARDKernel.cpp @@ -35,12 +35,12 @@ void CExponentialARDKernel::init() SG_ADD(&m_log_weights, "log_weights", "Feature weights in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); - SG_ADD(&m_weights_rows, "weights_rows", "Row of feature weights", ParameterProperties()); - SG_ADD(&m_weights_cols, "weights_cols", "Column of feature weights", ParameterProperties()); - SG_ADD((int *)(&m_ARD_type), "type", "ARD kernel type", ParameterProperties()); + SG_ADD(&m_weights_rows, "weights_rows", "Row of feature weights"); + SG_ADD(&m_weights_cols, "weights_cols", "Column of feature weights"); + SG_ADD((int *)(&m_ARD_type), "type", "ARD kernel type"); m_weights_raw=SGMatrix(); - SG_ADD(&m_weights_raw, "weights_raw", "Features weights in standard domain", ParameterProperties()); + SG_ADD(&m_weights_raw, "weights_raw", "Features weights in standard domain"); } diff --git a/src/shogun/kernel/GaussianARDKernel.cpp b/src/shogun/kernel/GaussianARDKernel.cpp index 07f42ba5b55..587ef11cf6b 100644 --- a/src/shogun/kernel/GaussianARDKernel.cpp +++ b/src/shogun/kernel/GaussianARDKernel.cpp @@ -24,8 +24,8 @@ void CGaussianARDKernel::init() { m_sq_lhs=SGVector(); m_sq_rhs=SGVector(); - SG_ADD(&m_sq_lhs, "sq_lhs", "squared left-hand side", ParameterProperties()); - SG_ADD(&m_sq_rhs, "sq_rhs", "squared right-hand side", ParameterProperties()); + SG_ADD(&m_sq_lhs, "sq_lhs", "squared left-hand side"); + SG_ADD(&m_sq_rhs, "sq_rhs", "squared right-hand side"); } float64_t CGaussianARDKernel::distance(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/Kernel.cpp b/src/shogun/kernel/Kernel.cpp index 8c67fe58df9..51204c3327b 100644 --- a/src/shogun/kernel/Kernel.cpp +++ b/src/shogun/kernel/Kernel.cpp @@ -919,27 +919,23 @@ void CKernel::save_serializable_post() throw (ShogunException) void CKernel::register_params() { SG_ADD(&cache_size, "cache_size", - "Cache size in MB.", ParameterProperties()); + "Cache size in MB."); SG_ADD( - &lhs, "lhs", "Feature vectors to occur on left hand side.", - ParameterProperties()); + &lhs, "lhs", "Feature vectors to occur on left hand side."); SG_ADD( - &rhs, "rhs", "Feature vectors to occur on right hand side.", - ParameterProperties()); + &rhs, "rhs", "Feature vectors to occur on right hand side."); SG_ADD(&lhs_equals_rhs, "lhs_equals_rhs", - "If features on lhs are the same as on rhs.", ParameterProperties()); - SG_ADD(&num_lhs, "num_lhs", "Number of feature vectors on left hand side.", - ParameterProperties()); - SG_ADD(&num_rhs, "num_rhs", "Number of feature vectors on right hand side.", - ParameterProperties()); + "If features on lhs are the same as on rhs."); + SG_ADD(&num_lhs, "num_lhs", "Number of feature vectors on left hand side."); + SG_ADD(&num_rhs, "num_rhs", "Number of feature vectors on right hand side."); SG_ADD(&combined_kernel_weight, "combined_kernel_weight", "Combined kernel weight.", ParameterProperties::HYPER); SG_ADD(&optimization_initialized, "optimization_initialized", - "Optimization is initialized.", ParameterProperties()); + "Optimization is initialized."); SG_ADD((machine_int_t*) &opt_type, "opt_type", - "Optimization type.", ParameterProperties()); - SG_ADD(&properties, "properties", "Kernel properties.", ParameterProperties()); + "Optimization type."); + SG_ADD(&properties, "properties", "Kernel properties."); SG_ADD(&normalizer, "normalizer", "Normalize the kernel.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/PeriodicKernel.cpp b/src/shogun/kernel/PeriodicKernel.cpp index 49c6b669b0a..dabe2561927 100644 --- a/src/shogun/kernel/PeriodicKernel.cpp +++ b/src/shogun/kernel/PeriodicKernel.cpp @@ -161,9 +161,9 @@ void CPeriodicKernel::init() SG_ADD(&m_period, "period", "Kernel period", ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_sq_lhs, "sq_lhs", - "Vector of dot products of each left-hand-side vector with itself.", ParameterProperties()); + "Vector of dot products of each left-hand-side vector with itself."); SG_ADD(&m_sq_rhs, "sq_rhs", - "Vector of dot products of each right-hand-side vector with itself.", ParameterProperties()); + "Vector of dot products of each right-hand-side vector with itself."); } float64_t CPeriodicKernel::distance(int32_t idx_a, int32_t idx_b) diff --git a/src/shogun/kernel/PolyKernel.cpp b/src/shogun/kernel/PolyKernel.cpp index e64273cc6c2..6c30b022681 100644 --- a/src/shogun/kernel/PolyKernel.cpp +++ b/src/shogun/kernel/PolyKernel.cpp @@ -71,7 +71,6 @@ void CPolyKernel::init() set_normalizer(new CSqrtDiagKernelNormalizer()); SG_ADD(°ree, "degree", "Degree of polynomial kernel", ParameterProperties::HYPER); - SG_ADD(&inhomogene, "inhomogene", "If kernel is inhomogeneous.", - ParameterProperties()); + SG_ADD(&inhomogene, "inhomogene", "If kernel is inhomogeneous."); } diff --git a/src/shogun/kernel/ProductKernel.cpp b/src/shogun/kernel/ProductKernel.cpp index 49ec215986c..173f8977c6f 100644 --- a/src/shogun/kernel/ProductKernel.cpp +++ b/src/shogun/kernel/ProductKernel.cpp @@ -230,8 +230,7 @@ void CProductKernel::init() SG_ADD((CSGObject**) &kernel_array, "kernel_array", "Array of kernels", ParameterProperties::HYPER); - SG_ADD(&initialized, "initialized", "Whether kernel is ready to be used", - ParameterProperties()); + SG_ADD(&initialized, "initialized", "Whether kernel is ready to be used"); } SGMatrix CProductKernel::get_parameter_gradient( diff --git a/src/shogun/kernel/ShiftInvariantKernel.cpp b/src/shogun/kernel/ShiftInvariantKernel.cpp index 3aefe5e2140..04bc157cb5e 100644 --- a/src/shogun/kernel/ShiftInvariantKernel.cpp +++ b/src/shogun/kernel/ShiftInvariantKernel.cpp @@ -102,8 +102,8 @@ float64_t CShiftInvariantKernel::distance(int32_t a, int32_t b) const void CShiftInvariantKernel::register_params() { - SG_ADD((CSGObject**) &m_distance, "m_distance", "Distance to be used.", ParameterProperties()); - SG_ADD((CSGObject**) &m_precomputed_distance, "m_precomputed_distance", "Precomputed istance to be used.", ParameterProperties()); + SG_ADD((CSGObject**) &m_distance, "m_distance", "Distance to be used."); + SG_ADD((CSGObject**) &m_precomputed_distance, "m_precomputed_distance", "Precomputed istance to be used."); m_distance=NULL; m_precomputed_distance=NULL; diff --git a/src/shogun/kernel/normalizer/DiceKernelNormalizer.h b/src/shogun/kernel/normalizer/DiceKernelNormalizer.h index 29a4b531d60..09b0cd3a22f 100644 --- a/src/shogun/kernel/normalizer/DiceKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/DiceKernelNormalizer.h @@ -42,7 +42,7 @@ class CDiceKernelNormalizer : public CKernelNormalizer SG_ADD(&use_optimized_diagonal_computation, "use_optimized_diagonal_computation", - "flat if optimized diagonal computation is used", ParameterProperties()); + "flat if optimized diagonal computation is used"); } /** default destructor */ diff --git a/src/shogun/kernel/normalizer/KernelNormalizer.h b/src/shogun/kernel/normalizer/KernelNormalizer.h index 274a6a14a94..5acb918c86f 100644 --- a/src/shogun/kernel/normalizer/KernelNormalizer.h +++ b/src/shogun/kernel/normalizer/KernelNormalizer.h @@ -89,8 +89,7 @@ class CKernelNormalizer : public CSGObject */ virtual void register_params() { - SG_ADD((machine_int_t*) &m_type, "m_type", "Normalizer type.", - ParameterProperties()); + SG_ADD((machine_int_t*) &m_type, "m_type", "Normalizer type."); } /** getter for normalizer type diff --git a/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h b/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h index fd0ccb3b5aa..4be2a1aa460 100644 --- a/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/ScatterKernelNormalizer.h @@ -154,13 +154,13 @@ class CScatterKernelNormalizer: public CKernelNormalizer m_testing_class = -1; SG_ADD(&m_testing_class, "m_testing_class", - "Testing Class.", ParameterProperties()); + "Testing Class."); SG_ADD(&m_const_diag, "m_const_diag", "Factor to multiply to diagonal elements.", ParameterProperties::HYPER); SG_ADD(&m_const_offdiag, "m_const_offdiag", "Factor to multiply to off-diagonal elements.", ParameterProperties::HYPER); - SG_ADD((CSGObject**) &m_labels, "m_labels", "Labels", ParameterProperties()); + SG_ADD((CSGObject**) &m_labels, "m_labels", "Labels"); SG_ADD((CSGObject**) &m_normalizer, "m_normalizer", "Kernel normalizer.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h b/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h index ecd0908f214..e32049570cd 100644 --- a/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h +++ b/src/shogun/kernel/normalizer/SqrtDiagKernelNormalizer.h @@ -46,7 +46,7 @@ class CSqrtDiagKernelNormalizer : public CKernelNormalizer SG_ADD(&use_optimized_diagonal_computation, "use_optimized_diagonal_computation", - "flat if optimized diagonal computation is used", ParameterProperties()); + "flat if optimized diagonal computation is used"); } /** default destructor */ diff --git a/src/shogun/kernel/string/CommWordStringKernel.cpp b/src/shogun/kernel/string/CommWordStringKernel.cpp index 54281fe8277..1b99cc7eb4c 100644 --- a/src/shogun/kernel/string/CommWordStringKernel.cpp +++ b/src/shogun/kernel/string/CommWordStringKernel.cpp @@ -593,10 +593,10 @@ void CCommWordStringKernel::init() set_normalizer(new CSqrtDiagKernelNormalizer(use_dict_diagonal_optimization)); SG_ADD(&dictionary_weights, "dictionary_weights", - "Dictionary for applying kernel.", ParameterProperties()); + "Dictionary for applying kernel."); SG_ADD(&use_sign, "use_sign", "If signum(counts) is used instead of counts.", ParameterProperties::HYPER); SG_ADD(&use_dict_diagonal_optimization, "use_dict_diagonal_optimization", "If K(x,x) is computed potentially " - "more efficiently.", ParameterProperties()); + "more efficiently."); } diff --git a/src/shogun/kernel/string/HistogramWordStringKernel.cpp b/src/shogun/kernel/string/HistogramWordStringKernel.cpp index 509417c05fd..0fe20376935 100644 --- a/src/shogun/kernel/string/HistogramWordStringKernel.cpp +++ b/src/shogun/kernel/string/HistogramWordStringKernel.cpp @@ -408,8 +408,7 @@ void CHistogramWordStringKernel::init() sum_m2_s2=0; initialized=false; - SG_ADD(&initialized, "initialized", "If kernel is initalized.", - ParameterProperties()); + SG_ADD(&initialized, "initialized", "If kernel is initalized."); m_parameters->add_vector(&plo_lhs, &num_lhs, "plo_lhs"); watch_param("plo_lhs", &plo_lhs, &num_lhs); @@ -435,8 +434,7 @@ void CHistogramWordStringKernel::init() m_parameters->add_vector(&variance, &num_params2, "variance"); watch_param("variance", &variance, &num_params2); - SG_ADD((CSGObject**) &estimate, "estimate", "Plugin Estimate.", - ParameterProperties()); + SG_ADD((CSGObject**) &estimate, "estimate", "Plugin Estimate."); } #ifdef DEBUG_HWSK_COMPUTATION diff --git a/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp b/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp index 8cb8e186635..28f457dfd85 100644 --- a/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp +++ b/src/shogun/kernel/string/LocalAlignmentStringKernel.cpp @@ -405,8 +405,7 @@ void CLocalAlignmentStringKernel::init() scaled_blosum=SG_CALLOC(int32_t, sizeof(blosum)); init_logsum(); - SG_ADD(&initialized, "initialized", "If kernel is initalized.", - ParameterProperties()); + SG_ADD(&initialized, "initialized", "If kernel is initalized."); SG_ADD(&m_opening, "opening", "Opening gap opening penalty.", ParameterProperties::HYPER); SG_ADD(&m_extension, "extension", "Extension gap extension penalty.", ParameterProperties::HYPER); diff --git a/src/shogun/kernel/string/OligoStringKernel.cpp b/src/shogun/kernel/string/OligoStringKernel.cpp index b7b8124ed15..42f2a997d48 100644 --- a/src/shogun/kernel/string/OligoStringKernel.cpp +++ b/src/shogun/kernel/string/OligoStringKernel.cpp @@ -304,5 +304,5 @@ void COligoStringKernel::init() SG_ADD(&k, "k", "K-mer length.", ParameterProperties::HYPER); SG_ADD(&width, "width", "Width of Gaussian.", ParameterProperties::HYPER); - SG_ADD(&gauss_table, "gauss_table", "Gauss Cache Table.", ParameterProperties()); + SG_ADD(&gauss_table, "gauss_table", "Gauss Cache Table."); } diff --git a/src/shogun/kernel/string/PolyMatchStringKernel.cpp b/src/shogun/kernel/string/PolyMatchStringKernel.cpp index 2f47491a524..846a84b098f 100644 --- a/src/shogun/kernel/string/PolyMatchStringKernel.cpp +++ b/src/shogun/kernel/string/PolyMatchStringKernel.cpp @@ -88,8 +88,7 @@ void CPolyMatchStringKernel::init() set_normalizer(new CSqrtDiagKernelNormalizer()); SG_ADD(°ree, "degree", "Degree of poly-kernel.", ParameterProperties::HYPER); - SG_ADD(&inhomogene, "inhomogene", "True for inhomogene poly-kernel.", - ParameterProperties()); + SG_ADD(&inhomogene, "inhomogene", "True for inhomogene poly-kernel."); SG_ADD(&rescaling, "rescaling", "True to rescale kernel with string length.", ParameterProperties::HYPER); } diff --git a/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp b/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp index dfa3d9efc81..61e7c489c5e 100644 --- a/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp +++ b/src/shogun/kernel/string/PolyMatchWordStringKernel.cpp @@ -91,6 +91,5 @@ void CPolyMatchWordStringKernel::init() set_normalizer(new CSqrtDiagKernelNormalizer()); SG_ADD(°ree, "degree", "Degree of poly-kernel.", ParameterProperties::HYPER); - SG_ADD(&inhomogene, "inhomogene", "True for inhomogene poly-kernel.", - ParameterProperties()); + SG_ADD(&inhomogene, "inhomogene", "True for inhomogene poly-kernel."); } diff --git a/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp b/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp index b16112c9995..ee51f4fa6ab 100644 --- a/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp +++ b/src/shogun/kernel/string/RegulatoryModulesStringKernel.cpp @@ -56,11 +56,11 @@ void CRegulatoryModulesStringKernel::init() "the shift of weighted degree with shifts kernel part", ParameterProperties::HYPER); SG_ADD(&window, "window", "the size of window around motifs", ParameterProperties::HYPER); SG_ADD((CSGObject**)&motif_positions_lhs, "motif_positions_lhs", - "the matrix of motif positions from sequences left-hand side", ParameterProperties()); + "the matrix of motif positions from sequences left-hand side"); SG_ADD((CSGObject**)&motif_positions_rhs, "motif_positions_rhs", - "the matrix of motif positions from sequences right-hand side", ParameterProperties()); - SG_ADD(&position_weights, "position_weights", "scaling weights in window", ParameterProperties()); - SG_ADD(&weights, "weights", "weights of WD kernel", ParameterProperties()); + "the matrix of motif positions from sequences right-hand side"); + SG_ADD(&position_weights, "position_weights", "scaling weights in window"); + SG_ADD(&weights, "weights", "weights of WD kernel"); } bool CRegulatoryModulesStringKernel::init(CFeatures* l, CFeatures* r) diff --git a/src/shogun/kernel/string/SNPStringKernel.cpp b/src/shogun/kernel/string/SNPStringKernel.cpp index 8d40ad53764..d6267e07e73 100644 --- a/src/shogun/kernel/string/SNPStringKernel.cpp +++ b/src/shogun/kernel/string/SNPStringKernel.cpp @@ -180,7 +180,7 @@ void CSNPStringKernel::register_params() SG_ADD(&m_degree, "m_degree", "the order of the kernel", ParameterProperties::HYPER); SG_ADD(&m_win_len, "m_win_len", "the window length", ParameterProperties::HYPER); SG_ADD(&m_inhomogene, "m_inhomogene", - "the mark of whether it's an inhomogeneous poly kernel", ParameterProperties()); + "the mark of whether it's an inhomogeneous poly kernel"); m_parameters->add_vector(&m_str_min, &m_str_len, "m_str_min", "allele A"); watch_param("m_str_min", &m_str_min, &m_str_len); diff --git a/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp b/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp index cdeae06bfb1..cbc51fd51dd 100644 --- a/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp +++ b/src/shogun/kernel/string/SpectrumMismatchRBFKernel.cpp @@ -298,23 +298,18 @@ bool CSpectrumMismatchRBFKernel::set_max_mismatch(int32_t max) void CSpectrumMismatchRBFKernel::register_params() { SG_ADD(°ree, "degree", "degree of the kernel", ParameterProperties::HYPER); - SG_ADD(&AA_matrix, "AA_matrix", "128*128 scalar product matrix", - ParameterProperties()); + SG_ADD(&AA_matrix, "AA_matrix", "128*128 scalar product matrix"); SG_ADD(&width, "width", "width of Gaussian", ParameterProperties::HYPER); - SG_ADD(&target_letter_0, "target_letter_0", "target letter 0", - ParameterProperties()); - SG_ADD(&initialized, "initialized", "the mark of initialization status", - ParameterProperties()); + SG_ADD(&target_letter_0, "target_letter_0", "target letter 0"); + SG_ADD(&initialized, "initialized", "the mark of initialization status"); SG_ADD((CSGObject** )&kernel_matrix, "kernel_matrix", "the kernel matrix with its length " - "defined by the number of vectors of the string features", - ParameterProperties()); + "defined by the number of vectors of the string features"); } void CSpectrumMismatchRBFKernel::register_alphabet() { - SG_ADD((CSGObject** )&alphabet, "alphabet", "the alphabet used by kernel", - ParameterProperties()); + SG_ADD((CSGObject** )&alphabet, "alphabet", "the alphabet used by kernel"); } void CSpectrumMismatchRBFKernel::init() diff --git a/src/shogun/kernel/string/SpectrumRBFKernel.cpp b/src/shogun/kernel/string/SpectrumRBFKernel.cpp index 15060820108..537ab625b43 100644 --- a/src/shogun/kernel/string/SpectrumRBFKernel.cpp +++ b/src/shogun/kernel/string/SpectrumRBFKernel.cpp @@ -379,22 +379,20 @@ bool CSpectrumRBFKernel::set_AA_matrix( void CSpectrumRBFKernel::register_param() { SG_ADD(°ree, "degree", "degree of the kernel", ParameterProperties::HYPER); - SG_ADD(&AA_matrix, "AA_matrix", "128*128 scalar product matrix", ParameterProperties()); + SG_ADD(&AA_matrix, "AA_matrix", "128*128 scalar product matrix"); SG_ADD(&width, "width", "width of Gaussian", ParameterProperties::HYPER); - SG_ADD(&nof_sequences, "nof_sequences", "length of the sequence", - ParameterProperties()); + SG_ADD(&nof_sequences, "nof_sequences", "length of the sequence"); m_parameters->add_vector(&sequences, &nof_sequences, "sequences", "the sequences as a part of profile"); watch_param("sequences", &sequences, &nof_sequences); SG_ADD(&max_sequence_length, - "max_sequence_length", "max length of the sequence", ParameterProperties()); + "max_sequence_length", "max length of the sequence"); } void CSpectrumRBFKernel::register_alphabet() { - SG_ADD((CSGObject**)&alphabet, "alphabet", "the alphabet used by kernel", - ParameterProperties()); + SG_ADD((CSGObject**)&alphabet, "alphabet", "the alphabet used by kernel"); } void CSpectrumRBFKernel::init() diff --git a/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp b/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp index 748a09fdebb..d84619d48b5 100644 --- a/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp +++ b/src/shogun/kernel/string/WeightedDegreePositionStringKernel.cpp @@ -1955,12 +1955,12 @@ void CWeightedDegreePositionStringKernel::init() SG_ADD(&max_mismatch, "max_mismatch", "Number of allowed mismatches.", ParameterProperties::HYPER); SG_ADD(&block_computation, "block_computation", - "If block computation shall be used.", ParameterProperties()); + "If block computation shall be used."); SG_ADD((machine_int_t*) &type, "type", "WeightedDegree kernel type.", ParameterProperties::HYPER); SG_ADD(&which_degree, "which_degree", "The selected degree. All degrees are used by default (for value -1).", ParameterProperties::HYPER); SG_ADD((CSGObject**) &alphabet, "alphabet", - "Alphabet of Features.", ParameterProperties()); + "Alphabet of Features."); } diff --git a/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp b/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp index 9f64662d540..c5034865c9a 100644 --- a/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp +++ b/src/shogun/kernel/string/WeightedDegreeStringKernel.cpp @@ -1023,12 +1023,12 @@ void CWeightedDegreeStringKernel::init() SG_ADD(&max_mismatch, "max_mismatch", "Number of allowed mismatches.", ParameterProperties::HYPER); SG_ADD(&block_computation, "block_computation", - "If block computation shall be used.", ParameterProperties()); + "If block computation shall be used."); SG_ADD((machine_int_t*) &type, "type", "WeightedDegree kernel type.", ParameterProperties::HYPER); SG_ADD(&which_degree, "which_degree", "The selected degree. All degrees are used by default (for value -1).", ParameterProperties::HYPER); SG_ADD((CSGObject**) &alphabet, "alphabet", - "Alphabet of Features.", ParameterProperties()); + "Alphabet of Features."); } diff --git a/src/shogun/labels/DenseLabels.cpp b/src/shogun/labels/DenseLabels.cpp index 3fe10ee9616..c3c2eda35b0 100644 --- a/src/shogun/labels/DenseLabels.cpp +++ b/src/shogun/labels/DenseLabels.cpp @@ -49,7 +49,7 @@ CDenseLabels::~CDenseLabels() void CDenseLabels::init() { - SG_ADD(&m_labels, "labels", "The labels.", ParameterProperties()); + SG_ADD(&m_labels, "labels", "The labels."); } void CDenseLabels::set_to_one() diff --git a/src/shogun/labels/Labels.cpp b/src/shogun/labels/Labels.cpp index e0b471b662b..c86aa6ed40b 100644 --- a/src/shogun/labels/Labels.cpp +++ b/src/shogun/labels/Labels.cpp @@ -41,10 +41,9 @@ CLabels::~CLabels() void CLabels::init() { SG_ADD((CSGObject **)&m_subset_stack, "subset_stack", - "Current subset stack", ParameterProperties()); + "Current subset stack"); SG_ADD( - &m_current_values, "current_values", "current active value vector", - ParameterProperties()); + &m_current_values, "current_values", "current active value vector"); m_subset_stack = new CSubsetStack(); SG_REF(m_subset_stack); } diff --git a/src/shogun/labels/LatentLabels.cpp b/src/shogun/labels/LatentLabels.cpp index 69d1abed1cb..d132018d6d7 100644 --- a/src/shogun/labels/LatentLabels.cpp +++ b/src/shogun/labels/LatentLabels.cpp @@ -45,8 +45,8 @@ CLatentLabels::~CLatentLabels() void CLatentLabels::init() { - SG_ADD((CSGObject**) &m_latent_labels, "m_latent_labels", "The latent labels", ParameterProperties()); - SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", ParameterProperties()); + SG_ADD((CSGObject**) &m_latent_labels, "m_latent_labels", "The latent labels"); + SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels"); m_latent_labels = NULL; m_labels = NULL; } diff --git a/src/shogun/labels/MultilabelLabels.cpp b/src/shogun/labels/MultilabelLabels.cpp index ebfd8efce07..beab3515a3e 100644 --- a/src/shogun/labels/MultilabelLabels.cpp +++ b/src/shogun/labels/MultilabelLabels.cpp @@ -69,9 +69,9 @@ CMultilabelLabels::init(int32_t num_labels, int32_t num_classes) // This one does consider the contained labels, so its simply BROKEN // Can be disabled as - SG_ADD(&m_num_labels, "m_num_labels", "number of labels", ParameterProperties()); - SG_ADD(&m_num_classes, "m_num_classes", "number of classes", ParameterProperties()); - // SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", ParameterProperties()); + SG_ADD(&m_num_labels, "m_num_labels", "number of labels"); + SG_ADD(&m_num_classes, "m_num_classes", "number of classes"); + // SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels"); // Can only be enabled after this issue has been solved: diff --git a/src/shogun/labels/StructuredLabels.cpp b/src/shogun/labels/StructuredLabels.cpp index 4a83107f4c6..369e9c270a5 100644 --- a/src/shogun/labels/StructuredLabels.cpp +++ b/src/shogun/labels/StructuredLabels.cpp @@ -84,7 +84,7 @@ int32_t CStructuredLabels::get_num_labels() const void CStructuredLabels::init() { - SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels", ParameterProperties()); + SG_ADD((CSGObject**) &m_labels, "m_labels", "The labels"); m_labels = NULL; m_sdt = SDT_UNKNOWN; diff --git a/src/shogun/latent/LatentModel.cpp b/src/shogun/latent/LatentModel.cpp index f191b4c13ff..b07410b26ea 100644 --- a/src/shogun/latent/LatentModel.cpp +++ b/src/shogun/latent/LatentModel.cpp @@ -82,15 +82,13 @@ void CLatentModel::argmax_h(const SGVector& w) void CLatentModel::register_parameters() { - SG_ADD(&m_features, "features", "Latent features", ParameterProperties()); - SG_ADD(&m_labels, "labels", "Latent labels", ParameterProperties()); + SG_ADD(&m_features, "features", "Latent features"); + SG_ADD(&m_labels, "labels", "Latent labels"); SG_ADD( - &m_cached_psi, "cached_psi", "Cached PSI features after argmax_h", - ParameterProperties()); + &m_cached_psi, "cached_psi", "Cached PSI features after argmax_h"); SG_ADD( &m_do_caching, "do_caching", - "Indicate whether or not do PSI vector caching after argmax_h", - ParameterProperties()); + "Indicate whether or not do PSI vector caching after argmax_h"); } diff --git a/src/shogun/lib/DelimiterTokenizer.cpp b/src/shogun/lib/DelimiterTokenizer.cpp index 970b4d3d321..d381faea62a 100644 --- a/src/shogun/lib/DelimiterTokenizer.cpp +++ b/src/shogun/lib/DelimiterTokenizer.cpp @@ -31,10 +31,9 @@ CDelimiterTokenizer::CDelimiterTokenizer(const CDelimiterTokenizer& orig) void CDelimiterTokenizer::init() { - SG_ADD(&last_idx, "last_idx", "Index of last token", - ParameterProperties()); + SG_ADD(&last_idx, "last_idx", "Index of last token"); SG_ADD(&skip_consecutive_delimiters, "skip_consecutive_delimiters", - "Whether to skip consecutive delimiters or not", ParameterProperties()); + "Whether to skip consecutive delimiters or not"); SGVector::fill_vector(delimiters, 256, 0); } diff --git a/src/shogun/lib/DynamicArray.h b/src/shogun/lib/DynamicArray.h index 5a2ebdc980a..2a27efcddd0 100644 --- a/src/shogun/lib/DynamicArray.h +++ b/src/shogun/lib/DynamicArray.h @@ -627,18 +627,16 @@ template class CDynamicArray :public CSGObject SG_ADD(&m_array.resize_granularity, "resize_granularity", - "shrink/grow step size.", ParameterProperties()); + "shrink/grow step size."); SG_ADD(&m_array.use_sg_mallocs, "use_sg_malloc", - "whether SG_MALLOC or malloc should be used", - ParameterProperties()); + "whether SG_MALLOC or malloc should be used"); SG_ADD(&m_array.free_array, "free_array", - "whether array must be freed", - ParameterProperties()); - SG_ADD(&dim1_size, "dim1_size", "Dimension 1", ParameterProperties()); - SG_ADD(&dim2_size, "dim2_size", "Dimension 2", ParameterProperties()); - SG_ADD(&dim3_size, "dim3_size", "Dimension 3", ParameterProperties()); + "whether array must be freed"); + SG_ADD(&dim1_size, "dim1_size", "Dimension 1"); + SG_ADD(&dim2_size, "dim2_size", "Dimension 2"); + SG_ADD(&dim3_size, "dim3_size", "Dimension 3"); } protected: diff --git a/src/shogun/lib/DynamicObjectArray.h b/src/shogun/lib/DynamicObjectArray.h index 47e48686100..1480f79450e 100644 --- a/src/shogun/lib/DynamicObjectArray.h +++ b/src/shogun/lib/DynamicObjectArray.h @@ -473,18 +473,16 @@ class CDynamicObjectArray : public CSGObject SG_ADD(&m_array.resize_granularity, "resize_granularity", - "shrink/grow step size.", ParameterProperties()); + "shrink/grow step size."); SG_ADD(&m_array.use_sg_mallocs, "use_sg_malloc", - "whether SG_MALLOC or malloc should be used", - ParameterProperties()); + "whether SG_MALLOC or malloc should be used"); SG_ADD(&m_array.free_array, "free_array", - "whether array must be freed", - ParameterProperties()); - SG_ADD(&dim1_size, "dim1_size", "Dimension 1", ParameterProperties()); - SG_ADD(&dim2_size, "dim2_size", "Dimension 2", ParameterProperties()); - SG_ADD(&dim3_size, "dim3_size", "Dimension 3", ParameterProperties()); + "whether array must be freed"); + SG_ADD(&dim1_size, "dim1_size", "Dimension 1"); + SG_ADD(&dim2_size, "dim2_size", "Dimension 2"); + SG_ADD(&dim3_size, "dim3_size", "Dimension 3"); } /** de-reference all elements of this array once */ diff --git a/src/shogun/lib/List.h b/src/shogun/lib/List.h index 854c0959f2d..ea0f12cdd97 100644 --- a/src/shogun/lib/List.h +++ b/src/shogun/lib/List.h @@ -53,8 +53,8 @@ class CListElement :public CSGObject private: void init() { - SG_ADD(&data, "data", "Data of this element.", ParameterProperties()); - SG_ADD(&next, "next", "Next element in list.", ParameterProperties()); + SG_ADD(&data, "data", "Data of this element."); + SG_ADD(&next, "next", "Next element in list."); } public: diff --git a/src/shogun/lib/NGramTokenizer.cpp b/src/shogun/lib/NGramTokenizer.cpp index 675db75340d..fd74540fbcf 100644 --- a/src/shogun/lib/NGramTokenizer.cpp +++ b/src/shogun/lib/NGramTokenizer.cpp @@ -28,10 +28,8 @@ CNGramTokenizer::CNGramTokenizer(const CNGramTokenizer& orig) void CNGramTokenizer::init() { - SG_ADD(&n, "n", "Size of n-grams", - ParameterProperties()); - SG_ADD(&last_idx, "last_idx", "Index of last token", - ParameterProperties()); + SG_ADD(&n, "n", "Size of n-grams"); + SG_ADD(&last_idx, "last_idx", "Index of last token"); } void CNGramTokenizer::set_text(SGVector txt) diff --git a/src/shogun/lib/Tokenizer.cpp b/src/shogun/lib/Tokenizer.cpp index fcb66eac866..9d80006cb46 100644 --- a/src/shogun/lib/Tokenizer.cpp +++ b/src/shogun/lib/Tokenizer.cpp @@ -21,6 +21,6 @@ void CTokenizer::set_text(SGVector txt) void CTokenizer::init() { - SG_ADD(&text, "text", "The text", ParameterProperties()); + SG_ADD(&text, "text", "The text"); } } diff --git a/src/shogun/machine/BaggingMachine.cpp b/src/shogun/machine/BaggingMachine.cpp index a2b0ebc5258..49a86745b08 100644 --- a/src/shogun/machine/BaggingMachine.cpp +++ b/src/shogun/machine/BaggingMachine.cpp @@ -255,19 +255,16 @@ void CBaggingMachine::set_machine_parameters(CMachine* m, SGVector idx) void CBaggingMachine::register_parameters() { SG_ADD( - &m_features, "features", "Train features for bagging", - ParameterProperties()); + &m_features, "features", "Train features for bagging"); SG_ADD(&m_num_bags, "num_bags", "Number of bags", ParameterProperties::HYPER); SG_ADD(&m_bag_size, "bag_size", "Number of vectors per bag", ParameterProperties::HYPER); - SG_ADD(&m_bags, "bags", "Bags array", ParameterProperties()); + SG_ADD(&m_bags, "bags", "Bags array"); SG_ADD( &m_combination_rule, "combination_rule", "Combination rule to use for aggregating", ParameterProperties::HYPER); - SG_ADD(&m_all_oob_idx, "all_oob_idx", "Indices of all oob vectors", - ParameterProperties()); + SG_ADD(&m_all_oob_idx, "all_oob_idx", "Indices of all oob vectors"); SG_ADD( - &m_oob_indices, "oob_indices", "OOB indices for each machine", - ParameterProperties()); + &m_oob_indices, "oob_indices", "OOB indices for each machine"); } void CBaggingMachine::set_num_bags(int32_t num_bags) diff --git a/src/shogun/machine/BaseMulticlassMachine.cpp b/src/shogun/machine/BaseMulticlassMachine.cpp index d775ffeee78..cc6f4bcb1b2 100644 --- a/src/shogun/machine/BaseMulticlassMachine.cpp +++ b/src/shogun/machine/BaseMulticlassMachine.cpp @@ -12,7 +12,7 @@ CBaseMulticlassMachine::CBaseMulticlassMachine() { m_machines = new CDynamicObjectArray(); - SG_ADD((CSGObject**)&m_machines, "machines", "Machines that jointly make up the multi-class machine.", ParameterProperties()); + SG_ADD((CSGObject**)&m_machines, "machines", "Machines that jointly make up the multi-class machine."); } CBaseMulticlassMachine::~CBaseMulticlassMachine() diff --git a/src/shogun/machine/IterativeMachine.h b/src/shogun/machine/IterativeMachine.h index bc716bfb7d8..4f03414c267 100644 --- a/src/shogun/machine/IterativeMachine.h +++ b/src/shogun/machine/IterativeMachine.h @@ -35,13 +35,12 @@ namespace shogun SG_ADD( &m_current_iteration, "current_iteration", - "Current Iteration of training", ParameterProperties()); + "Current Iteration of training"); SG_ADD( &m_max_iterations, "max_iterations", "Maximum number of Iterations", ParameterProperties::HYPER); SG_ADD( - &m_complete, "complete", "Convergence status", - ParameterProperties()); + &m_complete, "complete", "Convergence status"); } virtual ~CIterativeMachine() diff --git a/src/shogun/machine/KernelMachine.cpp b/src/shogun/machine/KernelMachine.cpp index 16b41512583..0a628fd6077 100644 --- a/src/shogun/machine/KernelMachine.cpp +++ b/src/shogun/machine/KernelMachine.cpp @@ -612,17 +612,16 @@ void CKernelMachine::init() SG_ADD(&kernel, "kernel", "", ParameterProperties::HYPER); SG_ADD((CSGObject**) &m_custom_kernel, "custom_kernel", "Custom kernel for" - " data lock", ParameterProperties()); + " data lock"); SG_ADD((CSGObject**) &m_kernel_backup, "kernel_backup", - "Kernel backup for data lock", ParameterProperties()); + "Kernel backup for data lock"); SG_ADD(&use_batch_computation, "use_batch_computation", - "Batch computation is enabled.", ParameterProperties()); - SG_ADD(&use_linadd, "use_linadd", "Linadd is enabled.", ParameterProperties()); - SG_ADD(&use_bias, "use_bias", "Bias shall be used.", ParameterProperties()); - SG_ADD(&m_bias, "m_bias", "Bias term.", ParameterProperties()); - SG_ADD(&m_alpha, "m_alpha", "Array of coefficients alpha.", - ParameterProperties()); - SG_ADD(&m_svs, "m_svs", "Number of ``support vectors''.", ParameterProperties()); + "Batch computation is enabled."); + SG_ADD(&use_linadd, "use_linadd", "Linadd is enabled."); + SG_ADD(&use_bias, "use_bias", "Bias shall be used."); + SG_ADD(&m_bias, "m_bias", "Bias term."); + SG_ADD(&m_alpha, "m_alpha", "Array of coefficients alpha."); + SG_ADD(&m_svs, "m_svs", "Number of ``support vectors''."); } bool CKernelMachine::supports_locking() const diff --git a/src/shogun/machine/LinearLatentMachine.cpp b/src/shogun/machine/LinearLatentMachine.cpp index 1b7750410f1..ed61cbdce0a 100644 --- a/src/shogun/machine/LinearLatentMachine.cpp +++ b/src/shogun/machine/LinearLatentMachine.cpp @@ -118,9 +118,9 @@ void CLinearLatentMachine::init() m_max_iter = 400; m_model = NULL; - SG_ADD(&m_C, "C", "Cost constant.", ParameterProperties()); - SG_ADD(&m_epsilon, "epsilon", "Convergence precision.", ParameterProperties()); - SG_ADD(&m_max_iter, "max_iter", "Maximum iterations.", ParameterProperties()); - SG_ADD(&m_model, "latent_model", "Latent Model.", ParameterProperties()); + SG_ADD(&m_C, "C", "Cost constant."); + SG_ADD(&m_epsilon, "epsilon", "Convergence precision."); + SG_ADD(&m_max_iter, "max_iter", "Maximum iterations."); + SG_ADD(&m_model, "latent_model", "Latent Model."); } diff --git a/src/shogun/machine/LinearMachine.cpp b/src/shogun/machine/LinearMachine.cpp index 6ae712d95ed..6df88303300 100644 --- a/src/shogun/machine/LinearMachine.cpp +++ b/src/shogun/machine/LinearMachine.cpp @@ -35,11 +35,10 @@ void CLinearMachine::init() bias = 0; features = NULL; - SG_ADD(&m_w, "w", "Parameter vector w.", ParameterProperties()); - SG_ADD(&bias, "bias", "Bias b.", ParameterProperties()); + SG_ADD(&m_w, "w", "Parameter vector w."); + SG_ADD(&bias, "bias", "Bias b."); SG_ADD( - (CFeatures**)&features, "features", "Feature object.", - ParameterProperties()); + (CFeatures**)&features, "features", "Feature object."); } diff --git a/src/shogun/machine/LinearMulticlassMachine.h b/src/shogun/machine/LinearMulticlassMachine.h index 8d3560137ba..99a5683cb20 100644 --- a/src/shogun/machine/LinearMulticlassMachine.h +++ b/src/shogun/machine/LinearMulticlassMachine.h @@ -30,8 +30,7 @@ class CLinearMulticlassMachine : public CMulticlassMachine /** default constructor */ CLinearMulticlassMachine() : CMulticlassMachine(), m_features(NULL) { - SG_ADD((CSGObject**)&m_features, "m_features", "Feature object.", - ParameterProperties()); + SG_ADD((CSGObject**)&m_features, "m_features", "Feature object."); } /** standard constructor @@ -44,8 +43,7 @@ class CLinearMulticlassMachine : public CMulticlassMachine CMulticlassMachine(strategy,(CMachine*)machine,labs), m_features(NULL) { set_features(features); - SG_ADD((CSGObject**)&m_features, "m_features", "Feature object.", - ParameterProperties()); + SG_ADD((CSGObject**)&m_features, "m_features", "Feature object."); } /** destructor */ diff --git a/src/shogun/machine/LinearStructuredOutputMachine.cpp b/src/shogun/machine/LinearStructuredOutputMachine.cpp index e52644b73f1..67664cd197c 100644 --- a/src/shogun/machine/LinearStructuredOutputMachine.cpp +++ b/src/shogun/machine/LinearStructuredOutputMachine.cpp @@ -68,7 +68,7 @@ CStructuredLabels* CLinearStructuredOutputMachine::apply_structured(CFeatures* d void CLinearStructuredOutputMachine::register_parameters() { - SG_ADD(&m_w, "m_w", "Weight vector", ParameterProperties()); + SG_ADD(&m_w, "m_w", "Weight vector"); } void CLinearStructuredOutputMachine::store_model_features() diff --git a/src/shogun/machine/Machine.cpp b/src/shogun/machine/Machine.cpp index ae0cef78e44..2eb668741bd 100644 --- a/src/shogun/machine/Machine.cpp +++ b/src/shogun/machine/Machine.cpp @@ -21,15 +21,15 @@ CMachine::CMachine() m_store_model_features=false; SG_ADD(&m_max_train_time, "max_train_time", - "Maximum training time.", ParameterProperties()); + "Maximum training time."); SG_ADD((machine_int_t*) &m_solver_type, "solver_type", - "Type of solver.", ParameterProperties()); + "Type of solver."); - SG_ADD(&m_labels, "labels", "Labels to be used.", ParameterProperties()); + SG_ADD(&m_labels, "labels", "Labels to be used."); SG_ADD(&m_store_model_features, "store_model_features", - "Should feature data of model be stored after training?", ParameterProperties()); + "Should feature data of model be stored after training?"); SG_ADD(&m_data_locked, "data_locked", - "Indicates whether data is locked", ParameterProperties()); + "Indicates whether data is locked"); } CMachine::~CMachine() diff --git a/src/shogun/machine/MulticlassMachine.cpp b/src/shogun/machine/MulticlassMachine.cpp index 47a53dd358f..593db217ee6 100644 --- a/src/shogun/machine/MulticlassMachine.cpp +++ b/src/shogun/machine/MulticlassMachine.cpp @@ -50,8 +50,8 @@ void CMulticlassMachine::set_labels(CLabels* lab) void CMulticlassMachine::register_parameters() { - SG_ADD(&m_multiclass_strategy,"multiclass_strategy", "Multiclass strategy", ParameterProperties()); - SG_ADD(&m_machine, "machine", "The base machine", ParameterProperties()); + SG_ADD(&m_multiclass_strategy,"multiclass_strategy", "Multiclass strategy"); + SG_ADD(&m_machine, "machine", "The base machine"); } void CMulticlassMachine::init_strategy() diff --git a/src/shogun/machine/OnlineLinearMachine.cpp b/src/shogun/machine/OnlineLinearMachine.cpp index 67a12220c19..b64eb927221 100644 --- a/src/shogun/machine/OnlineLinearMachine.cpp +++ b/src/shogun/machine/OnlineLinearMachine.cpp @@ -19,10 +19,10 @@ using namespace shogun; COnlineLinearMachine::COnlineLinearMachine() : CMachine(), bias(0), features(NULL) { - SG_ADD(&m_w, "m_w", "Parameter vector w.", ParameterProperties()); - SG_ADD(&bias, "bias", "Bias b.", ParameterProperties()); + SG_ADD(&m_w, "m_w", "Parameter vector w."); + SG_ADD(&bias, "bias", "Bias b."); SG_ADD((CSGObject**) &features, "features", - "Feature object.", ParameterProperties()); + "Feature object."); } COnlineLinearMachine::~COnlineLinearMachine() diff --git a/src/shogun/machine/StructuredOutputMachine.cpp b/src/shogun/machine/StructuredOutputMachine.cpp index 7ac81a1f0c8..6a5ebd37e1b 100644 --- a/src/shogun/machine/StructuredOutputMachine.cpp +++ b/src/shogun/machine/StructuredOutputMachine.cpp @@ -52,10 +52,10 @@ CStructuredModel* CStructuredOutputMachine::get_model() const void CStructuredOutputMachine::register_parameters() { - SG_ADD((CSGObject**)&m_model, "m_model", "Structured model", ParameterProperties()); - SG_ADD((CSGObject**)&m_surrogate_loss, "m_surrogate_loss", "Surrogate loss", ParameterProperties()); - SG_ADD(&m_verbose, "verbose", "Verbosity flag", ParameterProperties()); - SG_ADD((CSGObject**)&m_helper, "helper", "Training helper", ParameterProperties()); + SG_ADD((CSGObject**)&m_model, "m_model", "Structured model"); + SG_ADD((CSGObject**)&m_surrogate_loss, "m_surrogate_loss", "Surrogate loss"); + SG_ADD(&m_verbose, "verbose", "Verbosity flag"); + SG_ADD((CSGObject**)&m_helper, "helper", "Training helper"); m_verbose = false; m_helper = NULL; diff --git a/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp b/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp index e20c5220b75..0c47ca03efa 100644 --- a/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp +++ b/src/shogun/machine/gp/DualVariationalGaussianLikelihood.cpp @@ -217,16 +217,13 @@ void CDualVariationalGaussianLikelihood::precompute() void CDualVariationalGaussianLikelihood::init() { SG_ADD(&m_lambda, "lambda", - "Dual parameter for variational s2", - ParameterProperties()); + "Dual parameter for variational s2"); SG_ADD(&m_is_valid, "is_valid", - "Is the Dual parameter valid", - ParameterProperties()); + "Is the Dual parameter valid"); SG_ADD(&m_strict_scale, "strict_scale", - "The strict variable used in adjust_step_wrt_dual_parameter", - ParameterProperties()); + "The strict variable used in adjust_step_wrt_dual_parameter"); m_is_valid=false; m_strict_scale=1e-5; diff --git a/src/shogun/machine/gp/Inference.cpp b/src/shogun/machine/gp/Inference.cpp index 427b4eb3471..68ee9c0c764 100644 --- a/src/shogun/machine/gp/Inference.cpp +++ b/src/shogun/machine/gp/Inference.cpp @@ -92,9 +92,9 @@ void CInference::init() SG_ADD(&m_log_scale, "log_scale", "Kernel log scale", ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_model, "likelihood_model", "Likelihood model", ParameterProperties::HYPER); SG_ADD(&m_mean, "mean_function", "Mean function", ParameterProperties::HYPER); - SG_ADD(&m_labels, "labels", "Labels", ParameterProperties()); - SG_ADD(&m_features, "features", "Features", ParameterProperties()); - SG_ADD(&m_gradient_update, "gradient_update", "Whether gradients are updated", ParameterProperties()); + SG_ADD(&m_labels, "labels", "Labels"); + SG_ADD(&m_features, "features", "Features"); + SG_ADD(&m_gradient_update, "gradient_update", "Whether gradients are updated"); m_kernel=NULL; @@ -106,10 +106,10 @@ void CInference::init() m_gradient_update=false; m_minimizer=NULL; - SG_ADD((CSGObject**)&m_minimizer, "Inference__m_minimizer", "minimizer in Inference", ParameterProperties()); - SG_ADD(&m_alpha, "alpha", "alpha vector used in process mean calculation", ParameterProperties()); - SG_ADD(&m_L, "L", "upper triangular factor of Cholesky decomposition", ParameterProperties()); - SG_ADD(&m_E, "E", "the matrix used for multi classification", ParameterProperties()); + SG_ADD((CSGObject**)&m_minimizer, "Inference__m_minimizer", "minimizer in Inference"); + SG_ADD(&m_alpha, "alpha", "alpha vector used in process mean calculation"); + SG_ADD(&m_L, "L", "upper triangular factor of Cholesky decomposition"); + SG_ADD(&m_E, "E", "the matrix used for multi classification"); } void CInference::register_minimizer(Minimizer* minimizer) diff --git a/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp b/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp index c5f45721afd..2239b9512c1 100644 --- a/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLCholeskyInferenceMethod.cpp @@ -66,11 +66,9 @@ CKLCholeskyInferenceMethod::CKLCholeskyInferenceMethod(CKernel* kern, void CKLCholeskyInferenceMethod::init() { SG_ADD(&m_C, "C", - "The Cholesky represention of the variational co-variance matrix", - ParameterProperties()); + "The Cholesky represention of the variational co-variance matrix"); SG_ADD(&m_InvK_C, "invK_C", - " The K^{-1}C matrix", - ParameterProperties()); + " The K^{-1}C matrix"); } CKLCholeskyInferenceMethod* CKLCholeskyInferenceMethod::obtain_from_generic( diff --git a/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp b/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp index e369026b5d3..09c073ce747 100644 --- a/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLCovarianceInferenceMethod.cpp @@ -66,23 +66,17 @@ CKLCovarianceInferenceMethod::CKLCovarianceInferenceMethod(CKernel* kern, void CKLCovarianceInferenceMethod::init() { SG_ADD(&m_V, "V", - "V is L'*V=diag(sW)*K", - ParameterProperties()); + "V is L'*V=diag(sW)*K"); SG_ADD(&m_A, "A", - "A is A=I-K*diag(sW)*inv(L)'*inv(L)*diag(sW)", - ParameterProperties()); + "A is A=I-K*diag(sW)*inv(L)'*inv(L)*diag(sW)"); SG_ADD(&m_W, "W", - "noise matrix W", - ParameterProperties()); + "noise matrix W"); SG_ADD(&m_sW, "sW", - "Square root of noise matrix W", - ParameterProperties()); + "Square root of noise matrix W"); SG_ADD(&m_dv, "dv", - "the gradient of the variational expection wrt sigma2", - ParameterProperties()); + "the gradient of the variational expection wrt sigma2"); SG_ADD(&m_df, "df", - "the gradient of the variational expection wrt mu", - ParameterProperties()); + "the gradient of the variational expection wrt mu"); } diff --git a/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp b/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp index 45d083d21bc..d00b845cce8 100644 --- a/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLDiagonalInferenceMethod.cpp @@ -66,8 +66,7 @@ CKLDiagonalInferenceMethod::CKLDiagonalInferenceMethod(CKernel* kern, void CKLDiagonalInferenceMethod::init() { SG_ADD(&m_InvK, "invK", - "The K^{-1} matrix", - ParameterProperties()); + "The K^{-1} matrix"); } CKLDiagonalInferenceMethod* CKLDiagonalInferenceMethod::obtain_from_generic( diff --git a/src/shogun/machine/gp/KLDualInferenceMethod.cpp b/src/shogun/machine/gp/KLDualInferenceMethod.cpp index 54c07a83a67..8f64f0ba701 100644 --- a/src/shogun/machine/gp/KLDualInferenceMethod.cpp +++ b/src/shogun/machine/gp/KLDualInferenceMethod.cpp @@ -106,9 +106,9 @@ friend class CKLDualInferenceMethodMinimizer; m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "KLDualInferenceMethodCostFunction__m_derivatives", - "derivatives in KLDualInferenceMethodCostFunction", ParameterProperties()); + "derivatives in KLDualInferenceMethodCostFunction"); SG_ADD((CSGObject **)&m_obj, "KLDualInferenceMethodCostFunction__m_obj", - "obj in KLDualInferenceMethodCostFunction", ParameterProperties()); + "obj in KLDualInferenceMethodCostFunction"); } CKLDualInferenceMethod *m_obj; CDualVariationalGaussianLikelihood* get_dual_variational_likelihood() const @@ -278,20 +278,15 @@ void CKLDualInferenceMethod::register_minimizer(Minimizer* minimizer) void CKLDualInferenceMethod::init() { SG_ADD(&m_W, "W", - "noise matrix W", - ParameterProperties()); + "noise matrix W"); SG_ADD(&m_sW, "sW", - "Square root of noise matrix W", - ParameterProperties()); + "Square root of noise matrix W"); SG_ADD(&m_dv, "dv", - "the gradient of the variational expection wrt sigma2", - ParameterProperties()); + "the gradient of the variational expection wrt sigma2"); SG_ADD(&m_df, "df", - "the gradient of the variational expection wrt mu", - ParameterProperties()); + "the gradient of the variational expection wrt mu"); SG_ADD(&m_is_dual_valid, "is_dual_valid", - "whether the lambda (m_W) is valid or not", - ParameterProperties()); + "whether the lambda (m_W) is valid or not"); m_is_dual_valid=false; register_minimizer(new CKLDualInferenceMethodMinimizer()); diff --git a/src/shogun/machine/gp/KLInference.cpp b/src/shogun/machine/gp/KLInference.cpp index 8a384848192..941b6877ecf 100644 --- a/src/shogun/machine/gp/KLInference.cpp +++ b/src/shogun/machine/gp/KLInference.cpp @@ -99,9 +99,9 @@ class KLInferenceCostFunction: public FirstOrderCostFunction m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "KLInferenceCostFunction__m_derivatives", - "derivatives in KLInferenceCostFunction", ParameterProperties()); + "derivatives in KLInferenceCostFunction"); SG_ADD((CSGObject **)&m_obj, "KLInferenceCostFunction__m_obj", - "obj in KLInferenceCostFunction", ParameterProperties()); + "obj in KLInferenceCostFunction"); } CKLInference *m_obj; }; @@ -143,26 +143,19 @@ void CKLInference::init() m_exp_factor=2; m_min_coeff_kernel=1e-5; SG_ADD(&m_noise_factor, "noise_factor", - "The noise factor used for correcting Kernel matrix", - ParameterProperties()); + "The noise factor used for correcting Kernel matrix"); SG_ADD(&m_exp_factor, "exp_factor", - "The exponential factor used for increasing noise_factor", - ParameterProperties()); + "The exponential factor used for increasing noise_factor"); SG_ADD(&m_max_attempt, "max_attempt", - "The max number of attempt to correct Kernel matrix", - ParameterProperties()); + "The max number of attempt to correct Kernel matrix"); SG_ADD(&m_min_coeff_kernel, "min_coeff_kernel", - "The minimum coeefficient of kernel matrix in LDLT factorization used to check whether the kernel matrix is positive definite or not", - ParameterProperties()); + "The minimum coeefficient of kernel matrix in LDLT factorization used to check whether the kernel matrix is positive definite or not"); SG_ADD(&m_s2, "s2", - "Variational parameter sigma2", - ParameterProperties()); + "Variational parameter sigma2"); SG_ADD(&m_mu, "mu", - "Variational parameter mu and posterior mean", - ParameterProperties()); + "Variational parameter mu and posterior mean"); SG_ADD(&m_Sigma, "Sigma", - "Posterior covariance matrix Sigma", - ParameterProperties()); + "Posterior covariance matrix Sigma"); register_minimizer(new CLBFGSMinimizer()); } diff --git a/src/shogun/machine/gp/KLLowerTriangularInference.cpp b/src/shogun/machine/gp/KLLowerTriangularInference.cpp index 5ab701620c1..722d26d475f 100644 --- a/src/shogun/machine/gp/KLLowerTriangularInference.cpp +++ b/src/shogun/machine/gp/KLLowerTriangularInference.cpp @@ -65,21 +65,16 @@ CKLLowerTriangularInference::CKLLowerTriangularInference(CKernel* kern, void CKLLowerTriangularInference::init() { SG_ADD(&m_InvK_Sigma, "invk_Sigma", - "K^{-1}Sigma'", - ParameterProperties()); + "K^{-1}Sigma'"); SG_ADD(&m_mean_vec, "mean_vec", - "The mean vector generated from mean function", - ParameterProperties()); + "The mean vector generated from mean function"); SG_ADD(&m_log_det_Kernel, "log_det_kernel", - "The Log-determinant of Kernel", - ParameterProperties()); + "The Log-determinant of Kernel"); SG_ADD(&m_Kernel_LsD, "L_sqrt_D", - "The L*sqrt(D) matrix, where L and D are defined in LDLT factorization on Kernel*sq(m_scale)", - ParameterProperties()); + "The L*sqrt(D) matrix, where L and D are defined in LDLT factorization on Kernel*sq(m_scale)"); SG_ADD(&m_Kernel_P, "Permutation_P", - "The permutation sequence of P, where P are defined in LDLT factorization on Kernel*sq(m_scale)", - ParameterProperties()); + "The permutation sequence of P, where P are defined in LDLT factorization on Kernel*sq(m_scale)"); m_log_det_Kernel=0; } diff --git a/src/shogun/machine/gp/LaplaceInference.cpp b/src/shogun/machine/gp/LaplaceInference.cpp index bc3e80709b3..6c7ae23097c 100644 --- a/src/shogun/machine/gp/LaplaceInference.cpp +++ b/src/shogun/machine/gp/LaplaceInference.cpp @@ -56,10 +56,10 @@ CLaplaceInference::CLaplaceInference(CKernel* kern, void CLaplaceInference::init() { - SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location", ParameterProperties()); - SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior", ParameterProperties()); - SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior", ParameterProperties()); - SG_ADD(&m_W, "W", "the noise matrix", ParameterProperties()); + SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location"); + SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior"); + SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior"); + SG_ADD(&m_W, "W", "the noise matrix"); } CLaplaceInference::~CLaplaceInference() diff --git a/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp b/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp index 57307e3908e..abfbccb5c05 100644 --- a/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp +++ b/src/shogun/machine/gp/LogitVGPiecewiseBoundLikelihood.cpp @@ -387,26 +387,19 @@ void CLogitVGPiecewiseBoundLikelihood::init_likelihood() void CLogitVGPiecewiseBoundLikelihood::init() { SG_ADD(&m_bound, "bound", - "Variational piecewise bound for logit likelihood", - ParameterProperties()); + "Variational piecewise bound for logit likelihood"); SG_ADD(&m_pl, "pdf_l", - "The pdf given the lower range and parameters(mu and variance)", - ParameterProperties()); + "The pdf given the lower range and parameters(mu and variance)"); SG_ADD(&m_ph, "pdf_h", - "The pdf given the higher range and parameters(mu and variance)", - ParameterProperties()); + "The pdf given the higher range and parameters(mu and variance)"); SG_ADD(&m_cdf_diff, "cdf_h_minus_cdf_l", - "The CDF difference between the lower and higher range given the parameters(mu and variance)", - ParameterProperties()); + "The CDF difference between the lower and higher range given the parameters(mu and variance)"); SG_ADD(&m_l2_plus_s2, "l2_plus_sigma2", - "The result of l^2 + sigma^2", - ParameterProperties()); + "The result of l^2 + sigma^2"); SG_ADD(&m_h2_plus_s2, "h2_plus_sigma2", - "The result of h^2 + sigma^2", - ParameterProperties()); + "The result of h^2 + sigma^2"); SG_ADD(&m_weighted_pdf_diff, "weighted_pdf_diff", - "The result of l*pdf(l_norm)-h*pdf(h_norm)", - ParameterProperties()); + "The result of l*pdf(l_norm)-h*pdf(h_norm)"); init_likelihood(); } diff --git a/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp b/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp index ca052d95f10..4cdff6b934b 100644 --- a/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp +++ b/src/shogun/machine/gp/MultiLaplaceInferenceMethod.cpp @@ -122,13 +122,13 @@ void CMultiLaplaceInferenceMethod::init() m_opt_max=10; m_nlz=0; - SG_ADD(&m_nlz, "nlz", "negative log marginal likelihood ", ParameterProperties()); - SG_ADD(&m_U, "U", "the matrix used to compute gradient wrt hyperparameters", ParameterProperties()); + SG_ADD(&m_nlz, "nlz", "negative log marginal likelihood "); + SG_ADD(&m_U, "U", "the matrix used to compute gradient wrt hyperparameters"); - SG_ADD(&m_tolerance, "tolerance", "amount of tolerance for Newton's iterations", ParameterProperties()); - SG_ADD(&m_iter, "iter", "max Newton's iterations", ParameterProperties()); - SG_ADD(&m_opt_tolerance, "opt_tolerance", "amount of tolerance for Brent's minimization method", ParameterProperties()); - SG_ADD(&m_opt_max, "opt_max", "max iterations for Brent's minimization method", ParameterProperties()); + SG_ADD(&m_tolerance, "tolerance", "amount of tolerance for Newton's iterations"); + SG_ADD(&m_iter, "iter", "max Newton's iterations"); + SG_ADD(&m_opt_tolerance, "opt_tolerance", "amount of tolerance for Brent's minimization method"); + SG_ADD(&m_opt_max, "opt_max", "max iterations for Brent's minimization method"); } CMultiLaplaceInferenceMethod::~CMultiLaplaceInferenceMethod() diff --git a/src/shogun/machine/gp/NumericalVGLikelihood.cpp b/src/shogun/machine/gp/NumericalVGLikelihood.cpp index df8f4500f0b..aabb0f29d4f 100644 --- a/src/shogun/machine/gp/NumericalVGLikelihood.cpp +++ b/src/shogun/machine/gp/NumericalVGLikelihood.cpp @@ -66,24 +66,19 @@ CNumericalVGLikelihood::~CNumericalVGLikelihood() void CNumericalVGLikelihood::init() { SG_ADD(&m_log_lam, "log_lam", - "The result of used for computing variational expection\n", - ParameterProperties()); + "The result of used for computing variational expection\n"); SG_ADD(&m_xgh, "xgh", - "Gaussian-Hermite quadrature base points (abscissas)\n", - ParameterProperties()); + "Gaussian-Hermite quadrature base points (abscissas)\n"); SG_ADD(&m_wgh, "wgh", - "Gaussian-Hermite quadrature weight factors\n", - ParameterProperties()); + "Gaussian-Hermite quadrature weight factors\n"); SG_ADD(&m_GHQ_N, "GHQ_N", - "The number of Gaussian-Hermite quadrature point\n", - ParameterProperties()); + "The number of Gaussian-Hermite quadrature point\n"); SG_ADD(&m_is_init_GHQ, "is_init_GHQ", - "Whether Gaussian-Hermite quadrature points are initialized or not\n", - ParameterProperties()); + "Whether Gaussian-Hermite quadrature points are initialized or not\n"); m_GHQ_N=20; m_is_init_GHQ=false; diff --git a/src/shogun/machine/gp/SingleFITCInference.cpp b/src/shogun/machine/gp/SingleFITCInference.cpp index d945ae7c8fc..5547acfd4d0 100644 --- a/src/shogun/machine/gp/SingleFITCInference.cpp +++ b/src/shogun/machine/gp/SingleFITCInference.cpp @@ -53,12 +53,12 @@ CSingleFITCInference::CSingleFITCInference(CKernel* kern, CFeatures* feat, void CSingleFITCInference::init() { - SG_ADD(&m_al, "al", "alpha", ParameterProperties()); - SG_ADD(&m_t, "t", "noise", ParameterProperties()); - SG_ADD(&m_B, "B", "B", ParameterProperties()); - SG_ADD(&m_w, "w", "B*al", ParameterProperties()); - SG_ADD(&m_Rvdd, "Rvdd", "Rvdd", ParameterProperties()); - SG_ADD(&m_V, "V", "V", ParameterProperties()); + SG_ADD(&m_al, "al", "alpha"); + SG_ADD(&m_t, "t", "noise"); + SG_ADD(&m_B, "B", "B"); + SG_ADD(&m_w, "w", "B*al"); + SG_ADD(&m_Rvdd, "Rvdd", "Rvdd"); + SG_ADD(&m_V, "V", "V"); } CSingleFITCInference::~CSingleFITCInference() diff --git a/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp b/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp index 1f0b698cd8b..5d70a873864 100644 --- a/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp +++ b/src/shogun/machine/gp/SingleFITCLaplaceInferenceMethod.cpp @@ -146,9 +146,9 @@ class SingleFITCLaplaceInferenceMethodCostFunction: public FirstOrderCostFunctio m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "SingleFITCLaplaceInferenceMethodCostFunction__m_derivatives", - "derivatives in SingleFITCLaplaceInferenceMethodCostFunction", ParameterProperties()); + "derivatives in SingleFITCLaplaceInferenceMethodCostFunction"); SG_ADD((CSGObject **)&m_obj, "SingleFITCLaplaceInferenceMethodCostFunction__m_obj", - "obj in SingleFITCLaplaceInferenceMethodCostFunction", ParameterProperties()); + "obj in SingleFITCLaplaceInferenceMethodCostFunction"); } SGVector m_derivatives; @@ -187,15 +187,15 @@ void CSingleFITCLaplaceNewtonOptimizer::init() m_opt_max=10; SG_ADD((CSGObject **)&m_obj, "CSingleFITCLaplaceNewtonOptimizer__m_obj", - "obj in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); + "obj in CSingleFITCLaplaceNewtonOptimizer"); SG_ADD(&m_iter, "CSingleFITCLaplaceNewtonOptimizer__m_iter", - "iter in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); + "iter in CSingleFITCLaplaceNewtonOptimizer"); SG_ADD(&m_tolerance, "CSingleFITCLaplaceNewtonOptimizer__m_tolerance", - "tolerance in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); + "tolerance in CSingleFITCLaplaceNewtonOptimizer"); SG_ADD(&m_opt_tolerance, "CSingleFITCLaplaceNewtonOptimizer__m_opt_tolerance", - "opt_tolerance in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); + "opt_tolerance in CSingleFITCLaplaceNewtonOptimizer"); SG_ADD(&m_opt_max, "CSingleFITCLaplaceNewtonOptimizer__m_opt_max", - "opt_max in CSingleFITCLaplaceNewtonOptimizer", ParameterProperties()); + "opt_max in CSingleFITCLaplaceNewtonOptimizer"); } float64_t CSingleFITCLaplaceNewtonOptimizer::minimize() @@ -320,18 +320,18 @@ void CSingleFITCLaplaceInferenceMethod::init() m_Psi=0; m_Wneg=false; - SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location", ParameterProperties()); - SG_ADD(&m_W, "W", "the noise matrix", ParameterProperties()); - - SG_ADD(&m_sW, "sW", "square root of W", ParameterProperties()); - SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location", ParameterProperties()); - SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location", ParameterProperties()); - SG_ADD(&m_chol_R0, "chol_R0", "Cholesky of inverse covariance of inducing features", ParameterProperties()); - SG_ADD(&m_dfhat, "dfhat", "derivative of negative log (approximated) marginal likelihood wrt f", ParameterProperties()); - SG_ADD(&m_g, "g", "variable g defined in infFITC_Laplace.m", ParameterProperties()); - SG_ADD(&m_dg, "dg", "variable d0 defined in infFITC_Laplace.m", ParameterProperties()); - SG_ADD(&m_Psi, "Psi", "the negative log likelihood without constant terms used in Newton's method", ParameterProperties()); - SG_ADD(&m_Wneg, "Wneg", "whether W contains negative elements", ParameterProperties()); + SG_ADD(&m_dlp, "dlp", "derivative of log likelihood with respect to function location"); + SG_ADD(&m_W, "W", "the noise matrix"); + + SG_ADD(&m_sW, "sW", "square root of W"); + SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location"); + SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location"); + SG_ADD(&m_chol_R0, "chol_R0", "Cholesky of inverse covariance of inducing features"); + SG_ADD(&m_dfhat, "dfhat", "derivative of negative log (approximated) marginal likelihood wrt f"); + SG_ADD(&m_g, "g", "variable g defined in infFITC_Laplace.m"); + SG_ADD(&m_dg, "dg", "variable d0 defined in infFITC_Laplace.m"); + SG_ADD(&m_Psi, "Psi", "the negative log likelihood without constant terms used in Newton's method"); + SG_ADD(&m_Wneg, "Wneg", "whether W contains negative elements"); register_minimizer(new CSingleFITCLaplaceNewtonOptimizer()); } diff --git a/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp b/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp index e6b2f7e91b6..1d35bc8f2d4 100644 --- a/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp +++ b/src/shogun/machine/gp/SingleLaplaceInferenceMethod.cpp @@ -110,9 +110,9 @@ class SingleLaplaceInferenceMethodCostFunction: public FirstOrderCostFunction m_obj=NULL; m_derivatives = SGVector(); SG_ADD(&m_derivatives, "SingleLaplaceInferenceMethodCostFunction__m_derivatives", - "derivatives in SingleLaplaceInferenceMethodCostFunction", ParameterProperties()); + "derivatives in SingleLaplaceInferenceMethodCostFunction"); SG_ADD((CSGObject **)&m_obj, "SingleLaplaceInferenceMethodCostFunction__m_obj", - "obj in SingleLaplaceInferenceMethodCostFunction", ParameterProperties()); + "obj in SingleLaplaceInferenceMethodCostFunction"); } @@ -152,15 +152,15 @@ void CSingleLaplaceNewtonOptimizer::init() m_opt_max=10; SG_ADD((CSGObject **)&m_obj, "CSingleLaplaceNewtonOptimizer__m_obj", - "obj in CSingleLaplaceNewtonOptimizer", ParameterProperties()); + "obj in CSingleLaplaceNewtonOptimizer"); SG_ADD(&m_iter, "CSingleLaplaceNewtonOptimizer__m_iter", - "iter in CSingleLaplaceNewtonOptimizer", ParameterProperties()); + "iter in CSingleLaplaceNewtonOptimizer"); SG_ADD(&m_tolerance, "CSingleLaplaceNewtonOptimizer__m_tolerance", - "tolerance in CSingleLaplaceNewtonOptimizer", ParameterProperties()); + "tolerance in CSingleLaplaceNewtonOptimizer"); SG_ADD(&m_opt_tolerance, "CSingleLaplaceNewtonOptimizer__m_opt_tolerance", - "opt_tolerance in CSingleLaplaceNewtonOptimizer", ParameterProperties()); + "opt_tolerance in CSingleLaplaceNewtonOptimizer"); SG_ADD(&m_opt_max, "CSingleLaplaceNewtonOptimizer__m_opt_max", - "opt_max in CSingleLaplaceNewtonOptimizer", ParameterProperties()); + "opt_max in CSingleLaplaceNewtonOptimizer"); } float64_t CSingleLaplaceNewtonOptimizer::minimize() @@ -283,10 +283,10 @@ CSingleLaplaceInferenceMethod::CSingleLaplaceInferenceMethod(CKernel* kern, void CSingleLaplaceInferenceMethod::init() { m_Psi=0; - SG_ADD(&m_Psi, "Psi", "posterior log likelihood without constant terms", ParameterProperties()); - SG_ADD(&m_sW, "sW", "square root of W", ParameterProperties()); - SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location", ParameterProperties()); - SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location", ParameterProperties()); + SG_ADD(&m_Psi, "Psi", "posterior log likelihood without constant terms"); + SG_ADD(&m_sW, "sW", "square root of W"); + SG_ADD(&m_d2lp, "d2lp", "second derivative of log likelihood with respect to function location"); + SG_ADD(&m_d3lp, "d3lp", "third derivative of log likelihood with respect to function location"); register_minimizer(new CSingleLaplaceNewtonOptimizer()); } diff --git a/src/shogun/machine/gp/SingleSparseInference.cpp b/src/shogun/machine/gp/SingleSparseInference.cpp index 1640f87d057..7d9d791756d 100644 --- a/src/shogun/machine/gp/SingleSparseInference.cpp +++ b/src/shogun/machine/gp/SingleSparseInference.cpp @@ -114,7 +114,7 @@ class SingleSparseInferenceCostFunction: public FirstOrderBoundConstraintsCostFu //The existing implementation in CSGObject::get_parameter_incremental_hash() //can NOT deal with circular reference when parameter_hash_changed() is called //SG_ADD((CSGObject **)&m_obj, "CSigleSparseInference__m_obj", - //"m_obj in SingleSparseInferenceCostFunction", ParameterProperties()); + //"m_obj in SingleSparseInferenceCostFunction"); } }; #endif //DOXYGEN_SHOULD_SKIP_THIS @@ -137,22 +137,22 @@ void CSingleSparseInference::init() m_fully_sparse=false; m_inducing_minimizer=NULL; SG_ADD(&m_fully_sparse, "fully_Sparse", - "whether the kernel support sparse inference", ParameterProperties()); + "whether the kernel support sparse inference"); m_lock=new CLock(); SG_ADD(&m_upper_bound, "upper_bound", - "upper bound of inducing features", ParameterProperties()); + "upper bound of inducing features"); SG_ADD(&m_lower_bound, "lower_bound", - "lower bound of inducing features", ParameterProperties()); + "lower bound of inducing features"); SG_ADD(&m_max_ind_iterations, "max_ind_iterations", - "max number of iterations used in inducing features optimization", ParameterProperties()); + "max number of iterations used in inducing features optimization"); SG_ADD(&m_ind_tolerance, "ind_tolerance", - "tolearance used in inducing features optimization", ParameterProperties()); + "tolearance used in inducing features optimization"); SG_ADD(&m_opt_inducing_features, - "opt_inducing_features", "whether optimize inducing features", ParameterProperties()); + "opt_inducing_features", "whether optimize inducing features"); SG_ADD((CSGObject **)&m_inducing_minimizer, - "inducing_minimizer", "Minimizer used in optimize inducing features", ParameterProperties()); + "inducing_minimizer", "Minimizer used in optimize inducing features"); m_max_ind_iterations=50; m_ind_tolerance=1e-3; diff --git a/src/shogun/machine/gp/SoftMaxLikelihood.cpp b/src/shogun/machine/gp/SoftMaxLikelihood.cpp index dc0ba98b8b7..03f7172636a 100644 --- a/src/shogun/machine/gp/SoftMaxLikelihood.cpp +++ b/src/shogun/machine/gp/SoftMaxLikelihood.cpp @@ -60,8 +60,7 @@ void CSoftMaxLikelihood::init() { m_num_samples=10000; SG_ADD(&m_num_samples, "num_samples", - "Number of samples to be generated", - ParameterProperties()); + "Number of samples to be generated"); } SGVector CSoftMaxLikelihood::get_log_probability_f(const CLabels* lab, diff --git a/src/shogun/machine/gp/SparseInference.cpp b/src/shogun/machine/gp/SparseInference.cpp index d3e2860faf2..03c6474a8d0 100644 --- a/src/shogun/machine/gp/SparseInference.cpp +++ b/src/shogun/machine/gp/SparseInference.cpp @@ -101,9 +101,9 @@ void CSparseInference::init() ParameterProperties::HYPER | ParameterProperties::GRADIENT); SG_ADD(&m_log_ind_noise, "log_inducing_noise", "noise about inducing potins in log domain", ParameterProperties::HYPER | ParameterProperties::GRADIENT); - SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior", ParameterProperties()); - SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior", ParameterProperties()); - SG_ADD(&m_ktrtr_diag, "ktrtr_diag", "diagonal elements of kernel matrix m_ktrtr", ParameterProperties()); + SG_ADD(&m_mu, "mu", "mean vector of the approximation to the posterior"); + SG_ADD(&m_Sigma, "Sigma", "covariance matrix of the approximation to the posterior"); + SG_ADD(&m_ktrtr_diag, "ktrtr_diag", "diagonal elements of kernel matrix m_ktrtr"); m_log_ind_noise = std::log(1e-10); m_inducing_features=SGMatrix(); diff --git a/src/shogun/machine/gp/VarDTCInferenceMethod.cpp b/src/shogun/machine/gp/VarDTCInferenceMethod.cpp index e639274c05f..55b69b653a4 100644 --- a/src/shogun/machine/gp/VarDTCInferenceMethod.cpp +++ b/src/shogun/machine/gp/VarDTCInferenceMethod.cpp @@ -69,15 +69,15 @@ void CVarDTCInferenceMethod::init() m_inv_La=SGMatrix(); m_Knm_inv_Lm=SGMatrix(); - SG_ADD(&m_yy, "yy", "yy", ParameterProperties()); - SG_ADD(&m_f3, "f3", "f3", ParameterProperties()); - SG_ADD(&m_sigma2, "sigma2", "sigma2", ParameterProperties()); - SG_ADD(&m_trk, "trk", "trk", ParameterProperties()); - SG_ADD(&m_Tmm, "Tmm", "Tmm", ParameterProperties()); - SG_ADD(&m_Tnm, "Tnm", "Tnm", ParameterProperties()); - SG_ADD(&m_inv_Lm, "inv_Lm", "inv_Lm", ParameterProperties()); - SG_ADD(&m_inv_La, "inv_La", "inv_La", ParameterProperties()); - SG_ADD(&m_Knm_inv_Lm, "Knm_Inv_Lm", "Knm_Inv_Lm", ParameterProperties()); + SG_ADD(&m_yy, "yy", "yy"); + SG_ADD(&m_f3, "f3", "f3"); + SG_ADD(&m_sigma2, "sigma2", "sigma2"); + SG_ADD(&m_trk, "trk", "trk"); + SG_ADD(&m_Tmm, "Tmm", "Tmm"); + SG_ADD(&m_Tnm, "Tnm", "Tnm"); + SG_ADD(&m_inv_Lm, "inv_Lm", "inv_Lm"); + SG_ADD(&m_inv_La, "inv_La", "inv_La"); + SG_ADD(&m_Knm_inv_Lm, "Knm_Inv_Lm", "Knm_Inv_Lm"); } CVarDTCInferenceMethod::~CVarDTCInferenceMethod() diff --git a/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp b/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp index fc794b27e55..2d47bf64177 100644 --- a/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp +++ b/src/shogun/machine/gp/VariationalGaussianLikelihood.cpp @@ -44,16 +44,13 @@ CVariationalGaussianLikelihood::CVariationalGaussianLikelihood() void CVariationalGaussianLikelihood::init() { SG_ADD(&m_mu, "mu", - "The mean of variational normal distribution\n", - ParameterProperties()); + "The mean of variational normal distribution\n"); SG_ADD(&m_s2, "sigma2", - "The variance of variational normal distribution\n", - ParameterProperties()); + "The variance of variational normal distribution\n"); SG_ADD(&m_noise_factor, "noise_factor", - "Correct the variance if variance is close to zero or negative\n", - ParameterProperties()); + "Correct the variance if variance is close to zero or negative\n"); m_noise_factor=1e-6; } diff --git a/src/shogun/machine/gp/VariationalLikelihood.cpp b/src/shogun/machine/gp/VariationalLikelihood.cpp index 6b74d3b9302..2fdd955ad95 100644 --- a/src/shogun/machine/gp/VariationalLikelihood.cpp +++ b/src/shogun/machine/gp/VariationalLikelihood.cpp @@ -61,12 +61,10 @@ void CVariationalLikelihood::init() SG_REF(m_likelihood); SG_ADD(&m_lab, "labels", - "The label of the data\n", - ParameterProperties()); + "The label of the data\n"); SG_ADD((CSGObject**)&m_likelihood, "likelihood", - "The distribution used to model the data\n", - ParameterProperties()); + "The distribution used to model the data\n"); } SGVector CVariationalLikelihood::get_predictive_means( diff --git a/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h b/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h index 3a1d050fcb6..c46ec085888 100644 --- a/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h +++ b/src/shogun/mathematics/linalg/eigsolver/EigenSolver.h @@ -112,24 +112,19 @@ class CEigenSolver : public CSGObject m_is_computed_max=false; SG_ADD(&m_min_eigenvalue, "min_eigenvalue", - "Minimum eigenvalue of a real valued self-adjoint linear operator", - ParameterProperties()); + "Minimum eigenvalue of a real valued self-adjoint linear operator"); SG_ADD(&m_max_eigenvalue, "max_eigenvalue", - "Maximum eigenvalue of a real valued self-adjoint linear operator", - ParameterProperties()); + "Maximum eigenvalue of a real valued self-adjoint linear operator"); SG_ADD((CSGObject**)&m_linear_operator, "linear_operator", - "Self-adjoint linear operator", - ParameterProperties()); + "Self-adjoint linear operator"); SG_ADD(&m_is_computed_min, "is_computed_min", - "Flag denoting that the minimum eigenvalue has already been computed", - ParameterProperties()); + "Flag denoting that the minimum eigenvalue has already been computed"); SG_ADD(&m_max_eigenvalue, "is_computed_max", - "Flag denoting that the maximum eigenvalue has already been computed", - ParameterProperties()); + "Flag denoting that the maximum eigenvalue has already been computed"); } }; diff --git a/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp b/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp index 7c5ff9f1344..aedce2e19ad 100644 --- a/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp +++ b/src/shogun/mathematics/linalg/eigsolver/LanczosEigenSolver.cpp @@ -43,13 +43,13 @@ void CLanczosEigenSolver::init() m_absolute_tolerence=1E-6; SG_ADD(&m_max_iteration_limit, "max_iteration_limit", - "Maximum number of iteration for the solver", ParameterProperties()); + "Maximum number of iteration for the solver"); SG_ADD(&m_relative_tolerence, "relative_tolerence", - "Relative tolerence of solver", ParameterProperties()); + "Relative tolerence of solver"); SG_ADD(&m_absolute_tolerence, "absolute_tolerence", - "Absolute tolerence of solver", ParameterProperties()); + "Absolute tolerence of solver"); } CLanczosEigenSolver::~CLanczosEigenSolver() diff --git a/src/shogun/mathematics/linalg/linop/LinearOperator.cpp b/src/shogun/mathematics/linalg/linop/LinearOperator.cpp index 3e707d149eb..57c676af7e9 100644 --- a/src/shogun/mathematics/linalg/linop/LinearOperator.cpp +++ b/src/shogun/mathematics/linalg/linop/LinearOperator.cpp @@ -41,8 +41,7 @@ void CLinearOperator::init() m_dimension=0; SG_ADD(&m_dimension, "dimension", - "Dimension of the vector on which linear operator can apply", - ParameterProperties()); + "Dimension of the vector on which linear operator can apply"); } template class CLinearOperator; diff --git a/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp b/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp index b3de2ab3f5a..9c3d72c8193 100644 --- a/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/logdet/LogDetEstimator.cpp @@ -79,10 +79,10 @@ void CLogDetEstimator::init() m_operator_log=NULL; SG_ADD((CSGObject**)&m_trace_sampler, "trace_sampler", - "Trace sampler for the log operator", ParameterProperties()); + "Trace sampler for the log operator"); SG_ADD((CSGObject**)&m_operator_log, "operator_log", - "The log operator function", ParameterProperties()); + "The log operator function"); } CLogDetEstimator::~CLogDetEstimator() diff --git a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp index e72592c10dd..a5b6ffe3935 100644 --- a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationCGM.cpp @@ -41,7 +41,7 @@ void CLogRationalApproximationCGM::init() m_linear_solver=NULL; SG_ADD((CSGObject**)&m_linear_solver, "linear_solver", - "Linear solver for complex systems", ParameterProperties()); + "Linear solver for complex systems"); } CLogRationalApproximationCGM::~CLogRationalApproximationCGM() diff --git a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp index b4a18bd49d1..9d1e5565079 100644 --- a/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/logdet/opfunc/LogRationalApproximationIndividual.cpp @@ -44,7 +44,7 @@ void CLogRationalApproximationIndividual::init() m_linear_solver=NULL; SG_ADD((CSGObject**)&m_linear_solver, "linear_solver", - "Linear solver for complex systems", ParameterProperties()); + "Linear solver for complex systems"); } CLogRationalApproximationIndividual::~CLogRationalApproximationIndividual() diff --git a/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h b/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h index 0cad7d2974e..caa6db0dba8 100644 --- a/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h +++ b/src/shogun/mathematics/linalg/ratapprox/opfunc/OperatorFunction.h @@ -100,7 +100,7 @@ template class COperatorFunction : public CSGObject m_linear_operator=NULL; SG_ADD((CSGObject**)&m_linear_operator, "linear_operator", - "Linear operator of this operator function", ParameterProperties()); + "Linear operator of this operator function"); } }; } diff --git a/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp b/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp index 2005557bf19..bd8e7d06e0f 100644 --- a/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/opfunc/RationalApproximation.cpp @@ -57,23 +57,20 @@ void CRationalApproximation::init() m_desired_accuracy=0.0; SG_ADD((CSGObject**)&m_eigen_solver, "eigen_solver", - "Eigen solver for computing extremal eigenvalues", ParameterProperties()); + "Eigen solver for computing extremal eigenvalues"); - SG_ADD(&m_shifts, "complex_shifts", "Complex shifts in the linear system", - ParameterProperties()); + SG_ADD(&m_shifts, "complex_shifts", "Complex shifts in the linear system"); - SG_ADD(&m_weights, "complex_weights", "Complex weights of the linear system", - ParameterProperties()); + SG_ADD(&m_weights, "complex_weights", "Complex weights of the linear system"); SG_ADD(&m_constant_multiplier, "constant_multiplier", - "Constant multiplier in the rational approximation", - ParameterProperties()); + "Constant multiplier in the rational approximation"); SG_ADD(&m_num_shifts, "num_shifts", - "Number of shifts in the quadrature rule", ParameterProperties()); + "Number of shifts in the quadrature rule"); SG_ADD(&m_desired_accuracy, "desired_accuracy", - "Desired accuracy of the rational approximation", ParameterProperties()); + "Desired accuracy of the rational approximation"); } SGVector CRationalApproximation::get_shifts() const diff --git a/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp b/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp index 147fb4749eb..e077eae9d05 100644 --- a/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp +++ b/src/shogun/mathematics/linalg/ratapprox/tracesampler/ProbingSampler.cpp @@ -55,16 +55,15 @@ void CProbingSampler::init() m_is_precomputed=false; SG_ADD(&m_coloring_vector, "coloring_vector", "the coloring vector generated" - " from coloring", ParameterProperties()); + " from coloring"); - SG_ADD(&m_power, "matrix_power", "power of the sparse-matrix for coloring", - ParameterProperties()); + SG_ADD(&m_power, "matrix_power", "power of the sparse-matrix for coloring"); SG_ADD(&m_is_precomputed, "is_precomputed", - "flag that is true if already precomputed", ParameterProperties()); + "flag that is true if already precomputed"); SG_ADD((CSGObject**)&m_matrix_operator, "matrix_operator", - "the sparse-matrix linear opeator for coloring", ParameterProperties()); + "the sparse-matrix linear opeator for coloring"); } CProbingSampler::~CProbingSampler() diff --git a/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h b/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h index 3fd36d6c4a8..80e7b031024 100644 --- a/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h +++ b/src/shogun/mathematics/linalg/ratapprox/tracesampler/TraceSampler.h @@ -97,10 +97,10 @@ class CTraceSampler : public CSGObject m_dimension=0; SG_ADD(&m_num_samples, "num_samples", - "Number of samples this sampler can generate", ParameterProperties()); + "Number of samples this sampler can generate"); SG_ADD(&m_dimension, "sample_dimension", - "Dimension of samples this sampler can generate", ParameterProperties()); + "Dimension of samples this sampler can generate"); } }; diff --git a/src/shogun/metric/LMNN.cpp b/src/shogun/metric/LMNN.cpp index 6eef5d1707e..7c183fb1d59 100644 --- a/src/shogun/metric/LMNN.cpp +++ b/src/shogun/metric/LMNN.cpp @@ -258,28 +258,20 @@ CLMNNStatistics* CLMNN::get_statistics() const void CLMNN::init() { SG_ADD(&m_linear_transform, "linear_transform", - "Linear transform in matrix form", ParameterProperties()); - SG_ADD((CSGObject**) &m_features, "features", "Training features", - ParameterProperties()); - SG_ADD((CSGObject**) &m_labels, "labels", "Training labels", - ParameterProperties()); - SG_ADD(&m_k, "k", "Number of target neighbours per example", - ParameterProperties()); + "Linear transform in matrix form"); + SG_ADD((CSGObject**) &m_features, "features", "Training features"); + SG_ADD((CSGObject**) &m_labels, "labels", "Training labels"); + SG_ADD(&m_k, "k", "Number of target neighbours per example"); SG_ADD(&m_regularization, "regularization", "Regularization", ParameterProperties::HYPER); - SG_ADD(&m_stepsize, "stepsize", "Step size in gradient descent", - ParameterProperties()); - SG_ADD(&m_stepsize_threshold, "stepsize_threshold", "Step size threshold", - ParameterProperties()); - SG_ADD(&m_maxiter, "maxiter", "Maximum number of iterations", - ParameterProperties()); + SG_ADD(&m_stepsize, "stepsize", "Step size in gradient descent"); + SG_ADD(&m_stepsize_threshold, "stepsize_threshold", "Step size threshold"); + SG_ADD(&m_maxiter, "maxiter", "Maximum number of iterations"); SG_ADD(&m_correction, "correction", - "Iterations between exact impostors search", ParameterProperties()); - SG_ADD(&m_obj_threshold, "obj_threshold", "Objective threshold", - ParameterProperties()); - SG_ADD(&m_diagonal, "m_diagonal", "Diagonal transformation", ParameterProperties()); - SG_ADD((CSGObject**) &m_statistics, "statistics", "Training statistics", - ParameterProperties()); + "Iterations between exact impostors search"); + SG_ADD(&m_obj_threshold, "obj_threshold", "Objective threshold"); + SG_ADD(&m_diagonal, "m_diagonal", "Diagonal transformation"); + SG_ADD((CSGObject**) &m_statistics, "statistics", "Training statistics"); m_features = NULL; m_labels = NULL; @@ -331,9 +323,8 @@ void CLMNNStatistics::set(index_t iter, float64_t obj_iter, float64_t stepsize_i void CLMNNStatistics::init() { - SG_ADD(&obj, "obj", "Objective at each iteration", ParameterProperties()); - SG_ADD(&stepsize, "stepsize", "Step size at each iteration", ParameterProperties()); - SG_ADD(&num_impostors, "num_impostors", "Number of impostors at each iteration", - ParameterProperties()); + SG_ADD(&obj, "obj", "Objective at each iteration"); + SG_ADD(&stepsize, "stepsize", "Step size at each iteration"); + SG_ADD(&num_impostors, "num_impostors", "Number of impostors at each iteration"); } diff --git a/src/shogun/modelselection/GradientModelSelection.cpp b/src/shogun/modelselection/GradientModelSelection.cpp index 0e1ede3fce3..7f7b87eeae3 100644 --- a/src/shogun/modelselection/GradientModelSelection.cpp +++ b/src/shogun/modelselection/GradientModelSelection.cpp @@ -83,16 +83,16 @@ class GradientModelSelectionCostFunction: public FirstOrderCostFunction { m_obj=NULL; SG_ADD((CSGObject **)&m_obj, "GradientModelSelectionCostFunction__m_obj", - "obj in GradientModelSelectionCostFunction", ParameterProperties()); + "obj in GradientModelSelectionCostFunction"); m_func_data = NULL; m_val = SGVector(); SG_ADD( &m_val, "GradientModelSelectionCostFunction__m_val", - "val in GradientModelSelectionCostFunction", ParameterProperties()); + "val in GradientModelSelectionCostFunction"); m_grad = SGVector(); SG_ADD( &m_grad, "GradientModelSelectionCostFunction__m_grad", - "grad in GradientModelSelectionCostFunction", ParameterProperties()); + "grad in GradientModelSelectionCostFunction"); } CGradientModelSelection *m_obj; @@ -278,7 +278,7 @@ void CGradientModelSelection::init() SG_REF(m_mode_minimizer); SG_ADD((CSGObject **)&m_mode_minimizer, - "mode_minimizer", "Minimizer used in mode selection", ParameterProperties()); + "mode_minimizer", "Minimizer used in mode selection"); } diff --git a/src/shogun/modelselection/ModelSelection.cpp b/src/shogun/modelselection/ModelSelection.cpp index ff83805d959..bd4129a4493 100644 --- a/src/shogun/modelselection/ModelSelection.cpp +++ b/src/shogun/modelselection/ModelSelection.cpp @@ -35,10 +35,10 @@ void CModelSelection::init() m_machine_eval=NULL; SG_ADD((CSGObject**)&m_model_parameters, "model_parameters", - "Parameter tree for model selection", ParameterProperties()); + "Parameter tree for model selection"); SG_ADD((CSGObject**)&m_machine_eval, "machine_evaluation", - "Machine evaluation strategy", ParameterProperties()); + "Machine evaluation strategy"); } CModelSelection::~CModelSelection() diff --git a/src/shogun/modelselection/ParameterCombination.cpp b/src/shogun/modelselection/ParameterCombination.cpp index 5bf92c048a8..43527d8b0e4 100644 --- a/src/shogun/modelselection/ParameterCombination.cpp +++ b/src/shogun/modelselection/ParameterCombination.cpp @@ -121,8 +121,7 @@ void CParameterCombination::init() m_child_nodes=new CDynamicObjectArray(); SG_REF(m_child_nodes); - SG_ADD((CSGObject**)&m_child_nodes, "child_nodes", "Children of this node", - ParameterProperties()); + SG_ADD((CSGObject**)&m_child_nodes, "child_nodes", "Children of this node"); } CParameterCombination::~CParameterCombination() diff --git a/src/shogun/multiclass/GaussianNaiveBayes.cpp b/src/shogun/multiclass/GaussianNaiveBayes.cpp index 5a9cae53eef..e7a31a3c058 100644 --- a/src/shogun/multiclass/GaussianNaiveBayes.cpp +++ b/src/shogun/multiclass/GaussianNaiveBayes.cpp @@ -223,19 +223,18 @@ float64_t CGaussianNaiveBayes::apply_one(int32_t idx) void CGaussianNaiveBayes::init() { - SG_ADD(&m_min_label, "m_min_label", "minimal label", ParameterProperties()); + SG_ADD(&m_min_label, "m_min_label", "minimal label"); SG_ADD(&m_num_classes, "m_num_classes", - "number of different classes (labels)", ParameterProperties()); + "number of different classes (labels)"); SG_ADD(&m_dim, "m_dim", - "dimensionality of feature space", ParameterProperties()); + "dimensionality of feature space"); SG_ADD(&m_means, "m_means", - "means for normal distributions of features", ParameterProperties()); + "means for normal distributions of features"); SG_ADD(&m_variances, "m_variances", - "variances for normal distributions of features", ParameterProperties()); + "variances for normal distributions of features"); SG_ADD(&m_label_prob, "m_label_prob", - "a priori probabilities of labels", ParameterProperties()); - SG_ADD(&m_rates, "m_rates", "label rates", ParameterProperties()); + "a priori probabilities of labels"); + SG_ADD(&m_rates, "m_rates", "label rates"); SG_ADD( - (CFeatures**)&m_features, "features", "Training features", - ParameterProperties()); + (CFeatures**)&m_features, "features", "Training features"); } diff --git a/src/shogun/multiclass/KNN.cpp b/src/shogun/multiclass/KNN.cpp index e253132caac..ff24b94c98f 100644 --- a/src/shogun/multiclass/KNN.cpp +++ b/src/shogun/multiclass/KNN.cpp @@ -59,11 +59,11 @@ void CKNN::init() /* use the method classify_multiply_k to experiment with different values * of k */ - SG_ADD(&m_k, "k", "Parameter k", ParameterProperties()); + SG_ADD(&m_k, "k", "Parameter k"); SG_ADD(&m_q, "q", "Parameter q", ParameterProperties::HYPER); - SG_ADD(&m_num_classes, "num_classes", "Number of classes", ParameterProperties()); - SG_ADD(&m_leaf_size, "leaf_size", "Leaf size for KDTree", ParameterProperties()); - SG_ADD((machine_int_t*) &m_knn_solver, "knn_solver", "Algorithm to solve knn", ParameterProperties()); + SG_ADD(&m_num_classes, "num_classes", "Number of classes"); + SG_ADD(&m_leaf_size, "leaf_size", "Leaf size for KDTree"); + SG_ADD((machine_int_t*) &m_knn_solver, "knn_solver", "Algorithm to solve knn"); } CKNN::~CKNN() diff --git a/src/shogun/multiclass/MCLDA.cpp b/src/shogun/multiclass/MCLDA.cpp index 082c26b16e4..ee51947a0a8 100644 --- a/src/shogun/multiclass/MCLDA.cpp +++ b/src/shogun/multiclass/MCLDA.cpp @@ -51,18 +51,18 @@ CMCLDA::~CMCLDA() void CMCLDA::init() { SG_ADD(&m_tolerance, "m_tolerance", "Tolerance member.", ParameterProperties::HYPER); - SG_ADD(&m_store_cov, "m_store_cov", "Store covariance member", ParameterProperties()); - SG_ADD((CSGObject**) &m_features, "m_features", "Feature object.", ParameterProperties()); - SG_ADD(&m_means, "m_means", "Mean vectors list", ParameterProperties()); - SG_ADD(&m_cov, "m_cov", "covariance matrix", ParameterProperties()); - SG_ADD(&m_xbar, "m_xbar", "total mean", ParameterProperties()); - SG_ADD(&m_scalings, "m_scalings", "scalings", ParameterProperties()); - SG_ADD(&m_rank, "m_rank", "rank", ParameterProperties()); - SG_ADD(&m_dim, "m_dim", "dimension of feature space", ParameterProperties()); + SG_ADD(&m_store_cov, "m_store_cov", "Store covariance member"); + SG_ADD((CSGObject**) &m_features, "m_features", "Feature object."); + SG_ADD(&m_means, "m_means", "Mean vectors list"); + SG_ADD(&m_cov, "m_cov", "covariance matrix"); + SG_ADD(&m_xbar, "m_xbar", "total mean"); + SG_ADD(&m_scalings, "m_scalings", "scalings"); + SG_ADD(&m_rank, "m_rank", "rank"); + SG_ADD(&m_dim, "m_dim", "dimension of feature space"); SG_ADD( - &m_num_classes, "m_num_classes", "number of classes", ParameterProperties()); - SG_ADD(&m_coef, "m_coef", "weight vector", ParameterProperties()); - SG_ADD(&m_intercept, "m_intercept", "intercept", ParameterProperties()); + &m_num_classes, "m_num_classes", "number of classes"); + SG_ADD(&m_coef, "m_coef", "weight vector"); + SG_ADD(&m_intercept, "m_intercept", "intercept"); m_features = NULL; m_num_classes=0; diff --git a/src/shogun/multiclass/MulticlassLibSVM.cpp b/src/shogun/multiclass/MulticlassLibSVM.cpp index d800de4087c..97291c50d64 100644 --- a/src/shogun/multiclass/MulticlassLibSVM.cpp +++ b/src/shogun/multiclass/MulticlassLibSVM.cpp @@ -28,7 +28,7 @@ CMulticlassLibSVM::~CMulticlassLibSVM() void CMulticlassLibSVM::register_params() { - SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM solver type", ParameterProperties()); + SG_ADD((machine_int_t*) &solver_type, "libsvm_solver_type", "LibSVM solver type"); } bool CMulticlassLibSVM::train_machine(CFeatures* data) diff --git a/src/shogun/multiclass/MulticlassOCAS.cpp b/src/shogun/multiclass/MulticlassOCAS.cpp index 844da1cc0f2..7ac4b5b5ad6 100644 --- a/src/shogun/multiclass/MulticlassOCAS.cpp +++ b/src/shogun/multiclass/MulticlassOCAS.cpp @@ -51,10 +51,10 @@ CMulticlassOCAS::CMulticlassOCAS(float64_t C, CDotFeatures* train_features, CLab void CMulticlassOCAS::register_parameters() { SG_ADD(&m_C, "m_C", "regularization constant", ParameterProperties::HYPER); - SG_ADD(&m_epsilon, "m_epsilon", "solver relative tolerance", ParameterProperties()); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations", ParameterProperties()); - SG_ADD(&m_method, "m_method", "used solver method", ParameterProperties()); - SG_ADD(&m_buf_size, "m_buf_size", "buffer size", ParameterProperties()); + SG_ADD(&m_epsilon, "m_epsilon", "solver relative tolerance"); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations"); + SG_ADD(&m_method, "m_method", "used solver method"); + SG_ADD(&m_buf_size, "m_buf_size", "buffer size"); } CMulticlassOCAS::~CMulticlassOCAS() diff --git a/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp b/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp index 93647435b3f..b426bc671d1 100644 --- a/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp +++ b/src/shogun/multiclass/MulticlassOneVsOneStrategy.cpp @@ -26,7 +26,7 @@ CMulticlassOneVsOneStrategy::CMulticlassOneVsOneStrategy(EProbHeuristicType prob void CMulticlassOneVsOneStrategy::register_parameters() { - //SG_ADD(&m_num_samples, "num_samples", "Number of samples in each training machine", ParameterProperties()); + //SG_ADD(&m_num_samples, "num_samples", "Number of samples in each training machine"); SG_WARNING("%s::CMulticlassOneVsOneStrategy(): register parameters!\n", get_name()); } diff --git a/src/shogun/multiclass/MulticlassStrategy.cpp b/src/shogun/multiclass/MulticlassStrategy.cpp index 490a4f9ffac..86ec7a73fb7 100644 --- a/src/shogun/multiclass/MulticlassStrategy.cpp +++ b/src/shogun/multiclass/MulticlassStrategy.cpp @@ -34,9 +34,9 @@ void CMulticlassStrategy::init() m_prob_heuris=PROB_HEURIS_NONE; m_num_classes=0; - SG_ADD((CSGObject**)&m_rejection_strategy, "rejection_strategy", "Strategy of rejection", ParameterProperties()); - SG_ADD(&m_num_classes, "num_classes", "Number of classes", ParameterProperties()); - SG_ADD((machine_int_t*)&m_prob_heuris, "prob_heuris", "Probability estimation heuristics", ParameterProperties()); + SG_ADD((CSGObject**)&m_rejection_strategy, "rejection_strategy", "Strategy of rejection"); + SG_ADD(&m_num_classes, "num_classes", "Number of classes"); + SG_ADD((machine_int_t*)&m_prob_heuris, "prob_heuris", "Probability estimation heuristics"); } void CMulticlassStrategy::train_start(CMulticlassLabels *orig_labels, CBinaryLabels *train_labels) diff --git a/src/shogun/multiclass/QDA.cpp b/src/shogun/multiclass/QDA.cpp index a47041921c1..0c6d3a9617b 100644 --- a/src/shogun/multiclass/QDA.cpp +++ b/src/shogun/multiclass/QDA.cpp @@ -84,14 +84,14 @@ void CQDA::init() m_tolerance = 1e-4; m_store_covs = false; SG_ADD(&m_tolerance, "m_tolerance", "Tolerance member.", ParameterProperties::HYPER); - SG_ADD(&m_store_covs, "m_store_covs", "Store covariances member", ParameterProperties()); - SG_ADD((CSGObject**) &m_features, "m_features", "Feature object.", ParameterProperties()); - SG_ADD(&m_means, "m_means", "Mean vectors list", ParameterProperties()); - SG_ADD(&m_slog, "m_slog", "Vector used in classification", ParameterProperties()); - SG_ADD(&m_dim, "m_dim", "dimension of feature space", ParameterProperties()); + SG_ADD(&m_store_covs, "m_store_covs", "Store covariances member"); + SG_ADD((CSGObject**) &m_features, "m_features", "Feature object."); + SG_ADD(&m_means, "m_means", "Mean vectors list"); + SG_ADD(&m_slog, "m_slog", "Vector used in classification"); + SG_ADD(&m_dim, "m_dim", "dimension of feature space"); SG_ADD( - &m_num_classes, "m_num_classes", "number of classes", ParameterProperties()); - SG_ADD(&m_M, "m_M", "Matrices used in classification", ParameterProperties()); + &m_num_classes, "m_num_classes", "number of classes"); + SG_ADD(&m_M, "m_M", "Matrices used in classification"); m_features = NULL; } diff --git a/src/shogun/multiclass/ScatterSVM.cpp b/src/shogun/multiclass/ScatterSVM.cpp index 7b718560764..b1d02eaf20f 100644 --- a/src/shogun/multiclass/ScatterSVM.cpp +++ b/src/shogun/multiclass/ScatterSVM.cpp @@ -44,7 +44,7 @@ CScatterSVM::~CScatterSVM() void CScatterSVM::register_params() { - SG_ADD((machine_int_t*) &scatter_type, "scatter_type", "Type of scatter SVM", ParameterProperties()); + SG_ADD((machine_int_t*) &scatter_type, "scatter_type", "Type of scatter SVM"); m_parameters->add_vector(&norm_wc, &norm_wc_len, "norm_wc", "Norm of w_c"); watch_param("norm_wc", &norm_wc, &norm_wc_len); @@ -52,8 +52,8 @@ void CScatterSVM::register_params() m_parameters->add_vector(&norm_wcw, &norm_wcw_len, "norm_wcw", "Norm of w_cw"); watch_param("norm_wcw", &norm_wcw, &norm_wcw_len); - SG_ADD(&rho, "rho", "Scatter SVM rho", ParameterProperties()); - SG_ADD(&m_num_classes, "m_num_classes", "Number of classes", ParameterProperties()); + SG_ADD(&rho, "rho", "Scatter SVM rho"); + SG_ADD(&m_num_classes, "m_num_classes", "Number of classes"); } bool CScatterSVM::train_machine(CFeatures* data) diff --git a/src/shogun/multiclass/ShareBoost.cpp b/src/shogun/multiclass/ShareBoost.cpp index 8f8b194b9eb..a3271f00986 100644 --- a/src/shogun/multiclass/ShareBoost.cpp +++ b/src/shogun/multiclass/ShareBoost.cpp @@ -31,8 +31,8 @@ CShareBoost::CShareBoost(CDenseFeatures *features, CMulticlassLabels void CShareBoost::init_sb_params() { - SG_ADD(&m_nonzero_feas, "nonzero_feas", "Number of non-zero features", ParameterProperties()); - SG_ADD(&m_activeset, "active_set", "Selected features", ParameterProperties()); + SG_ADD(&m_nonzero_feas, "nonzero_feas", "Number of non-zero features"); + SG_ADD(&m_activeset, "active_set", "Selected features"); } SGVector CShareBoost::get_activeset() diff --git a/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp index 723f258e606..8e9638649ff 100644 --- a/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCDiscriminantEncoder.cpp @@ -37,7 +37,7 @@ void CECOCDiscriminantEncoder::init() // parameters - SG_ADD(&m_iterations, "iterations", "number of iterations in SFFS", ParameterProperties()); + SG_ADD(&m_iterations, "iterations", "number of iterations in SFFS"); } void CECOCDiscriminantEncoder::set_features(CDenseFeatures *features) diff --git a/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp index 60c65b4cc08..1476314224f 100644 --- a/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCForestEncoder.cpp @@ -11,7 +11,7 @@ using namespace shogun; CECOCForestEncoder::CECOCForestEncoder() { m_num_trees = 3; - SG_ADD(&m_num_trees, "num_trees", "number of trees", ParameterProperties()); + SG_ADD(&m_num_trees, "num_trees", "number of trees"); } void CECOCForestEncoder::set_num_trees(int32_t num_trees) diff --git a/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp index d585ed21427..8b9f6a4fc9a 100644 --- a/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCRandomDenseEncoder.cpp @@ -34,9 +34,9 @@ void CECOCRandomDenseEncoder::init() m_maxiter = 10000; m_codelen = 0; m_pposone = 0.5; - SG_ADD(&m_maxiter, "maxiter", "max number of iterations", ParameterProperties()); - SG_ADD(&m_codelen, "codelen", "code length", ParameterProperties()); - SG_ADD(&m_pposone, "pposone", "probability of +1", ParameterProperties()); + SG_ADD(&m_maxiter, "maxiter", "max number of iterations"); + SG_ADD(&m_codelen, "codelen", "code length"); + SG_ADD(&m_pposone, "pposone", "probability of +1"); } void CECOCRandomDenseEncoder::set_probability(float64_t pposone) diff --git a/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp b/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp index 63c25b3494a..482b9b305b4 100644 --- a/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp +++ b/src/shogun/multiclass/ecoc/ECOCRandomSparseEncoder.cpp @@ -25,11 +25,11 @@ CECOCRandomSparseEncoder::CECOCRandomSparseEncoder(int32_t maxiter, int32_t code void CECOCRandomSparseEncoder::init() { - SG_ADD(&m_maxiter, "maxiter", "max number of iterations", ParameterProperties()); - SG_ADD(&m_codelen, "codelen", "code length", ParameterProperties()); - SG_ADD(&m_pzero, "pzero", "probability of 0", ParameterProperties()); - SG_ADD(&m_pposone, "pposone", "probability of +1", ParameterProperties()); - SG_ADD(&m_pnegone, "pnegone", "probability of -1", ParameterProperties()); + SG_ADD(&m_maxiter, "maxiter", "max number of iterations"); + SG_ADD(&m_codelen, "codelen", "code length"); + SG_ADD(&m_pzero, "pzero", "probability of 0"); + SG_ADD(&m_pposone, "pposone", "probability of +1"); + SG_ADD(&m_pnegone, "pnegone", "probability of -1"); } void CECOCRandomSparseEncoder::set_probability(float64_t pzero, float64_t pposone, float64_t pnegone) diff --git a/src/shogun/multiclass/ecoc/ECOCStrategy.cpp b/src/shogun/multiclass/ecoc/ECOCStrategy.cpp index 35aef514da3..b3463229bbb 100644 --- a/src/shogun/multiclass/ecoc/ECOCStrategy.cpp +++ b/src/shogun/multiclass/ecoc/ECOCStrategy.cpp @@ -30,8 +30,8 @@ void CECOCStrategy::init() m_encoder=NULL; m_decoder=NULL; - SG_ADD(&m_encoder, "encoder", "ECOC Encoder", ParameterProperties()); - SG_ADD(&m_decoder, "decoder", "ECOC Decoder", ParameterProperties()); + SG_ADD(&m_encoder, "encoder", "ECOC Encoder"); + SG_ADD(&m_decoder, "decoder", "ECOC Decoder"); } CECOCStrategy::~CECOCStrategy() diff --git a/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp b/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp index 71b2093b86b..d722abdf86a 100644 --- a/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp +++ b/src/shogun/multiclass/tree/BalancedConditionalProbabilityTree.cpp @@ -11,7 +11,7 @@ using namespace shogun; CBalancedConditionalProbabilityTree::CBalancedConditionalProbabilityTree() :m_alpha(0.4) { - SG_ADD(&m_alpha, "m_alpha", "Trade-off parameter of tree balance", ParameterProperties()); + SG_ADD(&m_alpha, "m_alpha", "Trade-off parameter of tree balance"); } void CBalancedConditionalProbabilityTree::set_alpha(float64_t alpha) diff --git a/src/shogun/multiclass/tree/C45ClassifierTree.cpp b/src/shogun/multiclass/tree/C45ClassifierTree.cpp index 2fcd1cf73ce..faf3a93b8f0 100644 --- a/src/shogun/multiclass/tree/C45ClassifierTree.cpp +++ b/src/shogun/multiclass/tree/C45ClassifierTree.cpp @@ -795,10 +795,10 @@ void CC45ClassifierTree::init() m_types_set=false; m_weights_set=false; - SG_ADD(&m_nominal,"m_nominal", "feature types", ParameterProperties()); - SG_ADD(&m_weights,"m_weights", "weights", ParameterProperties()); - SG_ADD(&m_certainty,"m_certainty", "certainty", ParameterProperties()); - SG_ADD(&m_weights_set,"m_weights_set", "weights set", ParameterProperties()); - SG_ADD(&m_types_set,"m_types_set", "feature types set", ParameterProperties()); + SG_ADD(&m_nominal,"m_nominal", "feature types"); + SG_ADD(&m_weights,"m_weights", "weights"); + SG_ADD(&m_certainty,"m_certainty", "certainty"); + SG_ADD(&m_weights_set,"m_weights_set", "weights set"); + SG_ADD(&m_types_set,"m_types_set", "feature types set"); } diff --git a/src/shogun/multiclass/tree/CARTree.cpp b/src/shogun/multiclass/tree/CARTree.cpp index df28fdf1c8d..ed02d5596f5 100644 --- a/src/shogun/multiclass/tree/CARTree.cpp +++ b/src/shogun/multiclass/tree/CARTree.cpp @@ -1489,17 +1489,17 @@ void CCARTree::init() m_sorted_features=SGMatrix(); m_sorted_indices=SGMatrix(); - SG_ADD(&m_pre_sort, "pre_sort", "presort", ParameterProperties()); - SG_ADD(&m_sorted_features, "sorted_features", "sorted feats", ParameterProperties()); - SG_ADD(&m_sorted_indices, "sorted_indices", "sorted indices", ParameterProperties()); - SG_ADD(&m_nominal, "nominal", "feature types", ParameterProperties()); - SG_ADD(&m_weights, "weights", "weights", ParameterProperties()); - SG_ADD(&m_weights_set, "weights_set", "weights set", ParameterProperties()); - SG_ADD(&m_types_set, "types_set", "feature types set", ParameterProperties()); - SG_ADD(&m_apply_cv_pruning, "apply_cv_pruning", "apply cross validation pruning", ParameterProperties()); - SG_ADD(&m_folds, "folds", "number of subsets for cross validation", ParameterProperties()); - SG_ADD(&m_max_depth, "max_depth", "max allowed tree depth", ParameterProperties()); - SG_ADD(&m_min_node_size, "min_node_size", "min allowed node size", ParameterProperties()); - SG_ADD(&m_label_epsilon, "label_epsilon", "epsilon for labels", ParameterProperties()); - SG_ADD((machine_int_t*)&m_mode, "mode", "problem type (multiclass or regression)", ParameterProperties()); + SG_ADD(&m_pre_sort, "pre_sort", "presort"); + SG_ADD(&m_sorted_features, "sorted_features", "sorted feats"); + SG_ADD(&m_sorted_indices, "sorted_indices", "sorted indices"); + SG_ADD(&m_nominal, "nominal", "feature types"); + SG_ADD(&m_weights, "weights", "weights"); + SG_ADD(&m_weights_set, "weights_set", "weights set"); + SG_ADD(&m_types_set, "types_set", "feature types set"); + SG_ADD(&m_apply_cv_pruning, "apply_cv_pruning", "apply cross validation pruning"); + SG_ADD(&m_folds, "folds", "number of subsets for cross validation"); + SG_ADD(&m_max_depth, "max_depth", "max allowed tree depth"); + SG_ADD(&m_min_node_size, "min_node_size", "min allowed node size"); + SG_ADD(&m_label_epsilon, "label_epsilon", "epsilon for labels"); + SG_ADD((machine_int_t*)&m_mode, "mode", "problem type (multiclass or regression)"); } diff --git a/src/shogun/multiclass/tree/CHAIDTree.cpp b/src/shogun/multiclass/tree/CHAIDTree.cpp index cefc5d96cd9..525387e7ccd 100644 --- a/src/shogun/multiclass/tree/CHAIDTree.cpp +++ b/src/shogun/multiclass/tree/CHAIDTree.cpp @@ -1400,14 +1400,14 @@ void CCHAIDTree::init() m_cont_breakpoints=SGMatrix(); m_num_breakpoints=0; - SG_ADD(&m_weights,"m_weights", "weights", ParameterProperties()); - SG_ADD(&m_weights_set,"m_weights_set", "weights set", ParameterProperties()); - SG_ADD(&m_feature_types,"m_feature_types", "feature types", ParameterProperties()); - SG_ADD(&m_dependent_vartype,"m_dependent_vartype", "dependent variable type", ParameterProperties()); - SG_ADD(&m_max_tree_depth,"m_max_tree_depth", "max tree depth", ParameterProperties()); - SG_ADD(&m_min_node_size,"m_min_node_size", "min node size", ParameterProperties()); - SG_ADD(&m_alpha_merge,"m_alpha_merge", "alpha-merge", ParameterProperties()); - SG_ADD(&m_alpha_split,"m_alpha_split", "alpha-split", ParameterProperties()); - SG_ADD(&m_cont_breakpoints,"m_cont_breakpoints", "breakpoints in continuous attributes", ParameterProperties()); - SG_ADD(&m_num_breakpoints,"m_num_breakpoints", "number of breakpoints", ParameterProperties()); + SG_ADD(&m_weights,"m_weights", "weights"); + SG_ADD(&m_weights_set,"m_weights_set", "weights set"); + SG_ADD(&m_feature_types,"m_feature_types", "feature types"); + SG_ADD(&m_dependent_vartype,"m_dependent_vartype", "dependent variable type"); + SG_ADD(&m_max_tree_depth,"m_max_tree_depth", "max tree depth"); + SG_ADD(&m_min_node_size,"m_min_node_size", "min node size"); + SG_ADD(&m_alpha_merge,"m_alpha_merge", "alpha-merge"); + SG_ADD(&m_alpha_split,"m_alpha_split", "alpha-split"); + SG_ADD(&m_cont_breakpoints,"m_cont_breakpoints", "breakpoints in continuous attributes"); + SG_ADD(&m_num_breakpoints,"m_num_breakpoints", "number of breakpoints"); } diff --git a/src/shogun/multiclass/tree/RandomCARTree.cpp b/src/shogun/multiclass/tree/RandomCARTree.cpp index 0c5b3473e4a..33064479df4 100644 --- a/src/shogun/multiclass/tree/RandomCARTree.cpp +++ b/src/shogun/multiclass/tree/RandomCARTree.cpp @@ -72,5 +72,5 @@ void CRandomCARTree::init() { m_randsubset_size=0; - SG_ADD(&m_randsubset_size,"m_randsubset_size", "random features subset size", ParameterProperties()); + SG_ADD(&m_randsubset_size,"m_randsubset_size", "random features subset size"); } diff --git a/src/shogun/multiclass/tree/RelaxedTree.cpp b/src/shogun/multiclass/tree/RelaxedTree.cpp index b6140c54d67..846737259c6 100644 --- a/src/shogun/multiclass/tree/RelaxedTree.cpp +++ b/src/shogun/multiclass/tree/RelaxedTree.cpp @@ -22,7 +22,7 @@ CRelaxedTree::CRelaxedTree() :m_max_num_iter(3), m_A(0.5), m_B(5), m_svm_C(1), m_svm_epsilon(0.001), m_kernel(NULL), m_feats(NULL), m_machine_for_confusion_matrix(NULL), m_num_classes(0) { - SG_ADD(&m_max_num_iter, "m_max_num_iter", "max number of iterations in alternating optimization", ParameterProperties()); + SG_ADD(&m_max_num_iter, "m_max_num_iter", "max number of iterations in alternating optimization"); SG_ADD(&m_svm_C, "m_svm_C", "C for svm", ParameterProperties::HYPER); SG_ADD(&m_A, "m_A", "parameter A", ParameterProperties::HYPER); SG_ADD(&m_B, "m_B", "parameter B", ParameterProperties::HYPER); diff --git a/src/shogun/multiclass/tree/TreeMachine.h b/src/shogun/multiclass/tree/TreeMachine.h index 84689f41036..2b09c9263c5 100644 --- a/src/shogun/multiclass/tree/TreeMachine.h +++ b/src/shogun/multiclass/tree/TreeMachine.h @@ -58,7 +58,7 @@ template class CTreeMachine : public CBaseMulticlassMachine CTreeMachine() : CBaseMulticlassMachine() { m_root=NULL; - SG_ADD((CSGObject**)&m_root,"m_root", "tree structure", ParameterProperties()); + SG_ADD((CSGObject**)&m_root,"m_root", "tree structure"); } /** destructor */ diff --git a/src/shogun/multiclass/tree/TreeMachineNode.h b/src/shogun/multiclass/tree/TreeMachineNode.h index 6231da77a10..6b18d51d568 100644 --- a/src/shogun/multiclass/tree/TreeMachineNode.h +++ b/src/shogun/multiclass/tree/TreeMachineNode.h @@ -185,8 +185,8 @@ class CTreeMachineNode m_machine=-1; m_children=new CDynamicObjectArray(); SG_REF(m_children); - SG_ADD((CSGObject**)&m_parent,"m_parent", "Parent node", ParameterProperties()); - SG_ADD(&m_machine,"m_machine", "Index of associated machine", ParameterProperties()); + SG_ADD((CSGObject**)&m_parent,"m_parent", "Parent node"); + SG_ADD(&m_machine,"m_machine", "Index of associated machine"); } public: diff --git a/src/shogun/neuralnets/Autoencoder.cpp b/src/shogun/neuralnets/Autoencoder.cpp index d03762acf3a..5eb2c710253 100644 --- a/src/shogun/neuralnets/Autoencoder.cpp +++ b/src/shogun/neuralnets/Autoencoder.cpp @@ -176,9 +176,9 @@ void CAutoencoder::init() m_contraction_coefficient = 0.0; SG_ADD((machine_int_t*)&m_noise_type, "noise_type", - "Noise Type", ParameterProperties()); + "Noise Type"); SG_ADD(&m_noise_parameter, "noise_parameter", - "Noise Parameter", ParameterProperties()); + "Noise Parameter"); SG_ADD(&m_contraction_coefficient, "contraction_coefficient", - "Contraction Coefficient", ParameterProperties()); + "Contraction Coefficient"); } diff --git a/src/shogun/neuralnets/DeepAutoencoder.cpp b/src/shogun/neuralnets/DeepAutoencoder.cpp index 604f9603c30..95e2bac89c3 100644 --- a/src/shogun/neuralnets/DeepAutoencoder.cpp +++ b/src/shogun/neuralnets/DeepAutoencoder.cpp @@ -270,31 +270,31 @@ void CDeepAutoencoder::init() pt_gd_error_damping_coeff.set_const(-1); SG_ADD(&pt_noise_type, "pt_noise_type", - "Pre-training Noise Type", ParameterProperties()); + "Pre-training Noise Type"); SG_ADD(&pt_noise_parameter, "pt_noise_parameter", - "Pre-training Noise Parameter", ParameterProperties()); + "Pre-training Noise Parameter"); SG_ADD(&pt_contraction_coefficient, "pt_contraction_coefficient", - "Pre-training Contraction Coefficient", ParameterProperties()); + "Pre-training Contraction Coefficient"); SG_ADD(&pt_optimization_method, "pt_optimization_method", - "Pre-training Optimization Method", ParameterProperties()); + "Pre-training Optimization Method"); SG_ADD(&pt_gd_mini_batch_size, "pt_gd_mini_batch_size", - "Pre-training Gradient Descent Mini-batch size", ParameterProperties()); + "Pre-training Gradient Descent Mini-batch size"); SG_ADD(&pt_max_num_epochs, "pt_max_num_epochs", - "Pre-training Max number of Epochs", ParameterProperties()); + "Pre-training Max number of Epochs"); SG_ADD(&pt_gd_learning_rate, "pt_gd_learning_rate", - "Pre-training Gradient descent learning rate", ParameterProperties()); + "Pre-training Gradient descent learning rate"); SG_ADD(&pt_gd_learning_rate_decay, "pt_gd_learning_rate_decay", - "Pre-training Gradient descent learning rate decay", ParameterProperties()); + "Pre-training Gradient descent learning rate decay"); SG_ADD(&pt_gd_momentum, "pt_gd_momentum", - "Pre-training Gradient Descent Momentum", ParameterProperties()); + "Pre-training Gradient Descent Momentum"); SG_ADD(&pt_gd_error_damping_coeff, "pt_gd_error_damping_coeff", - "Pre-training Gradient Descent Error Damping Coeff", ParameterProperties()); + "Pre-training Gradient Descent Error Damping Coeff"); SG_ADD(&pt_epsilon, "pt_epsilon", - "Pre-training Epsilon", ParameterProperties()); + "Pre-training Epsilon"); SG_ADD(&pt_l2_coefficient, "pt_l2_coefficient", - "Pre-training L2 regularization coeff", ParameterProperties()); + "Pre-training L2 regularization coeff"); SG_ADD(&pt_l1_coefficient, "pt_l1_coefficient", - "Pre-training L1 regularization coeff", ParameterProperties()); + "Pre-training L1 regularization coeff"); - SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma", ParameterProperties()); + SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma"); } diff --git a/src/shogun/neuralnets/DeepBeliefNetwork.cpp b/src/shogun/neuralnets/DeepBeliefNetwork.cpp index 5d8b95d1caf..006e4d7e00b 100644 --- a/src/shogun/neuralnets/DeepBeliefNetwork.cpp +++ b/src/shogun/neuralnets/DeepBeliefNetwork.cpp @@ -584,60 +584,60 @@ void CDeepBeliefNetwork::init() m_sigma = 0.01; SG_ADD((machine_int_t*)&m_visible_units_type, "visible_units_type", - "Type of the visible units", ParameterProperties()); + "Type of the visible units"); SG_ADD(&m_num_layers, "num_layers", - "Number of layers", ParameterProperties()); + "Number of layers"); SG_ADD((CSGObject**)&m_layer_sizes, "layer_sizes", - "Size of each hidden layer", ParameterProperties()); + "Size of each hidden layer"); SG_ADD(&m_params, "params", - "Parameters of the network", ParameterProperties()); + "Parameters of the network"); SG_ADD(&m_num_params, "num_params", - "Number of parameters", ParameterProperties()); + "Number of parameters"); SG_ADD(&m_bias_index_offsets, "bias_index_offsets", - "Index offsets of the biases", ParameterProperties()); + "Index offsets of the biases"); SG_ADD(&m_weights_index_offsets, "weights_index_offsets", - "Index offsets of the weights", ParameterProperties()); + "Index offsets of the weights"); SG_ADD(&pt_cd_num_steps, "pt_cd_num_steps", - "Pre-training Number of CD Steps", ParameterProperties()); + "Pre-training Number of CD Steps"); SG_ADD(&pt_cd_persistent, "pt_cd_persistent", - "Pre-training Persistent CD", ParameterProperties()); + "Pre-training Persistent CD"); SG_ADD(&pt_cd_sample_visible, "pt_cd_sample_visible", - "Pre-training Number of CD Sample Visible", ParameterProperties()); + "Pre-training Number of CD Sample Visible"); SG_ADD(&pt_l2_coefficient, "pt_l2_coefficient", - "Pre-training L2 regularization coeff", ParameterProperties()); + "Pre-training L2 regularization coeff"); SG_ADD(&pt_l1_coefficient, "pt_l1_coefficient", - "Pre-training L1 regularization coeff", ParameterProperties()); + "Pre-training L1 regularization coeff"); SG_ADD(&pt_monitoring_interval, "pt_monitoring_interval", - "Pre-training Monitoring Interval", ParameterProperties()); + "Pre-training Monitoring Interval"); SG_ADD(&pt_monitoring_method, "pt_monitoring_method", - "Pre-training Monitoring Method", ParameterProperties()); + "Pre-training Monitoring Method"); SG_ADD(&pt_cd_num_steps, "pt_gd_mini_batch_size", - "Pre-training Gradient Descent Mini-batch size", ParameterProperties()); + "Pre-training Gradient Descent Mini-batch size"); SG_ADD(&pt_max_num_epochs, "pt_max_num_epochs", - "Pre-training Max number of Epochs", ParameterProperties()); + "Pre-training Max number of Epochs"); SG_ADD(&pt_gd_learning_rate, "pt_gd_learning_rate", - "Pre-training Gradient descent learning rate", ParameterProperties()); + "Pre-training Gradient descent learning rate"); SG_ADD(&pt_gd_learning_rate_decay, "pt_gd_learning_rate_decay", - "Pre-training Gradient descent learning rate decay", ParameterProperties()); + "Pre-training Gradient descent learning rate decay"); SG_ADD(&pt_gd_momentum, "pt_gd_momentum", - "Pre-training Gradient Descent Momentum", ParameterProperties()); + "Pre-training Gradient Descent Momentum"); - SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps", ParameterProperties()); + SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps"); SG_ADD(&monitoring_interval, "monitoring_interval", - "Monitoring Interval", ParameterProperties()); + "Monitoring Interval"); SG_ADD(&gd_mini_batch_size, "gd_mini_batch_size", - "Gradient Descent Mini-batch size", ParameterProperties()); + "Gradient Descent Mini-batch size"); SG_ADD(&max_num_epochs, "max_num_epochs", - "Max number of Epochs", ParameterProperties()); + "Max number of Epochs"); SG_ADD(&gd_learning_rate, "gd_learning_rate", - "Gradient descent learning rate", ParameterProperties()); + "Gradient descent learning rate"); SG_ADD(&gd_learning_rate_decay, "gd_learning_rate_decay", - "Gradient descent learning rate decay", ParameterProperties()); + "Gradient descent learning rate decay"); SG_ADD(&gd_momentum, "gd_momentum", - "Gradient Descent Momentum", ParameterProperties()); + "Gradient Descent Momentum"); - SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma", ParameterProperties()); + SG_ADD(&m_sigma, "m_sigma", "Initialization Sigma"); } diff --git a/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp b/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp index e49a7c5124f..659b0906263 100644 --- a/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp +++ b/src/shogun/neuralnets/NeuralConvolutionalLayer.cpp @@ -283,27 +283,25 @@ void CNeuralConvolutionalLayer::init() m_initialization_mode = NORMAL; m_activation_function = CMAF_IDENTITY; - SG_ADD(&m_num_maps, "num_maps", "Number of maps", ParameterProperties()); - SG_ADD(&m_input_width, "input_width", "Input Width", ParameterProperties()); - SG_ADD(&m_input_height, "input_height", "Input Height", ParameterProperties()); - SG_ADD(&m_input_num_channels, "input_num_channels", "Input's number of channels", - ParameterProperties()); - SG_ADD(&m_radius_x, "radius_x", "X Radius", ParameterProperties()); - SG_ADD(&m_radius_y, "radius_y", "Y Radius", ParameterProperties()); - SG_ADD(&m_pooling_width, "pooling_width", "Pooling Width", ParameterProperties()); - SG_ADD(&m_pooling_height, "pooling_height", "Pooling Height", ParameterProperties()); - SG_ADD(&m_stride_x, "stride_x", "X Stride", ParameterProperties()); - SG_ADD(&m_stride_y, "stride_y", "Y Stride", ParameterProperties()); - - SG_ADD((machine_int_t*) &m_initialization_mode, "initialization_mode", "Initialization Mode", - ParameterProperties()); + SG_ADD(&m_num_maps, "num_maps", "Number of maps"); + SG_ADD(&m_input_width, "input_width", "Input Width"); + SG_ADD(&m_input_height, "input_height", "Input Height"); + SG_ADD(&m_input_num_channels, "input_num_channels", "Input's number of channels"); + SG_ADD(&m_radius_x, "radius_x", "X Radius"); + SG_ADD(&m_radius_y, "radius_y", "Y Radius"); + SG_ADD(&m_pooling_width, "pooling_width", "Pooling Width"); + SG_ADD(&m_pooling_height, "pooling_height", "Pooling Height"); + SG_ADD(&m_stride_x, "stride_x", "X Stride"); + SG_ADD(&m_stride_y, "stride_y", "Y Stride"); + + SG_ADD((machine_int_t*) &m_initialization_mode, "initialization_mode", "Initialization Mode"); SG_ADD((machine_int_t*) &m_activation_function, "activation_function", - "Activation Function", ParameterProperties()); + "Activation Function"); SG_ADD(&m_convolution_output, "convolution_output", - "Convolution Output", ParameterProperties()); + "Convolution Output"); SG_ADD(&m_convolution_output_gradients, "convolution_output_gradients", - "Convolution Output Gradients", ParameterProperties()); + "Convolution Output Gradients"); } diff --git a/src/shogun/neuralnets/NeuralInputLayer.cpp b/src/shogun/neuralnets/NeuralInputLayer.cpp index 3b1f6925a33..cd9737a6232 100644 --- a/src/shogun/neuralnets/NeuralInputLayer.cpp +++ b/src/shogun/neuralnets/NeuralInputLayer.cpp @@ -82,7 +82,7 @@ void CNeuralInputLayer::init() m_start_index = 0; gaussian_noise = 0; SG_ADD(&m_start_index, "start_index", - "Start Index", ParameterProperties()); + "Start Index"); SG_ADD(&gaussian_noise, "gaussian_noise", - "Gaussian Noise Standard Deviation", ParameterProperties()); + "Gaussian Noise Standard Deviation"); } diff --git a/src/shogun/neuralnets/NeuralLayer.cpp b/src/shogun/neuralnets/NeuralLayer.cpp index 96aeeccb59e..bd673dd0dd1 100644 --- a/src/shogun/neuralnets/NeuralLayer.cpp +++ b/src/shogun/neuralnets/NeuralLayer.cpp @@ -121,32 +121,32 @@ void CNeuralLayer::init() autoencoder_position = NLAP_NONE; SG_ADD(&m_num_neurons, "num_neurons", - "Number of Neurons", ParameterProperties()); + "Number of Neurons"); SG_ADD(&m_width, "width", - "Width", ParameterProperties()); + "Width"); SG_ADD(&m_height, "height", - "Height", ParameterProperties()); + "Height"); SG_ADD(&m_input_indices, "input_indices", - "Input Indices", ParameterProperties()); + "Input Indices"); SG_ADD(&m_input_sizes, "input_sizes", - "Input Sizes", ParameterProperties()); + "Input Sizes"); SG_ADD(&dropout_prop, "dropout_prop", - "Dropout Probabilty", ParameterProperties()); + "Dropout Probabilty"); SG_ADD(&contraction_coefficient, "contraction_coefficient", - "Contraction Coefficient", ParameterProperties()); + "Contraction Coefficient"); SG_ADD(&is_training, "is_training", - "is_training", ParameterProperties()); + "is_training"); SG_ADD(&m_batch_size, "batch_size", - "Batch Size", ParameterProperties()); + "Batch Size"); SG_ADD(&m_activations, "activations", - "Activations", ParameterProperties()); + "Activations"); SG_ADD(&m_activation_gradients, "activation_gradients", - "Activation Gradients", ParameterProperties()); + "Activation Gradients"); SG_ADD(&m_local_gradients, "local_gradients", - "Local Gradients", ParameterProperties()); + "Local Gradients"); SG_ADD(&m_dropout_mask, "dropout_mask", - "Dropout mask", ParameterProperties()); + "Dropout mask"); SG_ADD((machine_int_t*)&autoencoder_position, "autoencoder_position", - "Autoencoder Position", ParameterProperties()); + "Autoencoder Position"); } diff --git a/src/shogun/neuralnets/NeuralNetwork.cpp b/src/shogun/neuralnets/NeuralNetwork.cpp index 5ceb97d7d5e..150efe6644a 100644 --- a/src/shogun/neuralnets/NeuralNetwork.cpp +++ b/src/shogun/neuralnets/NeuralNetwork.cpp @@ -790,52 +790,50 @@ void CNeuralNetwork::init() SG_REF(m_layers); SG_ADD((machine_int_t*)&m_optimization_method, "optimization_method", - "Optimization Method", ParameterProperties()); + "Optimization Method"); SG_ADD(&m_gd_mini_batch_size, "gd_mini_batch_size", - "Gradient Descent Mini-batch size", ParameterProperties()); + "Gradient Descent Mini-batch size"); SG_ADD(&m_max_num_epochs, "max_num_epochs", - "Max number of Epochs", ParameterProperties()); + "Max number of Epochs"); SG_ADD(&m_gd_learning_rate, "gd_learning_rate", - "Gradient descent learning rate", ParameterProperties()); + "Gradient descent learning rate"); SG_ADD(&m_gd_learning_rate_decay, "gd_learning_rate_decay", - "Gradient descent learning rate decay", ParameterProperties()); + "Gradient descent learning rate decay"); SG_ADD(&m_gd_momentum, "gd_momentum", - "Gradient Descent Momentum", ParameterProperties()); + "Gradient Descent Momentum"); SG_ADD(&m_gd_error_damping_coeff, "gd_error_damping_coeff", - "Gradient Descent Error Damping Coeff", ParameterProperties()); + "Gradient Descent Error Damping Coeff"); SG_ADD(&m_epsilon, "epsilon", - "Epsilon", ParameterProperties()); + "Epsilon"); SG_ADD(&m_num_inputs, "num_inputs", - "Number of Inputs", ParameterProperties()); + "Number of Inputs"); SG_ADD(&m_num_layers, "num_layers", - "Number of Layers", ParameterProperties()); + "Number of Layers"); SG_ADD(&m_adj_matrix, "adj_matrix", - "Adjacency Matrix", ParameterProperties()); + "Adjacency Matrix"); SG_ADD(&m_l2_coefficient, "l2_coefficient", - "L2 regularization coeff", ParameterProperties()); + "L2 regularization coeff"); SG_ADD(&m_l1_coefficient, "l1_coefficient", - "L1 regularization coeff", ParameterProperties()); + "L1 regularization coeff"); SG_ADD(&m_dropout_hidden, "dropout_hidden", - "Hidden neuron dropout probability", ParameterProperties()); + "Hidden neuron dropout probability"); SG_ADD(&m_dropout_input, "dropout_input", - "Input neuron dropout probability", ParameterProperties()); + "Input neuron dropout probability"); SG_ADD(&m_max_norm, "max_norm", - "Max Norm", ParameterProperties()); + "Max Norm"); SG_ADD(&m_total_num_parameters, "total_num_parameters", - "Total number of parameters", ParameterProperties()); + "Total number of parameters"); SG_ADD(&m_index_offsets, "index_offsets", - "Index Offsets", ParameterProperties()); + "Index Offsets"); SG_ADD(&m_params, "params", - "Parameters", ParameterProperties()); + "Parameters"); SG_ADD(&m_param_regularizable, "param_regularizable", - "Parameter Regularizable", ParameterProperties()); + "Parameter Regularizable"); SG_ADD( - &m_layers, "layers", "DynamicObjectArray of NeuralNetwork objects", - ParameterProperties()); - SG_ADD(&m_auto_quick_initialize, "auto_quick_initialize", "auto_quick_initialize", ParameterProperties()); + &m_layers, "layers", "DynamicObjectArray of NeuralNetwork objects"); + SG_ADD(&m_auto_quick_initialize, "auto_quick_initialize", "auto_quick_initialize"); SG_ADD(&m_is_training, "is_training", - "is_training", ParameterProperties()); + "is_training"); SG_ADD( - &m_sigma, "sigma", "sigma", - ParameterProperties()); + &m_sigma, "sigma", "sigma"); } diff --git a/src/shogun/neuralnets/RBM.cpp b/src/shogun/neuralnets/RBM.cpp index 0be99bc6a1e..734b2ef746f 100644 --- a/src/shogun/neuralnets/RBM.cpp +++ b/src/shogun/neuralnets/RBM.cpp @@ -623,45 +623,45 @@ void CRBM::init() m_num_params = 0; m_batch_size = 0; - SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps", ParameterProperties()); - SG_ADD(&cd_persistent, "cd_persistent", "Whether to use PCD", ParameterProperties()); + SG_ADD(&cd_num_steps, "cd_num_steps", "Number of CD Steps"); + SG_ADD(&cd_persistent, "cd_persistent", "Whether to use PCD"); SG_ADD(&cd_sample_visible, "sample_visible", - "Whether to sample the visible units during (P)CD", ParameterProperties()); + "Whether to sample the visible units during (P)CD"); SG_ADD(&l2_coefficient, "l2_coefficient", - "L2 regularization coeff", ParameterProperties()); + "L2 regularization coeff"); SG_ADD(&l1_coefficient, "l1_coefficient", - "L1 regularization coeff", ParameterProperties()); + "L1 regularization coeff"); SG_ADD((machine_int_t*)&monitoring_method, "monitoring_method", - "Monitoring Method", ParameterProperties()); + "Monitoring Method"); SG_ADD(&monitoring_interval, "monitoring_interval", - "Monitoring Interval", ParameterProperties()); + "Monitoring Interval"); SG_ADD(&gd_mini_batch_size, "gd_mini_batch_size", - "Gradient Descent Mini-batch size", ParameterProperties()); + "Gradient Descent Mini-batch size"); SG_ADD(&max_num_epochs, "max_num_epochs", - "Max number of Epochs", ParameterProperties()); + "Max number of Epochs"); SG_ADD(&gd_learning_rate, "gd_learning_rate", - "Gradient descent learning rate", ParameterProperties()); + "Gradient descent learning rate"); SG_ADD(&gd_learning_rate_decay, "gd_learning_rate_decay", - "Gradient descent learning rate decay", ParameterProperties()); + "Gradient descent learning rate decay"); SG_ADD(&gd_momentum, "gd_momentum", - "Gradient Descent Momentum", ParameterProperties()); + "Gradient Descent Momentum"); SG_ADD(&m_num_hidden, "num_hidden", - "Number of Hidden Units", ParameterProperties()); + "Number of Hidden Units"); SG_ADD(&m_num_visible, "num_visible", - "Number of Visible Units", ParameterProperties()); + "Number of Visible Units"); SG_ADD(&m_num_visible_groups, "num_visible_groups", - "Number of Visible Unit Groups", ParameterProperties()); + "Number of Visible Unit Groups"); SG_ADD((CSGObject**)&m_visible_group_sizes, "visible_group_sizes", - "Sizes of Visible Unit Groups", ParameterProperties()); + "Sizes of Visible Unit Groups"); SG_ADD((CSGObject**)&m_visible_group_types, "visible_group_types", - "Types of Visible Unit Groups", ParameterProperties()); + "Types of Visible Unit Groups"); SG_ADD((CSGObject**)&m_visible_state_offsets, "visible_group_index_offsets", - "State Index offsets of Visible Unit Groups", ParameterProperties()); + "State Index offsets of Visible Unit Groups"); SG_ADD(&m_num_params, "num_params", - "Number of Parameters", ParameterProperties()); - SG_ADD(&m_params, "params", "Parameters", ParameterProperties()); + "Number of Parameters"); + SG_ADD(&m_params, "params", "Parameters"); } diff --git a/src/shogun/optimization/AdaDeltaUpdater.cpp b/src/shogun/optimization/AdaDeltaUpdater.cpp index 2fdf5ce6c03..616a0fd1831 100644 --- a/src/shogun/optimization/AdaDeltaUpdater.cpp +++ b/src/shogun/optimization/AdaDeltaUpdater.cpp @@ -86,15 +86,15 @@ void AdaDeltaUpdater::init() m_gradient_delta_accuracy=SGVector(); SG_ADD(&m_gradient_accuracy, "AdaDeltaUpdater__m_gradient_accuracy", - "gradient_accuracy in AdaDeltaUpdater", ParameterProperties()); + "gradient_accuracy in AdaDeltaUpdater"); SG_ADD(&m_gradient_delta_accuracy, "AdaDeltaUpdater__m_gradient_delta_accuracy", - "gradient_delta_accuracy in AdaDeltaUpdater", ParameterProperties()); + "gradient_delta_accuracy in AdaDeltaUpdater"); SG_ADD(&m_epsilon, "AdaDeltaUpdater__m_epsilon", - "epsilon in AdaDeltaUpdater", ParameterProperties()); + "epsilon in AdaDeltaUpdater"); SG_ADD(&m_decay_factor, "AdaDeltaUpdater__m_decay_factor", - "decay_factor in AdaDeltaUpdater", ParameterProperties()); + "decay_factor in AdaDeltaUpdater"); SG_ADD(&m_build_in_learning_rate, "AdaDeltaUpdater__m_build_in_learning_rate", - "m_build_in_learning_rate in AdaDeltaUpdater", ParameterProperties()); + "m_build_in_learning_rate in AdaDeltaUpdater"); } float64_t AdaDeltaUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/AdaGradUpdater.cpp b/src/shogun/optimization/AdaGradUpdater.cpp index adaf0e4e0cc..f1005b64d04 100644 --- a/src/shogun/optimization/AdaGradUpdater.cpp +++ b/src/shogun/optimization/AdaGradUpdater.cpp @@ -71,11 +71,11 @@ void AdaGradUpdater::init() m_gradient_accuracy=SGVector(); SG_ADD(&m_epsilon, "AdaGradUpdater__m_epsilon", - "epsilon in AdaGradUpdater", ParameterProperties()); + "epsilon in AdaGradUpdater"); SG_ADD(&m_build_in_learning_rate, "AdaGradUpdater__m_build_in_learning_rate", - "m_build_in_learning_rate in AdaGradUpdater", ParameterProperties()); + "m_build_in_learning_rate in AdaGradUpdater"); SG_ADD(&m_gradient_accuracy, "AdaGradUpdater__m_gradient_accuracy", - "gradient_accuracy in AdaGradUpdater", ParameterProperties()); + "gradient_accuracy in AdaGradUpdater"); } float64_t AdaGradUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/AdamUpdater.cpp b/src/shogun/optimization/AdamUpdater.cpp index 2003cfa9501..e7a4e8340fa 100644 --- a/src/shogun/optimization/AdamUpdater.cpp +++ b/src/shogun/optimization/AdamUpdater.cpp @@ -97,21 +97,21 @@ void AdamUpdater::init() m_gradient_second_moment=SGVector(); SG_ADD(&m_decay_factor_first_moment, "AdamUpdater__m_decay_factor_first_moment", - "decay_factor_first_moment in AdamUpdater", ParameterProperties()); + "decay_factor_first_moment in AdamUpdater"); SG_ADD(&m_decay_factor_second_moment, "AdamUpdater__m_decay_factor_second_moment", - "decay_factor_second_moment in AdamUpdater", ParameterProperties()); + "decay_factor_second_moment in AdamUpdater"); SG_ADD(&m_gradient_first_moment, "AdamUpdater__m_gradient_first_moment", - "m_gradient_first_moment in AdamUpdater", ParameterProperties()); + "m_gradient_first_moment in AdamUpdater"); SG_ADD(&m_gradient_second_moment, "AdamUpdater__m_gradient_second_moment", - "m_gradient_second_moment in AdamUpdater", ParameterProperties()); + "m_gradient_second_moment in AdamUpdater"); SG_ADD(&m_epsilon, "AdamUpdater__m_epsilon", - "epsilon in AdamUpdater", ParameterProperties()); + "epsilon in AdamUpdater"); SG_ADD(&m_log_scale_pre_iteration, "AdamUpdater__m_log_scale_pre_iteration", - "log_scale_pre_iteration in AdamUpdater", ParameterProperties()); + "log_scale_pre_iteration in AdamUpdater"); SG_ADD(&m_log_learning_rate, "AdamUpdater__m_log_learning_rate", - "m_log_learning_rate in AdamUpdater", ParameterProperties()); + "m_log_learning_rate in AdamUpdater"); SG_ADD(&m_iteration_counter, "AdamUpdater__m_iteration_counter", - "m_iteration_counter in AdamUpdater", ParameterProperties()); + "m_iteration_counter in AdamUpdater"); } float64_t AdamUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/AdaptMomentumCorrection.cpp b/src/shogun/optimization/AdaptMomentumCorrection.cpp index f756035d657..d3e87dddfc9 100644 --- a/src/shogun/optimization/AdaptMomentumCorrection.cpp +++ b/src/shogun/optimization/AdaptMomentumCorrection.cpp @@ -128,15 +128,15 @@ void AdaptMomentumCorrection::init() m_init_descend_rate=1.0; SG_ADD(&m_adapt_rate, "AdaptMomentumCorrection__m_adapt_rate", - "m_adapt_rate in AdaptMomentumCorrection", ParameterProperties()); + "m_adapt_rate in AdaptMomentumCorrection"); SG_ADD(&m_rate_min, "AdaptMomentumCorrection__m_rate_min", - "m_rate_min in AdaptMomentumCorrection", ParameterProperties()); + "m_rate_min in AdaptMomentumCorrection"); SG_ADD(&m_rate_max, "AdaptMomentumCorrection__m_rate_max", - "m_rate_max in AdaptMomentumCorrection", ParameterProperties()); + "m_rate_max in AdaptMomentumCorrection"); SG_ADD(&m_init_descend_rate, "AdaptMomentumCorrection__m_init_descend_rate", - "m_init_descend_rate in AdaptMomentumCorrection", ParameterProperties()); + "m_init_descend_rate in AdaptMomentumCorrection"); SG_ADD(&m_descend_rate, "AdaptMomentumCorrection__m_descend_rate", - "m_descend_rate in AdaptMomentumCorrection", ParameterProperties()); + "m_descend_rate in AdaptMomentumCorrection"); SG_ADD((CSGObject **)&m_momentum_correction, "AdaptMomentumCorrection__m_momentum_correction", - "m_momentum_correction in AdaptMomentumCorrection", ParameterProperties()); + "m_momentum_correction in AdaptMomentumCorrection"); } diff --git a/src/shogun/optimization/ConstLearningRate.cpp b/src/shogun/optimization/ConstLearningRate.cpp index 3cf46f1143e..f56d866e36e 100644 --- a/src/shogun/optimization/ConstLearningRate.cpp +++ b/src/shogun/optimization/ConstLearningRate.cpp @@ -50,5 +50,5 @@ void ConstLearningRate::init() { m_const_learning_rate=0.0; SG_ADD(&m_const_learning_rate, "ConstLearningRate__m_const_learning_rate", - "learning rate in ConstLearningRate", ParameterProperties()); + "learning rate in ConstLearningRate"); } diff --git a/src/shogun/optimization/DescendCorrection.cpp b/src/shogun/optimization/DescendCorrection.cpp index 4408b841040..c9c5cb962ed 100644 --- a/src/shogun/optimization/DescendCorrection.cpp +++ b/src/shogun/optimization/DescendCorrection.cpp @@ -43,5 +43,5 @@ void DescendCorrection::init() { m_weight=0.0; SG_ADD(&m_weight, "DescendCorrection__m_weight", - "weight used in descend correction", ParameterProperties()); + "weight used in descend correction"); } diff --git a/src/shogun/optimization/DescendUpdaterWithCorrection.cpp b/src/shogun/optimization/DescendUpdaterWithCorrection.cpp index 286c6c02ed2..77a8d4865f0 100644 --- a/src/shogun/optimization/DescendUpdaterWithCorrection.cpp +++ b/src/shogun/optimization/DescendUpdaterWithCorrection.cpp @@ -90,5 +90,5 @@ void DescendUpdaterWithCorrection::init() { m_correction=NULL; SG_ADD((CSGObject **)&m_correction, "DescendUpdaterWithCorrection__m_correction", - "correction in DescendUpdaterWithCorrection", ParameterProperties()); + "correction in DescendUpdaterWithCorrection"); } diff --git a/src/shogun/optimization/ElasticNetPenalty.cpp b/src/shogun/optimization/ElasticNetPenalty.cpp index 6ee3fa17676..8d648a2261c 100644 --- a/src/shogun/optimization/ElasticNetPenalty.cpp +++ b/src/shogun/optimization/ElasticNetPenalty.cpp @@ -85,9 +85,9 @@ void ElasticNetPenalty::init() m_l1_penalty=new L1Penalty(); m_l2_penalty=new L2Penalty(); SG_ADD(&m_l1_ratio, "ElasticNetPenalty__m_l1_ratio", - "l1_ratio in ElasticNetPenalty", ParameterProperties()); + "l1_ratio in ElasticNetPenalty"); SG_ADD((CSGObject **) &m_l1_penalty, "ElasticNetPenalty__m_l1_penalty", - "l1_penalty in ElasticNetPenalty", ParameterProperties()); + "l1_penalty in ElasticNetPenalty"); SG_ADD((CSGObject **) &m_l2_penalty, "ElasticNetPenalty__m_l2_penalty", - "l2_penalty in ElasticNetPenalty", ParameterProperties()); + "l2_penalty in ElasticNetPenalty"); } diff --git a/src/shogun/optimization/FirstOrderMinimizer.cpp b/src/shogun/optimization/FirstOrderMinimizer.cpp index 4b404061467..bf5be601ca7 100644 --- a/src/shogun/optimization/FirstOrderMinimizer.cpp +++ b/src/shogun/optimization/FirstOrderMinimizer.cpp @@ -98,9 +98,9 @@ void FirstOrderMinimizer::init() m_penalty_type=NULL; m_penalty_weight=0; SG_ADD(&m_penalty_weight, "FirstOrderMinimizer__m_penalty_weight", - "penalty_weight in FirstOrderMinimizer", ParameterProperties()); + "penalty_weight in FirstOrderMinimizer"); SG_ADD((CSGObject **)&m_penalty_type, "FirstOrderMinimizer__m_penalty_type", - "penalty_type in FirstOrderMinimizer", ParameterProperties()); + "penalty_type in FirstOrderMinimizer"); SG_ADD((CSGObject **)&m_fun, "FirstOrderMinimizer__m_fun", - "penalty_fun in FirstOrderMinimizer", ParameterProperties()); + "penalty_fun in FirstOrderMinimizer"); } diff --git a/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp b/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp index 37a8def9394..bc85832ae97 100644 --- a/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp +++ b/src/shogun/optimization/FirstOrderStochasticMinimizer.cpp @@ -101,13 +101,13 @@ void FirstOrderStochasticMinimizer::init() m_iter_counter=0; SG_ADD((CSGObject **)&m_learning_rate, "FirstOrderMinimizer__m_learning_rate", - "learning_rate in FirstOrderStochasticMinimizer", ParameterProperties()); + "learning_rate in FirstOrderStochasticMinimizer"); SG_ADD((CSGObject **)&m_gradient_updater, "FirstOrderMinimizer__m_gradient_updater", - "gradient_updater in FirstOrderStochasticMinimizer", ParameterProperties()); + "gradient_updater in FirstOrderStochasticMinimizer"); SG_ADD(&m_num_passes, "FirstOrderMinimizer__m_num_passes", - "num_passes in FirstOrderStochasticMinimizer", ParameterProperties()); + "num_passes in FirstOrderStochasticMinimizer"); SG_ADD(&m_cur_passes, "FirstOrderMinimizer__m_cur_passes", - "cur_passes in FirstOrderStochasticMinimizer", ParameterProperties()); + "cur_passes in FirstOrderStochasticMinimizer"); SG_ADD(&m_iter_counter, "FirstOrderMinimizer__m_iter_counter", - "m_iter_counter in FirstOrderStochasticMinimizer", ParameterProperties()); + "m_iter_counter in FirstOrderStochasticMinimizer"); } diff --git a/src/shogun/optimization/InverseScalingLearningRate.cpp b/src/shogun/optimization/InverseScalingLearningRate.cpp index d8105e1cdef..6ee2a747f6b 100644 --- a/src/shogun/optimization/InverseScalingLearningRate.cpp +++ b/src/shogun/optimization/InverseScalingLearningRate.cpp @@ -72,11 +72,11 @@ void InverseScalingLearningRate::init() m_intercept=0.0; m_slope=1.0; SG_ADD(&m_slope, "InverseScalingLearningRate__m_slope", - "slope in InverseScalingLearningRate", ParameterProperties()); + "slope in InverseScalingLearningRate"); SG_ADD(&m_exponent, "InverseScalingLearningRate__m_exponent", - "exponent in InverseScalingLearningRate", ParameterProperties()); + "exponent in InverseScalingLearningRate"); SG_ADD(&m_intercept, "InverseScalingLearningRate__m_intercept", - "intercept in InverseScalingLearningRate", ParameterProperties()); + "intercept in InverseScalingLearningRate"); SG_ADD(&m_initial_learning_rate, "InverseScalingLearningRate__m_initial_learning_rate", - "initial_learning_rate in InverseScalingLearningRate", ParameterProperties()); + "initial_learning_rate in InverseScalingLearningRate"); } diff --git a/src/shogun/optimization/L1Penalty.cpp b/src/shogun/optimization/L1Penalty.cpp index cd5c4677f30..b93a3a93a97 100644 --- a/src/shogun/optimization/L1Penalty.cpp +++ b/src/shogun/optimization/L1Penalty.cpp @@ -74,5 +74,5 @@ void L1Penalty::init() { m_rounding_epsilon=1e-8; SG_ADD(&m_rounding_epsilon, "L1Penalty__m_rounding_epsilon", - "rounding_epsilon in L1Penalty", ParameterProperties()); + "rounding_epsilon in L1Penalty"); } diff --git a/src/shogun/optimization/L1PenaltyForTG.cpp b/src/shogun/optimization/L1PenaltyForTG.cpp index 42d8aebc966..071d0931e44 100644 --- a/src/shogun/optimization/L1PenaltyForTG.cpp +++ b/src/shogun/optimization/L1PenaltyForTG.cpp @@ -62,7 +62,7 @@ void L1PenaltyForTG::init() m_u=0; m_q=SGVector(); SG_ADD(&m_u, "L1PenaltyForTG__m_u", - "u in L1PenaltyForTG", ParameterProperties()); + "u in L1PenaltyForTG"); SG_ADD(&m_q, "L1PenaltyForTG__m_q", - "q in L1PenaltyForTG", ParameterProperties()); + "q in L1PenaltyForTG"); } diff --git a/src/shogun/optimization/MomentumCorrection.cpp b/src/shogun/optimization/MomentumCorrection.cpp index 3558670625c..9b1ff5d1711 100644 --- a/src/shogun/optimization/MomentumCorrection.cpp +++ b/src/shogun/optimization/MomentumCorrection.cpp @@ -51,5 +51,5 @@ void MomentumCorrection::init() { m_previous_descend_direction=SGVector(); SG_ADD(&m_previous_descend_direction, "MomentumCorrection__m_previous_descend_direction", - "previous_descend_direction in MomentumCorrection", ParameterProperties()); + "previous_descend_direction in MomentumCorrection"); } diff --git a/src/shogun/optimization/PNormMappingFunction.cpp b/src/shogun/optimization/PNormMappingFunction.cpp index 94524b89e39..741c81eece7 100644 --- a/src/shogun/optimization/PNormMappingFunction.cpp +++ b/src/shogun/optimization/PNormMappingFunction.cpp @@ -80,5 +80,5 @@ void PNormMappingFunction::init() { m_p=2.0; SG_ADD(&m_p, "PNormMappingFunction__m_p", - "p in PNormMappingFunction", ParameterProperties()); + "p in PNormMappingFunction"); } diff --git a/src/shogun/optimization/RmsPropUpdater.cpp b/src/shogun/optimization/RmsPropUpdater.cpp index c57025f7704..2b417abafc5 100644 --- a/src/shogun/optimization/RmsPropUpdater.cpp +++ b/src/shogun/optimization/RmsPropUpdater.cpp @@ -81,13 +81,13 @@ void RmsPropUpdater::init() m_gradient_accuracy=SGVector(); SG_ADD(&m_decay_factor, "RmsPropUpdater__m_decay_factor", - "decay_factor in RmsPropUpdater", ParameterProperties()); + "decay_factor in RmsPropUpdater"); SG_ADD(&m_epsilon, "RmsPropUpdater__m_epsilon", - "epsilon in RmsPropUpdater", ParameterProperties()); + "epsilon in RmsPropUpdater"); SG_ADD(&m_build_in_learning_rate, "RmsPropUpdater__m_build_in_learning_rate", - "build_in_learning_rate in RmsPropUpdater", ParameterProperties()); + "build_in_learning_rate in RmsPropUpdater"); SG_ADD(&m_gradient_accuracy, "RmsPropUpdater__m_gradient_accuracy", - "gradient_accuracy in RmsPropUpdater", ParameterProperties()); + "gradient_accuracy in RmsPropUpdater"); } float64_t RmsPropUpdater::get_negative_descend_direction(float64_t variable, diff --git a/src/shogun/optimization/SMDMinimizer.cpp b/src/shogun/optimization/SMDMinimizer.cpp index a8581ede7a3..87647b7616b 100644 --- a/src/shogun/optimization/SMDMinimizer.cpp +++ b/src/shogun/optimization/SMDMinimizer.cpp @@ -80,7 +80,7 @@ void SMDMinimizer::init() { m_mapping_fun=NULL; SG_ADD((CSGObject **)&m_mapping_fun, "SMDMinimizer__m_mapping_fun", - "m_mapping_fun in SMDMinimizer", ParameterProperties()); + "m_mapping_fun in SMDMinimizer"); } void SMDMinimizer::set_mapping_function(MappingFunction* mapping_fun) diff --git a/src/shogun/optimization/SMIDASMinimizer.cpp b/src/shogun/optimization/SMIDASMinimizer.cpp index 1168a9ee0d1..afa2f78d215 100644 --- a/src/shogun/optimization/SMIDASMinimizer.cpp +++ b/src/shogun/optimization/SMIDASMinimizer.cpp @@ -92,7 +92,7 @@ void SMIDASMinimizer::init() { m_dual_variable=SGVector(); SG_ADD(&m_dual_variable, "SMIDASMinimizer__m_dual_variable", - "dual_variable in SMIDASMinimizer", ParameterProperties()); + "dual_variable in SMIDASMinimizer"); } void SMIDASMinimizer::init_minimization() diff --git a/src/shogun/optimization/SVRGMinimizer.cpp b/src/shogun/optimization/SVRGMinimizer.cpp index 12bcc42aeaa..03b6aa040b2 100644 --- a/src/shogun/optimization/SVRGMinimizer.cpp +++ b/src/shogun/optimization/SVRGMinimizer.cpp @@ -57,13 +57,13 @@ void SVRGMinimizer::init() m_previous_variable=SGVector(); SG_ADD(&m_num_sgd_passes, "SVRGMinimizer__m_num_sgd_passes", - "num_sgd_passes in SVRGMinimizer", ParameterProperties()); + "num_sgd_passes in SVRGMinimizer"); SG_ADD(&m_svrg_interval, "SVRGMinimizer__m_svrg_interval", - "svrg_interval in SVRGMinimizer", ParameterProperties()); + "svrg_interval in SVRGMinimizer"); SG_ADD(&m_average_gradient, "SVRGMinimizer__m_average_gradient", - "average_gradient in SVRGMinimizer", ParameterProperties()); + "average_gradient in SVRGMinimizer"); SG_ADD(&m_previous_variable, "SVRGMinimizer__m_previous_variable", - "previous_variable in SVRGMinimizer", ParameterProperties()); + "previous_variable in SVRGMinimizer"); } void SVRGMinimizer::init_minimization() diff --git a/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp b/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp index 2d618e5c6b5..a87a62cafa2 100644 --- a/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp +++ b/src/shogun/optimization/lbfgs/LBFGSMinimizer.cpp @@ -62,39 +62,39 @@ void CLBFGSMinimizer::init() m_min_step=1e-6; m_xtol=1e-6; SG_ADD(&m_linesearch_id, "CLBFGSMinimizer__m_linesearch_id", - "linesearch_id in CLBFGSMinimizer", ParameterProperties()); + "linesearch_id in CLBFGSMinimizer"); SG_ADD(&m_m, "CLBFGSMinimizer__m_m", - "m in CLBFGSMinimizer", ParameterProperties()); + "m in CLBFGSMinimizer"); SG_ADD(&m_max_linesearch, "CLBFGSMinimizer__m_max_linesearch", - "max_linesearch in CLBFGSMinimizer", ParameterProperties()); + "max_linesearch in CLBFGSMinimizer"); SG_ADD(&m_max_iterations, "CLBFGSMinimizer__m_max_iterations", - "max_iterations in CLBFGSMinimizer", ParameterProperties()); + "max_iterations in CLBFGSMinimizer"); SG_ADD(&m_delta, "CLBFGSMinimizer__m_delta", - "delta in CLBFGSMinimizer", ParameterProperties()); + "delta in CLBFGSMinimizer"); SG_ADD(&m_past, "CLBFGSMinimizer__m_past", - "past in CLBFGSMinimizer", ParameterProperties()); + "past in CLBFGSMinimizer"); SG_ADD(&m_epsilon, "CLBFGSMinimizer__m_epsilon", - "epsilon in CLBFGSMinimizer", ParameterProperties()); + "epsilon in CLBFGSMinimizer"); SG_ADD(&m_min_step, "CLBFGSMinimizer__m_min_step", - "min_step in CLBFGSMinimizer", ParameterProperties()); + "min_step in CLBFGSMinimizer"); SG_ADD(&m_max_step, "CLBFGSMinimizer__m_max_step", - "max_step in CLBFGSMinimizer", ParameterProperties()); + "max_step in CLBFGSMinimizer"); SG_ADD(&m_ftol, "CLBFGSMinimizer__m_ftol", - "ftol in CLBFGSMinimizer", ParameterProperties()); + "ftol in CLBFGSMinimizer"); SG_ADD(&m_wolfe, "CLBFGSMinimizer__m_wolfe", - "wolfe in CLBFGSMinimizer", ParameterProperties()); + "wolfe in CLBFGSMinimizer"); SG_ADD(&m_gtol, "CLBFGSMinimizer__m_gtol", - "gtol in CLBFGSMinimizer", ParameterProperties()); + "gtol in CLBFGSMinimizer"); SG_ADD(&m_xtol, "CLBFGSMinimizer__m_xtol", - "xtol in CLBFGSMinimizer", ParameterProperties()); + "xtol in CLBFGSMinimizer"); SG_ADD(&m_orthantwise_c, "CLBFGSMinimizer__m_orthantwise_c", - "orthantwise_c in CLBFGSMinimizer", ParameterProperties()); + "orthantwise_c in CLBFGSMinimizer"); SG_ADD(&m_orthantwise_start, "CLBFGSMinimizer__m_orthantwise_start", - "orthantwise_start in CLBFGSMinimizer", ParameterProperties()); + "orthantwise_start in CLBFGSMinimizer"); SG_ADD(&m_orthantwise_end, "CLBFGSMinimizer__m_orthantwise_end", - "orthantwise_end in CLBFGSMinimizer", ParameterProperties()); + "orthantwise_end in CLBFGSMinimizer"); SG_ADD(&m_target_variable, "CLBFGSMinimizer__m_target_variable", - "m_target_variable in CLBFGSMinimizer", ParameterProperties()); + "m_target_variable in CLBFGSMinimizer"); } void CLBFGSMinimizer::set_lbfgs_parameters( diff --git a/src/shogun/preprocessor/DependenceMaximization.cpp b/src/shogun/preprocessor/DependenceMaximization.cpp index e51bf65c13e..e3ebef68e35 100644 --- a/src/shogun/preprocessor/DependenceMaximization.cpp +++ b/src/shogun/preprocessor/DependenceMaximization.cpp @@ -46,9 +46,9 @@ CDependenceMaximization::CDependenceMaximization() void CDependenceMaximization::init() { SG_ADD((CSGObject**)&m_estimator, "estimator", - "the estimator for computing measures", ParameterProperties()); + "the estimator for computing measures"); SG_ADD((CSGObject**)&m_labels_feats, "labels_feats", - "the features based on labels", ParameterProperties()); + "the features based on labels"); m_estimator=NULL; m_labels_feats=NULL; diff --git a/src/shogun/preprocessor/FeatureSelection.cpp b/src/shogun/preprocessor/FeatureSelection.cpp index 79236eac856..2536b7392b7 100644 --- a/src/shogun/preprocessor/FeatureSelection.cpp +++ b/src/shogun/preprocessor/FeatureSelection.cpp @@ -48,18 +48,16 @@ CFeatureSelection::CFeatureSelection() : CPreprocessor() template void CFeatureSelection::initialize_parameters() { - SG_ADD(&m_target_dim, "target_dim", "target dimension", - ParameterProperties()); + SG_ADD(&m_target_dim, "target_dim", "target dimension"); SG_ADD((machine_int_t*)&m_algorithm, "algorithm", - "the feature selectiona algorithm", ParameterProperties()); - SG_ADD((machine_int_t*)&m_policy, "policy", "feature removal policy", - ParameterProperties()); + "the feature selectiona algorithm"); + SG_ADD((machine_int_t*)&m_policy, "policy", "feature removal policy"); SG_ADD(&m_num_remove, "num_remove", "number or percentage of features to " - "be removed", ParameterProperties()); + "be removed"); SG_ADD((CSGObject**)&m_labels, "labels", - "the class labels for the features", ParameterProperties()); + "the class labels for the features"); SG_ADD((CSGObject**)&m_subset, "subset", - "indices of selected features", ParameterProperties()); + "indices of selected features"); m_target_dim=0; m_algorithm=BACKWARD_ELIMINATION; diff --git a/src/shogun/preprocessor/FisherLDA.cpp b/src/shogun/preprocessor/FisherLDA.cpp index 16edf8cb40b..deb4285a240 100644 --- a/src/shogun/preprocessor/FisherLDA.cpp +++ b/src/shogun/preprocessor/FisherLDA.cpp @@ -69,22 +69,18 @@ void CFisherLDA::initialize_parameters() m_gamma = 0; m_bdc_svd = true; SG_ADD( - &m_method, "FLDA_method", "method for performing FLDA", - ParameterProperties()); + &m_method, "FLDA_method", "method for performing FLDA"); SG_ADD( - &m_num_dim, "final_dimensions", "dimensions to be retained", - ParameterProperties()); - SG_ADD(&m_gamma, "m_gamma", "Regularization parameter", ParameterProperties()); - SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm", ParameterProperties()); + &m_num_dim, "final_dimensions", "dimensions to be retained"); + SG_ADD(&m_gamma, "m_gamma", "Regularization parameter"); + SG_ADD(&m_bdc_svd, "m_bdc_svd", "Use BDC-SVD algorithm"); SG_ADD( &m_transformation_matrix, "transformation_matrix", "Transformation" - " matrix (Eigenvectors of covariance matrix).", - ParameterProperties()); - SG_ADD(&m_mean_vector, "mean_vector", "Mean Vector.", ParameterProperties()); + " matrix (Eigenvectors of covariance matrix)."); + SG_ADD(&m_mean_vector, "mean_vector", "Mean Vector."); SG_ADD( - &m_eigenvalues_vector, "eigenvalues_vector", "Vector with Eigenvalues.", - ParameterProperties()); + &m_eigenvalues_vector, "eigenvalues_vector", "Vector with Eigenvalues."); } CFisherLDA::~CFisherLDA() diff --git a/src/shogun/preprocessor/KernelPCA.cpp b/src/shogun/preprocessor/KernelPCA.cpp index ed21b36731f..1a34cfe2b33 100644 --- a/src/shogun/preprocessor/KernelPCA.cpp +++ b/src/shogun/preprocessor/KernelPCA.cpp @@ -42,9 +42,9 @@ void CKernelPCA::init() m_kernel = NULL; SG_ADD(&m_transformation_matrix, "transformation_matrix", - "matrix used to transform data", ParameterProperties()); + "matrix used to transform data"); SG_ADD(&m_bias_vector, "bias_vector", - "bias vector used to transform data", ParameterProperties()); + "bias vector used to transform data"); SG_ADD( &m_target_dim, "target_dim", "target dimensionality of preprocessor", ParameterProperties::HYPER); diff --git a/src/shogun/preprocessor/PCA.cpp b/src/shogun/preprocessor/PCA.cpp index 5309d27b411..a6168db477c 100644 --- a/src/shogun/preprocessor/PCA.cpp +++ b/src/shogun/preprocessor/PCA.cpp @@ -53,21 +53,20 @@ void CPCA::init() m_target_dim = 1; SG_ADD(&m_transformation_matrix, "transformation_matrix", - "Transformation matrix (Eigenvectors of covariance matrix).", - ParameterProperties()); - SG_ADD(&m_mean_vector, "mean_vector", "Mean Vector.", ParameterProperties()); + "Transformation matrix (Eigenvectors of covariance matrix)."); + SG_ADD(&m_mean_vector, "mean_vector", "Mean Vector."); SG_ADD(&m_eigenvalues_vector, "eigenvalues_vector", - "Vector with Eigenvalues.", ParameterProperties()); + "Vector with Eigenvalues."); SG_ADD(&m_whitening, "whitening", "Whether data shall be whitened.", ParameterProperties::HYPER); SG_ADD((machine_int_t*) &m_mode, "mode", "PCA Mode.", ParameterProperties::HYPER); SG_ADD(&m_thresh, "m_thresh", "Cutoff threshold.", ParameterProperties::HYPER); SG_ADD((machine_int_t*) &m_mem_mode, "m_mem_mode", - "Memory mode (in-place or reallocation).", ParameterProperties()); + "Memory mode (in-place or reallocation)."); SG_ADD((machine_int_t*) &m_method, "m_method", - "Method used for PCA calculation", ParameterProperties()); + "Method used for PCA calculation"); SG_ADD(&m_eigenvalue_zero_tolerance, "eigenvalue_zero_tolerance", "zero tolerance" - " for determining zero eigenvalues during whitening to avoid numerical issues", ParameterProperties()); + " for determining zero eigenvalues during whitening to avoid numerical issues"); SG_ADD( &m_target_dim, "target_dim", "target dimensionality of preprocessor", ParameterProperties::HYPER); diff --git a/src/shogun/preprocessor/PruneVarSubMean.cpp b/src/shogun/preprocessor/PruneVarSubMean.cpp index d66af603624..f2ad377e0f9 100644 --- a/src/shogun/preprocessor/PruneVarSubMean.cpp +++ b/src/shogun/preprocessor/PruneVarSubMean.cpp @@ -159,8 +159,8 @@ void CPruneVarSubMean::init() void CPruneVarSubMean::register_parameters() { SG_ADD(&m_divide_by_std, "divide_by_std", "Divide by standard deviation", ParameterProperties::HYPER); - SG_ADD(&m_num_idx, "num_idx", "Number of elements in idx_vec", ParameterProperties()); - SG_ADD(&m_std, "std_vec", "Standard dev vector", ParameterProperties()); - SG_ADD(&m_mean, "mean_vec", "Mean vector", ParameterProperties()); - SG_ADD(&m_idx, "idx_vec", "Index vector", ParameterProperties()); + SG_ADD(&m_num_idx, "num_idx", "Number of elements in idx_vec"); + SG_ADD(&m_std, "std_vec", "Standard dev vector"); + SG_ADD(&m_mean, "mean_vec", "Mean vector"); + SG_ADD(&m_idx, "idx_vec", "Index vector"); } diff --git a/src/shogun/preprocessor/RandomFourierGaussPreproc.cpp b/src/shogun/preprocessor/RandomFourierGaussPreproc.cpp index 53de40cc819..8808a7521df 100644 --- a/src/shogun/preprocessor/RandomFourierGaussPreproc.cpp +++ b/src/shogun/preprocessor/RandomFourierGaussPreproc.cpp @@ -76,13 +76,13 @@ CRandomFourierGaussPreproc::CRandomFourierGaussPreproc() : if(m_parameters) { SG_ADD(&dim_input_space, "dim_input_space", - "Dimensionality of the input space.", ParameterProperties()); + "Dimensionality of the input space."); SG_ADD(&cur_dim_input_space, "cur_dim_input_space", - "Dimensionality of the input space.", ParameterProperties()); + "Dimensionality of the input space."); SG_ADD(&dim_feature_space, "dim_feature_space", - "Dimensionality of the feature space.", ParameterProperties()); + "Dimensionality of the feature space."); SG_ADD(&cur_dim_feature_space, "cur_dim_feature_space", - "Dimensionality of the feature space.", ParameterProperties()); + "Dimensionality of the feature space."); SG_ADD(&kernelwidth, "kernelwidth", "Kernel width.", ParameterProperties::HYPER); SG_ADD(&cur_kernelwidth, "cur_kernelwidth", "Kernel width.", ParameterProperties::HYPER); @@ -112,13 +112,13 @@ CRandomFourierGaussPreproc::CRandomFourierGaussPreproc( if(m_parameters) { SG_ADD(&dim_input_space, "dim_input_space", - "Dimensionality of the input space.", ParameterProperties()); + "Dimensionality of the input space."); SG_ADD(&cur_dim_input_space, "cur_dim_input_space", - "Dimensionality of the input space.", ParameterProperties()); + "Dimensionality of the input space."); SG_ADD(&dim_feature_space, "dim_feature_space", - "Dimensionality of the feature space.", ParameterProperties()); + "Dimensionality of the feature space."); SG_ADD(&cur_dim_feature_space, "cur_dim_feature_space", - "Dimensionality of the feature space.", ParameterProperties()); + "Dimensionality of the feature space."); SG_ADD(&kernelwidth, "kernelwidth", "Kernel width.", ParameterProperties::HYPER); SG_ADD(&cur_kernelwidth, "cur_kernelwidth", "Kernel width.", ParameterProperties::HYPER); diff --git a/src/shogun/preprocessor/RescaleFeatures.cpp b/src/shogun/preprocessor/RescaleFeatures.cpp index f862c2b9c99..f371c0cac9f 100644 --- a/src/shogun/preprocessor/RescaleFeatures.cpp +++ b/src/shogun/preprocessor/RescaleFeatures.cpp @@ -104,6 +104,6 @@ SGVector CRescaleFeatures::apply_to_feature_vector(SGVector diff --git a/src/shogun/regression/svr/LibSVR.cpp b/src/shogun/regression/svr/LibSVR.cpp index 8baf5498c84..00fa7606e99 100644 --- a/src/shogun/regression/svr/LibSVR.cpp +++ b/src/shogun/regression/svr/LibSVR.cpp @@ -53,7 +53,7 @@ CLibSVR::~CLibSVR() void CLibSVR::register_params() { - SG_ADD((machine_int_t*) &solver_type, "libsvr_solver_type", "LibSVR Solver type", ParameterProperties()); + SG_ADD((machine_int_t*) &solver_type, "libsvr_solver_type", "LibSVR Solver type"); } EMachineType CLibSVR::get_classifier_type() diff --git a/src/shogun/structure/CCSOSVM.cpp b/src/shogun/structure/CCSOSVM.cpp index 36144d88733..3d34f5f6884 100644 --- a/src/shogun/structure/CCSOSVM.cpp +++ b/src/shogun/structure/CCSOSVM.cpp @@ -705,15 +705,15 @@ void CCCSOSVM::init() SG_ERROR("Error while initializing mosek env: %d\n", r) #endif - SG_ADD(&m_C, "m_C", "C", ParameterProperties()); - SG_ADD(&m_eps, "m_eps", "Epsilon", ParameterProperties()); - SG_ADD(&m_alpha_thrld, "m_alpha_thrld", "Alpha threshold", ParameterProperties()); - SG_ADD(&m_cleanup_check, "m_cleanup_check", "Cleanup after given number of iterations", ParameterProperties()); - SG_ADD(&m_idle_iter, "m_idle_iter", "Maximum number of idle iteration", ParameterProperties()); - SG_ADD(&m_max_iter, "m_max_iter", "Maximum number of iterations", ParameterProperties()); - SG_ADD(&m_max_rho, "m_max_rho", "Max rho", ParameterProperties()); - SG_ADD(&m_primal_obj, "m_primal_obj", "Primal objective value", ParameterProperties()); - SG_ADD((machine_int_t*) &m_qp_type, "m_qp_type", "QP Solver Type", ParameterProperties()); + SG_ADD(&m_C, "m_C", "C"); + SG_ADD(&m_eps, "m_eps", "Epsilon"); + SG_ADD(&m_alpha_thrld, "m_alpha_thrld", "Alpha threshold"); + SG_ADD(&m_cleanup_check, "m_cleanup_check", "Cleanup after given number of iterations"); + SG_ADD(&m_idle_iter, "m_idle_iter", "Maximum number of idle iteration"); + SG_ADD(&m_max_iter, "m_max_iter", "Maximum number of iterations"); + SG_ADD(&m_max_rho, "m_max_rho", "Max rho"); + SG_ADD(&m_primal_obj, "m_primal_obj", "Primal objective value"); + SG_ADD((machine_int_t*) &m_qp_type, "m_qp_type", "QP Solver Type"); } EMachineType CCCSOSVM::get_classifier_type() diff --git a/src/shogun/structure/DisjointSet.cpp b/src/shogun/structure/DisjointSet.cpp index 9dcbb4f95d8..95dfa0e05a9 100644 --- a/src/shogun/structure/DisjointSet.cpp +++ b/src/shogun/structure/DisjointSet.cpp @@ -28,10 +28,10 @@ CDisjointSet::CDisjointSet(int32_t num_elements) void CDisjointSet::init() { - SG_ADD(&m_num_elements, "num_elements", "Number of elements", ParameterProperties()); - SG_ADD(&m_parent, "parent", "Parent pointers", ParameterProperties()); - SG_ADD(&m_rank, "rank", "Rank of each element", ParameterProperties()); - SG_ADD(&m_is_connected, "is_connected", "Whether disjoint sets have been linked", ParameterProperties()); + SG_ADD(&m_num_elements, "num_elements", "Number of elements"); + SG_ADD(&m_parent, "parent", "Parent pointers"); + SG_ADD(&m_rank, "rank", "Rank of each element"); + SG_ADD(&m_is_connected, "is_connected", "Whether disjoint sets have been linked"); m_is_connected = false; m_num_elements = -1; diff --git a/src/shogun/structure/FWSOSVM.cpp b/src/shogun/structure/FWSOSVM.cpp index 0eb01e2bcb3..b1cc64f4fde 100644 --- a/src/shogun/structure/FWSOSVM.cpp +++ b/src/shogun/structure/FWSOSVM.cpp @@ -38,11 +38,11 @@ CFWSOSVM::CFWSOSVM( void CFWSOSVM::init() { - SG_ADD(&m_lambda, "lambda", "Regularization constant", ParameterProperties()); - SG_ADD(&m_num_iter, "num_iter", "Number of iterations", ParameterProperties()); - SG_ADD(&m_do_line_search, "do_line_search", "Do line search", ParameterProperties()); - SG_ADD(&m_gap_threshold, "gap_threshold", "Gap threshold", ParameterProperties()); - SG_ADD(&m_ell, "ell", "Average loss", ParameterProperties()); + SG_ADD(&m_lambda, "lambda", "Regularization constant"); + SG_ADD(&m_num_iter, "num_iter", "Number of iterations"); + SG_ADD(&m_do_line_search, "do_line_search", "Do line search"); + SG_ADD(&m_gap_threshold, "gap_threshold", "Gap threshold"); + SG_ADD(&m_ell, "ell", "Average loss"); m_lambda = 1.0; m_num_iter = 50; diff --git a/src/shogun/structure/Factor.cpp b/src/shogun/structure/Factor.cpp index 84903c436be..06a0a045516 100644 --- a/src/shogun/structure/Factor.cpp +++ b/src/shogun/structure/Factor.cpp @@ -238,13 +238,13 @@ void CFactor::compute_gradients( void CFactor::init() { - SG_ADD((CSGObject**)&m_factor_type, "type_name", "Factor type name", ParameterProperties()); - SG_ADD(&m_var_index, "var_index", "Factor variable index", ParameterProperties()); - SG_ADD(&m_energies, "energies", "Factor energies", ParameterProperties()); - SG_ADD((CSGObject**)&m_data_source, "data_source", "Factor data source", ParameterProperties()); - SG_ADD(&m_data, "data", "Factor data", ParameterProperties()); - SG_ADD(&m_data_sparse, "data_sparse", "Sparse factor data", ParameterProperties()); - SG_ADD(&m_is_data_dep, "is_data_dep", "Factor is data dependent or not", ParameterProperties()); + SG_ADD((CSGObject**)&m_factor_type, "type_name", "Factor type name"); + SG_ADD(&m_var_index, "var_index", "Factor variable index"); + SG_ADD(&m_energies, "energies", "Factor energies"); + SG_ADD((CSGObject**)&m_data_source, "data_source", "Factor data source"); + SG_ADD(&m_data, "data", "Factor data"); + SG_ADD(&m_data_sparse, "data_sparse", "Sparse factor data"); + SG_ADD(&m_is_data_dep, "is_data_dep", "Factor is data dependent or not"); m_factor_type=NULL; m_data_source=NULL; @@ -302,7 +302,7 @@ void CFactorDataSource::set_data_sparse(SGSparseVectorEntry* sparse, void CFactorDataSource::init() { - SG_ADD(&m_dense, "dense", "Shared data", ParameterProperties()); - SG_ADD(&m_sparse, "sparse", "Shared sparse data", ParameterProperties()); + SG_ADD(&m_dense, "dense", "Shared data"); + SG_ADD(&m_sparse, "sparse", "Shared sparse data"); } diff --git a/src/shogun/structure/FactorGraph.cpp b/src/shogun/structure/FactorGraph.cpp index d6387ab5d8d..42ad5666b14 100644 --- a/src/shogun/structure/FactorGraph.cpp +++ b/src/shogun/structure/FactorGraph.cpp @@ -57,12 +57,12 @@ CFactorGraph::~CFactorGraph() void CFactorGraph::register_parameters() { - SG_ADD(&m_cardinalities, "cardinalities", "Cardinalities", ParameterProperties()); - SG_ADD((CSGObject**)&m_factors, "factors", "Factors", ParameterProperties()); - SG_ADD((CSGObject**)&m_datasources, "datasources", "Factor data sources", ParameterProperties()); - SG_ADD((CSGObject**)&m_dset, "dset", "Disjoint set", ParameterProperties()); - SG_ADD(&m_has_cycle, "has_cycle", "Whether has circle in graph", ParameterProperties()); - SG_ADD(&m_num_edges, "num_edges", "Number of edges", ParameterProperties()); + SG_ADD(&m_cardinalities, "cardinalities", "Cardinalities"); + SG_ADD((CSGObject**)&m_factors, "factors", "Factors"); + SG_ADD((CSGObject**)&m_datasources, "datasources", "Factor data sources"); + SG_ADD((CSGObject**)&m_dset, "dset", "Disjoint set"); + SG_ADD(&m_has_cycle, "has_cycle", "Whether has circle in graph"); + SG_ADD(&m_num_edges, "num_edges", "Number of edges"); } void CFactorGraph::init() diff --git a/src/shogun/structure/FactorGraphModel.cpp b/src/shogun/structure/FactorGraphModel.cpp index dd10e7bf86b..4a2c0b1f772 100644 --- a/src/shogun/structure/FactorGraphModel.cpp +++ b/src/shogun/structure/FactorGraphModel.cpp @@ -37,9 +37,9 @@ CFactorGraphModel::~CFactorGraphModel() void CFactorGraphModel::init() { - SG_ADD((CSGObject**)&m_factor_types, "factor_types", "Array of factor types", ParameterProperties()); - SG_ADD(&m_w_cache, "w_cache", "Cache of global parameters", ParameterProperties()); - SG_ADD(&m_w_map, "w_map", "Parameter mapping", ParameterProperties()); + SG_ADD((CSGObject**)&m_factor_types, "factor_types", "Array of factor types"); + SG_ADD(&m_w_cache, "w_cache", "Cache of global parameters"); + SG_ADD(&m_w_map, "w_map", "Parameter mapping"); m_inf_type = TREE_MAX_PROD; m_factor_types = new CDynamicObjectArray(); diff --git a/src/shogun/structure/FactorType.cpp b/src/shogun/structure/FactorType.cpp index cd241a2bcac..27cccf6bc5e 100644 --- a/src/shogun/structure/FactorType.cpp +++ b/src/shogun/structure/FactorType.cpp @@ -48,12 +48,12 @@ CFactorType::~CFactorType() void CFactorType::init() { - SG_ADD(&m_type_id, "type_id", "Factor type name", ParameterProperties()); - SG_ADD(&m_cards, "cards", "Cardinalities", ParameterProperties()); - SG_ADD(&m_cumprod_cards, "cumprod_cards", "Cumulative product of cardinalities", ParameterProperties()); - SG_ADD(&m_num_assignments, "num_assignments", "Number of variable configurations", ParameterProperties()); - SG_ADD(&m_w, "w", "Factor parameters", ParameterProperties()); - SG_ADD(&m_data_size, "data_size", "Size of data vector", ParameterProperties()); + SG_ADD(&m_type_id, "type_id", "Factor type name"); + SG_ADD(&m_cards, "cards", "Cardinalities"); + SG_ADD(&m_cumprod_cards, "cumprod_cards", "Cumulative product of cardinalities"); + SG_ADD(&m_num_assignments, "num_assignments", "Number of variable configurations"); + SG_ADD(&m_w, "w", "Factor parameters"); + SG_ADD(&m_data_size, "data_size", "Size of data vector"); m_type_id = 0; m_data_size = 0; diff --git a/src/shogun/structure/HMSVMModel.cpp b/src/shogun/structure/HMSVMModel.cpp index 76a793d266f..1d4a6001b61 100644 --- a/src/shogun/structure/HMSVMModel.cpp +++ b/src/shogun/structure/HMSVMModel.cpp @@ -484,14 +484,13 @@ bool CHMSVMModel::check_training_setup() const void CHMSVMModel::init() { - SG_ADD((CSGObject**) &m_state_model, "m_state_model", "The state model", ParameterProperties()); + SG_ADD((CSGObject**) &m_state_model, "m_state_model", "The state model"); SG_ADD(&m_transmission_weights, "m_transmission_weights", - "Transmission weights used in Viterbi", ParameterProperties()); + "Transmission weights used in Viterbi"); SG_ADD(&m_emission_weights, "m_emission_weights", - "Emission weights used in Viterbi", ParameterProperties()); - SG_ADD(&m_num_plif_nodes, "m_num_plif_nodes", "The number of points per PLiF", - ParameterProperties()); // FIXME It would actually make sense to do MS for this parameter - SG_ADD(&m_use_plifs, "m_use_plifs", "Whether to use plifs", ParameterProperties()); + "Emission weights used in Viterbi"); + SG_ADD(&m_num_plif_nodes, "m_num_plif_nodes", "The number of points per PLiF"); // FIXME It would actually make sense to do MS for this parameter + SG_ADD(&m_use_plifs, "m_use_plifs", "Whether to use plifs"); m_num_obs = 0; m_num_aux = 0; diff --git a/src/shogun/structure/HashedMultilabelModel.cpp b/src/shogun/structure/HashedMultilabelModel.cpp index 84e8cc46c40..6e1fc76213e 100644 --- a/src/shogun/structure/HashedMultilabelModel.cpp +++ b/src/shogun/structure/HashedMultilabelModel.cpp @@ -39,15 +39,11 @@ CStructuredLabels * CHashedMultilabelModel::structured_labels_factory( void CHashedMultilabelModel::init(int32_t dim) { - SG_ADD(&m_false_positive, "false_positive", "Misclassification cost for false positive", - ParameterProperties()); - SG_ADD(&m_false_negative, "false_negative", "Misclassification cost for false negative", - ParameterProperties()); - SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - ParameterProperties()); - SG_ADD(&m_dim, "dim", "New joint feature space dimension", ParameterProperties()); - SG_ADD(&m_seeds, "seeds", "Vector of seeds used for hashing", - ParameterProperties()); + SG_ADD(&m_false_positive, "false_positive", "Misclassification cost for false positive"); + SG_ADD(&m_false_negative, "false_negative", "Misclassification cost for false negative"); + SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label"); + SG_ADD(&m_dim, "dim", "New joint feature space dimension"); + SG_ADD(&m_seeds, "seeds", "Vector of seeds used for hashing"); m_false_positive = 1; m_false_negative = 1; diff --git a/src/shogun/structure/HierarchicalMultilabelModel.cpp b/src/shogun/structure/HierarchicalMultilabelModel.cpp index 973420d0a18..d111db157a3 100644 --- a/src/shogun/structure/HierarchicalMultilabelModel.cpp +++ b/src/shogun/structure/HierarchicalMultilabelModel.cpp @@ -42,13 +42,11 @@ CHierarchicalMultilabelModel::~CHierarchicalMultilabelModel() void CHierarchicalMultilabelModel::init(SGVector taxonomy, bool leaf_nodes_mandatory) { - SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - ParameterProperties()); - SG_ADD(&m_taxonomy, "taxonomy", "Taxonomy of the hierarchy of the labels", - ParameterProperties()); + SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label"); + SG_ADD(&m_taxonomy, "taxonomy", "Taxonomy of the hierarchy of the labels"); SG_ADD(&m_leaf_nodes_mandatory, "leaf_nodes_mandatory", "Whether internal nodes belong" - "to output class or not", ParameterProperties()); - SG_ADD(&m_root, "root", "Node-id of the ROOT element", ParameterProperties()); + "to output class or not"); + SG_ADD(&m_root, "root", "Node-id of the ROOT element"); m_leaf_nodes_mandatory = leaf_nodes_mandatory; m_num_classes = 0; diff --git a/src/shogun/structure/MAPInference.cpp b/src/shogun/structure/MAPInference.cpp index 5854d2d4c2a..dd352477315 100644 --- a/src/shogun/structure/MAPInference.cpp +++ b/src/shogun/structure/MAPInference.cpp @@ -69,10 +69,10 @@ CMAPInference::~CMAPInference() void CMAPInference::init() { - SG_ADD((CSGObject**)&m_fg, "fg", "factor graph", ParameterProperties()); - SG_ADD((CSGObject**)&m_outputs, "outputs", "Structured outputs", ParameterProperties()); - SG_ADD((CSGObject**)&m_infer_impl, "infer_impl", "Inference implementation", ParameterProperties()); - SG_ADD(&m_energy, "energy", "Minimized energy", ParameterProperties()); + SG_ADD((CSGObject**)&m_fg, "fg", "factor graph"); + SG_ADD((CSGObject**)&m_outputs, "outputs", "Structured outputs"); + SG_ADD((CSGObject**)&m_infer_impl, "infer_impl", "Inference implementation"); + SG_ADD(&m_energy, "energy", "Minimized energy"); m_outputs = NULL; m_infer_impl = NULL; @@ -126,7 +126,7 @@ CMAPInferImpl::~CMAPInferImpl() void CMAPInferImpl::register_parameters() { SG_ADD((CSGObject**)&m_fg, "fg", - "Factor graph pointer", ParameterProperties()); + "Factor graph pointer"); m_fg = NULL; } diff --git a/src/shogun/structure/MulticlassModel.cpp b/src/shogun/structure/MulticlassModel.cpp index 8e9497dcaef..b96b3e0bba1 100644 --- a/src/shogun/structure/MulticlassModel.cpp +++ b/src/shogun/structure/MulticlassModel.cpp @@ -163,8 +163,7 @@ void CMulticlassModel::init_primal_opt( void CMulticlassModel::init() { - SG_ADD(&m_num_classes, "m_num_classes", "The number of classes", - ParameterProperties()); + SG_ADD(&m_num_classes, "m_num_classes", "The number of classes"); m_num_classes = 0; } diff --git a/src/shogun/structure/MulticlassSOLabels.cpp b/src/shogun/structure/MulticlassSOLabels.cpp index 40ac1527235..606daba7112 100644 --- a/src/shogun/structure/MulticlassSOLabels.cpp +++ b/src/shogun/structure/MulticlassSOLabels.cpp @@ -97,13 +97,10 @@ int32_t CMulticlassSOLabels::get_num_labels() const void CMulticlassSOLabels::init() { - SG_ADD(&m_num_classes, "m_num_classes", "The number of classes", - ParameterProperties()); - SG_ADD(&m_num_labels_set, "m_num_labels_set", "The number of assigned labels", - ParameterProperties()); + SG_ADD(&m_num_classes, "m_num_classes", "The number of classes"); + SG_ADD(&m_num_labels_set, "m_num_labels_set", "The number of assigned labels"); SG_ADD( - &m_labels_vector, "labels_vector", "The labels vector", - ParameterProperties()); + &m_labels_vector, "labels_vector", "The labels vector"); m_num_classes = 0; m_num_labels_set = 0; diff --git a/src/shogun/structure/MultilabelCLRModel.cpp b/src/shogun/structure/MultilabelCLRModel.cpp index 910f58c58c3..cdab7a405b3 100644 --- a/src/shogun/structure/MultilabelCLRModel.cpp +++ b/src/shogun/structure/MultilabelCLRModel.cpp @@ -38,8 +38,7 @@ CMultilabelCLRModel::~CMultilabelCLRModel() void CMultilabelCLRModel::init() { - SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - ParameterProperties()); + SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label"); m_num_classes = 0; } diff --git a/src/shogun/structure/MultilabelModel.cpp b/src/shogun/structure/MultilabelModel.cpp index b150035b200..bb66135a49b 100644 --- a/src/shogun/structure/MultilabelModel.cpp +++ b/src/shogun/structure/MultilabelModel.cpp @@ -36,12 +36,9 @@ CStructuredLabels * CMultilabelModel::structured_labels_factory(int32_t num_labe void CMultilabelModel::init() { - SG_ADD(&m_false_positive, "false_positive", "Misclassification cost for false positive", - ParameterProperties()); - SG_ADD(&m_false_negative, "false_negative", "Misclassification cost for false negative", - ParameterProperties()); - SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label", - ParameterProperties()); + SG_ADD(&m_false_positive, "false_positive", "Misclassification cost for false positive"); + SG_ADD(&m_false_negative, "false_negative", "Misclassification cost for false negative"); + SG_ADD(&m_num_classes, "num_classes", "Number of (binary) class assignment per label"); m_false_positive = 1; m_false_negative = 1; m_num_classes = 0; diff --git a/src/shogun/structure/MultilabelSOLabels.cpp b/src/shogun/structure/MultilabelSOLabels.cpp index a6c7da37527..c969b4b44fc 100644 --- a/src/shogun/structure/MultilabelSOLabels.cpp +++ b/src/shogun/structure/MultilabelSOLabels.cpp @@ -41,10 +41,8 @@ CMultilabelSOLabels::CMultilabelSOLabels(CMultilabelLabels * multilabel_labels) void CMultilabelSOLabels::init() { - SG_ADD((CSGObject **)&m_multilabel_labels, "multilabel_labels", "multilabel labels object", - ParameterProperties()); - SG_ADD(&m_last_set_label, "last_set_label", "index of the last label added using add_label() method", - ParameterProperties()); + SG_ADD((CSGObject **)&m_multilabel_labels, "multilabel_labels", "multilabel labels object"); + SG_ADD(&m_last_set_label, "last_set_label", "index of the last label added using add_label() method"); m_last_set_label = 0; } diff --git a/src/shogun/structure/PrimalMosekSOSVM.cpp b/src/shogun/structure/PrimalMosekSOSVM.cpp index 00e9665b594..55e2feefe25 100644 --- a/src/shogun/structure/PrimalMosekSOSVM.cpp +++ b/src/shogun/structure/PrimalMosekSOSVM.cpp @@ -34,12 +34,12 @@ CPrimalMosekSOSVM::CPrimalMosekSOSVM( void CPrimalMosekSOSVM::init() { - SG_ADD(&m_slacks, "slacks", "Slacks vector", ParameterProperties()); + SG_ADD(&m_slacks, "slacks", "Slacks vector"); //FIXME model selection available for SO machines - SG_ADD(&m_regularization, "regularization", "Regularization constant", ParameterProperties()); - SG_ADD(&m_epsilon, "epsilon", "Violation tolerance", ParameterProperties()); - SG_ADD(&m_lb, "lb", "Lower bounds", ParameterProperties()); - SG_ADD(&m_ub, "ub", "Upper bounds", ParameterProperties()); + SG_ADD(&m_regularization, "regularization", "Regularization constant"); + SG_ADD(&m_epsilon, "epsilon", "Violation tolerance"); + SG_ADD(&m_lb, "lb", "Lower bounds"); + SG_ADD(&m_ub, "ub", "Upper bounds"); m_regularization = 1.0; m_epsilon = 0.0; diff --git a/src/shogun/structure/SOSVMHelper.cpp b/src/shogun/structure/SOSVMHelper.cpp index b7ab7ff3555..06df2890709 100644 --- a/src/shogun/structure/SOSVMHelper.cpp +++ b/src/shogun/structure/SOSVMHelper.cpp @@ -29,13 +29,13 @@ CSOSVMHelper::~CSOSVMHelper() void CSOSVMHelper::init() { - SG_ADD(&m_primal, "primal", "History of primal values", ParameterProperties()); - SG_ADD(&m_dual, "dual", "History of dual values", ParameterProperties()); - SG_ADD(&m_duality_gap, "duality_gap", "History of duality gaps", ParameterProperties()); - SG_ADD(&m_eff_pass, "eff_pass", "Effective passes", ParameterProperties()); - SG_ADD(&m_train_error, "train_error", "History of training errors", ParameterProperties()); - SG_ADD(&m_tracker, "tracker", "Tracker of training progress", ParameterProperties()); - SG_ADD(&m_bufsize, "bufsize", "Buffer size", ParameterProperties()); + SG_ADD(&m_primal, "primal", "History of primal values"); + SG_ADD(&m_dual, "dual", "History of dual values"); + SG_ADD(&m_duality_gap, "duality_gap", "History of duality gaps"); + SG_ADD(&m_eff_pass, "eff_pass", "Effective passes"); + SG_ADD(&m_train_error, "train_error", "History of training errors"); + SG_ADD(&m_tracker, "tracker", "Tracker of training progress"); + SG_ADD(&m_bufsize, "bufsize", "Buffer size"); m_tracker = 0; m_bufsize = 1000; diff --git a/src/shogun/structure/SequenceLabels.cpp b/src/shogun/structure/SequenceLabels.cpp index af8c7f21418..aed35df1675 100644 --- a/src/shogun/structure/SequenceLabels.cpp +++ b/src/shogun/structure/SequenceLabels.cpp @@ -46,5 +46,5 @@ void CSequenceLabels::add_vector_label(SGVector< int32_t > label) void CSequenceLabels::init() { - SG_ADD(&m_num_states, "m_num_states", "Number of states", ParameterProperties()); + SG_ADD(&m_num_states, "m_num_states", "Number of states"); } diff --git a/src/shogun/structure/StateModel.cpp b/src/shogun/structure/StateModel.cpp index 9b4f1c96992..9e838a91d3a 100644 --- a/src/shogun/structure/StateModel.cpp +++ b/src/shogun/structure/StateModel.cpp @@ -29,13 +29,12 @@ int32_t CStateModel::get_num_transmission_params() const void CStateModel::init() { - SG_ADD(&m_num_states, "m_num_states", "The number of states", ParameterProperties()); + SG_ADD(&m_num_states, "m_num_states", "The number of states"); SG_ADD(&m_num_transmission_params, "m_num_tranmission_params", - "The number of tranmission parameters", ParameterProperties()); - SG_ADD(&m_state_loss_mat, "m_state_loss_mat", "The state loss matrix", - ParameterProperties()); - SG_ADD(&m_p, "m_p", "The distribution of start states", ParameterProperties()); - SG_ADD(&m_q, "m_q", "The distribution of stop states", ParameterProperties()); + "The number of tranmission parameters"); + SG_ADD(&m_state_loss_mat, "m_state_loss_mat", "The state loss matrix"); + SG_ADD(&m_p, "m_p", "The distribution of start states"); + SG_ADD(&m_q, "m_q", "The distribution of stop states"); m_num_states = 0; m_num_transmission_params = 0; diff --git a/src/shogun/structure/StochasticSOSVM.cpp b/src/shogun/structure/StochasticSOSVM.cpp index dd4669689d9..18e2886c17f 100644 --- a/src/shogun/structure/StochasticSOSVM.cpp +++ b/src/shogun/structure/StochasticSOSVM.cpp @@ -39,11 +39,11 @@ CStochasticSOSVM::CStochasticSOSVM( void CStochasticSOSVM::init() { - SG_ADD(&m_lambda, "lambda", "Regularization constant", ParameterProperties()); - SG_ADD(&m_num_iter, "num_iter", "Number of iterations", ParameterProperties()); - SG_ADD(&m_do_weighted_averaging, "do_weighted_averaging", "Do weighted averaging", ParameterProperties()); - SG_ADD(&m_debug_multiplier, "debug_multiplier", "Debug multiplier", ParameterProperties()); - SG_ADD(&m_rand_seed, "rand_seed", "Random seed", ParameterProperties()); + SG_ADD(&m_lambda, "lambda", "Regularization constant"); + SG_ADD(&m_num_iter, "num_iter", "Number of iterations"); + SG_ADD(&m_do_weighted_averaging, "do_weighted_averaging", "Do weighted averaging"); + SG_ADD(&m_debug_multiplier, "debug_multiplier", "Debug multiplier"); + SG_ADD(&m_rand_seed, "rand_seed", "Random seed"); m_lambda = 1.0; m_num_iter = 50; diff --git a/src/shogun/structure/StructuredModel.cpp b/src/shogun/structure/StructuredModel.cpp index c6008f8c4e3..de12b2f05ce 100644 --- a/src/shogun/structure/StructuredModel.cpp +++ b/src/shogun/structure/StructuredModel.cpp @@ -162,10 +162,8 @@ float64_t CStructuredModel::delta_loss(CStructuredData* y1, CStructuredData* y2) void CStructuredModel::init() { - SG_ADD((CSGObject**) &m_labels, "m_labels", "Structured labels", - ParameterProperties()); - SG_ADD((CSGObject**) &m_features, "m_features", "Feature vectors", - ParameterProperties()); + SG_ADD((CSGObject**) &m_labels, "m_labels", "Structured labels"); + SG_ADD((CSGObject**) &m_features, "m_features", "Feature vectors"); m_features = NULL; m_labels = NULL; diff --git a/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp b/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp index 21c2b899281..dcdfcfda52d 100644 --- a/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp +++ b/src/shogun/transfer/domain_adaptation/DomainAdaptationMulticlassLibLinear.cpp @@ -72,8 +72,7 @@ void CDomainAdaptationMulticlassLibLinear::set_source_machine( void CDomainAdaptationMulticlassLibLinear::register_parameters() { - SG_ADD((CSGObject**)&m_source_machine, "source_machine", "source domain machine", - ParameterProperties()); + SG_ADD((CSGObject**)&m_source_machine, "source_machine", "source domain machine"); SG_ADD(&m_train_factor, "train_factor", "factor of target domain regularization", ParameterProperties::HYPER); SG_ADD(&m_source_bias, "source_bias", "bias to source domain", diff --git a/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp b/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp index 34a938fc876..8711d1873e2 100644 --- a/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp +++ b/src/shogun/transfer/domain_adaptation/DomainAdaptationSVM.cpp @@ -179,8 +179,7 @@ void CDomainAdaptationSVM::init() B = 0; train_factor = 1.0; - SG_ADD((CSGObject**) &presvm, "presvm", "SVM to regularize against.", - ParameterProperties()); + SG_ADD((CSGObject**) &presvm, "presvm", "SVM to regularize against."); SG_ADD(&B, "B", "regularization parameter B.", ParameterProperties::HYPER); SG_ADD(&train_factor, "train_factor", "flag to switch off regularization in training.", ParameterProperties::HYPER); diff --git a/src/shogun/transfer/multitask/LibLinearMTL.cpp b/src/shogun/transfer/multitask/LibLinearMTL.cpp index ef2db2fb76a..eb317cb5cc6 100644 --- a/src/shogun/transfer/multitask/LibLinearMTL.cpp +++ b/src/shogun/transfer/multitask/LibLinearMTL.cpp @@ -53,11 +53,9 @@ void CLibLinearMTL::init() SG_ADD(&C1, "C1", "C Cost constant 1.", ParameterProperties::HYPER); SG_ADD(&C2, "C2", "C Cost constant 2.", ParameterProperties::HYPER); - SG_ADD(&use_bias, "use_bias", "Indicates if bias is used.", - ParameterProperties()); - SG_ADD(&epsilon, "epsilon", "Convergence precision.", ParameterProperties()); - SG_ADD(&max_iterations, "max_iterations", "Max number of iterations.", - ParameterProperties()); + SG_ADD(&use_bias, "use_bias", "Indicates if bias is used."); + SG_ADD(&epsilon, "epsilon", "Convergence precision."); + SG_ADD(&max_iterations, "max_iterations", "Max number of iterations."); } diff --git a/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h b/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h index 8c4a52b49e7..6db20ef59c8 100644 --- a/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h +++ b/src/shogun/transfer/multitask/MultitaskKernelPlifNormalizer.h @@ -326,8 +326,8 @@ class CMultitaskKernelPlifNormalizer: public CMultitaskKernelMklNormalizer */ virtual void register_params() { - SG_ADD(&num_tasks, "num_tasks", "the number of tasks", ParameterProperties()); - SG_ADD(&num_betas, "num_betas", "the number of weights", ParameterProperties()); + SG_ADD(&num_tasks, "num_tasks", "the number of tasks"); + SG_ADD(&num_betas, "num_betas", "the number of weights"); m_parameters->add_vector((SGString**)&distance_matrix, &num_tasksqr, "distance_matrix", "distance between tasks"); m_parameters->add_vector((SGString**)&similarity_matrix, &num_tasksqr, "similarity_matrix", "similarity between tasks"); diff --git a/src/shogun/transfer/multitask/Task.cpp b/src/shogun/transfer/multitask/Task.cpp index d79cabcffbf..e3accf69e61 100644 --- a/src/shogun/transfer/multitask/Task.cpp +++ b/src/shogun/transfer/multitask/Task.cpp @@ -44,9 +44,9 @@ void CTask::init() m_subtasks = new CList(true); SG_REF(m_subtasks); - SG_ADD((CSGObject**)&m_subtasks,"subtasks","subtasks of given task", ParameterProperties()); - SG_ADD(&m_indices,"indices","indices of task", ParameterProperties()); - SG_ADD(&m_weight,"weight","weight of task", ParameterProperties()); + SG_ADD((CSGObject**)&m_subtasks,"subtasks","subtasks of given task"); + SG_ADD(&m_indices,"indices","indices of task"); + SG_ADD(&m_weight,"weight","weight of task"); } CTask::~CTask() diff --git a/src/shogun/transformer/Transformer.cpp b/src/shogun/transformer/Transformer.cpp index 8210eb1b8de..265d544b57b 100644 --- a/src/shogun/transformer/Transformer.cpp +++ b/src/shogun/transformer/Transformer.cpp @@ -10,8 +10,7 @@ namespace shogun m_fitted = false; SG_ADD( - &m_fitted, "is_fitted", "Whether the transformer has been fitted.", - ParameterProperties()); + &m_fitted, "is_fitted", "Whether the transformer has been fitted."); } void CTransformer::assert_fitted() const diff --git a/tests/unit/optimization/NLOPTMinimizer_unittest.cc b/tests/unit/optimization/NLOPTMinimizer_unittest.cc index 64cf423473f..8f0f6d602c7 100644 --- a/tests/unit/optimization/NLOPTMinimizer_unittest.cc +++ b/tests/unit/optimization/NLOPTMinimizer_unittest.cc @@ -53,8 +53,7 @@ void CPiecewiseQuadraticObject2::init() SG_ADD(&m_init_x, "init_x", "init_x", ParameterProperties::HYPER | ParameterProperties::GRADIENT); - SG_ADD(&m_truth_x, "truth_x", "truth_x", - ParameterProperties()); + SG_ADD(&m_truth_x, "truth_x", "truth_x"); } void CPiecewiseQuadraticObject2::set_init_x(SGVector init_x) diff --git a/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc b/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc index 0cb48848349..9f4abfd9e85 100644 --- a/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc +++ b/tests/unit/optimization/lbfgs/LBFGSMinimizer_unittest.cc @@ -125,8 +125,7 @@ void CPiecewiseQuadraticObject::init() SG_ADD(&m_init_x, "init_x", "init_x", ParameterProperties::HYPER | ParameterProperties::GRADIENT); - SG_ADD(&m_truth_x, "truth_x", "truth_x", - ParameterProperties()); + SG_ADD(&m_truth_x, "truth_x", "truth_x"); } void CPiecewiseQuadraticObject::set_init_x(SGVector init_x) From a601c0a5efe65d3a84602730791a2caa571380bf Mon Sep 17 00:00:00 2001 From: Gil Date: Fri, 16 Nov 2018 16:54:09 +0000 Subject: [PATCH 4/6] started adding param properties [ci skip] --- src/shogun/classifier/mkl/MKL.cpp | 8 +++++--- src/shogun/machine/LinearMachine.cpp | 4 ++-- src/shogun/machine/LinearStructuredOutputMachine.cpp | 2 +- src/shogun/machine/OnlineLinearMachine.cpp | 4 ++-- 4 files changed, 10 insertions(+), 8 deletions(-) diff --git a/src/shogun/classifier/mkl/MKL.cpp b/src/shogun/classifier/mkl/MKL.cpp index 70ac705fcdd..5a48506b322 100644 --- a/src/shogun/classifier/mkl/MKL.cpp +++ b/src/shogun/classifier/mkl/MKL.cpp @@ -272,10 +272,12 @@ void CMKL::register_params() lp_initialized = false; SG_ADD((CMachine**)&svm, "svm", "wrapper svm"); - SG_ADD(&C_mkl, "C_mkl", "C mkl"); + SG_ADD(&C_mkl, "C_mkl", "C mkl", ParameterProperties::HYPER); SG_ADD(&mkl_norm, "mkl_norm", "norm used in mkl"); - SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter"); - SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter"); + SG_ADD(&ent_lambda, "ent_lambda", "elastic net sparsity trade-off parameter", + ParameterProperties::HYPER); + SG_ADD(&mkl_block_norm, "mkl_block_norm", "mkl sparse trade-off parameter", + ParameterProperties::HYPER); m_parameters->add_vector(&beta_local, &beta_local_size, "beta_local", "subkernel weights on L1 term of elastic net mkl"); watch_param("beta_local", &beta_local, &beta_local_size); diff --git a/src/shogun/machine/LinearMachine.cpp b/src/shogun/machine/LinearMachine.cpp index 6df88303300..ed03a2eca9c 100644 --- a/src/shogun/machine/LinearMachine.cpp +++ b/src/shogun/machine/LinearMachine.cpp @@ -35,8 +35,8 @@ void CLinearMachine::init() bias = 0; features = NULL; - SG_ADD(&m_w, "w", "Parameter vector w."); - SG_ADD(&bias, "bias", "Bias b."); + SG_ADD(&m_w, "w", "Parameter vector w.", ParameterProperties::MODEL); + SG_ADD(&bias, "bias", "Bias b.", ParameterProperties::MODEL); SG_ADD( (CFeatures**)&features, "features", "Feature object."); } diff --git a/src/shogun/machine/LinearStructuredOutputMachine.cpp b/src/shogun/machine/LinearStructuredOutputMachine.cpp index 67664cd197c..57a2cd8a182 100644 --- a/src/shogun/machine/LinearStructuredOutputMachine.cpp +++ b/src/shogun/machine/LinearStructuredOutputMachine.cpp @@ -68,7 +68,7 @@ CStructuredLabels* CLinearStructuredOutputMachine::apply_structured(CFeatures* d void CLinearStructuredOutputMachine::register_parameters() { - SG_ADD(&m_w, "m_w", "Weight vector"); + SG_ADD(&m_w, "m_w", "Weight vector", ParameterProperties::MODEL); } void CLinearStructuredOutputMachine::store_model_features() diff --git a/src/shogun/machine/OnlineLinearMachine.cpp b/src/shogun/machine/OnlineLinearMachine.cpp index b64eb927221..5e0373aeb07 100644 --- a/src/shogun/machine/OnlineLinearMachine.cpp +++ b/src/shogun/machine/OnlineLinearMachine.cpp @@ -19,8 +19,8 @@ using namespace shogun; COnlineLinearMachine::COnlineLinearMachine() : CMachine(), bias(0), features(NULL) { - SG_ADD(&m_w, "m_w", "Parameter vector w."); - SG_ADD(&bias, "bias", "Bias b."); + SG_ADD(&m_w, "m_w", "Parameter vector w.", ParameterProperties::MODEL); + SG_ADD(&bias, "bias", "Bias b.", ParameterProperties::MODEL); SG_ADD((CSGObject**) &features, "features", "Feature object."); } From bd9e09e76898be9487c6518f158570bbd31cad83 Mon Sep 17 00:00:00 2001 From: Gil Date: Fri, 16 Nov 2018 22:18:51 +0000 Subject: [PATCH 5/6] refactored SG_ADD3 calls --- src/shogun/converter/LocallyLinearEmbedding.cpp | 2 +- src/shogun/converter/ManifoldSculpting.cpp | 2 +- src/shogun/distributions/Gaussian.cpp | 10 +++++----- src/shogun/distributions/KernelDensity.cpp | 10 +++++----- src/shogun/distributions/MixtureModel.cpp | 8 ++++---- src/shogun/loss/HuberLoss.cpp | 2 +- src/shogun/machine/RandomForest.cpp | 2 +- src/shogun/machine/StochasticGBMachine.cpp | 14 +++++++------- src/shogun/multiclass/MulticlassLibLinear.cpp | 8 ++++---- src/shogun/multiclass/tree/NbodyTree.cpp | 12 ++++++------ src/shogun/preprocessor/HomogeneousKernelMap.cpp | 12 ++++++------ src/shogun/regression/svr/LibLinearRegression.cpp | 6 +++--- .../optimization/StochasticMinimizers_unittest.cc | 4 ++-- 13 files changed, 46 insertions(+), 46 deletions(-) diff --git a/src/shogun/converter/LocallyLinearEmbedding.cpp b/src/shogun/converter/LocallyLinearEmbedding.cpp index 34edd88242e..3e481cbe94b 100644 --- a/src/shogun/converter/LocallyLinearEmbedding.cpp +++ b/src/shogun/converter/LocallyLinearEmbedding.cpp @@ -28,7 +28,7 @@ void CLocallyLinearEmbedding::init() { SG_ADD(&m_k, "k", "number of neighbors", ParameterProperties::HYPER); SG_ADD(&m_nullspace_shift, "nullspace_shift", - "nullspace finding regularization shift",ParameterProperties()); + "nullspace finding regularization shift"); SG_ADD(&m_reconstruction_shift, "reconstruction_shift", "shift used to regularize reconstruction step"); } diff --git a/src/shogun/converter/ManifoldSculpting.cpp b/src/shogun/converter/ManifoldSculpting.cpp index 6d43780e7f8..2d9ce009ee8 100644 --- a/src/shogun/converter/ManifoldSculpting.cpp +++ b/src/shogun/converter/ManifoldSculpting.cpp @@ -25,7 +25,7 @@ void CManifoldSculpting::init() { SG_ADD(&m_k, "k", "number of neighbors"); SG_ADD(&m_squishing_rate, "quishing_rate", - "squishing rate",ParameterProperties()); + "squishing rate"); SG_ADD(&m_max_iteration, "max_iteration", "maximum number of algorithm's iterations"); } diff --git a/src/shogun/distributions/Gaussian.cpp b/src/shogun/distributions/Gaussian.cpp index 5a2acdeeaaf..e059bd052ac 100644 --- a/src/shogun/distributions/Gaussian.cpp +++ b/src/shogun/distributions/Gaussian.cpp @@ -337,11 +337,11 @@ SGMatrix CGaussian::get_cov() void CGaussian::register_params() { - SG_ADD(&m_u, "m_u", "Unitary matrix.",ParameterProperties()); - SG_ADD(&m_d, "m_d", "Diagonal.",ParameterProperties()); - SG_ADD(&m_mean, "m_mean", "Mean.",ParameterProperties()); - SG_ADD(&m_constant, "m_constant", "Constant part.",ParameterProperties()); - SG_ADD((machine_int_t*)&m_cov_type, "m_cov_type", "Covariance type.",ParameterProperties()); + SG_ADD(&m_u, "m_u", "Unitary matrix."); + SG_ADD(&m_d, "m_d", "Diagonal."); + SG_ADD(&m_mean, "m_mean", "Mean."); + SG_ADD(&m_constant, "m_constant", "Constant part."); + SG_ADD((machine_int_t*)&m_cov_type, "m_cov_type", "Covariance type."); } void CGaussian::decompose_cov(SGMatrix cov) diff --git a/src/shogun/distributions/KernelDensity.cpp b/src/shogun/distributions/KernelDensity.cpp index 9bd3765d78e..f75dd2951c8 100644 --- a/src/shogun/distributions/KernelDensity.cpp +++ b/src/shogun/distributions/KernelDensity.cpp @@ -159,9 +159,9 @@ void CKernelDensity::init() m_rtol=0; tree=NULL; - SG_ADD(&m_bandwidth,"m_bandwidth","bandwidth",ParameterProperties()); - SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",ParameterProperties()); - SG_ADD(&m_atol,"m_atol","absolute tolerance",ParameterProperties()); - SG_ADD(&m_rtol,"m_rtol","relative tolerance",ParameterProperties()); - SG_ADD((CSGObject**) &tree,"tree","tree",ParameterProperties()); + SG_ADD(&m_bandwidth,"m_bandwidth","bandwidth"); + SG_ADD(&m_leaf_size,"m_leaf_size","leaf size"); + SG_ADD(&m_atol,"m_atol","absolute tolerance"); + SG_ADD(&m_rtol,"m_rtol","relative tolerance"); + SG_ADD((CSGObject**) &tree,"tree","tree"); } \ No newline at end of file diff --git a/src/shogun/distributions/MixtureModel.cpp b/src/shogun/distributions/MixtureModel.cpp index c174c7d43e9..6f8a96f09c2 100644 --- a/src/shogun/distributions/MixtureModel.cpp +++ b/src/shogun/distributions/MixtureModel.cpp @@ -212,8 +212,8 @@ void CMixtureModel::init() m_conv_tol=1e-8; m_max_iters=1000; - SG_ADD((CSGObject**)&m_components,"m_components","components of mixture",ParameterProperties()); - SG_ADD(&m_weights,"m_weights","weights of components",ParameterProperties()); - SG_ADD(&m_conv_tol,"m_conv_tol","convergence tolerance",ParameterProperties()); - SG_ADD(&m_max_iters,"m_max_iters","max number of iterations",ParameterProperties()); + SG_ADD((CSGObject**)&m_components,"m_components","components of mixture"); + SG_ADD(&m_weights,"m_weights","weights of components"); + SG_ADD(&m_conv_tol,"m_conv_tol","convergence tolerance"); + SG_ADD(&m_max_iters,"m_max_iters","max number of iterations"); } diff --git a/src/shogun/loss/HuberLoss.cpp b/src/shogun/loss/HuberLoss.cpp index 8883fc75d03..41e302ae7ba 100644 --- a/src/shogun/loss/HuberLoss.cpp +++ b/src/shogun/loss/HuberLoss.cpp @@ -95,5 +95,5 @@ void CHuberLoss::init() { m_delta=0; - SG_ADD(&m_delta,"m_delta","delta",ParameterProperties()); + SG_ADD(&m_delta,"m_delta","delta"); } diff --git a/src/shogun/machine/RandomForest.cpp b/src/shogun/machine/RandomForest.cpp index 428355ffdea..bf92ca56283 100644 --- a/src/shogun/machine/RandomForest.cpp +++ b/src/shogun/machine/RandomForest.cpp @@ -185,5 +185,5 @@ void CRandomForest::init() m_machine=new CRandomCARTree(); m_weights=SGVector(); - SG_ADD(&m_weights,"m_weights","weights",ParameterProperties()); + SG_ADD(&m_weights,"m_weights","weights"); } diff --git a/src/shogun/machine/StochasticGBMachine.cpp b/src/shogun/machine/StochasticGBMachine.cpp index 42856e41e41..ef981910400 100644 --- a/src/shogun/machine/StochasticGBMachine.cpp +++ b/src/shogun/machine/StochasticGBMachine.cpp @@ -401,11 +401,11 @@ void CStochasticGBMachine::init() m_gamma=new CDynamicArray(); SG_REF(m_gamma); - SG_ADD((CSGObject**)&m_machine,"m_machine","machine",ParameterProperties()); - SG_ADD((CSGObject**)&m_loss,"m_loss","loss function",ParameterProperties()); - SG_ADD(&m_num_iter,"m_num_iter","number of iterations",ParameterProperties()); - SG_ADD(&m_subset_frac,"m_subset_frac","subset fraction",ParameterProperties()); - SG_ADD(&m_learning_rate,"m_learning_rate","learning rate",ParameterProperties()); - SG_ADD((CSGObject**)&m_weak_learners,"m_weak_learners","array of weak learners",ParameterProperties()); - SG_ADD((CSGObject**)&m_gamma,"m_gamma","array of learner weights",ParameterProperties()); + SG_ADD((CSGObject**)&m_machine,"m_machine","machine"); + SG_ADD((CSGObject**)&m_loss,"m_loss","loss function"); + SG_ADD(&m_num_iter,"m_num_iter","number of iterations"); + SG_ADD(&m_subset_frac,"m_subset_frac","subset fraction"); + SG_ADD(&m_learning_rate,"m_learning_rate","learning rate"); + SG_ADD((CSGObject**)&m_weak_learners,"m_weak_learners","array of weak learners"); + SG_ADD((CSGObject**)&m_gamma,"m_gamma","array of learner weights"); } diff --git a/src/shogun/multiclass/MulticlassLibLinear.cpp b/src/shogun/multiclass/MulticlassLibLinear.cpp index 3d5350b60a9..93875b48697 100644 --- a/src/shogun/multiclass/MulticlassLibLinear.cpp +++ b/src/shogun/multiclass/MulticlassLibLinear.cpp @@ -43,10 +43,10 @@ void CMulticlassLibLinear::init_defaults() void CMulticlassLibLinear::register_parameters() { SG_ADD(&m_C, "m_C", "regularization constant",ParameterProperties::HYPER); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",ParameterProperties()); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",ParameterProperties()); - SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",ParameterProperties()); - SG_ADD(&m_save_train_state, "m_save_train_state", "indicates whether bias should be used",ParameterProperties()); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon"); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations"); + SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used"); + SG_ADD(&m_save_train_state, "m_save_train_state", "indicates whether bias should be used"); } CMulticlassLibLinear::~CMulticlassLibLinear() diff --git a/src/shogun/multiclass/tree/NbodyTree.cpp b/src/shogun/multiclass/tree/NbodyTree.cpp index 8bedd4361da..8bfb33107b0 100644 --- a/src/shogun/multiclass/tree/NbodyTree.cpp +++ b/src/shogun/multiclass/tree/NbodyTree.cpp @@ -584,10 +584,10 @@ void CNbodyTree::init() m_knn_dists=SGMatrix(); m_knn_indices=SGMatrix(); - SG_ADD(&m_data,"m_data","data matrix",ParameterProperties()); - SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",ParameterProperties()); - SG_ADD(&m_vec_id,"m_vec_id","id of vectors",ParameterProperties()); - SG_ADD(&m_knn_done,"knn_done","knn done or not",ParameterProperties()); - SG_ADD(&m_knn_dists,"m_knn_dists","knn distances",ParameterProperties()); - SG_ADD(&m_knn_indices,"knn_indices","knn indices",ParameterProperties()); + SG_ADD(&m_data,"m_data","data matrix"); + SG_ADD(&m_leaf_size,"m_leaf_size","leaf size"); + SG_ADD(&m_vec_id,"m_vec_id","id of vectors"); + SG_ADD(&m_knn_done,"knn_done","knn done or not"); + SG_ADD(&m_knn_dists,"m_knn_dists","knn distances"); + SG_ADD(&m_knn_indices,"knn_indices","knn indices"); } \ No newline at end of file diff --git a/src/shogun/preprocessor/HomogeneousKernelMap.cpp b/src/shogun/preprocessor/HomogeneousKernelMap.cpp index f800e9cc19c..e8846836f04 100644 --- a/src/shogun/preprocessor/HomogeneousKernelMap.cpp +++ b/src/shogun/preprocessor/HomogeneousKernelMap.cpp @@ -349,11 +349,11 @@ void CHomogeneousKernelMap::register_params() SG_ADD((machine_int_t*) &m_kernel, "kernel", "Kernel type to use.",ParameterProperties::HYPER); SG_ADD((machine_int_t*) &m_window, "window", "Window type to use.",ParameterProperties::HYPER); SG_ADD(&m_gamma, "gamma", "Homogeneity order.",ParameterProperties::HYPER); - SG_ADD(&m_period, "period", "Approximation order",ParameterProperties()); - SG_ADD(&m_numSubdivisions, "num_subdivisions", "The number of sublevels",ParameterProperties()); - SG_ADD(&m_subdivision, "subdivision", "subdivision.",ParameterProperties()); + SG_ADD(&m_period, "period", "Approximation order"); + SG_ADD(&m_numSubdivisions, "num_subdivisions", "The number of sublevels"); + SG_ADD(&m_subdivision, "subdivision", "subdivision."); SG_ADD(&m_order, "order", "The order",ParameterProperties::HYPER); - SG_ADD(&m_minExponent, "min_exponent", "Minimum exponent",ParameterProperties()); - SG_ADD(&m_maxExponent, "max_exponent", "Maximum exponent",ParameterProperties()); - SG_ADD(&m_table, "table", "Lookup-table",ParameterProperties()); + SG_ADD(&m_minExponent, "min_exponent", "Minimum exponent"); + SG_ADD(&m_maxExponent, "max_exponent", "Maximum exponent"); + SG_ADD(&m_table, "table", "Lookup-table"); } diff --git a/src/shogun/regression/svr/LibLinearRegression.cpp b/src/shogun/regression/svr/LibLinearRegression.cpp index 14472d13a26..0afb38d37bc 100644 --- a/src/shogun/regression/svr/LibLinearRegression.cpp +++ b/src/shogun/regression/svr/LibLinearRegression.cpp @@ -44,10 +44,10 @@ void CLibLinearRegression::init_defaults() void CLibLinearRegression::register_parameters() { SG_ADD(&m_C, "m_C", "regularization constant",ParameterProperties::HYPER); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",ParameterProperties()); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon"); SG_ADD(&m_epsilon, "m_tube_epsilon", "svr tube epsilon",ParameterProperties::HYPER); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",ParameterProperties()); - SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",ParameterProperties()); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations"); + SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used"); } CLibLinearRegression::~CLibLinearRegression() diff --git a/tests/unit/optimization/StochasticMinimizers_unittest.cc b/tests/unit/optimization/StochasticMinimizers_unittest.cc index 636356d34fb..ed2c9030fb3 100644 --- a/tests/unit/optimization/StochasticMinimizers_unittest.cc +++ b/tests/unit/optimization/StochasticMinimizers_unittest.cc @@ -69,8 +69,8 @@ void CRegressionExample::init() m_y=SGVector(); m_x=SGMatrix(); SG_ADD(&m_w, "r_w", "r_w",ParameterProperties::HYPER | ParameterProperties::GRADIENT); - SG_ADD(&m_x, "r_x", "r_x",ParameterProperties()); - SG_ADD(&m_y, "r_y", "r_y",ParameterProperties()); + SG_ADD(&m_x, "r_x", "r_x"); + SG_ADD(&m_y, "r_y", "r_y"); } float64_t CRegressionExample::get_cost() From 2458eeefe2698756dc1ebc9ad202a3da9e123f69 Mon Sep 17 00:00:00 2001 From: Gil Date: Fri, 16 Nov 2018 22:18:51 +0000 Subject: [PATCH 6/6] refactored SG_ADD3 calls [ci skip] --- src/shogun/converter/LocallyLinearEmbedding.cpp | 2 +- src/shogun/converter/ManifoldSculpting.cpp | 2 +- src/shogun/distributions/Gaussian.cpp | 10 +++++----- src/shogun/distributions/KernelDensity.cpp | 10 +++++----- src/shogun/distributions/MixtureModel.cpp | 8 ++++---- src/shogun/loss/HuberLoss.cpp | 2 +- src/shogun/machine/RandomForest.cpp | 2 +- src/shogun/machine/StochasticGBMachine.cpp | 14 +++++++------- src/shogun/multiclass/MulticlassLibLinear.cpp | 8 ++++---- src/shogun/multiclass/tree/NbodyTree.cpp | 12 ++++++------ src/shogun/preprocessor/HomogeneousKernelMap.cpp | 12 ++++++------ src/shogun/regression/svr/LibLinearRegression.cpp | 6 +++--- .../optimization/StochasticMinimizers_unittest.cc | 4 ++-- 13 files changed, 46 insertions(+), 46 deletions(-) diff --git a/src/shogun/converter/LocallyLinearEmbedding.cpp b/src/shogun/converter/LocallyLinearEmbedding.cpp index 34edd88242e..3e481cbe94b 100644 --- a/src/shogun/converter/LocallyLinearEmbedding.cpp +++ b/src/shogun/converter/LocallyLinearEmbedding.cpp @@ -28,7 +28,7 @@ void CLocallyLinearEmbedding::init() { SG_ADD(&m_k, "k", "number of neighbors", ParameterProperties::HYPER); SG_ADD(&m_nullspace_shift, "nullspace_shift", - "nullspace finding regularization shift",ParameterProperties()); + "nullspace finding regularization shift"); SG_ADD(&m_reconstruction_shift, "reconstruction_shift", "shift used to regularize reconstruction step"); } diff --git a/src/shogun/converter/ManifoldSculpting.cpp b/src/shogun/converter/ManifoldSculpting.cpp index 6d43780e7f8..2d9ce009ee8 100644 --- a/src/shogun/converter/ManifoldSculpting.cpp +++ b/src/shogun/converter/ManifoldSculpting.cpp @@ -25,7 +25,7 @@ void CManifoldSculpting::init() { SG_ADD(&m_k, "k", "number of neighbors"); SG_ADD(&m_squishing_rate, "quishing_rate", - "squishing rate",ParameterProperties()); + "squishing rate"); SG_ADD(&m_max_iteration, "max_iteration", "maximum number of algorithm's iterations"); } diff --git a/src/shogun/distributions/Gaussian.cpp b/src/shogun/distributions/Gaussian.cpp index 5a2acdeeaaf..e059bd052ac 100644 --- a/src/shogun/distributions/Gaussian.cpp +++ b/src/shogun/distributions/Gaussian.cpp @@ -337,11 +337,11 @@ SGMatrix CGaussian::get_cov() void CGaussian::register_params() { - SG_ADD(&m_u, "m_u", "Unitary matrix.",ParameterProperties()); - SG_ADD(&m_d, "m_d", "Diagonal.",ParameterProperties()); - SG_ADD(&m_mean, "m_mean", "Mean.",ParameterProperties()); - SG_ADD(&m_constant, "m_constant", "Constant part.",ParameterProperties()); - SG_ADD((machine_int_t*)&m_cov_type, "m_cov_type", "Covariance type.",ParameterProperties()); + SG_ADD(&m_u, "m_u", "Unitary matrix."); + SG_ADD(&m_d, "m_d", "Diagonal."); + SG_ADD(&m_mean, "m_mean", "Mean."); + SG_ADD(&m_constant, "m_constant", "Constant part."); + SG_ADD((machine_int_t*)&m_cov_type, "m_cov_type", "Covariance type."); } void CGaussian::decompose_cov(SGMatrix cov) diff --git a/src/shogun/distributions/KernelDensity.cpp b/src/shogun/distributions/KernelDensity.cpp index 9bd3765d78e..f75dd2951c8 100644 --- a/src/shogun/distributions/KernelDensity.cpp +++ b/src/shogun/distributions/KernelDensity.cpp @@ -159,9 +159,9 @@ void CKernelDensity::init() m_rtol=0; tree=NULL; - SG_ADD(&m_bandwidth,"m_bandwidth","bandwidth",ParameterProperties()); - SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",ParameterProperties()); - SG_ADD(&m_atol,"m_atol","absolute tolerance",ParameterProperties()); - SG_ADD(&m_rtol,"m_rtol","relative tolerance",ParameterProperties()); - SG_ADD((CSGObject**) &tree,"tree","tree",ParameterProperties()); + SG_ADD(&m_bandwidth,"m_bandwidth","bandwidth"); + SG_ADD(&m_leaf_size,"m_leaf_size","leaf size"); + SG_ADD(&m_atol,"m_atol","absolute tolerance"); + SG_ADD(&m_rtol,"m_rtol","relative tolerance"); + SG_ADD((CSGObject**) &tree,"tree","tree"); } \ No newline at end of file diff --git a/src/shogun/distributions/MixtureModel.cpp b/src/shogun/distributions/MixtureModel.cpp index c174c7d43e9..6f8a96f09c2 100644 --- a/src/shogun/distributions/MixtureModel.cpp +++ b/src/shogun/distributions/MixtureModel.cpp @@ -212,8 +212,8 @@ void CMixtureModel::init() m_conv_tol=1e-8; m_max_iters=1000; - SG_ADD((CSGObject**)&m_components,"m_components","components of mixture",ParameterProperties()); - SG_ADD(&m_weights,"m_weights","weights of components",ParameterProperties()); - SG_ADD(&m_conv_tol,"m_conv_tol","convergence tolerance",ParameterProperties()); - SG_ADD(&m_max_iters,"m_max_iters","max number of iterations",ParameterProperties()); + SG_ADD((CSGObject**)&m_components,"m_components","components of mixture"); + SG_ADD(&m_weights,"m_weights","weights of components"); + SG_ADD(&m_conv_tol,"m_conv_tol","convergence tolerance"); + SG_ADD(&m_max_iters,"m_max_iters","max number of iterations"); } diff --git a/src/shogun/loss/HuberLoss.cpp b/src/shogun/loss/HuberLoss.cpp index 8883fc75d03..41e302ae7ba 100644 --- a/src/shogun/loss/HuberLoss.cpp +++ b/src/shogun/loss/HuberLoss.cpp @@ -95,5 +95,5 @@ void CHuberLoss::init() { m_delta=0; - SG_ADD(&m_delta,"m_delta","delta",ParameterProperties()); + SG_ADD(&m_delta,"m_delta","delta"); } diff --git a/src/shogun/machine/RandomForest.cpp b/src/shogun/machine/RandomForest.cpp index 428355ffdea..bf92ca56283 100644 --- a/src/shogun/machine/RandomForest.cpp +++ b/src/shogun/machine/RandomForest.cpp @@ -185,5 +185,5 @@ void CRandomForest::init() m_machine=new CRandomCARTree(); m_weights=SGVector(); - SG_ADD(&m_weights,"m_weights","weights",ParameterProperties()); + SG_ADD(&m_weights,"m_weights","weights"); } diff --git a/src/shogun/machine/StochasticGBMachine.cpp b/src/shogun/machine/StochasticGBMachine.cpp index 42856e41e41..ef981910400 100644 --- a/src/shogun/machine/StochasticGBMachine.cpp +++ b/src/shogun/machine/StochasticGBMachine.cpp @@ -401,11 +401,11 @@ void CStochasticGBMachine::init() m_gamma=new CDynamicArray(); SG_REF(m_gamma); - SG_ADD((CSGObject**)&m_machine,"m_machine","machine",ParameterProperties()); - SG_ADD((CSGObject**)&m_loss,"m_loss","loss function",ParameterProperties()); - SG_ADD(&m_num_iter,"m_num_iter","number of iterations",ParameterProperties()); - SG_ADD(&m_subset_frac,"m_subset_frac","subset fraction",ParameterProperties()); - SG_ADD(&m_learning_rate,"m_learning_rate","learning rate",ParameterProperties()); - SG_ADD((CSGObject**)&m_weak_learners,"m_weak_learners","array of weak learners",ParameterProperties()); - SG_ADD((CSGObject**)&m_gamma,"m_gamma","array of learner weights",ParameterProperties()); + SG_ADD((CSGObject**)&m_machine,"m_machine","machine"); + SG_ADD((CSGObject**)&m_loss,"m_loss","loss function"); + SG_ADD(&m_num_iter,"m_num_iter","number of iterations"); + SG_ADD(&m_subset_frac,"m_subset_frac","subset fraction"); + SG_ADD(&m_learning_rate,"m_learning_rate","learning rate"); + SG_ADD((CSGObject**)&m_weak_learners,"m_weak_learners","array of weak learners"); + SG_ADD((CSGObject**)&m_gamma,"m_gamma","array of learner weights"); } diff --git a/src/shogun/multiclass/MulticlassLibLinear.cpp b/src/shogun/multiclass/MulticlassLibLinear.cpp index 3d5350b60a9..93875b48697 100644 --- a/src/shogun/multiclass/MulticlassLibLinear.cpp +++ b/src/shogun/multiclass/MulticlassLibLinear.cpp @@ -43,10 +43,10 @@ void CMulticlassLibLinear::init_defaults() void CMulticlassLibLinear::register_parameters() { SG_ADD(&m_C, "m_C", "regularization constant",ParameterProperties::HYPER); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",ParameterProperties()); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",ParameterProperties()); - SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",ParameterProperties()); - SG_ADD(&m_save_train_state, "m_save_train_state", "indicates whether bias should be used",ParameterProperties()); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon"); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations"); + SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used"); + SG_ADD(&m_save_train_state, "m_save_train_state", "indicates whether bias should be used"); } CMulticlassLibLinear::~CMulticlassLibLinear() diff --git a/src/shogun/multiclass/tree/NbodyTree.cpp b/src/shogun/multiclass/tree/NbodyTree.cpp index 8bedd4361da..8bfb33107b0 100644 --- a/src/shogun/multiclass/tree/NbodyTree.cpp +++ b/src/shogun/multiclass/tree/NbodyTree.cpp @@ -584,10 +584,10 @@ void CNbodyTree::init() m_knn_dists=SGMatrix(); m_knn_indices=SGMatrix(); - SG_ADD(&m_data,"m_data","data matrix",ParameterProperties()); - SG_ADD(&m_leaf_size,"m_leaf_size","leaf size",ParameterProperties()); - SG_ADD(&m_vec_id,"m_vec_id","id of vectors",ParameterProperties()); - SG_ADD(&m_knn_done,"knn_done","knn done or not",ParameterProperties()); - SG_ADD(&m_knn_dists,"m_knn_dists","knn distances",ParameterProperties()); - SG_ADD(&m_knn_indices,"knn_indices","knn indices",ParameterProperties()); + SG_ADD(&m_data,"m_data","data matrix"); + SG_ADD(&m_leaf_size,"m_leaf_size","leaf size"); + SG_ADD(&m_vec_id,"m_vec_id","id of vectors"); + SG_ADD(&m_knn_done,"knn_done","knn done or not"); + SG_ADD(&m_knn_dists,"m_knn_dists","knn distances"); + SG_ADD(&m_knn_indices,"knn_indices","knn indices"); } \ No newline at end of file diff --git a/src/shogun/preprocessor/HomogeneousKernelMap.cpp b/src/shogun/preprocessor/HomogeneousKernelMap.cpp index f800e9cc19c..e8846836f04 100644 --- a/src/shogun/preprocessor/HomogeneousKernelMap.cpp +++ b/src/shogun/preprocessor/HomogeneousKernelMap.cpp @@ -349,11 +349,11 @@ void CHomogeneousKernelMap::register_params() SG_ADD((machine_int_t*) &m_kernel, "kernel", "Kernel type to use.",ParameterProperties::HYPER); SG_ADD((machine_int_t*) &m_window, "window", "Window type to use.",ParameterProperties::HYPER); SG_ADD(&m_gamma, "gamma", "Homogeneity order.",ParameterProperties::HYPER); - SG_ADD(&m_period, "period", "Approximation order",ParameterProperties()); - SG_ADD(&m_numSubdivisions, "num_subdivisions", "The number of sublevels",ParameterProperties()); - SG_ADD(&m_subdivision, "subdivision", "subdivision.",ParameterProperties()); + SG_ADD(&m_period, "period", "Approximation order"); + SG_ADD(&m_numSubdivisions, "num_subdivisions", "The number of sublevels"); + SG_ADD(&m_subdivision, "subdivision", "subdivision."); SG_ADD(&m_order, "order", "The order",ParameterProperties::HYPER); - SG_ADD(&m_minExponent, "min_exponent", "Minimum exponent",ParameterProperties()); - SG_ADD(&m_maxExponent, "max_exponent", "Maximum exponent",ParameterProperties()); - SG_ADD(&m_table, "table", "Lookup-table",ParameterProperties()); + SG_ADD(&m_minExponent, "min_exponent", "Minimum exponent"); + SG_ADD(&m_maxExponent, "max_exponent", "Maximum exponent"); + SG_ADD(&m_table, "table", "Lookup-table"); } diff --git a/src/shogun/regression/svr/LibLinearRegression.cpp b/src/shogun/regression/svr/LibLinearRegression.cpp index 14472d13a26..0afb38d37bc 100644 --- a/src/shogun/regression/svr/LibLinearRegression.cpp +++ b/src/shogun/regression/svr/LibLinearRegression.cpp @@ -44,10 +44,10 @@ void CLibLinearRegression::init_defaults() void CLibLinearRegression::register_parameters() { SG_ADD(&m_C, "m_C", "regularization constant",ParameterProperties::HYPER); - SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon",ParameterProperties()); + SG_ADD(&m_epsilon, "m_epsilon", "tolerance epsilon"); SG_ADD(&m_epsilon, "m_tube_epsilon", "svr tube epsilon",ParameterProperties::HYPER); - SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations",ParameterProperties()); - SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used",ParameterProperties()); + SG_ADD(&m_max_iter, "m_max_iter", "max number of iterations"); + SG_ADD(&m_use_bias, "m_use_bias", "indicates whether bias should be used"); } CLibLinearRegression::~CLibLinearRegression() diff --git a/tests/unit/optimization/StochasticMinimizers_unittest.cc b/tests/unit/optimization/StochasticMinimizers_unittest.cc index 636356d34fb..ed2c9030fb3 100644 --- a/tests/unit/optimization/StochasticMinimizers_unittest.cc +++ b/tests/unit/optimization/StochasticMinimizers_unittest.cc @@ -69,8 +69,8 @@ void CRegressionExample::init() m_y=SGVector(); m_x=SGMatrix(); SG_ADD(&m_w, "r_w", "r_w",ParameterProperties::HYPER | ParameterProperties::GRADIENT); - SG_ADD(&m_x, "r_x", "r_x",ParameterProperties()); - SG_ADD(&m_y, "r_y", "r_y",ParameterProperties()); + SG_ADD(&m_x, "r_x", "r_x"); + SG_ADD(&m_y, "r_y", "r_y"); } float64_t CRegressionExample::get_cost()