[opengm] 177/386: No boundaries on the parameter, start at origin.

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:37:35 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 5eef7949b433d7ec655f258d4ba59f45e738d8b7
Author: Janez Ales <janez.ales at iwr.uni-heidelberg.de>
Date:   Fri Dec 19 15:35:50 2014 +0100

    No boundaries on the parameter, start at origin.
---
 .../learning/maximum-likelihood-learning.hxx       | 24 ++++++++++------
 .../learning/test_maximum_likelihood_learner.cxx   | 32 ++--------------------
 2 files changed, 17 insertions(+), 39 deletions(-)

diff --git a/include/opengm/learning/maximum-likelihood-learning.hxx b/include/opengm/learning/maximum-likelihood-learning.hxx
index 81aa089..537599c 100644
--- a/include/opengm/learning/maximum-likelihood-learning.hxx
+++ b/include/opengm/learning/maximum-likelihood-learning.hxx
@@ -115,14 +115,16 @@ void MaximumLikelihoodLearner<DATASET, LOSS>::learn(typename INF::Parameter& wei
     std::vector<ValueType> gradient(dataset_.getNumberOfWeights(),0);
     std::vector<ValueType> Delta(dataset_.getNumberOfWeights(),0);
     for(IndexType p=0; p<dataset_.getNumberOfWeights(); ++p)
-        point[p] = ValueType((weight_.weightUpperbound_[p]-weight_.weightLowerbound_[p])/2);
+        point[p] = ValueType((0));
+
+    //weight_.weightUpperbound_[p]-weight_.weightLowerbound_[p])/2);
         //point[p] = ValueType(weight_.weightUpperbound_[p]);
         //point[p] = ValueType(weight_.weightLowerbound_[p]);
 
     // test only
-    point[0]=0.5;
-    point[1]=0.7;
-    point[2]=0.9;
+    //point[0]=0.5;
+    //point[1]=0.7;
+    //point[2]=0.9;
 
     LOSS lossFunction;
     bool search=true;
@@ -237,7 +239,7 @@ void MaximumLikelihoodLearner<DATASET, LOSS>::learn(typename INF::Parameter& wei
             for(IndexType m=0; m<dataset_.getNumberOfModels(); ++m){
                 const GMType &model = dataset_.getModel(m);
                 const std::vector<typename INF::LabelType>& gt =  dataset_.getGT(m);
-                ValueType f_x; // f^{d}_{C;k} ( x^d_C ) J. Kappes p. 64
+                ValueType f_p;
 
                 for(IndexType f=0; f<dataset_.getModel(m).numberOfFactors();++f){
                     const FactorType &factor = dataset_.getModel(m)[f];
@@ -251,10 +253,14 @@ void MaximumLikelihoodLearner<DATASET, LOSS>::learn(typename INF::Parameter& wei
                     }
                     WeightGradientFunctor weightGradientFunctor(p, labelVector.begin());
                     factor.callFunctor(weightGradientFunctor);
-                    f_x =weightGradientFunctor.result_;
-                    // ( ground truth - marginals ) * factorWeightGradient
-                    sum[p] += (b[m][f] - piW[m][f]) * f_x;
-                    // ( ground truth - marginals ) * factor
+                    f_p =weightGradientFunctor.result_;
+
+                    // gradient
+                    // ( marginals - ground_truth ) * factor_gradient_p
+                    sum[p] += (b[m][f] - piW[m][f]) * f_p;
+
+                    // likelihood function
+                    // marginals - ground_truth * factor
                     optFun += b[m][f] - piW[m][f] * factor(labelVector.begin());
                 }
             }
diff --git a/src/unittest/learning/test_maximum_likelihood_learner.cxx b/src/unittest/learning/test_maximum_likelihood_learner.cxx
index 4f383e8..4d1fda2 100644
--- a/src/unittest/learning/test_maximum_likelihood_learner.cxx
+++ b/src/unittest/learning/test_maximum_likelihood_learner.cxx
@@ -48,51 +48,30 @@ int main() {
    {
       DS0 dataset;
       std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfWeights() << " parameters."<<std::endl;
-
-
       opengm::learning::MaximumLikelihoodLearner<DS0,LOSS>::Weight weight;
-      weight.weightUpperbound_.resize(1,1);
-      weight.weightLowerbound_.resize(1,0);
-      weight.testingPoints_.resize(1,10);
       opengm::learning::MaximumLikelihoodLearner<DS0,LOSS> learner(dataset,weight);
-
-
       INF::Parameter infWeight;
       learner.learn<INF>(infWeight);
 
    }
-
+*/
 
    {
       DS1 dataset;
       std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfWeights() << " parameters."<<std::endl;
-      
-      
       opengm::learning::MaximumLikelihoodLearner<DS1,LOSS>::Weight weight;
-      weight.weightUpperbound_.resize(1,1);
-      weight.weightLowerbound_.resize(1,0);
-      weight.testingPoints_.resize(1,10);
       opengm::learning::MaximumLikelihoodLearner<DS1,LOSS> learner(dataset,weight);
-      
-      
       INF::Parameter infWeight;
       learner.learn<INF>(infWeight);
       
    }
-*/
+/*
 
    {
       DS2 dataset;
       std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfWeights() << " parameters."<<std::endl;
-      
-      
       opengm::learning::MaximumLikelihoodLearner<DS2,LOSS>::Weight weight;
-      weight.weightUpperbound_.resize(3,1);
-      weight.weightLowerbound_.resize(3,0);
-      weight.testingPoints_.resize(3,10);
       opengm::learning::MaximumLikelihoodLearner<DS2,LOSS> learner(dataset,weight);
-      
-      
       INF::Parameter infWeight;
       learner.learn<INF>(infWeight);
    }
@@ -101,15 +80,8 @@ int main() {
    {
       DSSimple dataset;
       std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfWeights() << " parameters."<<std::endl;
-
-
       opengm::learning::MaximumLikelihoodLearner<DSSimple,LOSS>::Weight weight;
-      weight.weightUpperbound_.resize(2,1);
-      weight.weightLowerbound_.resize(2,0);
-      weight.testingPoints_.resize(2,10);
       opengm::learning::MaximumLikelihoodLearner<DSSimple,LOSS> learner(dataset,weight);
-
-
       INF::Parameter infWeight;
       learner.learn<INF>(infWeight);
    }

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list