[opengm] 12/386: prototypical implementation for loss and learning, befor adding unittests some interfaces need to be specified

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:34:59 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit f14f9c753cd506fe0a5e192258781c7f721e95a5
Author: joergkappes <kappes at math.uni-heidelberg.de>
Date:   Mon May 19 09:08:22 2014 +0200

    prototypical implementation for loss and learning, befor adding unittests some interfaces need to be specified
---
 include/opengm/learning/loss/hammingloss.hxx | 65 ++++++++++++++++------------
 include/opengm/learning/randomlearner.hxx    | 57 ++++++++++++++++++++++++
 2 files changed, 95 insertions(+), 27 deletions(-)

diff --git a/include/opengm/learning/loss/hammingloss.hxx b/include/opengm/learning/loss/hammingloss.hxx
index 5d8eb72..ec13eda 100644
--- a/include/opengm/learning/loss/hammingloss.hxx
+++ b/include/opengm/learning/loss/hammingloss.hxx
@@ -1,35 +1,46 @@
+#pragma once
+#ifndef OPENGM_HAMMING_LOSS_HXX
+#define OPENGM_HAMMING_LOSS_HXX
+
 #include "opengm/functions/explicit_function.hxx"
+namespace opengm {
+   namespace learning {
+      class HammingLoss{
+      public:
 
+         template<class IT1, class IT2>
+         double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+  
+         template<class GM, class IT>
+         void addLoss(GM& gm, IT GTBegin) const;
+      private:
+      };
 
-class HammingLoss{
-public:
+      template<class IT1, class IT2>
+      double HammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+      {
+         double loss = 0;
+         for(; labelBegin!= labelEnd; ++labelBegin, ++GTBegin){
+            if(*labelBegin != *GTBegin){
+               loss += 1.0;
+            }
+         }
+      }
 
-   template<class IT1, class IT2>
-   double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
-  
-   template<class GM, class IT>
-   void addLoss(GM& gm, IT GTBegin) const;
-private:
-};
+      template<class GM, class IT>
+      void HammingLoss::addLoss(GM& gm, IT gt) const
+      {
 
-template<class IT1, class IT2>
-double HammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
-{
-   double loss = 0;
-   for(; labelBegin!= labelEnd; ++labelBegin, ++GTBegin){
-      if(*labelBegin != *GTBegin){
-         loss += 1.0;
+         for(typename GM::IndexType i=0; i<gm.numberOfVariables(); ++i){
+            typename GM::LabelType numL = gm.numberOFLabels(i);
+            opengm::ExplicitFunction<GM::ValueType,GM::IndexType, GM::LabelType> f(&numL, &(numL)+1,1);
+            f(*gt) = 0;
+            ++gt;
+            gm.addFactor(gm.addfunction(f), &i, &(i)+1);
+         }
       }
-   }
-}
 
-template<class GM, class IT>
-void HammingLoss::addLoss(GM& gm, IT GTBegin) const
-{
+   }  
+} // namespace opengm
 
-   for(typename GM::IndexType i=0; i<gm.numberOfVariables(); ++i){
-      typename GM::LabelType numL = gm.numberOFLabels(i);
-      //todo add functions and factors
-   }
-}
-  
+#endif 
diff --git a/include/opengm/learning/randomlearner.hxx b/include/opengm/learning/randomlearner.hxx
new file mode 100644
index 0000000..2019feb
--- /dev/null
+++ b/include/opengm/learning/randomlearner.hxx
@@ -0,0 +1,57 @@
+#pragma once
+#ifndef OPENGM_RANDOM_LEARNER_HXX
+#define OPENGM_RANDOM_LEARNER_HXX
+
+#include <vector>
+#include <opengm/functions/learnablefunction.hxx>
+
+namespace opengm {
+   namespace learning {
+      template<class DATASET, class LOSS>
+      class RandomLearner<DATASET, LOSS>
+      {
+      public:
+         typedef GMType; // This will be constructed as a combination of DATASET and LOSS (especially the functiontypelist
+
+
+         class Parameter{
+         public:
+            std::vector<double> parameterUpperbound_; 
+            std::vector<double> parameterLowerbound_;
+            size_t iterations_;
+            Parameter():iterations_(10){;}
+         };
+
+
+         RandomLearner(DATASET&, Parameter& );
+
+         template<class INF>
+         void learn(typename INF::Parameter para); 
+         //template<class INF, class VISITOR>
+         //void learn(typename INF::Parameter para, VITITOR vis);
+
+         const opengm::Parameters<ValueType,IndexType>& getModelParameters(){return modelParameters_;} 
+         Parameter& getLerningParameters(){return para_;}
+
+      private:
+         DATASET& dataset_;
+         opengm::Parameters<double,size_t> modelParameters_;
+         Parameter para_;
+      }; 
+
+      template<class DATASET, class LOSS>
+      RandomLearner<DATASET, LOSS>::RandomLearner(DATASET& ds, Parameter& p )
+         : dataset_(ds), para_(p)
+      {
+         modelParameters_ = opengm::Parameters<double,size_t>(ds.numberOfParameters());
+      }
+
+
+      template<class DATASET, class LOSS>
+      template<class INF>
+      void RandomLearner<DATASET, LOSS>::learn(typename INF::Parameter& para){
+         //todo
+      };
+   }
+}
+#endif

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list