[opengm] 145/386: breaks all tests. Revert "changed loss interface"

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:36:37 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 70431337047c95d92f03fcd7a1cc822fb938639f
Author: mschiegg <martin.schiegg at iwr.uni-heidelberg.de>
Date:   Fri Dec 19 10:07:12 2014 +0100

    breaks all tests. Revert "changed loss interface"
    
    This reverts commit 9762c5ad12ab680388786a0228d05ab3e8da266a.
---
 include/opengm/learning/gridsearch-learning.hxx          |  2 +-
 include/opengm/learning/loss/generalized-hammingloss.hxx | 10 +++++-----
 include/opengm/learning/loss/hammingloss.hxx             |  8 ++++----
 include/opengm/learning/loss/noloss.hxx                  |  8 ++++----
 src/interfaces/python/opengm/learning/__init__.py        | 12 +++++-------
 src/interfaces/python/opengm/learning/pyLoss.cxx         |  6 +++---
 6 files changed, 22 insertions(+), 24 deletions(-)

diff --git a/include/opengm/learning/gridsearch-learning.hxx b/include/opengm/learning/gridsearch-learning.hxx
index 360be57..3650e1e 100644
--- a/include/opengm/learning/gridsearch-learning.hxx
+++ b/include/opengm/learning/gridsearch-learning.hxx
@@ -84,7 +84,7 @@ namespace opengm {
                inf.infer();
                inf.arg(confs[m]);
                const std::vector<typename INF::LabelType>& gt =  dataset_.getGT(m);
-               loss += lossFunction.loss(dataset_.getModel(m),confs[m].begin(), confs[m].end(), gt.begin(), gt.end());
+               loss += lossFunction.loss(confs[m].begin(), confs[m].end(), gt.begin(), gt.end());
             }
             
             // *call visitor*
diff --git a/include/opengm/learning/loss/generalized-hammingloss.hxx b/include/opengm/learning/loss/generalized-hammingloss.hxx
index f7158a3..16ef2ff 100644
--- a/include/opengm/learning/loss/generalized-hammingloss.hxx
+++ b/include/opengm/learning/loss/generalized-hammingloss.hxx
@@ -32,7 +32,7 @@ public:
                 return nodeLossMultiplier_ < labelLossMultiplier_;
         }
         bool operator>(const GeneralizedHammingLoss & other) const{
-                return nodeLossMultiplier_ > labelLossMultiplier_;
+                nodeLossMultiplier_ > labelLossMultiplier_;
         }
 
         /**
@@ -53,8 +53,8 @@ public:
 public:
     GeneralizedHammingLoss(const Parameter& param = Parameter()) : param_(param){}
 
-    template<class GM, class IT1, class IT2>
-            double loss(const GM & gm, IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+    template<class IT1, class IT2>
+            double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
 
     template<class GM, class IT>
     void addLoss(GM& gm, IT GTBegin) const;
@@ -104,8 +104,8 @@ inline void GeneralizedHammingLoss::Parameter::load(const hid_t& groupHandle) {
     }
 }
 
-template<class GM, class IT1, class IT2>
-double GeneralizedHammingLoss::loss(const GM & gm, IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+template<class IT1, class IT2>
+double GeneralizedHammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
 {
     double loss = 0.0;
     size_t nodeIndex = 0;
diff --git a/include/opengm/learning/loss/hammingloss.hxx b/include/opengm/learning/loss/hammingloss.hxx
index 47e272e..783d615 100644
--- a/include/opengm/learning/loss/hammingloss.hxx
+++ b/include/opengm/learning/loss/hammingloss.hxx
@@ -35,8 +35,8 @@ namespace opengm {
       public:
          HammingLoss(const Parameter& param = Parameter()) : param_(param){}
 
-         template<class GM, class IT1, class IT2>
-         double loss(const GM & gm, IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+         template<class IT1, class IT2>
+         double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
   
          template<class GM, class IT>
          void addLoss(GM& gm, IT GTBegin) const;
@@ -50,8 +50,8 @@ namespace opengm {
           marray::hdf5::save(groupHandle,"lossId",name);
       }
 
-      template<class GM, class IT1, class IT2>
-      double HammingLoss::loss(const GM & gm, IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+      template<class IT1, class IT2>
+      double HammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
       {
          double loss = 0.0;
          for(; labelBegin!= labelEnd; ++labelBegin, ++GTBegin){
diff --git a/include/opengm/learning/loss/noloss.hxx b/include/opengm/learning/loss/noloss.hxx
index e207433..bce7acc 100644
--- a/include/opengm/learning/loss/noloss.hxx
+++ b/include/opengm/learning/loss/noloss.hxx
@@ -39,8 +39,8 @@ namespace learning {
 
         }
 
-        template<class GM, class IT1, class IT2>
-        double loss(const GM & gm, IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+        template<class IT1, class IT2>
+        double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
 
         template<class GM, class IT>
         void addLoss(GM& gm, IT GTBegin) const;
@@ -55,8 +55,8 @@ namespace learning {
         marray::hdf5::save(groupHandle,"lossId",name);
     }
 
-    template<class GM, class IT1, class IT2>
-    double NoLoss::loss(const GM & gm, IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+    template<class IT1, class IT2>
+    double NoLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
     {
         double loss = 0.0;
         return loss;
diff --git a/src/interfaces/python/opengm/learning/__init__.py b/src/interfaces/python/opengm/learning/__init__.py
index da56988..89f639f 100644
--- a/src/interfaces/python/opengm/learning/__init__.py
+++ b/src/interfaces/python/opengm/learning/__init__.py
@@ -111,25 +111,23 @@ def lPottsFunctions(nFunctions, numberOfLabels, features, weightIds):
     raise RuntimeError("not yet implemented")
 
 
-def lUnaryFunction(weights, numberOfLabels, features, weightIds):
-    assert numberOfLabels >= 2
+def lunaryFunction(weights, numberOfLabels, features, weightIds):
 
     features = numpy.require(features, dtype=value_type)
     weightIds = numpy.require(weightIds, dtype=index_type)
-
+    
     assert features.ndim == weightIds.ndim
     if features.ndim == 1 or weightIds.ndim == 1:
-        assert numberOfLabels == 2
-        assert features.shape[0]  == weightIds.shape[0]
+        assert numberOfLabels ==2
         features  = features.reshape(1,-1)
         weightIds = weightIds.reshape(1,-1)
 
     assert features.shape[0] in [numberOfLabels, numberOfLabels-1]
     assert weightIds.shape[0] in [numberOfLabels, numberOfLabels-1]
-    assert features.shape[1]  == weightIds.shape[1]
 
 
-    return LUnaryFunction(weights=weights, numberOfLabels=int(numberOfLabels), 
+
+    return LUnaryFunction(weights=weights, numberOfLabels=numberOfLabels, 
                           features=features, weightIds=weightIds)
 
 
diff --git a/src/interfaces/python/opengm/learning/pyLoss.cxx b/src/interfaces/python/opengm/learning/pyLoss.cxx
index 72aa8c5..abfde86 100644
--- a/src/interfaces/python/opengm/learning/pyLoss.cxx
+++ b/src/interfaces/python/opengm/learning/pyLoss.cxx
@@ -38,17 +38,17 @@ void export_loss(){
     typedef opengm::learning::GeneralizedHammingLoss::Parameter PyGeneralizedHammingLossParameter;
 
     class_<PyHammingLoss >("HammingLoss")
-        .def("loss", &PyHammingLoss::loss<const GM &, Literator,Literator>)
+        .def("loss", &PyHammingLoss::loss<Literator,Literator>)
         .def("addLoss", &PyHammingLoss::addLoss<GM, Literator>)
     ;
 
     class_<PyNoLoss >("NoLoss")
-        .def("loss", &PyNoLoss::loss<const GM &,Literator,Literator>)
+        .def("loss", &PyNoLoss::loss<Literator,Literator>)
         .def("addLoss", &PyNoLoss::addLoss<GM, Literator>)
     ;
 
     class_<PyGeneralizedHammingLoss >("GeneralizedHammingLoss", init<PyGeneralizedHammingLossParameter>())
-        .def("loss", &PyGeneralizedHammingLoss::loss<const GM &,Literator,Literator>)
+        .def("loss", &PyGeneralizedHammingLoss::loss<Literator,Literator>)
         .def("addLoss", &PyGeneralizedHammingLoss::addLoss<GM, Literator>)
     ;
 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list