[opengm] 53/386: Add generalized hamming loss with small test

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:35:04 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit fbf4bd0032236a4790f6eed6af2bfb761ce93349
Author: Carsten Haubold <carstenhaubold at googlemail.com>
Date:   Mon Dec 15 16:57:54 2014 +0100

    Add generalized hamming loss with small test
---
 .../learning/loss/generalized-hammingloss.hxx      | 79 ++++++++++++++++++++++
 src/unittest/learning/CMakeLists.txt               |  2 +
 .../learning/test_generalized_hammingloss.cxx      | 69 +++++++++++++++++++
 3 files changed, 150 insertions(+)

diff --git a/include/opengm/learning/loss/generalized-hammingloss.hxx b/include/opengm/learning/loss/generalized-hammingloss.hxx
new file mode 100644
index 0000000..311bff0
--- /dev/null
+++ b/include/opengm/learning/loss/generalized-hammingloss.hxx
@@ -0,0 +1,79 @@
+#pragma once
+#ifndef OPENGM_GENERALIZED_HAMMING_LOSS_HXX
+#define OPENGM_GENERALIZED_HAMMING_LOSS_HXX
+
+#include "opengm/functions/explicit_function.hxx"
+namespace opengm {
+namespace learning {
+
+/**
+ * The generalized Hamming Loss incurs a penalty of nodeLossMultiplier[n] * labelLossMultiplier[l]
+ * for node n taking label l, only if l is the same label as in the ground truth this amounts to zero.
+ * One can imagine the overall cost matrix as outer product nodeLossMultiplier * labelLossMultiplier,
+ * with zeros where the node label equals the ground truth.
+ **/
+class GeneralizedHammingLoss{
+public:
+    template<class IT1, class IT2>
+    GeneralizedHammingLoss(IT1 nodeLossMultiplierBegin,
+                           IT1 nodeLossMultiplierEnd,
+                           IT2 labelLossMultiplierBegin,
+                           IT2 labelLossMultiplierEnd);
+
+    template<class IT1, class IT2>
+            double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
+
+    template<class GM, class IT>
+    void addLoss(GM& gm, IT GTBegin) const;
+
+private:
+    std::vector<double> nodeLossMultiplier_;
+    std::vector<double> labelLossMultiplier_;
+};
+
+template<class IT1, class IT2>
+GeneralizedHammingLoss::GeneralizedHammingLoss(IT1 nodeLossMultiplierBegin,
+                                               IT1 nodeLossMultiplierEnd,
+                                               IT2 labelLossMultiplierBegin,
+                                               IT2 labelLossMultiplierEnd):
+    nodeLossMultiplier_(nodeLossMultiplierBegin, nodeLossMultiplierEnd),
+    labelLossMultiplier_(labelLossMultiplierBegin, labelLossMultiplierEnd)
+{
+}
+
+template<class IT1, class IT2>
+double GeneralizedHammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
+{
+    double loss = 0.0;
+    size_t nodeIndex = 0;
+
+    for(; labelBegin!= labelEnd; ++labelBegin, ++GTBegin, ++nodeIndex){
+        if(*labelBegin != *GTBegin){
+            loss += nodeLossMultiplier_[nodeIndex] * labelLossMultiplier_[*labelBegin];
+        }
+    }
+    return loss;
+}
+
+template<class GM, class IT>
+void GeneralizedHammingLoss::addLoss(GM& gm, IT gt) const
+{
+
+    for(typename GM::IndexType i=0; i<gm.numberOfVariables(); ++i){
+        typename GM::LabelType numL = gm.numberOfLabels(i);
+        opengm::ExplicitFunction<typename GM::ValueType,typename GM::IndexType, typename GM::LabelType> f(&numL, &(numL)+1, 0);
+
+        for(typename GM::LabelType l = 0; l < numL; ++l){
+            f(l) = nodeLossMultiplier_[i] * labelLossMultiplier_[l];
+        }
+
+        f(*gt) = 0;
+        ++gt;
+        gm.addFactor(gm.addFunction(f), &i, &(i)+1);
+    }
+}
+
+} // namespace learning
+} // namespace opengm
+
+#endif 
diff --git a/src/unittest/learning/CMakeLists.txt b/src/unittest/learning/CMakeLists.txt
index f2e7288..2a2cb7e 100644
--- a/src/unittest/learning/CMakeLists.txt
+++ b/src/unittest/learning/CMakeLists.txt
@@ -12,4 +12,6 @@ if(BUILD_TESTING)
       add_test(test-dataset ${CMAKE_CURRENT_BINARY_DIR}/test-dataset)
    endif()
   
+   add_executable(test-generalized-hammingloss test_generalized_hammingloss.cxx ${headers})
+   add_test(test-generalized-hammingloss ${CMAKE_CURRENT_BINARY_DIR}/test-generalized-hammingloss)
 endif()
diff --git a/src/unittest/learning/test_generalized_hammingloss.cxx b/src/unittest/learning/test_generalized_hammingloss.cxx
new file mode 100644
index 0000000..bdee10e
--- /dev/null
+++ b/src/unittest/learning/test_generalized_hammingloss.cxx
@@ -0,0 +1,69 @@
+#include <vector>
+#include <iostream>
+
+#include <opengm/learning/loss/generalized-hammingloss.hxx>
+#include <opengm/graphicalmodel/graphicalmodel.hxx>
+#include <opengm/graphicalmodel/graphicalmodel_factor.hxx>
+
+//*************************************
+typedef double ValueType;
+typedef size_t IndexType;
+typedef size_t LabelType;
+typedef opengm::meta::TypeListGenerator<opengm::ExplicitFunction<ValueType,IndexType,LabelType> >::type FunctionListType;
+typedef opengm::GraphicalModel<ValueType,opengm::Adder, FunctionListType, opengm::DiscreteSpace<IndexType,LabelType> > GM;
+
+//*************************************
+
+
+int main() {
+
+   std::vector<double> label_loss_multipliers;
+   label_loss_multipliers.push_back(2.0);
+   label_loss_multipliers.push_back(1.0);
+   label_loss_multipliers.push_back(0.5);
+
+   std::vector<double> node_loss_multipliers;
+   node_loss_multipliers.push_back(5.0);
+   node_loss_multipliers.push_back(6.0);
+   node_loss_multipliers.push_back(7.0);
+   node_loss_multipliers.push_back(8.0);
+
+   // create loss
+   opengm::learning::GeneralizedHammingLoss loss(node_loss_multipliers.begin(),
+                                                 node_loss_multipliers.end(),
+                                                 label_loss_multipliers.begin(),
+                                                 label_loss_multipliers.end());
+
+   // evaluate for a test point
+   std::vector<size_t> labels;
+   labels.push_back(0);
+   labels.push_back(1);
+   labels.push_back(2);
+   labels.push_back(2);
+
+   std::vector<size_t> ground_truth;
+   ground_truth.push_back(1);
+   ground_truth.push_back(1);
+   ground_truth.push_back(1);
+   ground_truth.push_back(1);
+
+   OPENGM_ASSERT_OP(loss.loss(labels.begin(), labels.end(), ground_truth.begin(), ground_truth.end()), ==, 17.5);
+
+   // add loss to a model and evaluate for a given labeling
+   GM gm;
+   size_t numberOfLabels = 3;
+   gm.addVariable(numberOfLabels);
+   gm.addVariable(numberOfLabels);
+   gm.addVariable(numberOfLabels);
+   gm.addVariable(numberOfLabels);
+
+   // add a unary to node 2 (if indexed from 1)
+   opengm::ExplicitFunction<typename GM::ValueType,typename GM::IndexType, typename GM::LabelType> f(&numberOfLabels, &(numberOfLabels)+1, 2.0);
+   size_t variableIndex = 1;
+   gm.addFactor(gm.addFunction(f), &variableIndex, &variableIndex+1);
+   OPENGM_ASSERT_OP(gm.evaluate(labels.begin()), ==, 2.0);
+
+   // loss augmented model:
+   loss.addLoss(gm, ground_truth.begin());
+   OPENGM_ASSERT_OP(gm.evaluate(labels.begin()), ==, 19.5);
+}

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list