[opengm] 68/386: bugfix datasets

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:35:06 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit df9f8396e5102e3400dd6c8a74f1e4acf4484249
Author: Joerg Kappes <jkappes at goedel.(none)>
Date:   Tue Dec 16 15:38:06 2014 +0100

    bugfix datasets
---
 include/opengm/learning/dataset/dataset.hxx      |  2 +-
 include/opengm/learning/dataset/testdataset.hxx  |  2 +-
 include/opengm/learning/dataset/testdatasets.hxx |  6 +++++-
 include/opengm/learning/loss/hammingloss.hxx     |  2 +-
 src/unittest/learning/test_dataset.cxx           | 22 +++++++++++++++++++---
 5 files changed, 27 insertions(+), 7 deletions(-)

diff --git a/include/opengm/learning/dataset/dataset.hxx b/include/opengm/learning/dataset/dataset.hxx
index c48ecab..aa32fb4 100644
--- a/include/opengm/learning/dataset/dataset.hxx
+++ b/include/opengm/learning/dataset/dataset.hxx
@@ -26,7 +26,7 @@ namespace opengm {
          bool                          unlockModel(const size_t i)             { OPENGM_ASSERT(count_[i]>0); --count_[i]; }
          const GM&                     getModel(const size_t i) const          { return gms_[i]; } 
          const GMWITHLOSS&             getModelWithLoss(const size_t i)const   { return gmsWithLoss_[i]; }
-         const std::vector<LabelType>& getGT(const size_t i) const             { return gt_[i]; }
+         const std::vector<LabelType>& getGT(const size_t i) const             { return gt_[i]; } 
          Weights&                      getWeights()                            { return weights_; } 
          size_t                        getNumberOfWeights() const              { return weights_.numberOfWeights(); }
          size_t                        getNumberOfModels() const               { return gms_.size(); } 
diff --git a/include/opengm/learning/dataset/testdataset.hxx b/include/opengm/learning/dataset/testdataset.hxx
index c1a0670..f24eee0 100644
--- a/include/opengm/learning/dataset/testdataset.hxx
+++ b/include/opengm/learning/dataset/testdataset.hxx
@@ -25,7 +25,7 @@ namespace opengm {
          typedef opengm::learning::Weights<ValueType> Weights;
 
          const GM&                     getModel(const size_t i) const  { return gms_[i]; }
-         const std::vector<LabelType>& getGT(const size_t i) const     { return gt_; }
+         const std::vector<LabelType>& getGT(const size_t i) const     { return gt_[i]; }
          Weights&                      getWeights()                    { return weights_; }
          size_t                        getNumberOfWeights() const      { return 1; }
          size_t                        getNumberOfModels() const       { return gms_.size(); } 
diff --git a/include/opengm/learning/dataset/testdatasets.hxx b/include/opengm/learning/dataset/testdatasets.hxx
index 6ae24d5..cb08545 100644
--- a/include/opengm/learning/dataset/testdatasets.hxx
+++ b/include/opengm/learning/dataset/testdatasets.hxx
@@ -49,13 +49,14 @@ namespace opengm {
          this->count_.resize(numModels,0);
          this->weights_ = Weights(1);
          LabelType numberOfLabels = 2;
-         this->gt_.resize(numModels,std::vector<size_t>(64*64,0));
+         this->gt_.resize(numModels,std::vector<LabelType>(64*64,0));
          for(size_t m=0;m<numModels;++m){
             for(size_t i=32*64; i<64*64; ++i){
                this->gt_[m][i] = 1;
             }
          }
          this->gms_.resize(numModels);
+         this->gmsWithLoss_.resize(numModels);
          for(size_t m=0; m<numModels; ++m){
             std::srand(m);
             for (int j = 0; j < 64*64; j++)
@@ -92,6 +93,7 @@ namespace opengm {
                   }
                }    
             }
+            this->buildModelWithLoss(m);
          }      
       };
 
@@ -111,6 +113,7 @@ namespace opengm {
             }
          }
          this->gms_.resize(numModels);
+         this->gmsWithLoss_.resize(numModels);
          for(size_t m=0; m<numModels; ++m){
             std::srand(m);
             for (int j = 0; j < 64*64; j++)
@@ -154,6 +157,7 @@ namespace opengm {
                   }
                }    
             }
+            this->buildModelWithLoss(m);
          }
       };
  
diff --git a/include/opengm/learning/loss/hammingloss.hxx b/include/opengm/learning/loss/hammingloss.hxx
index aad65a2..64e1434 100644
--- a/include/opengm/learning/loss/hammingloss.hxx
+++ b/include/opengm/learning/loss/hammingloss.hxx
@@ -34,7 +34,7 @@ namespace opengm {
 
          for(typename GM::IndexType i=0; i<gm.numberOfVariables(); ++i){
             typename GM::LabelType numL = gm.numberOfLabels(i);
-            opengm::ExplicitFunction<typename GM::ValueType,typename GM::IndexType, typename GM::LabelType> f(&numL, &(numL)+1,-1);
+            opengm::ExplicitFunction<typename GM::ValueType,typename GM::IndexType, typename GM::LabelType> f(&numL, &numL+1,-1);
             f(*gt) = 0;
             ++gt;
             gm.addFactor(gm.addFunction(f), &i, &(i)+1);
diff --git a/src/unittest/learning/test_dataset.cxx b/src/unittest/learning/test_dataset.cxx
index 7e4d4aa..7fbfb5a 100644
--- a/src/unittest/learning/test_dataset.cxx
+++ b/src/unittest/learning/test_dataset.cxx
@@ -16,6 +16,7 @@
 #include <opengm/learning/dataset/dataset.hxx>
 #include <opengm/learning/dataset/testdatasets.hxx>
 #include <opengm/learning/loss/noloss.hxx>
+#include <opengm/learning/loss/hammingloss.hxx>
 
 
 //*************************************
@@ -27,7 +28,8 @@ typedef opengm::GraphicalModel<ValueType,opengm::Adder, FunctionListType, opengm
 //typedef opengm::datasets::TestDataset<GM>  DS1;
 //typedef opengm::datasets::Dataset<GM>      DS;
 
-typedef opengm::learning::NoLoss                 LOSS;
+typedef opengm::learning::HammingLoss     LOSS;
+//typedef opengm::learning::NoLoss                 LOSS;
 typedef opengm::datasets::TestDataset1<GM,LOSS>  DS1;
 typedef opengm::datasets::TestDataset2<GM,LOSS>  DS2;
 typedef opengm::datasets::Dataset<GM,LOSS>       DS;
@@ -37,11 +39,12 @@ typedef opengm::datasets::Dataset<GM,LOSS>       DS;
 template<class DatasetType>
 struct DatasetTest {
 
-   DatasetType dataset_;
+   DatasetType& dataset_;
 
-   DatasetTest(DatasetType data): dataset_(data) {}
+   DatasetTest(DatasetType& data): dataset_(data) {}
 
    void testInitialization() {
+      std::cout << "Initialize Model:" << std::endl;
       // create a new dataset
       DatasetType dataset;
    }
@@ -105,6 +108,19 @@ int main() {
       //run tests on dataset
       DatasetTest<DS >t(data);
       t.run();
+   } 
+
+   {
+      // initialize your data here
+      // eventually you need to load it from file
+          DS1 data;
+
+      std::cout << "Start test" <<std::endl;
+      //run tests on dataset
+      DatasetTest<DS1 >t(data);
+      t.run();
+
    }
 
+
 }

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list