[opengm] 79/386: add new acyclic testdataset

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:35:08 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 53ed656b233f313d002a93d31cc42e9f04c24db4
Author: Joerg Kappes <jkappes at goedel.(none)>
Date:   Wed Dec 17 11:16:09 2014 +0100

    add new  acyclic testdataset
---
 include/opengm/learning/dataset/testdatasets.hxx | 63 ++++++++++++++++++++++++
 src/unittest/learning/test_dataset.cxx           | 30 +++++++++--
 2 files changed, 90 insertions(+), 3 deletions(-)

diff --git a/include/opengm/learning/dataset/testdatasets.hxx b/include/opengm/learning/dataset/testdatasets.hxx
index 4a18393..1517088 100644
--- a/include/opengm/learning/dataset/testdatasets.hxx
+++ b/include/opengm/learning/dataset/testdatasets.hxx
@@ -13,6 +13,20 @@ namespace opengm {
    namespace datasets{
 
       template<class GM, class LOSS>
+      class TestDataset0 : public Dataset<GM,LOSS>{ 
+      public:
+         typedef GM                     GMType;
+         typedef GM                     GMWITHLOSS;
+         typedef LOSS                   LossType;
+         typedef typename GM::ValueType ValueType;
+         typedef typename GM::IndexType IndexType;
+         typedef typename GM::LabelType LabelType;
+         typedef opengm::learning::Weights<ValueType> Weights;
+
+         TestDataset0(size_t numModels=5); 
+      };
+
+      template<class GM, class LOSS>
       class TestDataset1 : public Dataset<GM,LOSS>{ 
       public:
          typedef GM                     GMType;
@@ -26,6 +40,7 @@ namespace opengm {
          TestDataset1(size_t numModels=5); 
       };
 
+
       template<class GM, class LOSS>
       class TestDataset2 : public Dataset<GM,LOSS>{ 
       public:
@@ -41,6 +56,54 @@ namespace opengm {
       };
 
 //***********************************
+//** IMPL TestDataset 0
+//***********************************
+      template<class GM, class LOSS>
+      TestDataset0<GM,LOSS>::TestDataset0(size_t numModels)
+      { 
+         this->count_.resize(numModels,0);
+         this->weights_ = Weights(1);
+         LabelType numberOfLabels = 2;
+         this->gts_.resize(numModels,std::vector<LabelType>(64,0));
+         for(size_t m=0;m<numModels;++m){
+            for(size_t i=16; i<48; ++i){
+               this->gts_[m][i] = 1;
+            }
+         }
+         this->gms_.resize(numModels);
+         this->gmsWithLoss_.resize(numModels);
+         for(size_t m=0; m<numModels; ++m){
+            std::srand(m);
+            for (int j = 0; j < 64; j++)
+               this->gms_[m].addVariable(2);
+            for(size_t y = 0; y < 64; ++y){ 
+               // function
+               const size_t shape[] = {numberOfLabels};
+               ExplicitFunction<ValueType> f(shape, shape + 1);
+               ValueType val = (double)(this->gts_[m][y]) + (double)(std::rand()) / (double) (RAND_MAX) * 1.5 - 0.75 ;
+               f(0) = std::fabs(val-0);
+               f(1) = std::fabs(val-1);
+               typename GM::FunctionIdentifier fid =  this->gms_[m].addFunction(f);
+
+               // factor
+               size_t variableIndices[] = {y};
+               this->gms_[m].addFactor(fid, variableIndices, variableIndices + 1);         
+            }
+          
+            opengm::functions::learnable::LPotts<ValueType,IndexType,LabelType> f(this->weights_,2,std::vector<size_t>(1,0),std::vector<ValueType>(1,1));
+            typename GM::FunctionIdentifier fid = this->gms_[m].addFunction(f);      
+            for(size_t y = 0; y < 64; ++y){ 
+               if(y + 1 < 64) { // (x, y) -- (x, y + 1)
+                  size_t variableIndices[] = {y, y+1};
+                  //sort(variableIndices, variableIndices + 2);
+                  this->gms_[m].addFactor(fid, variableIndices, variableIndices + 2);
+               }
+            }
+            this->buildModelWithLoss(m);
+         }      
+      };
+
+//***********************************
 //** IMPL TestDataset 1
 //***********************************
       template<class GM, class LOSS>
diff --git a/src/unittest/learning/test_dataset.cxx b/src/unittest/learning/test_dataset.cxx
index 7fbfb5a..b00f9d4 100644
--- a/src/unittest/learning/test_dataset.cxx
+++ b/src/unittest/learning/test_dataset.cxx
@@ -30,6 +30,7 @@ typedef opengm::GraphicalModel<ValueType,opengm::Adder, FunctionListType, opengm
 
 typedef opengm::learning::HammingLoss     LOSS;
 //typedef opengm::learning::NoLoss                 LOSS;
+typedef opengm::datasets::TestDataset1<GM,LOSS>  DS0;
 typedef opengm::datasets::TestDataset1<GM,LOSS>  DS1;
 typedef opengm::datasets::TestDataset2<GM,LOSS>  DS2;
 typedef opengm::datasets::Dataset<GM,LOSS>       DS;
@@ -104,7 +105,8 @@ int main() {
       // initialize your data here
       // eventually you need to load it from file
       DS data;
-
+  
+      std::cout << "Start test DS" <<std::endl;
       //run tests on dataset
       DatasetTest<DS >t(data);
       t.run();
@@ -113,14 +115,36 @@ int main() {
    {
       // initialize your data here
       // eventually you need to load it from file
-          DS1 data;
+      DS0 data;
+
+      std::cout << "Start test DS0" <<std::endl;
+      //run tests on dataset
+      DatasetTest<DS0 >t(data);
+      t.run();
+
+   }
+   {
+      // initialize your data here
+      // eventually you need to load it from file
+      DS1 data;
 
-      std::cout << "Start test" <<std::endl;
+      std::cout << "Start test DS1" <<std::endl;
       //run tests on dataset
       DatasetTest<DS1 >t(data);
       t.run();
 
    }
+   {
+      // initialize your data here
+      // eventually you need to load it from file
+      DS2 data;
+
+      std::cout << "Start test DS2" <<std::endl;
+      //run tests on dataset
+      DatasetTest<DS2 >t(data);
+      t.run();
+
+   }
 
 
 }

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list