[opengm] 58/386: start refactoring datasets

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:35:04 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 716cebac04a4e7aa89a558e12d2e8309fe13363a
Author: Joerg Kappes <jkappes at goedel.(none)>
Date:   Tue Dec 16 09:22:40 2014 +0100

    start refactoring datasets
---
 include/opengm/learning/dataset/testdatasets.hxx | 72 ++++++++++++++++++++++++
 src/unittest/test_gridsearch_learner.cxx         | 21 ++++++-
 2 files changed, 92 insertions(+), 1 deletion(-)

diff --git a/include/opengm/learning/dataset/testdatasets.hxx b/include/opengm/learning/dataset/testdatasets.hxx
new file mode 100644
index 0000000..69201e9
--- /dev/null
+++ b/include/opengm/learning/dataset/testdatasets.hxx
@@ -0,0 +1,72 @@
+#pragma once
+#ifndef OPENGM_TESTDATASETS_HXX
+#define OPENGM_TESTDATASETS_HXX
+
+#include <vector>
+#include <cstdlib>
+
+#include <opengm/learning/dataset/dataset.hxx>
+#include <opengm/functions/learnable/lpotts.hxx>
+
+namespace opengm {
+   namespace datasets{
+
+      template<class GM, class LOSS>
+      class TestDataset1 : public Dataset<GM,LOSS>{
+         TestDataset1(size_t numModels=10); 
+      };
+
+      template<class GM, class LOSS>
+      TestDataset1<GM,LOSS>::TestDataset1(size_t numModels)
+         : weights_(Weights(1))
+      {
+         LabelType numberOfLabels = 2;
+         gt_.resize(64*64,0);
+         for(size_t i=32*64; i<64*64; ++i){
+            gt_[i] = 1;
+         }
+         gms_.resize(numModels);
+         for(size_t m=0; m<numModels; ++m){
+            std::srand(m);
+			for (int j = 0; j < 64*64; j++)
+				gms_[m].addVariable(2);
+            for(size_t y = 0; y < 64; ++y){ 
+               for(size_t x = 0; x < 64; ++x) {
+                  // function
+                  const size_t shape[] = {numberOfLabels};
+                  ExplicitFunction<ValueType> f(shape, shape + 1);
+                  ValueType val = (double)(gt_[y*64+x]) + (double)(std::rand()) / (double) (RAND_MAX) * 1.5 - 0.75 ;
+                  f(0) = std::fabs(val-0);
+                  f(1) = std::fabs(val-1);
+                  typename GM::FunctionIdentifier fid =  gms_[m].addFunction(f);
+
+                  // factor
+                  size_t variableIndices[] = {y*64+x};
+                  gms_[m].addFactor(fid, variableIndices, variableIndices + 1);
+               }
+            }
+          
+            opengm::functions::learnable::LPotts<ValueType,IndexType,LabelType> f(weights_,2,std::vector<size_t>(1,0),std::vector<ValueType>(1,1));
+            typename GM::FunctionIdentifier fid = gms_[m].addFunction(f);      
+            for(size_t y = 0; y < 64; ++y){ 
+               for(size_t x = 0; x < 64; ++x) {
+                  if(x + 1 < 64) { // (x, y) -- (x + 1, y)
+                     size_t variableIndices[] = {y*64+x, y*64+x+1};
+                     //sort(variableIndices, variableIndices + 2);
+                     gms_[m].addFactor(fid, variableIndices, variableIndices + 2);
+                  }
+                  if(y + 1 < 64) { // (x, y) -- (x, y + 1)
+                     size_t variableIndices[] = {y*64+x, (y+1)*64+x};
+                     //sort(variableIndices, variableIndices + 2);
+                     gms_[m].addFactor(fid, variableIndices, variableIndices + 2);
+                  }
+               }    
+            }
+         }
+         
+      };
+
+   }
+} // namespace opengm
+
+#endif 
diff --git a/src/unittest/test_gridsearch_learner.cxx b/src/unittest/test_gridsearch_learner.cxx
index dd07c84..bbb948a 100644
--- a/src/unittest/test_gridsearch_learner.cxx
+++ b/src/unittest/test_gridsearch_learner.cxx
@@ -14,6 +14,7 @@
 #include <opengm/learning/loss/hammingloss.hxx>
 #include <opengm/learning/dataset/testdataset.hxx>
 #include <opengm/learning/dataset/testdataset2.hxx>
+//#include <opengm/learning/dataset/testdatasets.hxx>
 
 
 //*************************************
@@ -23,6 +24,7 @@ typedef size_t LabelType;
 typedef opengm::meta::TypeListGenerator<opengm::ExplicitFunction<ValueType,IndexType,LabelType>, opengm::functions::learnable::LPotts<ValueType,IndexType,LabelType>, opengm::functions::learnable::SumOfExperts<ValueType,IndexType,LabelType> >::type FunctionListType;
 typedef opengm::GraphicalModel<ValueType,opengm::Adder, FunctionListType, opengm::DiscreteSpace<IndexType,LabelType> > GM; 
 typedef opengm::datasets::TestDataset<GM>  DS;
+//typedef opengm::datasets::TestDataset1<GM> DS1;
 typedef opengm::datasets::TestDataset2<GM> DS2;
 typedef opengm::learning::HammingLoss     LOSS;
 typedef opengm::ICM<GM,opengm::Minimizer> INF;
@@ -48,8 +50,25 @@ int main() {
       INF::Parameter infPara;
       learner.learn<INF>(infPara);
       
+   } 
+/*
+   {
+      DS1 dataset;
+      std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfWeights() << " parameters."<<std::endl;
+      
+      
+      opengm::learning::GridSearchLearner<DS1,LOSS>::Parameter para;
+      para.parameterUpperbound_.resize(1,1);
+      para.parameterLowerbound_.resize(1,0);
+      para.testingPoints_.resize(1,10);
+      opengm::learning::GridSearchLearner<DS1,LOSS> learner(dataset,para);
+      
+      
+      INF::Parameter infPara;
+      learner.learn<INF>(infPara);
+      
    }
-  
+*/  
    {
       DS2 dataset;
       std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfWeights() << " parameters."<<std::endl;

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list