[opengm] 50/386: add: unit test for dataset structure

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:35:03 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit d1d5fc0c82e0203ccc01696e692ee07f1e96425c
Author: Steffen-Wolf <Steffen.Wolf at stud.uni-heidelberg.de>
Date:   Mon Dec 15 16:22:16 2014 +0100

    add: unit test for dataset structure
---
 include/opengm/learning/dataset/dataset.hxx |  14 ++--
 src/unittest/learning/CMakeLists.txt        |   4 ++
 src/unittest/learning/test_dataset.cxx      | 103 ++++++++++++++++++++++++++++
 3 files changed, 114 insertions(+), 7 deletions(-)

diff --git a/include/opengm/learning/dataset/dataset.hxx b/include/opengm/learning/dataset/dataset.hxx
index 3a155fc..e3c6635 100644
--- a/include/opengm/learning/dataset/dataset.hxx
+++ b/include/opengm/learning/dataset/dataset.hxx
@@ -21,11 +21,11 @@ namespace opengm {
          typedef typename GM::LabelType LabelType; 
          typedef opengm::learning::Weights<ValueType> Weights;
 
- 	 bool                          lockModel(const size_t i)         { ++count_[i]; }
- 	 bool                          unlockModel(const size_t i)       { OPENGM_ASSERT(count_[i]>0); --count_[i]; }
+         bool                          lockModel(const size_t i)         { ++count_[i]; }
+         bool                          unlockModel(const size_t i)       { OPENGM_ASSERT(count_[i]>0); --count_[i]; }
          const GM&                     getModel(const size_t i)          { return gms_[i]; } 
          const GMWITHLOSS&             getModelWithLoss(const size_t i)  { return gmsWithLoss_[i]; }
-         const std::vector<LabelType>& getGT(const size_t i)             { return gt_; }
+         const std::vector<LabelType>& getGT(const size_t i)             { return gt_[i]; }
          Weights&                      getWeights()                      { return weights_; } 
          size_t                        getNumberOfWeights()              { return weights_.numberOfWeights(); }
          size_t                        getNumberOfModels()               { return gms_.size(); } 
@@ -34,14 +34,14 @@ namespace opengm {
          void loadAll(std::string path,std::string prefix);
 
       private:	
-	 std::vector<size_t> count_;
-	 std::vector<bool> isCached_;
+         std::vector<size_t> count_;
+         std::vector<bool> isCached_;
          std::vector<GM> gms_; 
-	 std::vector<GMWITHLOSS> gmsWithLoss_; 
+         std::vector<GMWITHLOSS> gmsWithLoss_; 
          std::vector<std::vector<LabelType> > gt_; 
          Weights weights_;
 
-   	 void buildModelWithLoss(size_t i);
+         void buildModelWithLoss(size_t i);
       };
       
 
diff --git a/src/unittest/learning/CMakeLists.txt b/src/unittest/learning/CMakeLists.txt
index 5cb5024..f2e7288 100644
--- a/src/unittest/learning/CMakeLists.txt
+++ b/src/unittest/learning/CMakeLists.txt
@@ -6,6 +6,10 @@ if(BUILD_TESTING)
       add_executable(test-dataset-io test_dataset_io.cxx ${headers})
       target_link_libraries(test-dataset-io ${HDF5_LIBRARIES})
       add_test(test-dataset-io ${CMAKE_CURRENT_BINARY_DIR}/test-dataset-io)
+
+      add_executable(test-dataset test_dataset.cxx ${headers})
+      target_link_libraries(test-dataset ${HDF5_LIBRARIES})
+      add_test(test-dataset ${CMAKE_CURRENT_BINARY_DIR}/test-dataset)
    endif()
   
 endif()
diff --git a/src/unittest/learning/test_dataset.cxx b/src/unittest/learning/test_dataset.cxx
new file mode 100644
index 0000000..60cbbc9
--- /dev/null
+++ b/src/unittest/learning/test_dataset.cxx
@@ -0,0 +1,103 @@
+#include <vector>
+
+#include <opengm/functions/explicit_function.hxx>
+#include <opengm/unittests/test.hxx>
+#include <opengm/graphicalmodel/graphicalmodel.hxx>
+#include <opengm/operations/adder.hxx>
+#include <opengm/operations/minimizer.hxx>
+#include <opengm/inference/icm.hxx>
+#include <opengm/utilities/metaprogramming.hxx>
+
+#include <opengm/functions/learnable/lpotts.hxx>
+#include <opengm/functions/learnable/sum_of_experts.hxx>
+#include <opengm/learning/dataset/testdataset.hxx>
+#include <opengm/learning/dataset/testdataset2.hxx>
+#include <opengm/learning/dataset/dataset_io.hxx>
+#include <opengm/learning/dataset/dataset.hxx>
+
+
+//*************************************
+typedef double ValueType;
+typedef size_t IndexType;
+typedef size_t LabelType; 
+typedef opengm::meta::TypeListGenerator<opengm::ExplicitFunction<ValueType,IndexType,LabelType>, opengm::functions::learnable::LPotts<ValueType,IndexType,LabelType>, opengm::functions::learnable::SumOfExperts<ValueType,IndexType,LabelType> >::type FunctionListType;
+typedef opengm::GraphicalModel<ValueType,opengm::Adder, FunctionListType, opengm::DiscreteSpace<IndexType,LabelType> > GM; 
+typedef opengm::datasets::TestDataset<GM>  DS1;
+typedef opengm::datasets::Dataset<GM>      DS;
+
+//*************************************
+
+template<class DatasetType>
+struct DatasetTest {
+
+   DatasetType dataset_;
+
+   DatasetTest(DatasetType data): dataset_(data) {}
+
+   void testInitialization() {
+      // create a new dataset
+      DatasetType dataset;
+   }
+
+   void callModelFunctions(){
+
+         std::cout << "calling Model functions:" << std::endl;
+         std::cout << "\tlocking all available Models" << std::endl;
+
+         for(int i; i<dataset_.getNumberOfModels();i++)
+         {
+            dataset_.lockModel(i);
+            dataset_.unlockModel(i);
+         }
+
+         std::cout << "\tgetModel with and without loss" << std::endl;
+         for(int i; i<dataset_.getNumberOfModels();i++)
+         {
+            OPENGM_TEST(dataset_.getModel(i).numberOfVariables() == dataset_.getModelWithLoss(i).numberOfVariables());
+            OPENGM_TEST(dataset_.getModel(i).numberOfFactors() <=  dataset_.getModelWithLoss(i).numberOfFactors());
+         }
+
+         std::cout << "\tgetGT" << std::endl;
+         for(int i; i<dataset_.getNumberOfModels();i++)
+         {
+            std::cout << dataset_.getGT(i).size() << std::endl;
+         }
+
+      }
+
+   void getInfo(){
+         std::cout << "Info of data size:" << std::endl;
+         std::cout << "\tnumberOfWeights\t" << dataset_.getNumberOfWeights() << std::endl;
+         std::cout << "\tnumberOfModels\t" << dataset_.getNumberOfModels() << std::endl;
+
+         opengm::learning::Weights<ValueType> weights = dataset_.getWeights();
+         std::cout << "Beginning of weight vector: ";
+         for(int i; i<std::min(dataset_.getNumberOfWeights(),size_t(10));i++)
+         {
+            std::cout << dataset_.getWeights()[i] << " ";
+         }
+         std::cout << std::endl;
+   }
+
+   void run() {
+      this->testInitialization();
+      this->getInfo();
+      this->callModelFunctions();
+   }
+};
+
+
+int main() {
+   std::cout << " Includes are fine :-) " << std::endl; 
+  
+   {
+      // initialize your data here
+      // eventually you need to load it from file
+      DS data;
+
+      //run tests on dataset
+      DatasetTest<DS >t(data);
+      t.run();
+   }
+
+}

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list