[opengm] 29/386: add second test-dataset (with 3 parameters) and add it to greidsearch test
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:35:01 UTC 2016
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to branch debian/master
in repository opengm.
commit 58a12e861900580db3693ebf406669f51b1b6b38
Author: joergkappes <kappes at math.uni-heidelberg.de>
Date: Thu Nov 20 11:34:20 2014 +0100
add second test-dataset (with 3 parameters) and add it to greidsearch test
---
.../opengm/functions/learnable/sum_of_experts.hxx | 15 ++--
include/opengm/learning/dataset/testdataset2.hxx | 99 ++++++++++++++++++++++
src/unittest/test_gridsearch_learner.cxx | 49 ++++++++---
3 files changed, 142 insertions(+), 21 deletions(-)
diff --git a/include/opengm/functions/learnable/sum_of_experts.hxx b/include/opengm/functions/learnable/sum_of_experts.hxx
index c69300b..9b0805c 100644
--- a/include/opengm/functions/learnable/sum_of_experts.hxx
+++ b/include/opengm/functions/learnable/sum_of_experts.hxx
@@ -35,9 +35,9 @@ public:
SumOfExperts();
SumOfExperts(
const std::vector<L>& shape,
- const Parameters<double,size_t>& parameters,
+ const Parameters<T,I>& parameters,
const std::vector<size_t>& parameterIDs,
- const std::vector<opengm::marray::Marray<T> >& feat
+ const std::vector<marray::Marray<T> >& feat
);
L shape(const size_t) const;
@@ -59,7 +59,7 @@ protected:
const Parameters<T,I>* parameters_;
std::vector<L> shape_;
std::vector<size_t> parameterIDs_;
- std::vector<opengm::marray::Marray<T> > feat_;
+ std::vector<marray::Marray<T> > feat_;
friend class opengm::FunctionSerialization<opengm::functions::learnable::SumOfExperts<T, I, L> >;
};
@@ -72,7 +72,7 @@ SumOfExperts<T, I, L>::SumOfExperts
const std::vector<L>& shape,
const Parameters<T,I>& parameters,
const std::vector<size_t>& parameterIDs,
- const std::vector<opengm::marray::Marray<T> >& feat
+ const std::vector<marray::Marray<T> >& feat
)
: shape_(shape), parameters_(¶meters), parameterIDs_(parameterIDs),feat_(feat)
{
@@ -82,9 +82,8 @@ SumOfExperts<T, I, L>::SumOfExperts
template <class T, class I, class L>
inline
-SumOfExperts<T, I, L>::SumOfExperts
-( )
- : shape_(std::vector<L>(0)), parameterIDs_(std::vector<size_t>(0)), feat_(std::vector<opengm::marray::Marray<T> >(0))
+SumOfExperts<T, I, L>::SumOfExperts()
+ : shape_(std::vector<L>(0)), parameterIDs_(std::vector<size_t>(0)), feat_(std::vector<marray::Marray<T> >(0))
{
;
}
@@ -245,7 +244,7 @@ FunctionSerialization<opengm::functions::learnable::SumOfExperts<T, I, L> >::des
++indexInIterator;
}
//read features
- std::vector<opengm::marray::Marray<T,I,L> > feat(numW,opengm::marray::Marray<T,I,L>(shape.begin(),shape.end()));
+ std::vector<marray::Marray<T,L> > feat(numW,marray::Marray<T,L>(shape.begin(),shape.end()));
for(size_t i=0; i<numW;++i){
for(size_t j=0; j<size;++j){
feat[i](j)=*valueInIterator;
diff --git a/include/opengm/learning/dataset/testdataset2.hxx b/include/opengm/learning/dataset/testdataset2.hxx
new file mode 100644
index 0000000..4c71327
--- /dev/null
+++ b/include/opengm/learning/dataset/testdataset2.hxx
@@ -0,0 +1,99 @@
+#pragma once
+#ifndef OPENGM_TESTDATASET2_HXX
+#define OPENGM_TESTDATASET2_HXX
+
+#include <vector>
+#include <cstdlib>
+
+#include <opengm/functions/learnable/lpotts.hxx>
+#include <opengm/functions/learnable/sum_of_experts.hxx>
+
+namespace opengm {
+ namespace datasets{
+
+ template<class GM>
+ class TestDataset2{
+ public:
+ typedef GM GMType;
+ typedef typename GM::ValueType ValueType;
+ typedef typename GM::IndexType IndexType;
+ typedef typename GM::LabelType LabelType;
+ typedef opengm::Parameters<ValueType,IndexType> ModelParameters;
+
+ GM& getModel(const size_t i) { return gms_[i]; }
+ const std::vector<LabelType>& getGT(const size_t i) { return gt_; }
+ ModelParameters& getModelParameters() { return modelParameters_; }
+ size_t getNumberOfParameters() { return 3; }
+ size_t getNumberOfModels() { return gms_.size(); }
+
+ TestDataset2(size_t numModels=4);
+
+ private:
+ std::vector<GM> gms_;
+ std::vector<LabelType> gt_;
+ ModelParameters modelParameters_;
+ };
+
+
+
+ template<class GM>
+ TestDataset2<GM>::TestDataset2(size_t numModels)
+ : modelParameters_(ModelParameters(3))
+ {
+ LabelType numberOfLabels = 2;
+ gt_.resize(64*64,0);
+ for(size_t i=32*64; i<64*64; ++i){
+ gt_[i] = 1;
+ }
+ gms_.resize(numModels);
+ for(size_t m=0; m<numModels; ++m){
+ std::srand(m);
+ for (int j = 0; j < 64*64; j++)
+ gms_[m].addVariable(2);
+ for(size_t y = 0; y < 64; ++y){
+ for(size_t x = 0; x < 64; ++x) {
+ // function
+ const size_t numExperts = 2;
+ const std::vector<size_t> shape(1,numberOfLabels);
+ std::vector<marray::Marray<ValueType> > feat(numExperts,marray::Marray<ValueType>(shape.begin(), shape.end()));
+ ValueType val0 = (double)(gt_[y*64+x]) + (double)(std::rand()) / (double) (RAND_MAX) * 0.75 ;
+ feat[0](0) = std::fabs(val0-0);
+ feat[0](1) = std::fabs(val0-1);
+ ValueType val1 = (double)(gt_[y*64+x]) + (double)(std::rand()) / (double) (RAND_MAX) * 1.5 ;
+ feat[1](0) = std::fabs(val1-0);
+ feat[1](1) = std::fabs(val1-1);
+ std::vector<size_t> wID(2);
+ wID[0]=1; wID[1]=2;
+ opengm::functions::learnable::SumOfExperts<ValueType,IndexType,LabelType> f(shape,modelParameters_, wID, feat);
+ typename GM::FunctionIdentifier fid = gms_[m].addFunction(f);
+
+ // factor
+ size_t variableIndices[] = {y*64+x};
+ gms_[m].addFactor(fid, variableIndices, variableIndices + 1);
+ }
+ }
+
+ opengm::functions::learnable::LPotts<ValueType,IndexType,LabelType> f(modelParameters_,2,std::vector<size_t>(1,0),std::vector<ValueType>(1,1));
+ typename GM::FunctionIdentifier fid = gms_[m].addFunction(f);
+ for(size_t y = 0; y < 64; ++y){
+ for(size_t x = 0; x < 64; ++x) {
+ if(x + 1 < 64) { // (x, y) -- (x + 1, y)
+ size_t variableIndices[] = {y*64+x, y*64+x+1};
+ //sort(variableIndices, variableIndices + 2);
+ gms_[m].addFactor(fid, variableIndices, variableIndices + 2);
+ }
+ if(y + 1 < 64) { // (x, y) -- (x, y + 1)
+ size_t variableIndices[] = {y*64+x, (y+1)*64+x};
+ //sort(variableIndices, variableIndices + 2);
+ gms_[m].addFactor(fid, variableIndices, variableIndices + 2);
+ }
+ }
+ }
+ }
+
+ };
+
+ }
+} // namespace opengm
+
+#endif
diff --git a/src/unittest/test_gridsearch_learner.cxx b/src/unittest/test_gridsearch_learner.cxx
index f457d6f..653a0e9 100644
--- a/src/unittest/test_gridsearch_learner.cxx
+++ b/src/unittest/test_gridsearch_learner.cxx
@@ -9,18 +9,21 @@
#include <opengm/utilities/metaprogramming.hxx>
#include <opengm/functions/learnable/lpotts.hxx>
+#include <opengm/functions/learnable/sum_of_experts.hxx>
#include <opengm/learning/gridsearch-learning.hxx>
#include <opengm/learning/loss/hammingloss.hxx>
#include <opengm/learning/dataset/testdataset.hxx>
+#include <opengm/learning/dataset/testdataset2.hxx>
//*************************************
typedef double ValueType;
typedef size_t IndexType;
typedef size_t LabelType;
-typedef opengm::meta::TypeListGenerator<opengm::ExplicitFunction<ValueType,IndexType,LabelType>, opengm::functions::learnable::LPotts<ValueType,IndexType,LabelType> >::type FunctionListType;
+typedef opengm::meta::TypeListGenerator<opengm::ExplicitFunction<ValueType,IndexType,LabelType>, opengm::functions::learnable::LPotts<ValueType,IndexType,LabelType>, opengm::functions::learnable::SumOfExperts<ValueType,IndexType,LabelType> >::type FunctionListType;
typedef opengm::GraphicalModel<ValueType,opengm::Adder, FunctionListType, opengm::DiscreteSpace<IndexType,LabelType> > GM;
-typedef opengm::datasets::TestDataset<GM> DS;
+typedef opengm::datasets::TestDataset<GM> DS;
+typedef opengm::datasets::TestDataset2<GM> DS2;
typedef opengm::learning::HammingLoss LOSS;
typedef opengm::ICM<GM,opengm::Minimizer> INF;
@@ -30,18 +33,38 @@ typedef opengm::ICM<GM,opengm::Minimizer> INF;
int main() {
std::cout << " Includes are fine :-) " << std::endl;
- DS dataset;
- std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfParameters() << " parameters."<<std::endl;
+ {
+ DS dataset;
+ std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfParameters() << " parameters."<<std::endl;
+
+
+ opengm::learning::GridSearchLearner<DS,LOSS>::Parameter para;
+ para.parameterUpperbound_.resize(1,1);
+ para.parameterLowerbound_.resize(1,0);
+ para.testingPoints_.resize(1,10);
+ opengm::learning::GridSearchLearner<DS,LOSS> learner(dataset,para);
+
+
+ INF::Parameter infPara;
+ learner.learn<INF>(infPara);
+
+ }
+ {
+ DS2 dataset;
+ std::cout << "Dataset includes " << dataset.getNumberOfModels() << " instances and has " << dataset.getNumberOfParameters() << " parameters."<<std::endl;
+
+
+ opengm::learning::GridSearchLearner<DS2,LOSS>::Parameter para;
+ para.parameterUpperbound_.resize(3,1);
+ para.parameterLowerbound_.resize(3,0);
+ para.testingPoints_.resize(3,5);
+ opengm::learning::GridSearchLearner<DS2,LOSS> learner(dataset,para);
+
+
+ INF::Parameter infPara;
+ learner.learn<INF>(infPara);
+ }
- opengm::learning::GridSearchLearner<DS,LOSS>::Parameter para;
- para.parameterUpperbound_.resize(1,1);
- para.parameterLowerbound_.resize(1,0);
- para.testingPoints_.resize(1,10);
- opengm::learning::GridSearchLearner<DS,LOSS> learner(dataset,para);
-
-
- INF::Parameter infPara;
- learner.learn<INF>(infPara);
}
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git
More information about the debian-science-commits
mailing list