[opengm] 252/386: removed weight regularzier from ds (stupid idead of me to put it there)

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:38:04 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 2ba1371ee0e19735fd712bb76b3dac678b871beb
Author: DerThorsten <thorsten.beier at iwr.uni-heidelberg.de>
Date:   Fri Jan 16 11:08:15 2015 +0100

    removed weight regularzier from ds (stupid idead of me to put it there)
---
 fubar/simple_sp.py                                  |  9 +++++----
 include/opengm/learning/dataset/dataset.hxx         | 12 +++---------
 include/opengm/learning/dataset/editabledataset.hxx | 14 ++------------
 include/opengm/learning/loss/flexibleloss.hxx       |  6 +++++-
 4 files changed, 15 insertions(+), 26 deletions(-)

diff --git a/fubar/simple_sp.py b/fubar/simple_sp.py
index af7cad8..280f70e 100644
--- a/fubar/simple_sp.py
+++ b/fubar/simple_sp.py
@@ -124,10 +124,11 @@ fBinary = [
 dataset,test_set = superpixelDataset(imgs=imgs,sps=sps, gts=gts, numberOfLabels=3, 
                                           fUnary=fUnary, fBinary=fBinary, 
                                           addConstFeature=True)
-
-
-ogm_ds.save("simple_dataset", 'simple_')
-
+if False :
+    dataset.save("simple_dataset", 'simple_')
+if True :
+    dataset = learning.createDataset(0,  numInstances=0)
+    dataset.load("simple_dataset", 'simple_')
 if True:
 
     learner =  learning.subgradientSSVM(dataset, learningRate=0.1, C=100, 
diff --git a/include/opengm/learning/dataset/dataset.hxx b/include/opengm/learning/dataset/dataset.hxx
index 142a6fe..e315100 100644
--- a/include/opengm/learning/dataset/dataset.hxx
+++ b/include/opengm/learning/dataset/dataset.hxx
@@ -59,7 +59,6 @@ namespace opengm {
 
         typedef opengm::learning::Weights<ValueType> Weights;
         typedef opengm::learning::WeightConstraints<ValueType> WeightConstraintsType;
-        typedef opengm::learning::WeightRegularizer<ValueType> WeightRegularizerType;
 
 
         bool                          lockModel(const size_t i)               { ++count_[i]; }
@@ -84,8 +83,7 @@ namespace opengm {
 
         Dataset(size_t numInstances);
 
-        Dataset(const Weights & weights = Weights(),const WeightConstraintsType & weightConstraints = WeightConstraintsType(),
-                const WeightRegularizerType & weightRegularizer = WeightRegularizerType(),size_t numInstances=0);
+        Dataset(const Weights & weights = Weights(),const WeightConstraintsType & weightConstraints = WeightConstraintsType(),size_t numInstances=0);
 
         //void loadAll(std::string path,std::string prefix); 
 
@@ -104,7 +102,6 @@ namespace opengm {
         std::vector<std::vector<LabelType> > gts_;
         Weights weights_;
         WeightConstraintsType weightConstraints_;
-        WeightRegularizerType weightRegularizer_;
 
 
         void buildModelWithLoss(size_t i);
@@ -120,8 +117,7 @@ namespace opengm {
         lossParams_(std::vector<LossParameterType>(numInstances)),
         gts_(std::vector<std::vector<LabelType> >(numInstances)),
         weights_(0),
-        weightConstraints_(),
-        weightRegularizer_()
+        weightConstraints_()
     {
     }
 
@@ -129,7 +125,6 @@ namespace opengm {
     Dataset<GM, LOSS, LOSS_GM>::Dataset(
         const Weights & weights, 
         const WeightConstraintsType & weightConstraints,
-        const WeightRegularizerType & weightRegularizer,
         size_t numInstances
     ):  count_(std::vector<size_t>(numInstances)),
         isCached_(std::vector<bool>(numInstances)),
@@ -138,8 +133,7 @@ namespace opengm {
         lossParams_(std::vector<LossParameterType>(numInstances)),
         gts_(std::vector<std::vector<LabelType> >(numInstances)),
         weights_(weights),
-        weightConstraints_(weightConstraints),
-        weightRegularizer_(weightRegularizer)
+        weightConstraints_(weightConstraints)
     {
     }
 
diff --git a/include/opengm/learning/dataset/editabledataset.hxx b/include/opengm/learning/dataset/editabledataset.hxx
index 178e0e0..1c940b9 100644
--- a/include/opengm/learning/dataset/editabledataset.hxx
+++ b/include/opengm/learning/dataset/editabledataset.hxx
@@ -38,16 +38,14 @@ namespace datasets{
 
         typedef opengm::learning::Weights<ValueType> Weights;
         typedef opengm::learning::WeightConstraints<ValueType> WeightConstraintsType;
-        typedef opengm::learning::WeightRegularizer<ValueType> WeightRegularizerType;
 
         typedef std::vector<LabelType> GTVector;
 
         EditableDataset(size_t numInstances) : Dataset<GM, LOSS,LOSS_GM>(numInstances) {}
         EditableDataset(std::vector<GM>& gms, std::vector<GTVector >& gts, std::vector<LossParameterType>& lossParams);
 
-        EditableDataset(const Weights & weights = Weights(),const WeightConstraintsType & weightConstraints = WeightConstraintsType(),
-                        const WeightRegularizerType & weightRegularizer = WeightRegularizerType(),size_t numInstances=0)
-        :   Dataset<GM, LOSS, LOSS_GM>(weights, weightConstraints, weightRegularizer, numInstances){
+        EditableDataset(const Weights & weights = Weights(),const WeightConstraintsType & weightConstraints = WeightConstraintsType(),size_t numInstances=0)
+        :   Dataset<GM, LOSS, LOSS_GM>(weights, weightConstraints, numInstances){
 
         }
 
@@ -60,7 +58,6 @@ namespace datasets{
 
         void setWeightConstraints(const WeightConstraintsType & weightConstraints);
 
-        void setWeightRegularizer(const WeightRegularizerType & weightRegularizer);
     };
 
     template<class GM, class LOSS, class LOSS_GM>
@@ -142,13 +139,6 @@ namespace datasets{
         this->weightConstraints_ = weightConstraints;
     }
 
-    template<class GM, class LOSS, class LOSS_GM>
-    inline void EditableDataset<GM, LOSS, LOSS_GM>::setWeightRegularizer(
-        const WeightRegularizerType & weightRegularizer
-    ){
-        this->weightRegularizer_ = weightRegularizer;
-    }
-
 
 } // namespace datasets
 } // namespace opengm
diff --git a/include/opengm/learning/loss/flexibleloss.hxx b/include/opengm/learning/loss/flexibleloss.hxx
index a7c3210..2301dc2 100644
--- a/include/opengm/learning/loss/flexibleloss.hxx
+++ b/include/opengm/learning/loss/flexibleloss.hxx
@@ -131,6 +131,8 @@ inline void FlexibleLoss::Parameter::save(hid_t& groupHandle) const {
 }
 
 inline void FlexibleLoss::Parameter::load(const hid_t& groupHandle) {
+
+    std::cout<<"load loss type \n";
     std::vector<size_t> lossType;
     marray::hdf5::loadVec(groupHandle, "lossType", lossType);
     if(lossType[0] == size_t(Hamming)){
@@ -152,7 +154,7 @@ inline void FlexibleLoss::Parameter::load(const hid_t& groupHandle) {
         lossType_ = ConfMat;
     }
 
-
+    std::cout<<"load nodeLossMultiplier \n";
 
     if (H5Dopen(groupHandle, "nodeLossMultiplier", H5P_DEFAULT) >= 0) {
         marray::hdf5::loadVec(groupHandle, "nodeLossMultiplier", this->nodeLossMultiplier_);
@@ -161,6 +163,7 @@ inline void FlexibleLoss::Parameter::load(const hid_t& groupHandle) {
         std::cout << "nodeLossMultiplier of FlexibleLoss not found, setting default values" << std::endl;
     }
 
+    std::cout<<"load factorLossMultiplier \n";
     if (H5Dopen(groupHandle, "factorLossMultiplier", H5P_DEFAULT) >= 0) {
         marray::hdf5::loadVec(groupHandle, "factorLossMultiplier", this->factorMultipier_);
     } 
@@ -168,6 +171,7 @@ inline void FlexibleLoss::Parameter::load(const hid_t& groupHandle) {
         std::cout << "factorLossMultiplier of FlexibleLoss not found, setting default values" << std::endl;
     }
 
+    std::cout<<"load labelLossMultiplier \n";
     if (H5Dopen(groupHandle, "labelLossMultiplier", H5P_DEFAULT) >= 0) {
         marray::hdf5::loadVec(groupHandle, "labelLossMultiplier", this->labelLossMultiplier_);
     } 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list