[opengm] 82/386: Introducing loss parameters
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:35:09 UTC 2016
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to branch debian/master
in repository opengm.
commit a392e7e63b9f396f033c4560c4417b8e81d9c806
Author: Carsten Haubold <carstenhaubold at googlemail.com>
Date: Wed Dec 17 11:58:56 2014 +0100
Introducing loss parameters
---
include/opengm/learning/dataset/dataset.hxx | 29 +++++++++++----------
.../opengm/learning/dataset/editabledataset.hxx | 20 ++++++++++-----
.../learning/loss/generalized-hammingloss.hxx | 30 ++++++++--------------
include/opengm/learning/loss/hammingloss.hxx | 6 +++++
include/opengm/learning/loss/noloss.hxx | 6 +++++
.../python/opengm/learning/pyDataset.cxx | 10 +++++---
src/interfaces/python/opengm/learning/pyLoss.cxx | 21 ++++++++++++++-
.../learning/test_generalized_hammingloss.cxx | 22 +++++++---------
8 files changed, 87 insertions(+), 57 deletions(-)
diff --git a/include/opengm/learning/dataset/dataset.hxx b/include/opengm/learning/dataset/dataset.hxx
index 88bdf21..0add387 100644
--- a/include/opengm/learning/dataset/dataset.hxx
+++ b/include/opengm/learning/dataset/dataset.hxx
@@ -14,12 +14,13 @@ namespace opengm {
template<class GM, class LOSS=opengm::learning::NoLoss>
class Dataset{
public:
- typedef GM GMType;
- typedef GM GMWITHLOSS;
- typedef LOSS LossType;
- typedef typename GM::ValueType ValueType;
- typedef typename GM::IndexType IndexType;
- typedef typename GM::LabelType LabelType;
+ typedef GM GMType;
+ typedef GM GMWITHLOSS;
+ typedef LOSS LossType;
+ typedef typename LOSS::Parameter LossParameterType;
+ typedef typename GM::ValueType ValueType;
+ typedef typename GM::IndexType IndexType;
+ typedef typename GM::LabelType LabelType;
typedef opengm::learning::Weights<ValueType> Weights;
bool lockModel(const size_t i) { ++count_[i]; }
@@ -42,6 +43,7 @@ namespace opengm {
std::vector<bool> isCached_;
std::vector<GM> gms_;
std::vector<GMWITHLOSS> gmsWithLoss_;
+ std::vector<LossParameterType> lossParams_;
std::vector<std::vector<LabelType> > gts_;
Weights weights_;
@@ -52,21 +54,22 @@ namespace opengm {
template<class GM, class LOSS>
Dataset<GM, LOSS>::Dataset(size_t numInstances)
- : count_(std::vector<size_t>(numInstances)),
- isCached_(std::vector<bool>(numInstances)),
+ : count_(std::vector<size_t>(numInstances)),
+ isCached_(std::vector<bool>(numInstances)),
gms_(std::vector<GM>(numInstances)),
- gmsWithLoss_(std::vector<GMWITHLOSS>(numInstances)),
+ gmsWithLoss_(std::vector<GMWITHLOSS>(numInstances)),
gts_(std::vector<std::vector<LabelType> >(numInstances)),
- weights_(Weights(0))
+ weights_(Weights(0)),
+ lossParams_(std::vector<LossParameterType>(numInstances))
{
};
template<class GM, class LOSS>
void Dataset<GM, LOSS>::buildModelWithLoss(size_t i){
- gmsWithLoss_[i] = gms_[i];
- LOSS loss;
- loss.addLoss(gmsWithLoss_[i], gts_[i].begin());
+ gmsWithLoss_[i] = gms_[i];
+ LOSS loss(lossParams_[i]);
+ loss.addLoss(gmsWithLoss_[i], gts_[i].begin());
}
/*
diff --git a/include/opengm/learning/dataset/editabledataset.hxx b/include/opengm/learning/dataset/editabledataset.hxx
index 2d1130a..9df9b3f 100644
--- a/include/opengm/learning/dataset/editabledataset.hxx
+++ b/include/opengm/learning/dataset/editabledataset.hxx
@@ -19,6 +19,7 @@ namespace opengm {
typedef GM GMType;
typedef GM GMWITHLOSS;
typedef LOSS LossType;
+ typedef typename LOSS::Parameter LossParameterType;
typedef typename GM::ValueType ValueType;
typedef typename GM::IndexType IndexType;
typedef typename GM::LabelType LabelType;
@@ -26,41 +27,46 @@ namespace opengm {
typedef std::vector<LabelType> GTVector;
EditableDataset(size_t numInstances=0) : Dataset<GM, LOSS>(numInstances) {}
- EditableDataset(std::vector<GM>& gms, std::vector<GTVector >& gts);
+ EditableDataset(std::vector<GM>& gms, std::vector<GTVector >& gts, std::vector<LossParameterType>& lossParams);
- void setInstance(const size_t i, GM& gm, GTVector& gt);
- void pushBackInstance(GM& gm, GTVector& gt);
+ void setInstance(const size_t i, GM& gm, GTVector& gt, LossParameterType& p);
+ void pushBackInstance(GM& gm, GTVector& gt, LossParameterType& p);
void setWeights(Weights& w);
};
template<class GM, class LOSS>
EditableDataset<GM, LOSS>::EditableDataset(std::vector<GM>& gms,
- std::vector<GTVector >& gts)
+ std::vector<GTVector >& gts,
+ std::vector<LossParameterType>& lossParams)
: Dataset<GM, LOSS>(gms.size())
{
for(size_t i=0; i<gms.size(); ++i){
- setInstance(i, gms[i], gts[i]);
+ setInstance(i, gms[i], gts[i], lossParams[i]);
this->buildModelWithLoss(i);
}
}
template<class GM, class LOSS>
- void EditableDataset<GM, LOSS>::setInstance(const size_t i, GM& gm, GTVector& gt) {
+ void EditableDataset<GM, LOSS>::setInstance(const size_t i, GM& gm, GTVector& gt, LossParameterType& p) {
OPENGM_CHECK_OP(i, <, this->gms_.size(),"");
OPENGM_CHECK_OP(i, <, this->gts_.size(),"");
+ OPENGM_CHECK_OP(i, <, this->lossParams_.size(),"");
OPENGM_CHECK_OP(i, <, this->gmsWithLoss_.size(),"");
this->gms_[i] = gm;
this->gts_[i] = gt;
+ this->lossParams_[i] = p;
this->buildModelWithLoss(i);
}
template<class GM, class LOSS>
- void EditableDataset<GM, LOSS>::pushBackInstance(GM& gm, GTVector& gt) {
+ void EditableDataset<GM, LOSS>::pushBackInstance(GM& gm, GTVector& gt, LossParameterType& p) {
this->gms_.push_back(gm);
this->gts_.push_back(gt);
+ this->lossParams_.push_back(p);
this->gmsWithLoss_.resize(this->gts_.size());
this->buildModelWithLoss(this->gts_.size()-1);
OPENGM_CHECK_OP(this->gms_.size(), ==, this->gts_.size(),"");
+ OPENGM_CHECK_OP(this->gms_.size(), ==, this->lossParams_.size(),"");
OPENGM_CHECK_OP(this->gms_.size(), ==, this->gmsWithLoss_.size(),"");
}
diff --git a/include/opengm/learning/loss/generalized-hammingloss.hxx b/include/opengm/learning/loss/generalized-hammingloss.hxx
index dc7621c..b17c383 100644
--- a/include/opengm/learning/loss/generalized-hammingloss.hxx
+++ b/include/opengm/learning/loss/generalized-hammingloss.hxx
@@ -14,11 +14,14 @@ namespace learning {
**/
class GeneralizedHammingLoss{
public:
- template<class IT1, class IT2>
- GeneralizedHammingLoss(IT1 nodeLossMultiplierBegin,
- IT1 nodeLossMultiplierEnd,
- IT2 labelLossMultiplierBegin,
- IT2 labelLossMultiplierEnd);
+ class Parameter{
+ public:
+ std::vector<double> nodeLossMultiplier_;
+ std::vector<double> labelLossMultiplier_;
+ };
+
+public:
+ GeneralizedHammingLoss(const Parameter& param = Parameter()) : param_(param){}
template<class IT1, class IT2>
double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
@@ -27,21 +30,10 @@ public:
void addLoss(GM& gm, IT GTBegin) const;
private:
- std::vector<double> nodeLossMultiplier_;
- std::vector<double> labelLossMultiplier_;
+ Parameter param_;
};
template<class IT1, class IT2>
-GeneralizedHammingLoss::GeneralizedHammingLoss(IT1 nodeLossMultiplierBegin,
- IT1 nodeLossMultiplierEnd,
- IT2 labelLossMultiplierBegin,
- IT2 labelLossMultiplierEnd):
- nodeLossMultiplier_(nodeLossMultiplierBegin, nodeLossMultiplierEnd),
- labelLossMultiplier_(labelLossMultiplierBegin, labelLossMultiplierEnd)
-{
-}
-
-template<class IT1, class IT2>
double GeneralizedHammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBegin, const IT2 GTEnd) const
{
double loss = 0.0;
@@ -49,7 +41,7 @@ double GeneralizedHammingLoss::loss(IT1 labelBegin, const IT1 labelEnd, IT2 GTBe
for(; labelBegin!= labelEnd; ++labelBegin, ++GTBegin, ++nodeIndex){
if(*labelBegin != *GTBegin){
- loss += nodeLossMultiplier_[nodeIndex] * labelLossMultiplier_[*labelBegin];
+ loss += param_.nodeLossMultiplier_[nodeIndex] * param_.labelLossMultiplier_[*labelBegin];
}
}
return loss;
@@ -64,7 +56,7 @@ void GeneralizedHammingLoss::addLoss(GM& gm, IT gt) const
opengm::ExplicitFunction<typename GM::ValueType,typename GM::IndexType, typename GM::LabelType> f(&numL, &(numL)+1, 0);
for(typename GM::LabelType l = 0; l < numL; ++l){
- f(l) = - nodeLossMultiplier_[i] * labelLossMultiplier_[l];
+ f(l) = - param_.nodeLossMultiplier_[i] * param_.labelLossMultiplier_[l];
}
f(*gt) = 0;
diff --git a/include/opengm/learning/loss/hammingloss.hxx b/include/opengm/learning/loss/hammingloss.hxx
index 64e1434..fcab161 100644
--- a/include/opengm/learning/loss/hammingloss.hxx
+++ b/include/opengm/learning/loss/hammingloss.hxx
@@ -7,6 +7,11 @@ namespace opengm {
namespace learning {
class HammingLoss{
public:
+ class Parameter{
+ };
+
+ public:
+ HammingLoss(const Parameter& param = Parameter()) : param_(param){}
template<class IT1, class IT2>
double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
@@ -14,6 +19,7 @@ namespace opengm {
template<class GM, class IT>
void addLoss(GM& gm, IT GTBegin) const;
private:
+ Parameter param_;
};
template<class IT1, class IT2>
diff --git a/include/opengm/learning/loss/noloss.hxx b/include/opengm/learning/loss/noloss.hxx
index 19fdd40..ae1ec73 100644
--- a/include/opengm/learning/loss/noloss.hxx
+++ b/include/opengm/learning/loss/noloss.hxx
@@ -7,6 +7,11 @@ namespace opengm {
namespace learning {
class NoLoss{
public:
+ class Parameter{
+ };
+
+ public:
+ NoLoss(const Parameter& param = Parameter()) : param_(param){}
template<class IT1, class IT2>
double loss(IT1 labelBegin, IT1 labelEnd, IT2 GTBegin,IT2 GTEnd) const;
@@ -14,6 +19,7 @@ namespace opengm {
template<class GM, class IT>
void addLoss(GM& gm, IT GTBegin) const;
private:
+ Parameter param_;
};
template<class IT1, class IT2>
diff --git a/src/interfaces/python/opengm/learning/pyDataset.cxx b/src/interfaces/python/opengm/learning/pyDataset.cxx
index 5981d4c..6a27422 100644
--- a/src/interfaces/python/opengm/learning/pyDataset.cxx
+++ b/src/interfaces/python/opengm/learning/pyDataset.cxx
@@ -19,17 +19,19 @@ template<class GM, class LOSS>
void pySetInstance(opengm::datasets::EditableDataset<GM, LOSS>& ds,
const size_t i,
GM& gm,
- const opengm::python::NumpyView<typename GM::LabelType,1>& gt) {
+ const opengm::python::NumpyView<typename GM::LabelType,1>& gt,
+ typename LOSS::Parameter& param) {
std::vector<typename GM::LabelType> gt_vector(gt.begin(), gt.end());
- ds.setInstance(i, gm, gt_vector);
+ ds.setInstance(i, gm, gt_vector, param);
}
template<class GM, class LOSS>
void pyPushBackInstance(opengm::datasets::EditableDataset<GM,LOSS>& ds,
GM& gm,
- const opengm::python::NumpyView<typename GM::LabelType,1>& gt) {
+ const opengm::python::NumpyView<typename GM::LabelType,1>& gt,
+ typename LOSS::Parameter& param) {
std::vector<typename GM::LabelType> gt_vector(gt.begin(), gt.end());
- ds.pushBackInstance(gm, gt_vector);
+ ds.pushBackInstance(gm, gt_vector, param);
}
template<class GM, class LOSS>
diff --git a/src/interfaces/python/opengm/learning/pyLoss.cxx b/src/interfaces/python/opengm/learning/pyLoss.cxx
index f06721c..85c256e 100644
--- a/src/interfaces/python/opengm/learning/pyLoss.cxx
+++ b/src/interfaces/python/opengm/learning/pyLoss.cxx
@@ -13,6 +13,18 @@ using namespace boost::python;
namespace opengm{
+void pySetNodeLossMultiplier(opengm::learning::GeneralizedHammingLoss::Parameter& p,
+ const opengm::python::NumpyView<double,1>& m)
+{
+ p.nodeLossMultiplier_ = std::vector<double>(m.begin(), m.end());
+}
+
+void pySetLabelLossMultiplier(opengm::learning::GeneralizedHammingLoss::Parameter& p,
+ const opengm::python::NumpyView<double,1>& m)
+{
+ p.labelLossMultiplier_ = std::vector<double>(m.begin(), m.end());
+}
+
template <class GM>
void export_loss(){
typedef typename std::vector<typename GM::LabelType>::const_iterator Literator;
@@ -21,6 +33,8 @@ void export_loss(){
typedef opengm::learning::NoLoss PyNoLoss;
typedef opengm::learning::GeneralizedHammingLoss PyGeneralizedHammingLoss;
+ typedef opengm::learning::GeneralizedHammingLoss::Parameter PyGeneralizedHammingLossParameter;
+
class_<PyHammingLoss >("HammingLoss")
.def("loss", &PyHammingLoss::loss<Literator,Literator>)
.def("addLoss", &PyHammingLoss::addLoss<GM, Literator>)
@@ -31,10 +45,15 @@ void export_loss(){
.def("addLoss", &PyNoLoss::addLoss<GM, Literator>)
;
- class_<PyGeneralizedHammingLoss >("GeneralizedHammingLoss", init<Niterator,Niterator,Literator,Literator>())
+ class_<PyGeneralizedHammingLoss >("GeneralizedHammingLoss", init<PyGeneralizedHammingLossParameter>())
.def("loss", &PyGeneralizedHammingLoss::loss<Literator,Literator>)
.def("addLoss", &PyGeneralizedHammingLoss::addLoss<GM, Literator>)
;
+
+ class_<PyGeneralizedHammingLossParameter>("GeneralizedHammingLossParameter")
+ .def("setNodeLossMultiplier", &pySetNodeLossMultiplier)
+ .def("setLabelLossMultiplier", &pySetLabelLossMultiplier)
+ ;
}
diff --git a/src/unittest/learning/test_generalized_hammingloss.cxx b/src/unittest/learning/test_generalized_hammingloss.cxx
index 44e1878..80a26e2 100644
--- a/src/unittest/learning/test_generalized_hammingloss.cxx
+++ b/src/unittest/learning/test_generalized_hammingloss.cxx
@@ -17,22 +17,18 @@ typedef opengm::GraphicalModel<ValueType,opengm::Adder, FunctionListType, opengm
int main() {
- std::vector<double> label_loss_multipliers;
- label_loss_multipliers.push_back(2.0);
- label_loss_multipliers.push_back(1.0);
- label_loss_multipliers.push_back(0.5);
+ opengm::learning::GeneralizedHammingLoss::Parameter param;
+ param.labelLossMultiplier_.push_back(2.0);
+ param.labelLossMultiplier_.push_back(1.0);
+ param.labelLossMultiplier_.push_back(0.5);
- std::vector<double> node_loss_multipliers;
- node_loss_multipliers.push_back(5.0);
- node_loss_multipliers.push_back(6.0);
- node_loss_multipliers.push_back(7.0);
- node_loss_multipliers.push_back(8.0);
+ param.nodeLossMultiplier_.push_back(5.0);
+ param.nodeLossMultiplier_.push_back(6.0);
+ param.nodeLossMultiplier_.push_back(7.0);
+ param.nodeLossMultiplier_.push_back(8.0);
// create loss
- opengm::learning::GeneralizedHammingLoss loss(node_loss_multipliers.begin(),
- node_loss_multipliers.end(),
- label_loss_multipliers.begin(),
- label_loss_multipliers.end());
+ opengm::learning::GeneralizedHammingLoss loss(param);
// evaluate for a test point
std::vector<size_t> labels;
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git
More information about the debian-science-commits
mailing list