[opengm] 211/386: merged

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Wed Aug 31 08:37:51 UTC 2016


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to branch debian/master
in repository opengm.

commit 652008f3bdfe09f6de3b06ca8004cc10cd45ccf0
Merge: 714e2af b939da9
Author: DerThorsten <thorsten.beier at iwr.uni-heidelberg.de>
Date:   Sun Jan 11 14:16:43 2015 +0100

    merged

 fubar/brown_horse.py                               |   2 +
 fubar/make_grid_potts_dset.py                      |   2 +-
 fubar/pascal_voc/convertPascalVOCNew.py            |   5 +-
 include/opengm/functions/learnable/lunary.hxx      |  14 ++-
 .../opengm/learning/dataset/editabledataset.hxx    |   4 +-
 include/opengm/learning/dataset/testdatasets.hxx   |  10 +-
 include/opengm/learning/gradient-accumulator.hxx   |  10 +-
 include/opengm/learning/loss/flexibleloss.hxx      |  87 +++++++++----
 include/opengm/learning/struct-max-margin.hxx      |   4 +-
 include/opengm/python/opengmpython.hxx             |   3 +-
 src/examples/unsorted-examples/inference_types.cxx |   9 +-
 src/interfaces/python/opengm/__init__.py           |   2 +-
 .../python/opengm/learning/CMakeLists.txt          |   2 +-
 src/interfaces/python/opengm/learning/__init__.py  | 138 +++++++++++----------
 src/interfaces/python/opengm/learning/learning.cxx |  30 ++---
 .../python/opengm/learning/pyDataset.cxx           |   6 +-
 .../python/opengm/learning/pyGridSearchLearner.cxx |   6 +-
 src/interfaces/python/opengm/learning/pyLoss.cxx   |  59 +++++----
 .../opengm/learning/pyMaxLikelihoodLearner.cxx     |   6 +-
 .../opengm/learning/pyStructMaxMarginLearner.cxx   |   4 +-
 .../python/opengm/learning/pyStructPerceptron.cxx  |   6 +-
 .../python/opengm/learning/pySubgradientSSVM.cxx   |   9 +-
 src/tutorials/c++/basics/doMinSumInference.cxx     |   2 +-
 src/tutorials/c++/basics/doSumProdInference.cxx    |   2 +-
 src/unittest/inference/test_messagepassing.cxx     |  20 +--
 25 files changed, 247 insertions(+), 195 deletions(-)

diff --cc fubar/brown_horse.py
index eb189de,7d720d7..d63c147
--- a/fubar/brown_horse.py
+++ b/fubar/brown_horse.py
@@@ -101,9 -101,9 +101,11 @@@ dataset,test_set = secondOrderImageData
  
  
  
 -learner =  learning.subgradientSSVM(dataset, learningRate=0.3, C=100, 
 -                                    learningMode='batch',maxIterations=40)
++
 +learner =  learning.subgradientSSVM(dataset, learningRate=0.05, C=100, 
 +                                    learningMode='workingSets',maxIterations=1000)
 +
+ 
  #learner = learning.structMaxMarginLearner(dataset, 0.1, 0.001, 0)
  
  
diff --cc fubar/make_grid_potts_dset.py
index 4c8f2c4,2ab93f0..192223e
--- a/fubar/make_grid_potts_dset.py
+++ b/fubar/make_grid_potts_dset.py
@@@ -56,11 -48,9 +56,11 @@@ def secondOrderImageDataset(imgs, gts, 
              tentative_test_set.append((img,gt))
  
  
-     dataset = learning.createDataset(numWeights=nWeights, loss='h')
+     dataset = learning.createDataset(numWeights=nWeights)
      weights = dataset.getWeights()
      uWeightIds = numpy.arange(nUnaryFeat ,dtype='uint64')
 +    if numberOfLabels != 2:
 +        uWeightIds = uWeightIds.reshape([numberOfLabels,-1])
      bWeightIds = numpy.arange(start=nUnaryFeat,stop=nWeights,dtype='uint64')
  
      def makeModel(img,gt):
diff --cc src/interfaces/python/opengm/learning/__init__.py
index 2a5f59c,9224970..24a9217
--- a/src/interfaces/python/opengm/learning/__init__.py
+++ b/src/interfaces/python/opengm/learning/__init__.py
@@@ -144,19 -136,14 +151,14 @@@ def structPerceptron(dataset, learningM
  
  def subgradientSSVM(dataset, learningMode='batch',eps=1e-5, maxIterations=10000, stopLoss=0.0, learningRate=1.0, C=100.0):
  
- 
-     if dataset.__class__.lossType == 'hamming':
-         learnerCls = SubgradientSSVM_HammingLoss
-         learnerParamCls = SubgradientSSVM_HammingLossParameter
-         learningModeEnum = SubgradientSSVM_HammingLossParameter_LearningMode
-     elif dataset.__class__.lossType == 'generalized-hamming':
-         learnerCls = SubgradientSSVM_GeneralizedHammingLossParameter
-         learnerParamCls = SubgradientSSVM_GeneralizedHammingLoss
-         learningModeEnum = SubgradientSSVM_GeneralizedHammingLossParameter_LearningMode
+     assert dataset.__class__.lossType == 'flexible'
+     learnerCls = SubgradientSSVM_FlexibleLoss
+     learnerParamCls = SubgradientSSVM_FlexibleLossParameter
+     learningModeEnum = SubgradientSSVM_FlexibleLossParameter_LearningMode
  
      lm = None
 -    if learningMode not in ['online','batch']:
 -        raise RuntimeError("wrong learning mode, must be 'online' or 'batch' ")
 +    if learningMode not in ['online','batch','workingSets']:
 +        raise RuntimeError("wrong learning mode, must be 'online', 'batch' or 'workingSets' ")
  
      if learningMode == 'online':
          lm = learningModeEnum.online

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/opengm.git



More information about the debian-science-commits mailing list