[med-svn] [pycorrfit] 02/06: Imported Upstream version 0.8.1

Alex Mestiashvili malex-guest at moszumanska.debian.org
Mon Jan 6 14:49:26 UTC 2014


This is an automated email from the git hooks/post-receive script.

malex-guest pushed a commit to branch master
in repository pycorrfit.

commit a4513d2c9c1ac972d5cb2f1e23b2b1e009c04e06
Author: Alexandre Mestiashvili <alex at biotec.tu-dresden.de>
Date:   Mon Jan 6 15:23:06 2014 +0100

    Imported Upstream version 0.8.1
---
 ChangeLog.txt                             |   8 +-
 MANIFEST.in                               |   4 +-
 PyCorrFit_doc.pdf                         | Bin 521911 -> 444211 bytes
 doc-src/Bibliography.bib                  | 242 ++++++------------
 doc-src/Images/SchwilleLogo.jpg           | Bin 14580 -> 0 bytes
 doc-src/Images/TU_Logo_SW.pdf             | Bin 4424 -> 0 bytes
 doc-src/PyCorrFit_doc.tex                 |  43 ++--
 doc-src/PyCorrFit_doc_content.tex         | 187 ++++++++------
 doc-src/PyCorrFit_doc_models.tex          | 273 ++++++++++----------
 src/PyCorrFit.py                          |  20 +-
 src/__init__.py                           |  36 ++-
 src/doc.py                                | 150 ++---------
 src/edclasses.py                          |  19 +-
 src/frontend.py                           |  76 +++---
 src/leastsquaresfit.py                    |  24 +-
 src/misc.py                               |  49 ++--
 src/models/MODEL_TIRF_1C.py               |  19 +-
 src/models/MODEL_TIRF_2D2D.py             |  19 +-
 src/models/MODEL_TIRF_3D2D.py             |  19 +-
 src/models/MODEL_TIRF_3D2Dkin_Ries.py     |  18 ++
 src/models/MODEL_TIRF_3D3D.py             |  19 +-
 src/models/MODEL_TIRF_gaussian_1C.py      |  19 +-
 src/models/MODEL_TIRF_gaussian_3D2D.py    |  19 +-
 src/models/MODEL_TIRF_gaussian_3D3D.py    |  19 +-
 src/models/MODEL_classic_gaussian_2D.py   |  29 ++-
 src/models/MODEL_classic_gaussian_3D.py   |  26 +-
 src/models/MODEL_classic_gaussian_3D2D.py |  19 +-
 src/models/__init__.py                    |  28 ++-
 src/openfile.py                           | 406 +++++++++++++++++++-----------
 src/page.py                               | 243 +++++++++++-------
 src/plotting.py                           |  20 +-
 src/readfiles/__init__.py                 |  20 ++
 src/readfiles/read_ASC_ALV_6000.py        |  20 ++
 src/readfiles/read_CSV_PyCorrFit.py       |  30 ++-
 src/readfiles/read_FCS_Confocor3.py       |  52 +++-
 src/readfiles/read_SIN_correlator_com.py  |  19 ++
 src/readfiles/read_mat_ries.py            |  37 ++-
 src/tools/__init__.py                     |  16 +-
 src/tools/average.py                      |  46 +++-
 src/tools/background.py                   | 240 +++++++++++++++---
 src/tools/batchcontrol.py                 |  19 +-
 src/tools/chooseimport.py                 |  27 +-
 src/tools/comment.py                      |  16 +-
 src/tools/datarange.py                    |  19 +-
 src/tools/example.py                      |  16 +-
 src/tools/globalfit.py                    |  21 +-
 src/tools/info.py                         |  59 ++++-
 src/tools/overlaycurves.py                |  22 +-
 src/tools/parmrange.py                    |  26 +-
 src/tools/plotexport.py                   |  23 +-
 src/tools/simulation.py                   |  22 +-
 src/tools/statistics.py                   |  27 +-
 src/tools/trace.py                        |  17 +-
 src/usermodel.py                          |  17 +-
 54 files changed, 1838 insertions(+), 1026 deletions(-)

diff --git a/ChangeLog.txt b/ChangeLog.txt
index ae8ca66..8a206eb 100644
--- a/ChangeLog.txt
+++ b/ChangeLog.txt
@@ -1,9 +1,12 @@
-0.8.0-2beta
-- Thanks to Alex Mestiashvili for providing initial setup.py files (@mestia)
+0.8.1
+- Thanks to Alex Mestiashvili for providing initial setup.py files
+  and for debianizing PyCorrFit (@mestia)
+- Thanks to Thomas Weidemann for his contributions to the documentation (@weidemann)
 - Bugfixes
    - Some ConfoCor files were not imported
    - The cpp was not calculated correctly in case of background correction (#45)
    - Enabled averaging of single pages (#58)
+   - Background correction for cross-correlation data is now copmuted (#46)
 - Improvements of the user interface
    - The menus have been reordered (#47, #50)
    - The fitting panel has been optimized (#49)
@@ -17,6 +20,7 @@
    - Statistics panel improvements (#43)
    - Run information is included in the Data set title
    - The page counter starts at "1" instead of "0" (#44)
+   - New handling of background correction (#46, #53)
 0.8.0
 - Filename/title of each tab now shows up in the notebook (#39)
 - Statistics tool can plot parameters and page selection with the Overlay
diff --git a/MANIFEST.in b/MANIFEST.in
index b3fdf25..1d5a49f 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,4 +1,6 @@
-include doc-src/*
+include doc-src/*.tex
+include doc-src/*.bib
+include doc-src/Images/*
 include external_model_functions/*
 include README.md
 include ChangeLog.txt
diff --git a/PyCorrFit_doc.pdf b/PyCorrFit_doc.pdf
index ca6d5fa..ad0d65a 100644
Binary files a/PyCorrFit_doc.pdf and b/PyCorrFit_doc.pdf differ
diff --git a/doc-src/Bibliography.bib b/doc-src/Bibliography.bib
index 59d521c..d812036 100755
--- a/doc-src/Bibliography.bib
+++ b/doc-src/Bibliography.bib
@@ -1,4 +1,4 @@
-% This file was created with JabRef 2.5.
+% This file was created with JabRef 2.7b.
 % Encoding: UTF-8
 
 @ARTICLE{Aragon1976,
@@ -12,8 +12,7 @@
   doi = {10.1063/1.432357},
   owner = {paul},
   publisher = {AIP},
-  timestamp = {2012.11.02},
-  url = {http://link.aip.org/link/?JCP/64/1791/1}
+  timestamp = {2012.11.02}
 }
 
 @ARTICLE{Ashkin1970,
@@ -28,8 +27,7 @@
   issue = {4},
   owner = {paul},
   publisher = {American Physical Society},
-  timestamp = {2012.11.13},
-  url = {http://link.aps.org/doi/10.1103/PhysRevLett.24.156}
+  timestamp = {2012.11.13}
 }
 
 @ARTICLE{Axelrod1984,
@@ -47,8 +45,7 @@
   issn = {0084-6589},
   owner = {paul},
   publisher = {Annual Reviews},
-  timestamp = {2012.02.14},
-  url = {http://dx.doi.org/10.1146/annurev.bb.13.060184.001335}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Bag2012,
@@ -67,8 +64,7 @@
 	total internal reflection},
   owner = {paul},
   publisher = {WILEY-VCH Verlag},
-  timestamp = {2012.09.20},
-  url = {http://dx.doi.org/10.1002/cphc.201200032}
+  timestamp = {2012.09.20}
 }
 
 @ARTICLE{Bestvater2010,
@@ -101,8 +97,7 @@
 	and luminescence},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.11.07},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-18-23-23818}
+  timestamp = {2012.11.07}
 }
 
 @ARTICLE{Blom2009,
@@ -116,10 +111,8 @@
   pages = {5554-5566},
   number = {19},
   doi = {10.1021/jp8110088},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/jp8110088},
   owner = {paul},
-  timestamp = {2012.11.02},
-  url = {http://pubs.acs.org/doi/abs/10.1021/jp8110088}
+  timestamp = {2012.11.02}
 }
 
 @ARTICLE{Blom2002,
@@ -149,8 +142,7 @@
 	Fluorescence, laser-induced},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.11.07},
-  url = {http://ao.osa.org/abstract.cfm?URI=ao-41-16-3336}
+  timestamp = {2012.11.07}
 }
 
 @ARTICLE{Brinkmeier1999,
@@ -189,8 +181,7 @@
   medline-pst = {ppublish},
   owner = {paul},
   pmid = {21662718},
-  timestamp = {2012.11.07},
-  url = {http://dx.doi.org/10.1021/ac980820i}
+  timestamp = {2012.11.07}
 }
 
 @ARTICLE{Brutzer2012,
@@ -203,10 +194,8 @@
   pages = {473-478},
   number = {1},
   doi = {10.1021/nl203876w},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/nl203876w},
   owner = {paul},
-  timestamp = {2012.08.09},
-  url = {http://pubs.acs.org/doi/abs/10.1021/nl203876w}
+  timestamp = {2012.08.09}
 }
 
 @ARTICLE{Buchholz2012,
@@ -245,8 +234,7 @@
 	Avalanche photodiodes (APDs)},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.10.24},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-20-16-17767}
+  timestamp = {2012.10.24}
 }
 
 @PHDTHESIS{Burkhardt2010,
@@ -285,8 +273,7 @@
   doi = {10.1364/OE.14.005013},
   keywords = {CCD, charge-coupled device; Medical optics and biotechnology; Fluorescence,
 	laser-induced},
-  publisher = {OSA},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-14-12-5013}
+  publisher = {OSA}
 }
 
 @ARTICLE{Chiantia2006,
@@ -303,8 +290,7 @@
   keywords = {fluorescent probes, force measurements, membranes, sphingolipids},
   owner = {paul},
   publisher = {WILEY-VCH Verlag},
-  timestamp = {2012.10.24},
-  url = {http://dx.doi.org/10.1002/cphc.200600464}
+  timestamp = {2012.10.24}
 }
 
 @ARTICLE{Dertinger2007,
@@ -323,8 +309,7 @@
 	time-resolved spectroscopy},
   owner = {paul},
   publisher = {WILEY-VCH Verlag},
-  timestamp = {2012.02.14},
-  url = {http://dx.doi.org/10.1002/cphc.200600638}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Einstein1905,
@@ -340,8 +325,7 @@
   issn = {1521-3889},
   owner = {paul},
   publisher = {WILEY-VCH Verlag},
-  timestamp = {2012.11.02},
-  url = {http://dx.doi.org/10.1002/andp.19053220806}
+  timestamp = {2012.11.02}
 }
 
 @ARTICLE{Elson1974,
@@ -356,8 +340,7 @@
   issn = {1097-0282},
   owner = {paul},
   publisher = {Wiley Subscription Services, Inc., A Wiley Company},
-  timestamp = {2012.09.24},
-  url = {http://dx.doi.org/10.1002/bip.1974.360130102}
+  timestamp = {2012.09.24}
 }
 
 @ARTICLE{Enderlein1999,
@@ -387,8 +370,7 @@
   keywords = {Geometric optical design; Microscopy; Detection; Fluorescence microscopy},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.11.02},
-  url = {http://ao.osa.org/abstract.cfm?URI=ao-38-4-724}
+  timestamp = {2012.11.02}
 }
 
 @ARTICLE{Hansen1998,
@@ -403,8 +385,7 @@
   number = {20},
   doi = {10.1021/ac980925l},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://pubs.acs.org/doi/abs/10.1021/ac980925l}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Hashmi2007,
@@ -434,8 +415,7 @@
   keywords = {Velocimetry; Flow diagnostics; Fluorescence, laser-induced},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.11.07},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-15-10-6528}
+  timestamp = {2012.11.07}
 }
 
 @ARTICLE{Hassler2005,
@@ -454,8 +434,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(05)73079-4 DOI - 10.1529/biophysj.104.053884},
-  timestamp = {2012.05.02},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349505730794}
+  timestamp = {2012.05.02}
 }
 
 @ARTICLE{Hassler2005a,
@@ -487,8 +466,7 @@
 	Fluorescence, laser-induced},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.09.21},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-13-19-7415}
+  timestamp = {2012.09.21}
 }
 
 @OTHER{HaunertG,
@@ -537,10 +515,8 @@
 	calibrated method for pH measurements in subfemtoliter volumes with
 	nanomolar concentrations of EGFP.},
   doi = {10.1073/pnas.95.23.13573},
-  eprint = {http://www.pnas.org/content/95/23/13573.full.pdf+html},
   owner = {paul},
-  timestamp = {2012.11.01},
-  url = {http://www.pnas.org/content/95/23/13573.abstract}
+  timestamp = {2012.11.01}
 }
 
 @ARTICLE{Haustein2007,
@@ -553,10 +529,8 @@
   pages = {151-169},
   number = {1},
   doi = {10.1146/annurev.biophys.36.040306.132612},
-  eprint = {http://www.annualreviews.org/doi/pdf/10.1146/annurev.biophys.36.040306.132612},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://www.annualreviews.org/doi/abs/10.1146/annurev.biophys.36.040306.132612}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Helmers2003,
@@ -571,8 +545,7 @@
   issn = {0030-3992},
   keywords = {CCD sensors},
   owner = {paul},
-  timestamp = {2012.10.06},
-  url = {http://www.sciencedirect.com/science/article/pii/S0030399203000781}
+  timestamp = {2012.10.06}
 }
 
 @ARTICLE{Holekamp2008,
@@ -588,8 +561,7 @@
   issn = {0896-6273},
   keywords = {SYSBIO},
   owner = {paul},
-  timestamp = {2012.11.13},
-  url = {http://www.sciencedirect.com/science/article/pii/S0896627308000445}
+  timestamp = {2012.11.13}
 }
 
 @ARTICLE{Humpolickova2006,
@@ -606,8 +578,7 @@
   doi = {10.1529/biophysj.106.089474},
   issn = {0006-3495},
   owner = {paul},
-  timestamp = {2012.10.25},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349506717878}
+  timestamp = {2012.10.25}
 }
 
 @ARTICLE{Jin2004,
@@ -626,8 +597,7 @@
   keyword = {Technik},
   owner = {paul},
   publisher = {Springer Berlin / Heidelberg},
-  timestamp = {2012.02.14},
-  url = {http://dx.doi.org/10.1007/s00348-004-0870-7}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Kannan2006,
@@ -641,10 +611,8 @@
   pages = {3444-3451},
   number = {10},
   doi = {10.1021/ac0600959},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/ac0600959},
   owner = {paul},
-  timestamp = {2012.11.07},
-  url = {http://pubs.acs.org/doi/abs/10.1021/ac0600959}
+  timestamp = {2012.11.07}
 }
 
 @INCOLLECTION{Kohl2005,
@@ -662,8 +630,7 @@
   isbn = {978-3-540-23698-6},
   keyword = {Chemistry and Materials Science},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://dx.doi.org/10.1007/b102212}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Korson1969,
@@ -675,10 +642,8 @@
   pages = {34-39},
   number = {1},
   doi = {10.1021/j100721a006},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/j100721a006},
   owner = {paul},
-  timestamp = {2012.10.29},
-  url = {http://pubs.acs.org/doi/abs/10.1021/j100721a006}
+  timestamp = {2012.10.29}
 }
 
 @BOOK{LandauLifshitsStatPhys,
@@ -710,8 +675,7 @@
   owner = {paul},
   posted-at = {2011-03-03 11:38:41},
   priority = {2},
-  timestamp = {2012.02.03},
-  url = {http://www.worldcat.org/isbn/0750633727}
+  timestamp = {2012.02.03}
 }
 
 @ARTICLE{Leutenegger2012,
@@ -750,8 +714,7 @@
   keywords = {Diffraction; Fluorescence microscopy; Fluorescence},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.09.21},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-20-5-5243}
+  timestamp = {2012.09.21}
 }
 
 @ARTICLE{Lieto2003a,
@@ -769,8 +732,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(03)74748-1 DOI - 10.1016/S0006-3495(03)74748-1},
-  timestamp = {2012.09.21},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349503747481}
+  timestamp = {2012.09.21}
 }
 
 @ARTICLE{Lieto2003,
@@ -783,10 +745,8 @@
   pages = {1782-1787},
   number = {5},
   doi = {10.1021/la0261601},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/la0261601},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://pubs.acs.org/doi/abs/10.1021/la0261601}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Lieto2004,
@@ -801,8 +761,7 @@
   doi = {10.1529/biophysj.103.035030},
   issn = {0006-3495},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349504736061}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Magde1972,
@@ -818,8 +777,7 @@
   issue = {11},
   owner = {paul},
   publisher = {American Physical Society},
-  timestamp = {2012.11.01},
-  url = {http://link.aps.org/doi/10.1103/PhysRevLett.29.705}
+  timestamp = {2012.11.01}
 }
 
 @ARTICLE{Magde1974,
@@ -834,8 +792,7 @@
   issn = {1097-0282},
   owner = {paul},
   publisher = {Wiley Subscription Services, Inc., A Wiley Company},
-  timestamp = {2012.09.21},
-  url = {http://dx.doi.org/10.1002/bip.1974.360130103}
+  timestamp = {2012.09.21}
 }
 
 @ARTICLE{Nitsche2004,
@@ -852,8 +809,7 @@
   doi = {10.1016/S0006-3495(04)74267-8},
   issn = {0006-3495},
   owner = {paul},
-  timestamp = {2012.11.08},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349504742678}
+  timestamp = {2012.11.08}
 }
 
 @ARTICLE{Ohsugi2009,
@@ -867,8 +823,7 @@
   number = {1},
   doi = {10.1117/1.3080723},
   owner = {paul},
-  timestamp = {2012.11.12},
-  url = { + http://dx.doi.org/10.1117/1.3080723}
+  timestamp = {2012.11.12}
 }
 
 @ARTICLE{Ohsugi2006,
@@ -883,8 +838,7 @@
   doi = {10.1529/biophysj.105.074625},
   owner = {paul},
   publisher = {Biophysical Society},
-  timestamp = {2012.02.14},
-  url = {http://www.pubmedcentral.nih.gov/articlerender.fcgi?artid=1614500&tool=pmcentrez&rendertype=abstract}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Palmer1987,
@@ -916,8 +870,7 @@
   owner = {paul},
   pii = {S0006-3495(87)83340-4},
   pmid = {3828464},
-  timestamp = {2012.11.02},
-  url = {http://dx.doi.org/10.1016/S0006-3495(87)83340-4}
+  timestamp = {2012.11.02}
 }
 
 @ARTICLE{Pero2006-06,
@@ -951,8 +904,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(08)70660-X DOI - 10.1529/biophysj.107.108811},
-  timestamp = {2012.05.20},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S000634950870660X}
+  timestamp = {2012.05.20}
 }
 
 @INCOLLECTION{Petrov:2008,
@@ -970,8 +922,7 @@
 	01307 Dresden, Germany},
   doi = {10.1007/4243_2008_032},
   isbn = {978-3-540-70571-0},
-  keyword = {Chemistry},
-  url = {http://dx.doi.org/10.1007/4243_2008_032}
+  keyword = {Chemistry}
 }
 
 @ARTICLE{Qian1991,
@@ -1002,8 +953,7 @@
   doi = {10.1364/AO.30.001185},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.11.02},
-  url = {http://ao.osa.org/abstract.cfm?URI=ao-30-10-1185}
+  timestamp = {2012.11.02}
 }
 
 @ELECTRONIC{ImageJ,
@@ -1025,10 +975,8 @@
   pages = {3497-3505},
   number = {8},
   doi = {10.1021/la052687c},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/la052687c},
   owner = {paul},
-  timestamp = {2012.11.12},
-  url = {http://pubs.acs.org/doi/abs/10.1021/la052687c}
+  timestamp = {2012.11.12}
 }
 
 @PHDTHESIS{Ries:08,
@@ -1037,8 +985,7 @@
   school = {Biophysics, BIOTEC, Technische Universität Dresden, Tatzberg 47–51,
 	01307 Dresden, Germany},
   year = {2008},
-  note = {\url{http://nbn-resolving.de/urn:nbn:de:bsz:14-ds-1219846317196-73420}},
-  url = {http://nbn-resolving.de/urn:nbn:de:bsz:14-ds-1219846317196-73420}
+  note = {\url{http://nbn-resolving.de/urn:nbn:de:bsz:14-ds-1219846317196-73420}}
 }
 
 @ARTICLE{Ries2009,
@@ -1052,8 +999,7 @@
   doi = {10.1016/j.bpj.2008.12.3888},
   issn = {0006-3495},
   owner = {paul},
-  timestamp = {2012.11.08},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349509002112}
+  timestamp = {2012.11.08}
 }
 
 @ARTICLE{Ries2008390,
@@ -1066,8 +1012,7 @@
   pages = {390 - 399},
   number = {1},
   doi = {10.1529/biophysj.107.126193},
-  issn = {0006-3495},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349508703126}
+  issn = {0006-3495}
 }
 
 @ARTICLE{Ries2008,
@@ -1095,8 +1040,7 @@
   issn = {1463-9076},
   owner = {paul},
   publisher = {The Royal Society of Chemistry},
-  timestamp = {2012.02.14},
-  url = {http://dx.doi.org/10.1039/B718132A}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Rigler1993,
@@ -1116,8 +1060,7 @@
   language = {English},
   owner = {paul},
   publisher = {Springer-Verlag},
-  timestamp = {2012.11.02},
-  url = {http://dx.doi.org/10.1007/BF00185777}
+  timestamp = {2012.11.02}
 }
 
 @ARTICLE{Ruan2004,
@@ -1135,8 +1078,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(04)73605-X DOI - 10.1529/biophysj.103.036483},
-  timestamp = {2012.02.14},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S000634950473605X}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Sankaran2009,
@@ -1155,8 +1097,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(09)01387-3 DOI - 10.1016/j.bpj.2009.08.025},
-  timestamp = {2012.09.21},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349509013873}
+  timestamp = {2012.09.21}
 }
 
 @ARTICLE{Sankaran2010,
@@ -1245,10 +1186,8 @@
 	terms of recent theoretical studies that invoke isomerization of
 	the chromophore as a nonradiative channel of the excited state relaxation.},
   doi = {10.1073/pnas.97.1.151},
-  eprint = {http://www.pnas.org/content/97/1/151.full.pdf+html},
   owner = {paul},
-  timestamp = {2012.09.24},
-  url = {http://www.pnas.org/content/97/1/151.abstract}
+  timestamp = {2012.09.24}
 }
 
 @ARTICLE{Schwille1997,
@@ -1265,8 +1204,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(97)78833-7 DOI - 10.1016/S0006-3495(97)78833-7},
-  timestamp = {2012.02.14},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349597788337}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Schatzel1990,
@@ -1288,8 +1226,7 @@
 	sample time measurement.},
   doi = {10.1088/0954-8998/2/4/002},
   owner = {paul},
-  timestamp = {2012.11.02},
-  url = {http://stacks.iop.org/0954-8998/2/i=4/a=002}
+  timestamp = {2012.11.02}
 }
 
 @ARTICLE{Scomparin2009,
@@ -1308,8 +1245,7 @@
   keyword = {Physik und Astronomie},
   owner = {paul},
   publisher = {Springer Berlin / Heidelberg},
-  timestamp = {2012.10.22},
-  url = {http://dx.doi.org/10.1140/epje/i2008-10407-3}
+  timestamp = {2012.10.22}
 }
 
 @ARTICLE{Seu2007,
@@ -1328,8 +1264,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(07)71049-4 DOI - 10.1529/biophysj.106.099721},
-  timestamp = {2012.10.22},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349507710494}
+  timestamp = {2012.10.22}
 }
 
 @ARTICLE{Shannon1984,
@@ -1388,8 +1323,7 @@
   owner = {paul},
   pii = {S0006-3495(05)72776-4},
   pmid = {15894645},
-  timestamp = {2012.10.28},
-  url = {http://dx.doi.org/10.1529/biophysj.105.060749}
+  timestamp = {2012.10.28}
 }
 
 @ARTICLE{Starr2001,
@@ -1402,8 +1336,7 @@
   pages = {1575 - 1584},
   number = {3},
   doi = {10.1016/S0006-3495(01)76130-9},
-  issn = {0006-3495},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349501761309}
+  issn = {0006-3495}
 }
 
 @ARTICLE{Sutherland1905,
@@ -1417,10 +1350,8 @@
   number = {54},
   __markedentry = {[paul]},
   doi = {10.1080/14786440509463331},
-  eprint = {http://www.tandfonline.com/doi/pdf/10.1080/14786440509463331},
   owner = {paul},
-  timestamp = {2012.11.14},
-  url = {http://www.tandfonline.com/doi/abs/10.1080/14786440509463331}
+  timestamp = {2012.11.14}
 }
 
 @ARTICLE{Tamm1985,
@@ -1437,8 +1368,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(85)83882-0 DOI - 10.1016/S0006-3495(85)83882-0},
-  timestamp = {2012.10.29},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349585838820}
+  timestamp = {2012.10.29}
 }
 
 @INCOLLECTION{Thomps:bookFCS2002,
@@ -1457,8 +1387,7 @@
   isbn = {978-0-306-47057-8},
   keyword = {Biomedical and Life Sciences},
   owner = {paul},
-  timestamp = {2012.01.10},
-  url = {http://dx.doi.org/10.1007/0-306-47057-8_6}
+  timestamp = {2012.01.10}
 }
 
 @ARTICLE{Thompson1983,
@@ -1473,8 +1402,7 @@
   doi = {10.1016/S0006-3495(83)84328-8},
   issn = {0006-3495},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349583843288}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Thompson1981,
@@ -1489,8 +1417,7 @@
   doi = {10.1016/S0006-3495(81)84905-3},
   issn = {0006-3495},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349581849053}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Thompson1997,
@@ -1508,8 +1435,7 @@
   issn = {1751-1097},
   owner = {paul},
   publisher = {Blackwell Publishing Ltd},
-  timestamp = {2012.02.14},
-  url = {http://dx.doi.org/10.1111/j.1751-1097.1997.tb01875.x}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Thompson1997a,
@@ -1524,8 +1450,7 @@
   doi = {10.1016/S0958-1669(97)80158-9},
   issn = {0958-1669},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://www.sciencedirect.com/science/article/pii/S0958166997801589}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Toomre2001,
@@ -1540,8 +1465,7 @@
   issn = {0962-8924},
   keywords = {green-fluorescent protein (GFP)},
   owner = {paul},
-  timestamp = {2012.02.14},
-  url = {http://www.sciencedirect.com/science/article/pii/S096289240102027X}
+  timestamp = {2012.02.14}
 }
 
 @ARTICLE{Unruh2008,
@@ -1559,8 +1483,7 @@
   owner = {paul},
   publisher = {Cell Press},
   refid = {S0006-3495(08)78962-8 DOI - 10.1529/biophysj.108.130310},
-  timestamp = {2012.09.21},
-  url = {http://linkinghub.elsevier.com/retrieve/pii/S0006349508789628}
+  timestamp = {2012.09.21}
 }
 
 @ARTICLE{Vacha2009,
@@ -1575,10 +1498,8 @@
   number = {26},
   note = {PMID: 19290591},
   doi = {10.1021/jp809974e},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/jp809974e},
   owner = {paul},
-  timestamp = {2012.10.24},
-  url = {http://pubs.acs.org/doi/abs/10.1021/jp809974e}
+  timestamp = {2012.10.24}
 }
 
 @ELECTRONIC{VisserRol,
@@ -1587,7 +1508,6 @@
   title = {Basic Photophysics},
   note = {Photobiological Sciences Online (KC Smith, ed.) American Society
 	for Photobiology \url{http://www.photobiology.info}.},
-  url = {http://www.photobiology.info/Visser-Rolinski.html},
   owner = {paul},
   timestamp = {2012.02.14}
 }
@@ -1602,10 +1522,8 @@
   pages = {13368-13379},
   number = {36},
   doi = {10.1021/j100036a009},
-  eprint = {http://pubs.acs.org/doi/pdf/10.1021/j100036a009},
   owner = {paul},
-  timestamp = {2012.02.20},
-  url = {http://pubs.acs.org/doi/abs/10.1021/j100036a009}
+  timestamp = {2012.02.20}
 }
 
 @ARTICLE{Widengren1994,
@@ -1623,8 +1541,7 @@
   keyword = {Biomedizin & Life Sciences},
   owner = {paul},
   publisher = {Springer Netherlands},
-  timestamp = {2012.09.24},
-  url = {http://dx.doi.org/10.1007/BF01878460}
+  timestamp = {2012.09.24}
 }
 
 @ARTICLE{Wohland2001,
@@ -1639,8 +1556,7 @@
   doi = {10.1016/S0006-3495(01)76264-9},
   issn = {0006-3495},
   owner = {paul},
-  timestamp = {2012.09.08},
-  url = {http://www.sciencedirect.com/science/article/pii/S0006349501762649}
+  timestamp = {2012.09.08}
 }
 
 @ARTICLE{Wohland2010,
@@ -1673,8 +1589,7 @@
 	fluorescence and luminescence},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.11.07},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-18-10-10627}
+  timestamp = {2012.11.07}
 }
 
 @ARTICLE{Yordanov2009,
@@ -1708,8 +1623,7 @@
   keywords = {Velocimetry; Fluorescence, laser-induced; Spectroscopy, surface},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.09.21},
-  url = {http://www.opticsexpress.org/abstract.cfm?URI=oe-17-23-21149}
+  timestamp = {2012.09.21}
 }
 
 @ARTICLE{Yordanov2011,
@@ -1729,8 +1643,7 @@
   numpages = {3},
   owner = {paul},
   publisher = {AIP},
-  timestamp = {2012.05.02},
-  url = {http://link.aip.org/link/?RSI/82/036105/1}
+  timestamp = {2012.05.02}
 }
 
 @ARTICLE{Zhang2007,
@@ -1763,8 +1676,7 @@
 	Fluorescence microscopy; Three-dimensional microscopy},
   owner = {paul},
   publisher = {OSA},
-  timestamp = {2012.09.20},
-  url = {http://ao.osa.org/abstract.cfm?URI=ao-46-10-1819}
+  timestamp = {2012.09.20}
 }
 
 @BOOK{Rigler:FCSbook,
@@ -1784,7 +1696,6 @@
   organization = {Andor Technology},
   note = {\url{http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf}
 	(Okt. 2012)},
-  url = {http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf},
   citeseerurl = {http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf},
   owner = {paul},
   timestamp = {2012.10.08}
@@ -1795,7 +1706,6 @@
   organization = {Hamamatsu},
   note = {\url{http://sales.hamamatsu.com/assets/pdf/hpspdf/e_flash4.pdf} (Okt.
 	2012)},
-  url = {http://sales.hamamatsu.com/assets/pdf/hpspdf/e_flash4.pdf},
   citeseerurl = {http://www.andor.com/pdfs/specifications/Andor_Neo_sCMOS_Specifications.pdf},
   owner = {paul},
   timestamp = {2012.10.08}
diff --git a/doc-src/Images/SchwilleLogo.jpg b/doc-src/Images/SchwilleLogo.jpg
deleted file mode 100755
index 880a890..0000000
Binary files a/doc-src/Images/SchwilleLogo.jpg and /dev/null differ
diff --git a/doc-src/Images/TU_Logo_SW.pdf b/doc-src/Images/TU_Logo_SW.pdf
deleted file mode 100755
index 8309dca..0000000
Binary files a/doc-src/Images/TU_Logo_SW.pdf and /dev/null differ
diff --git a/doc-src/PyCorrFit_doc.tex b/doc-src/PyCorrFit_doc.tex
index 8f90844..bacd385 100755
--- a/doc-src/PyCorrFit_doc.tex
+++ b/doc-src/PyCorrFit_doc.tex
@@ -49,6 +49,7 @@
 
 \newcommand{\mycaption}[2]{\caption[~#1]{\textbf{#1:} #2}}
 
+\usepackage{tabularx}
 \usepackage{textcomp} % Sonderzeichen
 \usepackage{wrapfig}
 
@@ -122,45 +123,29 @@
 
 \begin{document}
 
-
-
-\vspace{-5em}
-\begin{flushright}
-	\includegraphics[angle=0,width=35mm]{Images/TU_Logo_SW} \hspace{1em}
-	\raisebox{-0.2em}{\includegraphics[angle=0,width=40mm]{Images/SchwilleLogo} }
-	\includegraphics[angle=0,width=40mm]{Images/PyCorrFit_logo_dark.pdf} 
-\end{flushright}
-%\vspace{-3em}
-\begin{center}\rule{\textwidth}{0.1ex}\par\end{center}
-%\vspace{-4em}
-%\begin{center}\rule{\textwidth}{0.1ex}\par\end{center}
-
-
-
-\vspace{1em}
-
-\noindent\begin{tabular}{ll}
-\textbf{PyCorrFit - Generic cross-platform FCS fitting tool}\\
-\textit{Software Guide} \\
-\\
-
- Paul Müller \\
- Biotechnology Center of the TU Dresden \\
- \today 
- 
-\end{tabular}
+\noindent
+\begin{tabularx}{\linewidth}{Xr}
+ \textbf{PyCorrFit \newline FCS data evaluation} \newline \textit{Software Guide}  & 
+     \raisebox{-2em}{\includegraphics[angle=0,width=40mm]{Images/PyCorrFit_logo_dark.pdf}} \\
+ \\
+ Thomas Weidemann & \\
+ Max Planck Institute of Biochemistry, Martinsried, Germany & \\
+ \\
+ Paul Müller & \\
+ Biotechnology Center of the TU Dresden, Germany & \\
+ \\
+ \today & \\
+\end{tabularx}
 
 \vspace{2em}
 
 \tableofcontents
 
-
 \newpage
 
 \graphicspath{{Images/}}
 \include{PyCorrFit_doc_content}
 
-
 \section*{Acknowledgements}
 \addcontentsline{toc}{section}{Acknowledgements}
 I thank André Scholich (TU Dresden, Germany) for initial proof reading of the manuscript and Grzegorz Chwastek, Franziska Thomas, and Thomas Weidemann (Biotec, TU Dresden, Germany) for critical feedback on PyCorrFit.
diff --git a/doc-src/PyCorrFit_doc_content.tex b/doc-src/PyCorrFit_doc_content.tex
index 6935a36..2557a6f 100755
--- a/doc-src/PyCorrFit_doc_content.tex
+++ b/doc-src/PyCorrFit_doc_content.tex
@@ -1,26 +1,26 @@
 \section{Introduction}
 
 \subsection{Preface}
-\texttt{PyCorrFit} emerged from my work in the Schwille Lab\footnote{\url{http://www.biochem.mpg.de/en/rd/schwille/}} at the Biotechnology Center of the TU Dresden in 2011/2012. The program source code is available at GitHub\footnote{\url{https://github.com/paulmueller/PyCorrFit}}. Please do not hesitate to sign up and add a feature request. If you you found a bug, please let me know via GitHub.\\
+\textit{PyCorrFit} emerged from my work in the Schwille Lab\footnote{\url{http://www.biochem.mpg.de/en/rd/schwille/}} at the Biotechnology Center of the TU Dresden in 2011/2012. The program source code is available at GitHub\footnote{\url{https://github.com/paulmueller/PyCorrFit}}. Please do not hesitate to sign up and add a feature request. If you you found a bug, please let me know via GitHub.\\
 
-\noindent \texttt{PyCorrFit} was written to simplify the work with experimentally obtained correlation curves. These can be processed independently (operating system, location, time). PyCorrFit supports commonly used file formats and enables users to allocate and organize their data in a simple way.\\
+\noindent \textit{PyCorrFit} was written to simplify the work with experimentally obtained correlation curves. These can be processed independently (operating system, location, time). PyCorrFit supports commonly used file formats and enables users to allocate and organize their data in a simple way.\\
 
-\noindent PyCorrFit is free software: you can redistribute it and/or modify it
+\noindent \textit{PyCorrFit} is free software: you can redistribute it and/or modify it
 under the terms of the GNU General Public License as published 
 by the Free Software Foundation, either version 2 of the License, 
 or (at your option) any later version\footnote{\url{http://www.gnu.org/licenses/gpl.html}}.
 
-\subsubsection*{What PyCorrFit can do}
+\subsubsection*{What \textit{PyCorrFit} can do}
 \begin{itemize}
 \item Load correlation curves from numerous correlators
-\item Process these curves (e.g. background correction, s. Tools section \ref{sec:tools} )
+\item Process these curves (\hyref{Section}{sec:tm})
 \item Fit a model function (many included) to an experimental curve
 \item Import user defined models for fitting
 \item Many batch processing features
-\item Save/load entire PyCorrFit sessions
+\item Save/load entire \textit{PyCorrFit} sessions
 \end{itemize}
 
-\subsubsection*{What PyCorrFit is not}
+\subsubsection*{What \textit{PyCorrFit} is not}
 \begin{itemize}
 \item A multiple-$\tau$ correlator
 \item A software to operate hardware correlators
@@ -28,7 +28,7 @@ or (at your option) any later version\footnote{\url{http://www.gnu.org/licenses/
 
 \subsection{System prerequisites}
 \subsubsection{Hardware}
-This documentation addresses the processing of correlation curves with PyCorrFit. {PyCorrFit} was successfully used with the following setups:
+This documentation addresses the processing of correlation curves with \textit{PyCorrFit}. \textit{PyCorrFit} was successfully used with the following setups:
 \begin{itemize}
 \item[1.]
      APD: Photon Counting Device from PerkinElmer Optoelectronics, Model: 	 \texttt{SPCM-CD3017}\\
@@ -42,7 +42,7 @@ This documentation addresses the processing of correlation curves with PyCorrFit
 
 \subsubsection{Software}
 \label{cha:soft}
-The latest version of PyCorrFit can be obtained from the internet at \url{http://pycorrfit.craban.de}.
+The latest version of \textit{PyCorrFit} can be obtained from the internet at \url{http://pycorrfit.craban.de}.
 \begin{itemize}
 \item \textbf{MacOSx}.
 Binary files for MacOSx $>$10.6.8 are available from the download page but have not yet been fully tested for stability.
@@ -51,7 +51,7 @@ For Windows XP or Windows 7, stand-alone binary executables are available from t
 \item \textbf{Linux}.
 There are executable binaries for widely used distributions (e.g. Ubuntu).
 \item \textbf{Sources}
-The program was written in Python, keeping the concept of cross-platform programming in mind. To run PyCorrFit on any other operating system, the installation of Python v.2.7 is required. To obtain the latest source, visit PyCorrFit at GitHub (\url{https://github.com/paulmueller/PyCorrFit}). PyCorrFit depends on the following python modules:\\
+The program was written in Python, keeping the concept of cross-platform programming in mind. To run \textit{PyCorrFit} on any other operating system, the installation of Python v.2.7 is required. To obtain the latest source, visit \textit{PyCorrFit} at GitHub (\url{https://github.com/paulmueller/PyCorrFit}). \textit{PyCorrFit} depends on the following python modules:\\
 \texttt{\\
 python-matplotlib ($\geq$ 1.0.1) \\
 python-numpy ($\geq$ 1.5.1) \\
@@ -62,7 +62,7 @@ python-wxtools \\
 python-wxgtk2.8-dbg \\
 }
 \\
-For older versions of Ubuntu, some of the above package versions are not listed in the package repository. To enable the use of PyCorrFit on those systems, the following tasks have to be performed:
+For older versions of Ubuntu, some of the above package versions are not listed in the package repository. To enable the use of \textit{PyCorrFit} on those systems, the following tasks have to be performed:
 \begin{itemize}
 \item[ ] \textbf{matplotlib}. The tukss-ppa includes version 1.0.1. After adding the repository (\texttt{apt-add-repository ppa:tukss/ppa}), matplotlib can be installed as usual.
 \item[ ] \textbf{numpy}. The package from a later version of Ubuntu can be installed: \url{https://launchpad.net/ubuntu/+source/python-numpy/}
@@ -73,21 +73,21 @@ For older versions of Ubuntu, some of the above package versions are not listed
 Alternatively \texttt{python-pip} (\url{http://pypi.python.org/pypi/pip}) can be used to install up-to-date python modules.
 
 \noindent \textbf{\LaTeX}.
-PyCorrFit can save correlation curves as images using matplotlib. It is also possible to utilize Latex to generate these plots. On Windows, installing MiKTeX  with ``automatic package download'' will enable this feature. On MacOSx, the MacTeX distribution can be used. On other systems, the packages LaTeX, dvipng, Ghostscript and the scientific latex packages \texttt{texlive-science} and \texttt{texlive-math-extra} need to be installed.
+\textit{PyCorrFit} can save correlation curves as images using matplotlib. It is also possible to utilize Latex to generate these plots. On Windows, installing MiKTeX  with ``automatic package download'' will enable this feature. On MacOSx, the MacTeX distribution can be used. On other systems, the packages LaTeX, dvipng, Ghostscript and the scientific latex packages \texttt{texlive-science} and \texttt{texlive-math-extra} need to be installed.
 
-\subsection{Running PyCorrFit}
+\subsection{Running \textit{PyCorrFit}}
 \label{sec:run}
 \paragraph*{Windows}
 Download the executable file and double-click on the \texttt{PyCorrFit.exe} icon.
 \paragraph*{Linux/Ubuntu}
 Make sure the binary has the executable bit set, then simply double-click on the binary  \texttt{PyCorrFit}.
 \paragraph*{Mac OSx}
-When downloading the archive \texttt{PyCorrFit.zip}, the binary should be extracted automatically (if not, extract the archive) and you can double-click it to run PyCorrFit.
+When downloading the archive \texttt{PyCorrFit.zip}, the binary should be extracted automatically (if not, extract the archive) and you can double-click it to run \textit{PyCorrFit}.
 \paragraph*{from source}
 Invoke \texttt{python PyCorrFit.py} from the command line.
 
 
-\section{Working with PyCorrFit}
+\section{Working with \textit{PyCorrFit}}
 
 \subsection{Workflow}
 \label{cha_graphint}
@@ -141,7 +141,7 @@ The main window can be rescaled as a whole to improve data representation. In ad
 \section{The menu bar}
 \label{sec:mb}
 
-PyCorrFit is organized in panels which group certain functions. The menu organizes data management (File), data analysis (Tools), display of correlation functions (Current Page), numerical examples (Model), software settings (Preferences), and software metadata (Help). The documentation refers to the version 0.8.1.
+PyCorrFit is organized in panels which group certain functions. The menu organizes data management (File), data analysis (Tools), display of correlation functions (Current Page), numerical examples (Model), software settings (Preferences), and software metadata (Help).
 
 \subsection{File menu}
 \label{sec:fm}
@@ -149,13 +149,13 @@ The File menu organizes the import of theoretical models, experimental correlati
 
 \subsubsection{File / Import model}
 \label{sec:fm.im}
-Correlation data must be fitted to models describing the underlying physical processes which give rise to a particular time dependence and magnitude of the recorded signal fluctuations. Models are mathematical expressions containing parameters with physical meaning, like the molecular brightness or the dwell time through an illuminated volume etc. While a number of standard functions are built-in, the user can define new expressions. Some examples can be found at GitHub in the \textit{Py [...]
+Correlation data must be fitted to models describing the underlying physical processes which give rise to a particular time dependence and magnitude of the recorded signal fluctuations. Models are mathematical expressions containing parameters with physical meaning, like the molecular brightness or the dwell time through an illuminated volume etc. While a number of standard functions are built-in, the user can define new expressions. Some examples can be found at GitHub in the \textit{Py [...]
 
 Model functions are imported as text files (*.txt) using certain syntax:
 
 \begin{itemize}
 \item \textbf{Encoding}: PyCorrFit can interpret the standard Unicode character set (UTF-8).
-\item \textbf{Comments}: Lines starting with a hash (\#), empty lines, or lines containing only white space characters are ignored. The only exception is the first line starting with a hash followed by a white space and a short name of the model. This line is evaluated to complement the list of models in the dialogue\textit{ Choose }\textit{model}, when loading the data.
+\item \textbf{Comments}: Lines starting with a hash (\texttt{\#}), empty lines, or lines containing only white space characters are ignored. The only exception is the first line starting with a hash followed by a white space and a short name of the model. This line is evaluated to complement the list of models in the dialogue\textit{ Choose }\textit{model}, when loading the data.
 \item \textbf{Units}: PyCorrFit works with internal units for:
 
 \begin{itemize}
@@ -166,7 +166,10 @@ Model functions are imported as text files (*.txt) using certain syntax:
 \item Inverse area: \SI{100}{\mu m^{-2}} 
 \item Inverse volume: \SI{1000}{\mu m^{-3}} 
 \end{itemize}
-\item \textbf{Parameters:} To define a new model function new parameters can be introduced. Parameters are defined by a sequence of strings separated by white spaces containing name, the dimension in angular brackets, the equal sign, and a starting value which appears in the main window for fitting. For example: D [50 µm\textsuperscript{2}s\textsuperscript{{}-1}] = 50.00. It is important to note that when the dimensions differ from the internal units (10 µm\textsuperscript{2}s\textsupers [...]
+\item \textbf{Parameters:} To define a new model function new parameters can be introduced. Parameters are defined by a sequence of strings separated by white spaces containing name, the dimension in angular brackets, the equal sign, and a starting value which appears in the main window for fitting. For example: \texttt{D [\SI{10}{\mu m^ 2 s^{-1}}] = 5.0}.
+%It is important to note that when the dimensions differ from the internal units (\SI{10}{\mu m^ 2 s^{-1}}), the expression must contain some adjusting factor; here a factor of 5. 
+%Thus, user defined dimensions are only for display and cannot be processed numerically.
+ The parameter names contain only alphabetic (not numerical) characters. \texttt{G} and \texttt{g}, as well as the numbers \texttt{e} and \texttt{pi} are already mapped and cannot be used freely.
 \item \textbf{Placeholder:} When defining composite mathematical expressions for correlation functions one can use placeholders. Placeholders start with a lowercase ‘g’. For example, the standard, Gaussian 3D diffusion in free solution may be written as
 
 \begin{itemize}
@@ -241,39 +244,59 @@ G = 1./n * gThrD * gScan * gTrip
 \textit{Load data }is the first way to import multiple correlation data sets into a \textit{PyCorrFit} session. The supported file formats can be found in a drop-down list of supported file endings in the pop-up dialog \textit{Open data files}:
 
 
-\begin{enumerate}
-\item All supported files \ \ \ \ \ \ [default]
-\item Confocor3 (*.fcs)\ \ \ \ \ \ [AIM 4.2, ZEN 2010, Zeiss, Germany]
-\item Correlator ALV6000 (*.ASC)\ \ [ALV Laser GmbH, Langen, Germany]
-\item Correlator.com (*.SIN)\ \ \ \ [www.correlator.com, USA]
-\item Matlab ‘Ries (*.mat)\ \ \ \ \ \ [EMBL Heidelberg, Germany]
-\item PyCorrFit (*.csv)\ \ \ \ \ \ [Paul Müller, TU Dresden, Germany]
-\item Zip files (*.zip)\ \ \ \ \ \ [Paul Müller, TU Dresden, Germany]
-\end{enumerate}
-While (1)-(4) are file formats associated with commercial hardware, (5) refers to a MATLAB based FCS evaluation software developed by Jonas Ries in the Schwille lab at TU Dresden, (6) is the txt-file containing comma-separated values (csv) generated with PyCorrFit via the command \textit{Current Page / Save data}. Zip-files are automatically decompressed and can be imported when matching one of the above mentioned formats. In particular loading of zip files is a possibility to re-import  [...]
+\begin{tabular}{l l}
+ \rule{0pt}{3ex}  (1) All supported files & default \\
+ \rule{0pt}{3ex} (2) Confocor3 (*.fcs) & AIM 4.2, ZEN 2010, Zeiss, Germany \\
+ \rule{0pt}{3ex} (3) Correlator ALV6000 (*.ASC) & ALV Laser GmbH, Langen, Germany \\
+ \rule{0pt}{3ex} (4) Correlator.com (*.SIN) & www.correlator.com, USA \\
+ \rule{0pt}{3ex} (5) Matlab ‘Ries (*.mat) & EMBL Heidelberg, Germany \\
+ \rule{0pt}{3ex} (6) PyCorrFit (*.csv) & Paul Müller, TU Dresden, Germany \\
+ \rule{0pt}{3ex} (7) Zip files (*.zip) & Paul Müller, TU Dresden, Germany \\
+\end{tabular}
+\vspace{3ex}
+\newline
+While (2)-(4) are file formats associated with commercial hardware, (5) refers to a MATLAB based FCS evaluation software developed by Jonas Ries in the Schwille lab at TU Dresden, (6) is the txt-file containing comma-separated values (csv) generated with PyCorrFit via the command \textit{Current Page / Save data}. Zip-files are automatically decompressed and can be imported when matching one of the above mentioned formats. In particular loading of zip files is a possibility to re-import  [...]
 
-When loading data, the user is prompted to assign fit models in the \textit{Choose }\textit{Models} dialogue window. There, curves are sorted according to channel (for example AC1, AC2, CC12, and CC21, as a typical outcome of a dual-color cross-correlation experiment). For each channel a fit model must be selected from the list (see Section 3.4 and Appendix xxx):
+When loading data, the user is prompted to assign fit models in the \textit{Choose Models} dialogue window. There, curves are sorted according to channel (for example AC1, AC2, CC12, and CC21, as a typical outcome of a dual-color cross-correlation experiment). For each channel a fit model must be selected from the list (see \hyref{Section}{sec:models}):
 
 If a file format is not yet listed, the correlation data could be converted into a compatible text-file (*.csv) or bundles of *.csv files within a compressed archive *.zip. For reformatting the following points should be considered:
 
 
 \begin{itemize}
-\item \textbf{Encoding}: \textit{PyCorrFit} uses the standard Unicode character set (UTF-8). However, since no special characters are needed to save experimental data, other encodings may also work. New line characters are {\textbackslash}r{\textbackslash}n (Windows).
-\item \textbf{Comments}: Lines starting with a hash (\#), empty lines, or lines containing only white space characters are ignored. Exceptions are the keywords listed below.
+\item \textbf{Encoding}: \textit{PyCorrFit} uses the standard Unicode character set (UTF-8). However, since no special characters are needed to save experimental data, other encodings may also work. New line characters are \texttt{{\textbackslash}r{\textbackslash}n} (Windows).
+\item \textbf{Comments}: Lines starting with a hash (\texttt{\#}), empty lines, or lines containing only white space characters are ignored. Exceptions are the keywords listed below.
 \item \textbf{Units}: PyCorrFit works with units/values for:
 
 \begin{itemize}
-\item Time: 1 ms
-\item Intensity: 1 kHz
-\item Amplitude offset: G(0) = 0 (not 1)
+\item Time: \SI{1}{ms}
+\item Intensity: \SI{1}{kHz}
+\item Amplitude offset: $G(0) = 0$ (not 1)
 \end{itemize}
-\item \textbf{Keywords: }\textit{PyCorrFit} reads the first two columns containing numerical values. The first table (non-hashed) is recognized as the correlation data containing the lag times in the first and the correlation data in the second column. (In case the *.csv file has been generated with \textit{PyCorrFit} up to three additional columns containing the fit function are ignored, see Section 3.1.6). The table ends, when the keyword \# BEGIN TRACE appears. Below this line the tim [...]
-\item \textbf{Tags:} Channel information can be entered using defined syntax in a header. The keyword \# Type AC/CC Autocorrelation [uppercase?] assigns the tag ‘AC’ and \# Type AC/CC Cross-correlation assigns the tag ‘CC’ to the correlation curve. These strings are consistently displayed in the user interface of the respective data page in \textit{PyCorrFit}. If no data type is specified, autocorrelation is assumed. Tags may be specified with additional information like channel numbers, [...]
+\item \textbf{Keywords:}\footnote{Keywords are case-insensitive.} \textit{PyCorrFit} reads the first two columns containing numerical values. The first table (non-hashed) is recognized as the correlation data containing the lag times in the first and the correlation data in the second column. (In case the *.csv file has been generated with \textit{PyCorrFit} up to three additional columns containing the fit function are ignored). The table ends, when the keyword \texttt{\# BEGIN TRACE} a [...]
+\item \textbf{Tags:}\footnote{Tags are case-insensitive.} Channel information can be entered using defined syntax in a header. The keyword 
+\begin{center}
+\vspace{-1em}
+ \texttt{\# Type AC/CC Autocorrelation}
+\vspace{-1em}
+\end{center}
+  assigns the tag \texttt{AC} and the keyword
+\begin{center}
+\vspace{-1em}
+  {\texttt{\# Type AC/CC Crosscorrelation}}
+\vspace{-1em}
+\end{center}
+ assigns the tag \texttt{CC} to the correlation curve. These strings are consistently displayed in the user interface of the respective data page in \textit{PyCorrFit}. If no data type is specified, autocorrelation is assumed. Tags may be specified with additional information like channel numbers, e.g. 
+\begin{center}
+\vspace{-1em}
+ \texttt{\# Type AC/CC Autocorrelation \_01}.
+\vspace{-1em}
+\end{center}
+In this case the tag \texttt{AC\_01} is generated. This feature is useful to keep track of the type of curve during the fitting and when post-processing the numerical fit results.
 \end{itemize}
 
 \subsubsection{File / Open session}
 \label{sec:fm.os}
-This command is the second way to import data into PyCorrFit. In contrast to \textit{Load data}, it opens an entire fitting project, which was previously saved with \textit{PyCorrFit}. Sessions are bundles of files named *.fcsfit-session.zip. Sessions contain, comments, model assigned correlation data, and the current state of parameters for each data page (Section 3.1.6).
+This command is the second way to import data into PyCorrFit. In contrast to \textit{Load data}, it opens an entire fitting project, which was previously saved with \textit{PyCorrFit}. Sessions are bundles of files named *.fcsfit-session.zip. Sessions contain, comments, model assigned correlation data, and the current state of parameters for each data page (\hyref{Section}{sec:fm.ss}).
 
 \subsubsection{File / Comment session}
 \label{sec:fm.cs}
@@ -362,9 +385,9 @@ Page info is a most verbose summary of a data set. The panel \textit{Page info}
 \item Actual parameter values (as contained in the model function)
 \item Supplementary parameters (intensity, counts per particle, duration, etc.)
 \item Fitting related information (Chi-square, channel selection, varied fit parameters) .
-\item Model doc string (Section 3.1)
+\item Model doc string (\hyref{Section}{sec:models})
 \end{enumerate}
-The content of Page info is saved as a header when exporting correlation functions via the command \textit{Current page / Save data (*.csv)} (Section 3.3.2).
+The content of Page info is saved as a header when exporting correlation functions via the command \textit{Current page / Save data (*.csv)} (\hyref{Section}{sec:cp.sd}).
 
 \subsubsection{Tools / Slider simulation}
 \label{sec:tm.ss}
@@ -374,21 +397,21 @@ Two variables (A and B) have to be assigned from a drop-down list of parameters
 
 In addition, the variables A and B can be linked by a mathematical relation. For this a mathematical operator can be selected from a small list and the option \textit{Fix relation} must be checked. Then, the variable B appears inactivated (greyed out) and the new variable combining values for A and B can be explored by dragging.
 
-\subsection{ Current Page}
+\subsection{Current Page}
 \label{sec:cp}
 This menu compiles import and export operations referring exclusively to the active page in the main window. 
 
 \subsubsection{Current Page / Import Data}
 \label{sec:cp.id}
-This command is the third way to import data into a pre-existing session. Single files containing correlation data can be imported as long as they have the right format (Section 3.1.2). In contrast to \textit{Load data} from the \textit{File} menu, the model assignment and the state of the parameters remains. The purpose of this command is to compare different data sets to the very same model function for a given parameter values. After successful import, the previous correlation data of [...]
+This command is the third way to import data into a pre-existing session. Single files containing correlation data can be imported as long as they have the right format (\hyref{Section}{sec:fm.ld}). In contrast to \textit{Load data} from the \textit{File} menu, the model assignment and the state of the parameters remains. The purpose of this command is to compare different data sets to the very same model function for a given parameter values. After successful import, the previous correl [...]
 
-To avoid this loss, one could first generate a new page via the menu \textit{Models} (Section 3.4), select a model function and import data there. This is also a possibility to assign the very same data to different models within the same session.
+To avoid this loss, one could first generate a new page via the menu (\hyref{Section}{sec:tm.m}), select a model function and import data there. This is also a possibility to assign the very same data to different models within the same session.
 
 \subsubsection{Current Page / Save data (*.csv)}
 \label{sec:cp.sd}
-For the documentation with graphics software of choice, correlation curves can be exported as a comma-separated table. A saved \textit{PyCorrFit} text-file (*.csv) will contain a hashed header with metadata from the \textit{Page info} panel (Section 3.2.8), followed by the correlation and fitting values in tab-separated columns: \textit{Channel (tau [s])}, \textit{Experimental correlation}, \textit{Fitted correlation}, \textit{Residuals, }and \textit{Weights (fit)}. 
+For the documentation with graphics software of choice, correlation curves can be exported as a comma-separated table. A saved \textit{PyCorrFit} text-file (*.csv) will contain a hashed header with metadata from the \textit{Page info} tool (\hyref{Section}{sec:tm.pi}), followed by the correlation and fitting values in tab-separated columns: \textit{Channel (tau [s])}, \textit{Experimental correlation}, \textit{Fitted correlation}, \textit{Residuals}, and \textit{Weights (fit)}. 
 
-Below the columns, there are again 5 rows of hashed comments followed by the intensity data in two columns: \textit{Time [s]} and \textit{Intensity trace [kHz]}. Note that there are no assemblies of “multiple runs”, since \textit{PyCorrFit} treats these as individual correlation functions. A *.csv file therefore contains only a single fitted correlation curve and one intensity trace for autocorrelation or two intensity traces for cross-correlation.
+Below the columns, there are again 5 rows of hashed comments followed by the intensity data in two columns: \textit{Time [s]} and \textit{Intensity trace [kHz]}. Note that there are no assemblies of ``multiple runs'', since \textit{PyCorrFit} treats these as individual correlation functions. A *.csv file therefore contains only a single fitted correlation curve and one intensity trace for autocorrelation or two intensity traces for cross-correlation.
 
 \subsubsection{Current Page / Save correlation as image}
 \label{sec:cp.sc}
@@ -403,64 +426,64 @@ For a quick documentation the intensity from the \textit{Trace view} panel can b
 Closes the page; the data set is removed from the session. The page numbers of all other pages remain the same. The command is equivalent with the closer (x) in the tab. 
 
 \subsection{Models}
-
+\label{sec:models}
 When choosing a model from the \textit{Models} menu a new page opens and the model function is plotted according to the set of starting values for parameters as they were defined in the model description. The lists contains all of the implemented model functions, which can be selected during \textit{File / Load data}. The parameters can be manipulated to explore different shapes; the tool \textit{Slider simulation} can also be used. Via \textit{Current page / Import data}, the model may  [...]
-
 Standard model functions for a confocal setup are:
 
-Confocal (Gaussian): 3D \ \ \ \ \ \ [Free diffusion in three dimensions]
-
-Confocal (Gaussian): T-3D \ \ \ \ \ \ [Triplet blinking and 3D diffusion]
-
-Confocal (Gaussian): T-3D-3D \ \ \ \ [Triplet with two diffusive components]
-
-Confocal (Gaussian): T-3D-3D-3D \ \ \ \ [Triplet with three diffusive components]
-
-Confocal (Gaussian): 2D \ \ \ \ \ \ [2D diffusion, e.g. in membranes]
-
-Confocal (Gaussian): T-2D \ \ \ \ \ \ [Triplet blinking and 2D diffusion]
-
-Confocal (Gaussian): T-2D-2D\ \ \ \ [Triplet with two diffusive components]
-
-Confocal (Gaussian): T-3D-2D \ \ \ \ [Triplet with mixed 3D and 2D diffusion]
+\begin{tabular}{l l}
+%Confocal (Gaussian): 3D \ \ \ \ \ \ [Free diffusion in three dimensions]
+\rule{0pt}{3ex} - Confocal (Gaussian): T+3D & Triplet blinking and 3D diffusion \\
+\rule{0pt}{3ex} - Confocal (Gaussian): T+3D+3D & Triplet with two diffusive components \\
+%Confocal (Gaussian): T+3D+3D+3D & [Triplet with three diffusive components]
+%Confocal (Gaussian): 2D &  2D diffusion, e.g. in membranes \\
+\rule{0pt}{3ex} - Confocal (Gaussian): T-2D &  Triplet blinking and 2D diffusion \\
+\rule{0pt}{3ex} - Confocal (Gaussian): T-2D-2D & Triplet with two diffusive components \\
+\rule{0pt}{3ex} - Confocal (Gaussian): T-3D-2D &  Triplet with mixed 3D and 2D diffusion \\
+\rule{0pt}{3ex}
+\end{tabular}
 
 There is also a collection of models for FCS setups with TIR excitation:
 
-TIR (Gaussian/Exp.): 3D\ \ \ \ \ \ [3D diffusion]
-
-TIR (Gaussian/Exp.): T-3D-3D\ \ \ \ [Triplet with two diffusive components]
-
-TIR (Gaussian/Exp.): T-3D-2D\ \ \ \ [Triplet with mixed 3D and 2D diffusion]
+\begin{tabular}{l l}
+\rule{0pt}{3ex} - TIR (Gaussian/Exp.): 3D & 3D diffusion \\
+\rule{0pt}{3ex} - TIR (Gaussian/Exp.): T+3D+3D & Triplet with two diffusive components \\
+\rule{0pt}{3ex} - TIR (Gaussian/Exp.): T+3D+2D & Triplet with mixed 3D and 2D diffusion \\
+\rule{0pt}{3ex}
+\end{tabular}
 
-… 
 
-In addition, there are may be user defined model functions which have been uploaded previously via File / Import model (Section 3.1.1).
+In addition, there are may be user defined model functions which have been uploaded previously via File / Import model (\hyref{Section}{sec:fm.im}).
 
 \subsection{Preferences}
+\paragraph*{Latex} If the user has a Tex distribution (e.g. MikTex for Windows) installed, checking the ``Latex'' option will open a separate, TeX formatted panel (\textit{Figure1}) via the \textit{Current page / Save […] as image} commands. The \textit{Figure1} contains some interactive options for display. From there, in a second step, the image can be exported as *.png or *.svg.
 
-The preference menu is still short. If the user has a Tex distribution (e.g. MikTex for Windows) installed, checking the `'Latex´' option will open a separate, Tex formatted panel (\textit{Figure1}) via the \textit{Current page / Save […] as image} commands. The \textit{Figure1} contains some interactive options for display. From there, in a second step, the image can be exported as *.png.
-
-\textit{Verbose} generates a plot showing the spline function used for calculating the weights for each data points when performing a weighted fit. If Latex is active this plot will also be in Tex format. For obvious reasons, such a plot is not generated when using the iteratively improved \textit{Model function} or the actual \textit{Average} correlation curve for weighted fitting.
+\paragraph*{Verbose} If checked, this will cause the \textit{PyCorrFit} to display graphs that would be hidden otherwise. In weighted fitting with a spline, the spline function used for calculating the weights for each data points is displayed\footnote{For obvious reasons, such a plot is not generated when using the iteratively improved \textit{Model function} or the actual \textit{Average} correlation curve for weighted fitting.}. When saving the correlation curve as an image (\hyref{Se [...]
 
+\paragraph*{Show weights}
 Checking the option \textit{Show weights} will produce two lines showing the weights for each data point of the correlation function in the plot, as well as in the exported image. Note that the weights are always exported when using the \textit{Save data (*.csv)} command from the \textit{Current page} menu.
 
 \subsection{Help}
+\paragraph*{Documentation}
+This entry displays this documentation using the systems default PDF viewer.
+\paragraph*{Wiki}
+This entry displays the wiki of \textit{PyCorrFit} on \textit{GitHub}. Everyone who registers with \textit{GitHub} will be able to make additions and modifications. The wiki is intended for end-users of \textit{PyCorrFit} to share protocols or to add other useful information.
+\paragraph*{Update}
+establishes a link to the GitHub website to check for a new release; it also provides a few web links associated with PyCorrFit
+\paragraph*{Shell}
+This gives Shell-access to the functions of \textit{PyCorrFit}. It is particularly useful for trouble-shooting.
+\paragraph*{Software}
+This lists the exact version of \textit{Python} and the corresponding modules with which PyCorrFit is currently running.
+\paragraph*{About}
+Information of the participating developers, the license, and documentation writers.
 
-The help menu mainly provides additional information. \textit{Software used} lists the exact version of \textit{Python} used for programming the executable software. \textit{About} gives information of the participating developers, the license, and documentation writers. \textit{Update} establishes a link to the GitHub website to check for a new release; it also provides a few web links associated with PyCorrFit. Finally, \textit{Shell} specifies … ???.
-
-Documentation downloads this software guide from the GitHub website (PyCorrFit\_doc-1.pdf).
 
 \section{4 Hacker's corner}
+\paragraph*{New internal model functions}
+Additionally, new file formats can be implemented by programming of the readfiles module of \textit{PyCorrFit}. First, edit the code for \texttt{\_\_init\_\_.py} and then add the script \texttt{read\_FileFormat.py}.
 
-Additionally, new file formats can be implemented by programming of the readfiles module of \textit{PyCorrFit}. First, edit the code for \_\_init\_\_.py and then add the script read\_FileFormat.py to the \textit{PyCorrFit} library.[to which library?, more info? Remove this paragraph?] 
-
-External models will be imported with internal model function IDs starting at 7000. Models are checked upon import by the Python module sympy. If the import fails it might be a syntax error or just an error of sympy, since this module is still under development. 
-
-
+External models will be imported with internal model function IDs starting at $7000$. Models are checked upon import by the Python module sympy. If the import fails it might be a syntax error or just an error of sympy, since this module is still under development. 
 
 \section{Theoretical background}
-
-
 \subsection{Derivation of FCS model functions}
 This section introduces the calculation of FCS model functions. It supplies some background information and points out general properties of correlation functions.
 	
diff --git a/doc-src/PyCorrFit_doc_models.tex b/doc-src/PyCorrFit_doc_models.tex
index 9de3505..adece58 100644
--- a/doc-src/PyCorrFit_doc_models.tex
+++ b/doc-src/PyCorrFit_doc_models.tex
@@ -25,90 +25,121 @@ The parameters in the equation above need to be calibrated to obtain the diffusi
 
 
 % 2D diffusion
-\noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{2D (Gauß)} \\ 
-ID & \textbf{6001} \\ 
-Descr. &  Two-dimensional diffusion with a Gaussian laser profile\cite{Aragon1976, Qian1991, Rigler1993}. \\ 
-\end{tabular}
-\begin{align}
-G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})}
-\end{align} 
-\begin{center}
-\begin{tabular}{ll}
-$A_0$ & Offset \\ 
-$N$ & Effective number of particles in confocal area \\ 
-$\tau_\mathrm{diff}$ &   Characteristic residence time in confocal area \\
-\end{tabular} \\
-\end{center}
-\vspace{2em}
+%\noindent \begin{tabular}{lp{.7\textwidth}}
+%Name & \textbf{2D (Gauß)} \\ 
+%ID & \textbf{6001} \\ 
+%Descr. &  Two-dimensional diffusion with a Gaussian laser profile\cite{Aragon1976, Qian1991, Rigler1993}. \\ 
+%\end{tabular}
+%\begin{align}
+%G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})}
+%\end{align} 
+%\begin{center}
+%\begin{tabular}{ll}
+%$A_0$ & Offset \\ 
+%$N$ & Effective number of particles in confocal area \\ 
+%$\tau_\mathrm{diff}$ &   Characteristic residence time in confocal area \\
+%\end{tabular} \\
+%\end{center}
+%\vspace{2em}
 
 
-% 2D diffusion + triplett
+
+
+% 3D diffusion
+%\noindent \begin{tabular}{lp{.7\textwidth}}
+%Name & \textbf{3D (Gauß)} \\ 
+%ID & \textbf{6012} \\ 
+%Descr. &  Three-dimensional free diffusion with a Gaussian laser profile (eliptical)\cite{Aragon1976, Qian1991, Rigler1993}. \\ 
+%\end{tabular}
+%\begin{align}
+%G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})} \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_\mathrm{diff})}}
+%\end{align} 
+%\begin{center}
+%\begin{tabular}{ll}
+%$A_0$ & Offset \\ 
+%$N$ & Effective number of particles in confocal volume \\ 
+%$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal volume \\ 
+%$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
+%\end{tabular}
+%\end{center}
+%\vspace{2em}
+
+
+% 3D diffusion + triplet
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{2D+T (Gauß)} \\ 
-ID & \textbf{6002} \\ 
-Descr. &  Two-dimensional diffusion with a Gaussian laser profile, including a triplet component\cite{Aragon1976, Qian1991, Rigler1993,Widengren1994, Widengren1995, Haupts1998}. \\ 
+Name & \textbf{Confocal (Gaussian) T+3D} \\ 
+ID & \textbf{6011} \\ 
+Descr. &  Three-dimensional free diffusion with a Gaussian laser profile (eliptical), including a triplet component\cite{Widengren1994, Widengren1995, Haupts1998}. \\ 
 \end{tabular}
 \begin{align}
-G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})}  \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)
+G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})} \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_\mathrm{diff})}} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)
 \end{align} 
 \begin{center}
 \begin{tabular}{ll}
 $A_0$ & Offset \\ 
-$N$ & Effective number of particles in confocal area \\ 
-$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal area \\ 
+$N$ & Effective number of particles in confocal volume \\ 
+$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal volume \\ 
+$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
 $T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
-$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet \\
 \end{tabular}
 \end{center}
 \vspace{2em}
 
 
-% 3D diffusion
+
+% 3D+3D diffusion + triplett
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D (Gauß)} \\ 
-ID & \textbf{6012} \\ 
-Descr. &  Three-dimensional free diffusion with a Gaussian laser profile (eliptical)\cite{Aragon1976, Qian1991, Rigler1993}. \\ 
+Name & \textbf{Confocal (Gaussian) T+3D+3D} \\ 
+ID & \textbf{6030} \\ 
+Descr. &  Two-component three-dimensional free diffusion with a Gaussian laser profile, including a triplet component\cite{Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
 \end{tabular}
 \begin{align}
-G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})} \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_\mathrm{diff})}}
+G(\tau) &= A_0 + \frac{1}{N (F + \alpha (1-F))²}  \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
+\notag &\times  \left[ \frac{F}{(1+\tau/\tau_1)}  \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_1)}} + \alpha^2 \frac{1-F}{ (1+\tau/\tau_2) }  \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_2)}} \right]
 \end{align} 
 \begin{center}
 \begin{tabular}{ll}
 $A_0$ & Offset \\ 
-$N$ & Effective number of particles in confocal volume \\ 
-$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal volume \\ 
+$N$ & Effective number of particles in confocal volume ($N = N_1+N_2$) \\ 
+$\tau_1$ &  Diffusion time of particle species 1 \\ 
+$\tau_2$ &  Diffusion time of particle species 2 \\ 
+$F$ & Fraction of molecules of species 1 ($N_1 = F N$) \\
+$\alpha$ & Relative molecular brightness of particles 1 and 2 ($ \alpha = q_2/q_1$) \\
 $\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
+$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
 \end{tabular}
 \end{center}
 \vspace{2em}
 
 
-% 3D diffusion + triplet
+
+% 2D diffusion + triplett
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+T (Gauß)} \\ 
-ID & \textbf{6011} \\ 
-Descr. &  Three-dimensional free diffusion with a Gaussian laser profile (eliptical), including a triplet component\cite{Widengren1994, Widengren1995, Haupts1998}. \\ 
+Name & \textbf{Confocal (Gaussian) T+2D} \\ 
+ID & \textbf{6002} \\ 
+Descr. &  Two-dimensional diffusion with a Gaussian laser profile, including a triplet component\cite{Aragon1976, Qian1991, Rigler1993,Widengren1994, Widengren1995, Haupts1998}. \\ 
 \end{tabular}
 \begin{align}
-G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})} \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_\mathrm{diff})}} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)
+G(\tau) = A_0 + \frac{1}{N} \frac{1}{(1+\tau/\tau_\mathrm{diff})}  \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)
 \end{align} 
 \begin{center}
 \begin{tabular}{ll}
 $A_0$ & Offset \\ 
-$N$ & Effective number of particles in confocal volume \\ 
-$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal volume \\ 
-$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
+$N$ & Effective number of particles in confocal area \\ 
+$\tau_\mathrm{diff}$ &  Characteristic residence time in confocal area \\ 
 $T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
-$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet \\
+$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
 \end{tabular}
 \end{center}
 \vspace{2em}
 
 
+
 % 2D+2D diffusion + triplett
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{2D+2D+T (Gauß)} \\ 
+Name & \textbf{Confocal (Gaussian) T+2D+2D} \\ 
 ID & \textbf{6031} \\ 
 Descr. &  Two-component, two-dimensional diffusion with a Gaussian laser profile, including a triplet component\cite{Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
 \end{tabular}
@@ -130,9 +161,10 @@ $\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\
 \vspace{2em}
 
 
+
 % 3D+2D diffusion + triplett
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+2D+T (Gauß)} \\ 
+Name & \textbf{Confocal (Gaussian) T+3D+2D} \\ 
 ID & \textbf{6032} \\ 
 Descr. &  Two-component, two- and three-dimensional diffusion with a Gaussian laser profile, including a triplet component\cite{Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
 \end{tabular}
@@ -155,33 +187,6 @@ $\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\
 \vspace{2em}
 
 
-% 3D+3D diffusion + triplett
-\noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+3D+T (Gauß)} \\ 
-ID & \textbf{6030} \\ 
-Descr. &  Two-component three-dimensional free diffusion with a Gaussian laser profile, including a triplet component\cite{Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
-\end{tabular}
-\begin{align}
-G(\tau) &= A_0 + \frac{1}{N (F + \alpha (1-F))²}  \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
-\notag &\times  \left[ \frac{F}{(1+\tau/\tau_1)}  \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_1)}} + \alpha^2 \frac{1-F}{ (1+\tau/\tau_2) }  \frac{1}{\sqrt{1+\tau/(\mathit{SP}^2 \tau_2)}} \right]
-\end{align} 
-\begin{center}
-\begin{tabular}{ll}
-$A_0$ & Offset \\ 
-$N$ & Effective number of particles in confocal volume ($N = N_1+N_2$) \\ 
-$\tau_1$ &  Diffusion time of particle species 1 \\ 
-$\tau_2$ &  Diffusion time of particle species 2 \\ 
-$F$ & Fraction of molecules of species 1 ($N_1 = F N$) \\
-$\alpha$ & Relative molecular brightness of particles 1 and 2 ($ \alpha = q_2/q_1$) \\
-$\mathit{SP}$ & Structural parameter, describes elongation of the confocal volume \\
-$T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
-$\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\ 
-\end{tabular}
-\end{center}
-\vspace{2em}
-
-
-
 \subsubsection{Confocal TIR-FCS}
 The detection volume is axially confined by an evanescent field and has an effective size of
 \begin{align}
@@ -203,7 +208,7 @@ The lateral detection area has the same shape as in confocal FCS. Thus, correlat
 
 % 3D diffusion (Gauß/exp)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D (Gauß/exp)} \\ 
+Name & \textbf{TIR (Gaussian/Exp.) 3D} \\ 
 ID & \textbf{6013} \\ 
 Descr. &  Three-dimensional free diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction\cite{Starr2001, Hassler2005, Ohsugi2006}. \\ 
 \end{tabular}
@@ -222,27 +227,29 @@ $D$ & Diffusion coefficient  \\
 \vspace{2em}
 
 
-% 2D+3D+T diffusion (Gauß/exp)
+% 3D+3D+T diffusion (Gauß/exp)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+2D+T (Gauß/exp)} \\ 
-ID & \textbf{6033} \\ 
-Descr. &  Two-component, two- and three-dimensional diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction, including a triplet component\cite{Starr2001, Hassler2005, Ohsugi2006, Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
+Name & \textbf{TIR (Gaussian/Exp.) T+3D+3D} \\ 
+ID & \textbf{6034} \\ 
+Descr. &  Two-component three-dimensional diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction, including a triplet component\cite{Starr2001, Hassler2005, Ohsugi2006, Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\
 \end{tabular}
 \begin{align}
-G(\tau) &= A_0 + \frac{1}{N (1-F + \alpha F)^2} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
-& \notag \times  \left[
-\frac{1-F}{1+ 4 D_\mathrm{2D} \tau/R_0^2} + 
-\frac{\alpha^2 F \kappa}{1+ 4 D_\mathrm{3D} \tau/R_0^2} 
-\left( \sqrt{\frac{D_\mathrm{3D} \tau}{\pi}} + \frac{1 - 2 D_\mathrm{3D} \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_\mathrm{3D} \tau} \kappa\right) \right) \right]
+G(\tau) = &A_0 + \frac{1}{N (1-F + \alpha F)^2} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
+\notag \times  \Bigg[ \,\, & 
+\frac{F \kappa}{1+ 4 D_1 \tau/R_0^2} 
+\left( \sqrt{\frac{D_1 \tau}{\pi}} + \frac{1 - 2 D_1 \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_1 \tau} \kappa\right) \right) + \\
+ \notag + &
+\frac{(1-F) \alpha^2 \kappa}{1+ 4 D_2 \tau/R_0^2} 
+\left( \sqrt{\frac{D_2 \tau}{\pi}} + \frac{1 - 2 D_2 \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_2 \tau} \kappa\right) \right) \,\, \Bigg]
 \end{align} 
 \begin{center}
 \begin{tabular}{ll}
 $A_0$ & Offset \\ 
-$N$ & Effective number of particles in confocal volume ($N = N_\mathrm{2D}+N_\mathrm{3D}$) \\ 
-$D_\mathrm{2D}$ &  Diffusion coefficient of surface bound particles \\ 
-$D_\mathrm{3D}$ &  Diffusion coefficient of freely diffusing particles \\ 
-$F$ & Fraction of molecules of the freely diffusing species ($N_\mathrm{3D} = F N$) \\
-$\alpha$ & Relative molecular brightness of particle species ($ \alpha = q_\mathrm{3D}/q_\mathrm{2D}$) \\
+$N$ & Effective number of particles in confocal volume ($N = N_1+N_2$) \\ 
+$D_1$ &  Diffusion coefficient of species 1 \\ 
+$D_2$ &  Diffusion coefficient of species 2 \\ 
+$F$ & Fraction of molecules of species 1 ($N_1 = F N$) \\
+$\alpha$ & Relative molecular brightness of particle species ($ \alpha = q_2/q_1$) \\
 $R_0$ & Lateral extent of the detection volume \\
 $\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
 $T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
@@ -252,29 +259,29 @@ $\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\
 \vspace{2em}
 
 
-% 3D+3D+T diffusion (Gauß/exp)
+
+
+% 2D+3D+T diffusion (Gauß/exp)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+3D+T (Gauß/exp)} \\ 
-ID & \textbf{6034} \\ 
-Descr. &  Two-component three-dimensional diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction, including a triplet component\cite{Starr2001, Hassler2005, Ohsugi2006, Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\
+Name & \textbf{TIR (Gaussian/Exp.) T+3D+2D} \\ 
+ID & \textbf{6033} \\ 
+Descr. &  Two-component, two- and three-dimensional diffusion with a Gaussian lateral detection profile and an exponentially decaying profile in axial direction, including a triplet component\cite{Starr2001, Hassler2005, Ohsugi2006, Elson1974, Aragon1976, Palmer1987, Thomps:bookFCS2002}. \\ 
 \end{tabular}
 \begin{align}
-G(\tau) = &A_0 + \frac{1}{N (1-F + \alpha F)^2} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
-\notag \times  \Bigg[ \,\, & 
-\frac{F \kappa}{1+ 4 D_1 \tau/R_0^2} 
-\left( \sqrt{\frac{D_1 \tau}{\pi}} + \frac{1 - 2 D_1 \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_1 \tau} \kappa\right) \right) + \\
- \notag + &
-\frac{(1-F) \alpha^2 \kappa}{1+ 4 D_2 \tau/R_0^2} 
-\left( \sqrt{\frac{D_2 \tau}{\pi}} + \frac{1 - 2 D_2 \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_2 \tau} \kappa\right) \right) \,\, \Bigg]
+G(\tau) &= A_0 + \frac{1}{N (1-F + \alpha F)^2} \left(1 + \frac{T e^{-\tau/\tau_\mathrm{trip}}}{1-T}  \right)  \times \\
+& \notag \times  \left[
+\frac{1-F}{1+ 4 D_\mathrm{2D} \tau/R_0^2} + 
+\frac{\alpha^2 F \kappa}{1+ 4 D_\mathrm{3D} \tau/R_0^2} 
+\left( \sqrt{\frac{D_\mathrm{3D} \tau}{\pi}} + \frac{1 - 2 D_\mathrm{3D} \tau \kappa^2}{2 \kappa}  w\!\left(i \sqrt{D_\mathrm{3D} \tau} \kappa\right) \right) \right]
 \end{align} 
 \begin{center}
 \begin{tabular}{ll}
 $A_0$ & Offset \\ 
-$N$ & Effective number of particles in confocal volume ($N = N_1+N_2$) \\ 
-$D_1$ &  Diffusion coefficient of species 1 \\ 
-$D_2$ &  Diffusion coefficient of species 2 \\ 
-$F$ & Fraction of molecules of species 1 ($N_1 = F N$) \\
-$\alpha$ & Relative molecular brightness of particle species ($ \alpha = q_2/q_1$) \\
+$N$ & Effective number of particles in confocal volume ($N = N_\mathrm{2D}+N_\mathrm{3D}$) \\ 
+$D_\mathrm{2D}$ &  Diffusion coefficient of surface bound particles \\ 
+$D_\mathrm{3D}$ &  Diffusion coefficient of freely diffusing particles \\ 
+$F$ & Fraction of molecules of the freely diffusing species ($N_\mathrm{3D} = F N$) \\
+$\alpha$ & Relative molecular brightness of particle species ($ \alpha = q_\mathrm{3D}/q_\mathrm{2D}$) \\
 $R_0$ & Lateral extent of the detection volume \\
 $\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
 $T$ &  Fraction of particles in triplet (non-fluorescent) state\\ 
@@ -285,6 +292,7 @@ $\tau_\mathrm{trip}$ &  Characteristic residence time in triplet state \\
 
 
 
+
 \subsubsection{TIR-FCS with a square-shaped lateral detection volume}
 The detection volume is axially confined by an evanescent field of depth\footnote{Where the field has decayed to $1/e$} $d_\mathrm{eva} = 1 / \kappa$.
 The lateral detection area is a convolution of the point spread function of the microscope of size $\sigma$,
@@ -300,49 +308,60 @@ w\!(i\xi) &= e^{\xi^2} \mathrm{erfc}(\xi) \\
 \vspace{2em}
 
 
-% 2D TIRF diffusion (□xσ)
+
+% 3D TIRF diffusion (□xσ)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{2D (□x$\upsigma$)} \\ 
-ID & \textbf{6000} \\ 
-Descr. &  Two-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function\cite{Ries2008390, Yordanov2011}\footnote{The reader is made aware, that reference \cite{Ries2008390} contains several unfortunate misprints.}. \\ 
+Name & \textbf{TIR (□x$\upsigma$/Exp.) 3D} \\ 
+ID & \textbf{6010} \\ 
+Descr. &  Three-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction\cite{Ries2008390, Yordanov2011}. \\ 
 \end{tabular}
 \begin{align}
-G(\tau) = \frac{1}{C} \left[
-\frac{2 \sqrt{\sigma^2+D \tau}}{\sqrt{\pi} a^2}
+G(\tau) =  \frac{\kappa^2}{C} &
+\left( \sqrt{\frac{D \tau}{\pi}} + \frac{1 - 2 D \tau \kappa^2)}{2 \kappa} w\!\left(i \sqrt{D \tau} \kappa\right) \right) \times \\
+\notag  \times \Bigg[ & \frac{2 \sqrt{\sigma^2+D \tau}}{\sqrt{\pi} a^2}
 \left( \exp\left(-\frac{a^2}{4(\sigma^2+D \tau)}\right) - 1 \right) +
-\frac{1}{a} \, \mathrm{erf}\left(\frac{a}{2 \sqrt{\sigma^2+D \tau}}\right)
-\right]^2
+\frac{1}{a} \, \mathrm{erf}\left(\frac{a}{2 \sqrt{\sigma^2+D \tau}}\right) \Bigg]^2
 \end{align} 
 \begin{center}
 \begin{tabular}{ll}
-$C$ & Particle concentration in detection area \\ 
+$C$ & Particle concentration in detection volume \\ 
 $\sigma$ & Lateral size of the point spread function \\ 
 $a$ & Side size of the square-shaped detection area \\
+$\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
 $D$ & Diffusion coefficient \\
 \end{tabular} \\
 \end{center}
 \vspace{2em}
 
 
-% 3D TIRF diffusion (□xσ)
+% 3D+3D TIRF diffusion (□xσ)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D (□x$\upsigma$/exp)} \\ 
-ID & \textbf{6010} \\ 
-Descr. &  Three-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction\cite{Ries2008390, Yordanov2011}. \\ 
+Name & \textbf{TIR (□x$\upsigma$/Exp.) 3D+3D} \\ 
+ID & \textbf{6023} \\ 
+Descr. &  Two-component three-dimensional free diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction. \newline
+The correlation function is a superposition of three-dimensional model functions of the type \textbf{3D (□x$\upsigma$)} (6010)\cite{Ries2008390, Yordanov2011}. \\
+\end{tabular}
+\vspace{2em}
+
+
+% 2D TIRF diffusion (□xσ)
+\noindent \begin{tabular}{lp{.7\textwidth}}
+Name & \textbf{TIR (□x$\upsigma$) 2D} \\ 
+ID & \textbf{6000} \\ 
+Descr. &  Two-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function\cite{Ries2008390, Yordanov2011}\footnote{The reader is made aware, that reference \cite{Ries2008390} contains several unfortunate misprints.}. \\ 
 \end{tabular}
 \begin{align}
-G(\tau) =  \frac{\kappa^2}{C} &
-\left( \sqrt{\frac{D \tau}{\pi}} + \frac{1 - 2 D \tau \kappa^2)}{2 \kappa} w\!\left(i \sqrt{D \tau} \kappa\right) \right) \times \\
-\notag  \times \Bigg[ & \frac{2 \sqrt{\sigma^2+D \tau}}{\sqrt{\pi} a^2}
+G(\tau) = \frac{1}{C} \left[
+\frac{2 \sqrt{\sigma^2+D \tau}}{\sqrt{\pi} a^2}
 \left( \exp\left(-\frac{a^2}{4(\sigma^2+D \tau)}\right) - 1 \right) +
-\frac{1}{a} \, \mathrm{erf}\left(\frac{a}{2 \sqrt{\sigma^2+D \tau}}\right) \Bigg]^2
+\frac{1}{a} \, \mathrm{erf}\left(\frac{a}{2 \sqrt{\sigma^2+D \tau}}\right)
+\right]^2
 \end{align} 
 \begin{center}
 \begin{tabular}{ll}
-$C$ & Particle concentration in detection volume \\ 
+$C$ & Particle concentration in detection area \\ 
 $\sigma$ & Lateral size of the point spread function \\ 
 $a$ & Side size of the square-shaped detection area \\
-$\kappa$ &  Evanescent decay constant ($\kappa = 1/d_\mathrm{eva}$)\\ 
 $D$ & Diffusion coefficient \\
 \end{tabular} \\
 \end{center}
@@ -351,7 +370,7 @@ $D$ & Diffusion coefficient \\
 
 % 2D+2D TIRF diffusion (□xσ)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{2D+2D (□x$\upsigma$/exp)} \\ 
+Name & \textbf{TIR (□x$\upsigma$) 2D+2D} \\ 
 ID & \textbf{6022} \\ 
 Descr. &  Two-component two-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function. \newline
 The correlation function is a superposition of two-dimensional model functions of the type \textbf{2D (□x$\upsigma$)} (6000)\cite{Ries2008390, Yordanov2011}. \\
@@ -361,7 +380,7 @@ The correlation function is a superposition of two-dimensional model functions o
 
 % 3D+2D TIRF diffusion (□xσ)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+2D (□x$\upsigma$/exp)} \\ 
+Name & \textbf{TIR (□x$\upsigma$/Exp.) 3D+2D} \\ 
 ID & \textbf{6020} \\ 
 Descr. &  Two-component two- and three-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction.  \newline
 The correlation function is a superposition of the two-dimensional model function \textbf{2D (□x$\upsigma$)} (6000) and the three-dimensional model function \textbf{3D (□x$\upsigma$)} (6010)\cite{Ries2008390, Yordanov2011}.
@@ -369,19 +388,9 @@ The correlation function is a superposition of the two-dimensional model functio
 \vspace{2em}
 
 
-% 3D+3D TIRF diffusion (□xσ)
-\noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+3D (□x$\upsigma$/exp)} \\ 
-ID & \textbf{6023} \\ 
-Descr. &  Two-component three-dimensional free diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction. \newline
-The correlation function is a superposition of three-dimensional model functions of the type \textbf{3D (□x$\upsigma$)} (6010)\cite{Ries2008390, Yordanov2011}. \\
-\end{tabular}
-\vspace{2em}
-
-
 % 3D+2D+kin TIRF diffusion (□xσ)
 \noindent \begin{tabular}{lp{.7\textwidth}}
-Name & \textbf{3D+2D+kin (□x$\upsigma$/exp)} \\ 
+Name & \textbf{TIR (□x$\upsigma$/Exp.) 3D+2D+kin} \\ 
 ID & \textbf{6021} \\ 
 Descr. &  Two-component two- and three-dimensional diffusion with a square-shaped lateral detection area taking into account the size of the point spread function; and an exponential decaying profile in axial direction. This model covers binding and unbinding kintetics.  \newline 
 The correlation function for this model was introduced in \cite{Ries2008390}. Because approximations are made in the derivation, please verify if this model is applicable to your problem before using it.
diff --git a/src/PyCorrFit.py b/src/PyCorrFit.py
index d93a2dc..f77c598 100755
--- a/src/PyCorrFit.py
+++ b/src/PyCorrFit.py
@@ -1,7 +1,6 @@
 #!/usr/bin/python
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     A flexible tool for fitting and analyzing correlation curves.
 
@@ -13,19 +12,20 @@
     unit of inverse area: 100 /um^2
     unit of inv. volume : 1000 /um^3
 
+    Copyright (C) 2011-2012  Paul Müller
 
-    PyCorrFit is free software: you can redistribute it and/or modify
-    it under the terms of the GNU General Public License as published 
-    by the Free Software Foundation, either version 2 of the License, 
-    or (at your option) any later version.
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
 
-    PyCorrFit is distributed in the hope that it will be useful,
-    but WITHOUT ANY WARRANTY; without even the implied warranty of 
-    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  
-    See the GNU General Public License for more details. 
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
 
     You should have received a copy of the GNU General Public License 
-    along with this program.  If not, see <http://www.gnu.org/licenses/>.
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
diff --git a/src/__init__.py b/src/__init__.py
index 5832ae1..c525691 100644
--- a/src/__init__.py
+++ b/src/__init__.py
@@ -1,18 +1,30 @@
 # -*- coding: utf-8 -*-
 """
-PyCorrFit
-Paul Müller, Biotec - TU Dresden
+    In current biomedical research, fluorescence correlation spectroscopy (FCS)
+    is applied to characterize molecular dynamic processes in vitro and in living
+    cells. Commercial FCS setups only permit data analysis that is limited to a
+    specific instrument by the use of in-house file formats or a finite number of
+    implemented correlation model functions. PyCorrFit is a general-purpose FCS
+    evaluation software that, amongst other formats, supports the established Zeiss
+    ConfoCor3 ~.fcs file format. PyCorrFit comes with several built-in model
+    functions, covering a wide range of applications in standard confocal FCS.
+    In addition, it contains equations dealing with different excitation geometries
+    like total internal reflection (TIR).
 
-In current biomedical research, fluorescence correlation spectroscopy (FCS)
-is applied to characterize molecular dynamic processes in vitro and in living
-cells. Commercial FCS setups only permit data analysis that is limited to a
-specific instrument by the use of in-house file formats or a finite number of
-implemented correlation model functions. PyCorrFit is a general-purpose FCS
-evaluation software that, amongst other formats, supports the established Zeiss
-ConfoCor3 ~.fcs file format. PyCorrFit comes with several built-in model
-functions, covering a wide range of applications in standard confocal FCS.
-In addition, it contains equations dealing with different excitation geometries
-like total internal reflection (TIR).
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 import doc
diff --git a/src/doc.py b/src/doc.py
index cd101b1..140c27e 100755
--- a/src/doc.py
+++ b/src/doc.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module doc
     *doc* is the documentation. Functions for various text output point here.
@@ -12,6 +11,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -40,7 +54,6 @@ try:
 except ImportError:
     print " Warning: module sympy not found!"
     sympy = Fake()
-import tempfile
 try:
     import urllib2
 except ImportError:
@@ -57,19 +70,6 @@ import yaml
 import readfiles
 
 
-
-def description():
-    return """PyCorrFit is a data displaying, fitting and evaluat
-tool, targeted at fluorescence correlation
-spectroscopy. PyCorrFit is written in Python."""
-
-
-def description():
-    return """PyCorrFit displays and processes data
-from fluorescence correlation spectroscopy
-measurements. PyCorrFit is written in Python."""
-
-
 def GetLocationOfChangeLog(filename = "ChangeLog.txt"):
     locations = list()
     fname1 = os.path.realpath(__file__)
@@ -180,109 +180,6 @@ along with this program.  If not, see <http://www.gnu.org/licenses/>.
 """
 
 
-def saveCSVinfo(parent):
-    a = "# This file was created using PyCorrFit version "+\
-        parent.version+".\n#\n"
-    b = """# Lines starting with a '#' are treated as comments.
-# The data is stored as CSV below this comment section.
-# Data usually consists of lag times (channels) and
-# the corresponding correlation function - experimental
-# and fitted values plus resulting residuals.
-# If this file is opened by PyCorrFit, only the first two
-# columns will be imported as experimental data.
-#
-"""
-    return a+b
-
-
-def SessionReadme(parent):
-    a = "This file was created using PyCorrFit version "+parent.version+"\n"
-    b = """The .zip archive you are looking at is a stored session of PyCorrFit.
-If you are interested in how the data is stored, you will find
-out here. Most important are the dimensionalities:
-Dimensionless representation:
- unit of time        : 1 ms
- unit of inverse time: 10³ /s
- unit of distance    : 100 nm
- unit of Diff.coeff  : 10 µm²/s
- unit of inverse area: 100 /µm²
- unit of inv. volume : 1000 /µm³
-From there, the dimension of any parameter may be
-calculated.
-
-There are a number of files within this archive, 
-depending on what was done during the session.
-
-backgrounds.csv
- - Contains the list of backgrounds used and
- - Averaged intensities in [kHz]
-
-bg_trace*.csv (where * is an integer)
- - The trace of the background corresponding
-   to the line number in backgrounds.csv
- - Time in [ms], Trace in [kHz]
-
-comments.txt
- - Contains page titles and session comment
- - First n lines are titles, rest is session
-   comment (where n is total number of pages)
-
-data*.csv (where * is {Number of page})
- - Contains lag times [ms]
- - Contains experimental data, if available
-
-externalweights.txt
- - Contains names (types) of external weights other than from
-   Model function or spline fit
- - Linewise: 1st element is page number, 2nd is name
- - According to this data, the following files are present in the archive
-
-externalweights_data_*PageID*_*Type*.csv
- - Contains weighting information of Page *PageID* of type *Type*
-
-model_*ModelID*.txt
- - An external (user-defined) model file with internal ID *ModelID*
-
-Parameters.yaml
- - Contains all Parameters for each page
-   Block format:
-    - - '#{Number of page}: '       
-      - {Internal model ID}
-      - {List of parameters}
-      - {List of checked parameters (for fitting)}
-      - [{Min channel selected}, {Max channel selected}]
-      - [{Weighted fit method (0=None, 1=Spline, 2=Model function)}, 
-          {No. of bins from left and right}, {No. of knots (of e.g. spline)}]
-      - [{Background to use (line in backgrounds.csv)}]
-      - Data type is Cross-correlation?
-      - Parameter id (int) used for normalization in plotting.
-        This number first enumerates the model parameters and then
-        the supplemental parameters (e.g. "n1").
-      - - [min, max] fitting parameter range of 1st parameter
-        - [min, max] fitting parameter range of 2nd parameter
-        - etc.
- - Order in Parameters.yaml defines order of pages in a session
- - Order in Parameters.yaml defines order in comments.txt
-
-Readme.txt (this file)
-
-Supplements.yaml
- - Contains errors of fitting
-   Format:
-   -- Page number
-    -- [parameter id, error value]
-     - [parameter id, error value]
-    - Chi squared
-    - [pages that share parameters] (from global fitting)
-     
-trace*.csv (where * is {Number of page} | appendix "A" or "B" point to
-            the respective channels (only in cross-correlation mode))
- - Contains times [ms]
- - Contains countrates [kHz]
-"""
-    return a+b
-
-
 def SoftwareUsed():
     """ Return some Information about the software used for this program """
     text = "Python "+sys.version+\
@@ -306,23 +203,6 @@ def SoftwareUsed():
     return text
 
 
-    
-backgroundinit = """Correct the amplitude for non-correlated background.
-The background intensity <B> can be either imported
-from a blank measurement or set manually."""
-
-#backgroundinit = """Set background correction with the background signal <B>
-#to correct the amplitude of the correlation function by
-#a factor of [<S>/(<S>-<B>)]² where <S> is the average
-#trace signal of the measurement"""
-
-# For the selection of types to import when doing import Data
-chooseimport = """Several types of data were found in
-the chosen file. Please select
-what type(s) you would like to
-import. """
-
-
 # Standard homepage
 HomePage = "http://pycorrfit.craban.de/"
 # Changelog filename
diff --git a/src/edclasses.py b/src/edclasses.py
index e21e255..0fb9c0a 100644
--- a/src/edclasses.py
+++ b/src/edclasses.py
@@ -1,11 +1,24 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     EditedClasses
     Contains classes that we edited.
     Should make our classes more useful.
 
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -20,7 +33,6 @@ with warnings.catch_warnings():
 # We will hack this toolbar here
 from matplotlib.backends.backend_wx import NavigationToolbar2Wx 
 import numpy as np
-import os
 import sys
 import traceback
 from wx.lib.agw import floatspin        # Float numbers in spin fields
@@ -145,9 +157,8 @@ def save_figure(self, evt=None):
             errstr += str(info[1])+"\n"
             for tb_item in traceback.format_tb(info[2]):
                 errstr += tb_item
-            dlg3 = wx.MessageDialog(parent, errstr, "Error", 
+            wx.MessageDialog(parent, errstr, "Error", 
                 style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-            dlg3.ShowModal() == wx.ID_OK
     else:
         dirname = dlg.GetDirectory()
     try:
diff --git a/src/frontend.py b/src/frontend.py
index 029373d..b842d61 100644
--- a/src/frontend.py
+++ b/src/frontend.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module frontend
     The frontend displays the GUI (Graphic User Interface). All necessary 
@@ -13,6 +12,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -177,10 +191,10 @@ class MyFrame(wx.Frame):
 
         # Set window icon
         try:
-          self.MainIcon = misc.getMainIcon()
-          wx.Frame.SetIcon(self, self.MainIcon)
+            self.MainIcon = misc.getMainIcon()
+            wx.Frame.SetIcon(self, self.MainIcon)
         except:
-          self.MainIcon = None
+            self.MainIcon = None
 
 
     def add_fitting_tab(self, event=None, modelid=None, counter=None):
@@ -409,7 +423,9 @@ class MyFrame(wx.Frame):
 
     def OnAbout(self, event=None):
         # Show About Information
-        description = doc.description()
+        description =  ("PyCorrFit is a data displaying, fitting "+
+            "and evaluation tool \nfor fluorescence correlation "+
+            "spectroscopy. \nPyCorrFit is written in Python.")
         licence = doc.licence()
         info = wx.AboutDialogInfo()
         #info.SetIcon(wx.Icon('hunter.png', wx.BITMAP_TYPE_PNG))
@@ -421,7 +437,7 @@ class MyFrame(wx.Frame):
         info.SetLicence(licence)
         info.SetIcon(misc.getMainIcon(pxlength=64))
         info.AddDeveloper('Paul Müller')
-        info.AddDocWriter('Paul Müller')
+        info.AddDocWriter('Thomas Weidemann, Paul Müller')
         wx.AboutBox(info)
         
 
@@ -452,9 +468,8 @@ class MyFrame(wx.Frame):
                 errstr += str(info[1])+"\n"
                 for tb_item in traceback.format_tb(info[2]):
                     errstr += tb_item
-                dlg3 = wx.MessageDialog(self, errstr, "Error", 
+                wx.MessageDialog(self, errstr, "Error", 
                     style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-                dlg3.ShowModal() == wx.ID_OK
                 del NewModel
                 return
             # Test the code for sympy compatibility.
@@ -675,9 +690,8 @@ class MyFrame(wx.Frame):
                 errstr += str(info[1])+"\n"
                 for tb_item in traceback.format_tb(info[2]):
                     errstr += tb_item
-                dlg = wx.MessageDialog(self, errstr, "Error", 
+                wx.MessageDialog(self, errstr, "Error", 
                     style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-                dlg.ShowModal() == wx.ID_OK
                 return
             else:
                 dataexp = Stuff["Correlation"]
@@ -746,9 +760,6 @@ class MyFrame(wx.Frame):
                     self.ImportData(CurPage, dataexp[i], trace[i],
                                    curvetype=curvelist[i], filename=filename[i],
                                    curveid=i)
-
-
-
                     # Let the user abort, if he wants to:
                     # We want to do this here before an empty page is added
                     # to the notebok.
@@ -841,10 +852,9 @@ class MyFrame(wx.Frame):
         # Set new tabtitle value and strip leading or trailing
         # white spaces.
         if run != "":
-            title = "{}-r{:03d}   id{:03d} {}".format(curvetype,int(run),
-                                                   int(curveid), filename)
+            title = "{} r{:03d}-{}".format(filename, int(run), curvetype)
         else:
-            title = "{} id{:03d}   {}".format(curvetype, int(curveid), filename)
+            title = "{} id{:03d}-{}".format(filename, int(curveid), curvetype)
         CurPage.tabtitle.SetValue(title.strip())
         # Plot everything
         CurPage.PlotAll()
@@ -976,7 +986,6 @@ class MyFrame(wx.Frame):
             coords = np.zeros(len(keys), dtype=np.int)
             Run = np.zeros(len(Curveid), dtype=np.int)
             WorkType = 1*Type
-            d = 0
             for fname in np.unique(Filename):
                 # unique returns sorted file names.
                 for i in range(Filename.count(fname)/len(keys)):
@@ -996,11 +1005,11 @@ class MyFrame(wx.Frame):
         labels=list()
         for i in np.arange(len(Filename)):
             if Run[i] != "":
-                labels.append("{}-r{:03d} {}".format(Type[i], Run[i],
-                                                     Filename[i]))
+                labels.append("{} r{:03d}-{}".format(Filename[i],
+                                                     Run[i], Type[i]))
             else:
-                labels.append("{}-id{:03d} {}".format(Type[i], Curveid[i],
-                                                      Filename[i]))
+                labels.append("{} id{:03d}-{}".format(Filename[i],
+                                                      Curveid[i], Type[i]))
         Chosen = tools.ChooseImportTypesModel(self, curvetypes, Correlation,
                                               labels=labels)
         newCorrelation = list()
@@ -1313,8 +1322,8 @@ class MyFrame(wx.Frame):
         # If no file has been selected, self.filename will be set to 'None'.
         self.dirname, self.filename = opf.SaveSession(self, self.dirname,
           Infodict)
-          #Function_parms, Function_array, Function_trace, self.Background,
-          #Preferences, Comments, ExternalFunctions, Info)
+        #Function_parms, Function_array, Function_trace, self.Background,
+        #Preferences, Comments, ExternalFunctions, Info)
         # Set title of our window
         self.SetTitleFCS(self.filename)
 
@@ -1325,15 +1334,14 @@ class MyFrame(wx.Frame):
                  locals=locals())
         # Set window icon
         if self.MainIcon is not None:
-          wx.Frame.SetIcon(Shell, self.MainIcon)
+            wx.Frame.SetIcon(Shell, self.MainIcon)
         Shell.Show(True)
 
 
     def OnSoftware(self, event=None):
         # Show About Information
         text = doc.SoftwareUsed()
-        dlg = wx.MessageBox(text, 'Software', 
-            wx.OK | wx.ICON_INFORMATION)
+        wx.MessageBox(text, 'Software', wx.OK | wx.ICON_INFORMATION)
 
 
     def OnTool(self, event):
@@ -1397,7 +1405,7 @@ class MyFrame(wx.Frame):
         # Additional parameters as of v.0.2.9
         # Which Background signal is selected?
         # The Background information is in the list *self.Background*.
-        Parms.append([Page.bgselected])
+        Parms.append([Page.bgselected, Page.bg2selected])
         # Additional parameter as of v.0.5.8
         # Is the Experimental data (if it exists) AC or CC?
         Parms.append(Page.IsCrossCorrelation)
@@ -1415,6 +1423,9 @@ class MyFrame(wx.Frame):
 
     def UnpackParameters(self, Parms, Page):
         """ Apply the given parameters to the Page in question.
+            This function contains several *len(Parms) >= X* statements.
+            These are used for opening sessions that were saved using
+            earlier versions of PyCorrFit.
         """
         modelid = Parms[1]
         if Page.modelid != modelid:
@@ -1461,10 +1472,10 @@ class MyFrame(wx.Frame):
                 # We have knots as of v. 0.6.5
                 [weighted, weights, knots] = Parms[5]
             if knots is not None:
-         # This is done with apply_paramters_reverse:
-         #       text = Page.Fitbox[1].GetValue()
-         #       text = filter(lambda x: x.isalpha(), text)
-         #       Page.Fitbox[1].SetValue(text+str(knots))
+                # This is done with apply_paramters_reverse:
+                #       text = Page.Fitbox[1].GetValue()
+                #       text = filter(lambda x: x.isalpha(), text)
+                #       Page.Fitbox[1].SetValue(text+str(knots))
                 Page.FitKnots = int(knots)
             if weighted is False:
                 weighted = 0
@@ -1491,6 +1502,9 @@ class MyFrame(wx.Frame):
             # causality check:
             if len(self.Background) > Parms[6][0]:
                 Page.bgselected = Parms[6][0]
+                if len(Parms[6]) == 2:
+                    # New in 0.8.1: CC background correction
+                    Page.bg2selected = Parms[6][1]
                 # New feature since 0.7.8: BG selection on Page panel
                 Page.OnAmplitudeCheck("init")
         # Set if Newtab is of type cross-correlation:
diff --git a/src/leastsquaresfit.py b/src/leastsquaresfit.py
index 81e2c89..2613bdc 100644
--- a/src/leastsquaresfit.py
+++ b/src/leastsquaresfit.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module leastsquaresfit
     Here are the necessary functions for computing a fit with given parameters.
@@ -16,6 +15,21 @@
       the levenberg-marquardt algorithm. the user must provide a
       subroutine which calculates the functions. the jacobian is
       then calculated by a forward-difference approximation.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -23,7 +37,6 @@ import matplotlib.pyplot as plt
 import numpy as np
 from scipy import interpolate as spintp
 from scipy import optimize as spopt
-import warnings
 
 # If we use this module with PyCorrFit, we can plot things with latex using
 # our own special thing.
@@ -60,9 +73,10 @@ class Fit(object):
         self.dataexpfull = None
         self.function = None
         self.interval = None
-        self.uselatex = False # Eventually use latex. This is passed
-                              # to each plotting command. Only when plotting
-                              # module is available.
+        # Eventually use latex. This is passed
+        # to each plotting command. Only when plotting
+        # module is available.
+        self.uselatex = False 
         self.values = None
         self.valuestofit = None
 
diff --git a/src/misc.py b/src/misc.py
index 0df44bd..432927a 100644
--- a/src/misc.py
+++ b/src/misc.py
@@ -1,15 +1,28 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module misc
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 import codecs
 from distutils.version import LooseVersion # For version checking
 import numpy as np
 import os
-import platform
 import sys
 import tempfile
 import urllib2
@@ -69,7 +82,8 @@ class UpdateDlg(wx.Frame):
 
 class wxHTML(wx.html.HtmlWindow):
     def OnLinkClicked(parent, link):
-         webbrowser.open(link.GetHref())
+        webbrowser.open(link.GetHref())
+
 
 def parseString2Pagenum(parent, string, nodialog=False):
     """ Parse a string with a list of pagenumbers to an integer list with
@@ -96,9 +110,8 @@ def parseString2Pagenum(parent, string, nodialog=False):
                         ". Please use a comma separated list with"+\
                         " optional dashes, e.g. '1-3,6,8'." 
             try:
-                dlg = wx.MessageDialog(parent, errstring, "Error", 
+                wx.MessageDialog(parent, errstring, "Error", 
                                   style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-                dlg.ShowModal() == wx.ID_OK
             except:
                 raise ValueError(errstring)
         else:
@@ -130,13 +143,13 @@ def parsePagenum2String(pagenumlist):
                 dash = False
         else:
             if newlist[i]-1 == newlist[i-1]:
-                 if newlist[i]-2 == newlist[i-2]:
-                     dash = True
-                 elif len(newlist) != i+1 and newlist[i]+1 == newlist[i+1]:
-                     dash = True
-                 else:
-                     string += ", "+str(newlist[i])
-                     dash = False
+                if newlist[i]-2 == newlist[i-2]:
+                    dash = True
+                elif len(newlist) != i+1 and newlist[i]+1 == newlist[i+1]:
+                    dash = True
+                else:
+                    string += ", "+str(newlist[i])
+                    dash = False
             else:
                 dash = False
                 string += ", "+str(newlist[i])
@@ -153,12 +166,12 @@ def parsePagenum2String(pagenumlist):
 def removewrongUTF8(name):
     newname = u""
     for char in name:
-       try:
-           uchar = codecs.decode(char, "UTF-8")
-       except:
-           pass
-       else:
-           newname += char
+        try:
+            codecs.decode(char, "UTF-8")
+        except:
+            pass
+        else:
+            newname += char
     return newname
     
 
diff --git a/src/models/MODEL_TIRF_1C.py b/src/models/MODEL_TIRF_1C.py
index 41e158a..87f4328 100755
--- a/src/models/MODEL_TIRF_1C.py
+++ b/src/models/MODEL_TIRF_1C.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-""" This file contains TIR one component models
+"""
+    PyCorrFit
+    This file contains TIR one component models
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_TIRF_2D2D.py b/src/models/MODEL_TIRF_2D2D.py
index 19d2bc4..0b70609 100755
--- a/src/models/MODEL_TIRF_2D2D.py
+++ b/src/models/MODEL_TIRF_2D2D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-""" This file contains 2D+2D TIR-FCS models.
+"""
+    PyCorrFit
+    This file contains 2D+2D TIR-FCS models.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_TIRF_3D2D.py b/src/models/MODEL_TIRF_3D2D.py
index 1ee6149..b6e0d12 100755
--- a/src/models/MODEL_TIRF_3D2D.py
+++ b/src/models/MODEL_TIRF_3D2D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-"""  This file contains 3D+2D TIR-FCS models.
+"""
+    PyCorrFit
+    This file contains 3D+2D TIR-FCS models.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_TIRF_3D2Dkin_Ries.py b/src/models/MODEL_TIRF_3D2Dkin_Ries.py
index 1d32661..1723cc0 100755
--- a/src/models/MODEL_TIRF_3D2Dkin_Ries.py
+++ b/src/models/MODEL_TIRF_3D2Dkin_Ries.py
@@ -1,9 +1,27 @@
 # -*- coding: utf-8 -*-
 """ 
+    PyCorrFit
+
+    This file contains a TIR-FCS kineteics model function according to:
     "Total Internal Reflection Fluorescence Correlation Spectroscopy: Effects
     of Lateral Diffusion and Surface-Generated Fluorescence"
     Jonas Ries, Eugene P. Petrov, and Petra Schwille
     Biophysical Journal, Volume 95, July 2008, 390–399
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_TIRF_3D3D.py b/src/models/MODEL_TIRF_3D3D.py
index 431bcd0..78a521b 100755
--- a/src/models/MODEL_TIRF_3D3D.py
+++ b/src/models/MODEL_TIRF_3D3D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-"""  This file contains 3D+3D TIR-FCS models.
+"""
+    PyCorrFit
+    This file contains 3D+3D TIR-FCS models.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_TIRF_gaussian_1C.py b/src/models/MODEL_TIRF_gaussian_1C.py
index e9b2404..3cba3cb 100755
--- a/src/models/MODEL_TIRF_gaussian_1C.py
+++ b/src/models/MODEL_TIRF_gaussian_1C.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-""" This file contains TIR one component models + Triplet
+"""
+    PyCorrFit
+    This file contains TIR one component models + Triplet
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_TIRF_gaussian_3D2D.py b/src/models/MODEL_TIRF_gaussian_3D2D.py
index 2a002d0..fa02470 100755
--- a/src/models/MODEL_TIRF_gaussian_3D2D.py
+++ b/src/models/MODEL_TIRF_gaussian_3D2D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-"""  This file contains a 3D+2D+T TIR FCS model.
+"""
+    PyCorrFit
+    This file contains a 3D+2D+T TIR FCS model.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_TIRF_gaussian_3D3D.py b/src/models/MODEL_TIRF_gaussian_3D3D.py
index 89ec550..c4f990f 100755
--- a/src/models/MODEL_TIRF_gaussian_3D3D.py
+++ b/src/models/MODEL_TIRF_gaussian_3D3D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-"""  This file contains a 3D+3D+T TIR FCS model.
+""" 
+    PyCorrFit
+    This file contains a 3D+3D+T TIR FCS model.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 import scipy.special as sps
diff --git a/src/models/MODEL_classic_gaussian_2D.py b/src/models/MODEL_classic_gaussian_2D.py
index bbe1e9a..8a7d51d 100755
--- a/src/models/MODEL_classic_gaussian_2D.py
+++ b/src/models/MODEL_classic_gaussian_2D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-""" This file contains some simple 2D models for confocal microscopy
+"""
+    PyCorrFit
+    This file contains some simple 2D models for confocal microscopy
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 import numpy as np                  # NumPy
@@ -83,11 +100,8 @@ def Check_xy_T_gauss(parms):
     taudiff = parms[1] = np.abs(parms[1])
     tautrip = np.abs(parms[2])
     T=parms[3]
-    
-    # REMOVED (Issue #2)
-     ## Force triplet component to be smaller than diffusion times
-     #tautrip = min(tautrip,taudiff*0.9)
-     
+
+
     # Triplet fraction is between 0 and one. T may not be one!
     T = (0.<=T<1.)*T + .99999999999999*(T>=1)
 
@@ -158,9 +172,6 @@ def Check_6031(parms):
     T=parms[6]
     off=parms[7]
     
-    ## REMOVED (Issue #2)
-     ## Force triplet component to be smaller than diffusion times
-     #tautrip = min(tautrip,taud1*0.9, taud2*0.9)
      
     # Triplet fraction is between 0 and one. T may not be one!
     T = (0.<=T<1.)*T + .99999999999999*(T>=1)
diff --git a/src/models/MODEL_classic_gaussian_3D.py b/src/models/MODEL_classic_gaussian_3D.py
index 96fc8b6..a09278c 100755
--- a/src/models/MODEL_classic_gaussian_3D.py
+++ b/src/models/MODEL_classic_gaussian_3D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-""" This file contains TIR one component models + Triplet
+"""
+    PyCorrFit
+    This file contains TIR one component models + triplet
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 
@@ -91,10 +108,6 @@ def Check_6011(parms):
     parms[4] = np.abs(parms[4])
     off = parms[5]
     
-    # REMOVED (issue #2)
-     ## Force triplet component to be smaller than diffusion
-     #tautrip = min(tautrip, 0.9*taudiff)
-     
     # Triplet fraction is between 0 and one.
     T = (0.<=T<1.)*T + .999999999*(T>=1)
 
@@ -168,9 +181,6 @@ def Check_3D3DT(parms):
     T=parms[7]
     off=parms[8]
     
-    # REMOVED (issue #2)
-     # Force triplet component to be smaller than diffusion times
-     #tautrip = min(tautrip,taud1*0.9, taud2*0.9)
     
     # Triplet fraction is between 0 and one. T may not be one!
     T = (0.<=T<1.)*T + .99999999999999*(T>=1)
diff --git a/src/models/MODEL_classic_gaussian_3D2D.py b/src/models/MODEL_classic_gaussian_3D2D.py
index 56532d4..83b50a4 100755
--- a/src/models/MODEL_classic_gaussian_3D2D.py
+++ b/src/models/MODEL_classic_gaussian_3D2D.py
@@ -1,5 +1,22 @@
 # -*- coding: utf-8 -*-
-"""  This file contains a 3D+2D+T confocal FCS model.
+"""
+    PyCorrFit
+    This file contains a 3D+2D+T confocal FCS model.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import numpy as np                  # NumPy
 
diff --git a/src/models/__init__.py b/src/models/__init__.py
index 83f7dd6..2c3aecd 100644
--- a/src/models/__init__.py
+++ b/src/models/__init__.py
@@ -1,6 +1,6 @@
 # -*- coding: utf-8 -*-
-""" PyCorrFit
-    Paul Müller, Biotec - TU Dresden
+"""
+    PyCorrFit
 
     Module models:
     Define all models and set initial parameters.
@@ -8,10 +8,9 @@
     Each model has a unique ID. This ID is very important:
         1. It is a wxWidgets ID.
         2. It is used in the saving of sessions to identify a model.
-    It is very important, that model IDs do NOT change in newer versions of
-    this program, because it would not be possible to restore older PyCorrFit
-    sessions (Unless you add a program that maps the model IDs to the correct
-    models).
+    It is very important, that model IDs do NOT change in newer versions
+    of PyCorrFit, because it would not be possible to restore older
+    PyCorrFit sessions.
 
     Dimensionless representation:
     unit of time        : 1 ms
@@ -20,11 +19,26 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
 # This file is necessary for this folder to become a module that can be 
-# imported by PyCorrFit or other people.
+# imported from within Python/PyCorrFit.
 
 import numpy as np                  # NumPy
 import platform
diff --git a/src/openfile.py b/src/openfile.py
index 2994bad..96c3b9d 100644
--- a/src/openfile.py
+++ b/src/openfile.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module openfile
     This file is used to define operations on how to open some files.
@@ -12,6 +11,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -19,24 +33,20 @@ import csv
 from distutils.version import LooseVersion # For version checking
 import numpy as np
 import os
-import platform
 import shutil
-import sys
 import tempfile
-import traceback
 import wx
 import yaml
 import zipfile
 
 import doc
 import edclasses
-import models as mdls
 from tools import info
-# This contains all the information necessary to import data files:
+
+# These imports are required for loading data
 from readfiles import Filetypes
 from readfiles import BGFiletypes
 
-
 def ImportParametersYaml(parent, dirname):
     """ Import the parameters from a parameters.yaml file
         from an PyCorrFit session.
@@ -297,7 +307,6 @@ def OpenSession(parent, dirname, sessionfile=None):
             i = i + 1
         bgfile.close()
     # Get external weights if they exist
-    Info = dict()
     WeightsFilename = "externalweights.txt"
     try:
         # Raises KeyError, if file is not present:
@@ -331,145 +340,6 @@ def OpenSession(parent, dirname, sessionfile=None):
     return Infodict, dirname, filename
 
 
-def saveCSV(parent, dirname, Page):
-    """ Write relevant data into a comma separated list.
-        
-        Parameters:
-        *parent*   the parent window
-        *dirname* directory to set on saving
-        *Page*     Page containing all necessary variables
-    """
-    filename = Page.tabtitle.GetValue().strip()+Page.counter[:2]
-    dlg = wx.FileDialog(parent, "Save curve", dirname, filename, 
-          "Correlation with trace (*.csv)|*.csv;*.CSV"+\
-          "|Correlation only (*.csv)|*.csv;*.CSV",
-           wx.SAVE|wx.FD_OVERWRITE_PROMPT)
-    # user cannot do anything until he clicks "OK"
-    if dlg.ShowModal() == wx.ID_OK:
-        path = dlg.GetPath()            # Workaround since 0.7.5
-        (dirname, filename) = os.path.split(path)
-        #filename = dlg.GetFilename()
-        #dirname = dlg.GetDirectory()
-        if filename.lower().endswith(".csv") is not True:
-            filename = filename+".csv"
-        openedfile = open(os.path.join(dirname, filename), 'wb')
-        ## First, some doc text
-        openedfile.write(doc.saveCSVinfo(parent).replace('\n', '\r\n'))
-        # The infos
-        InfoMan = info.InfoClass(CurPage=Page)
-        PageInfo = InfoMan.GetCurFancyInfo()
-        for line in PageInfo.splitlines():
-            openedfile.write("# "+line+"\r\n")
-        openedfile.write("#\r\n#\r\n")
-        # Get all the data we need from the Page
-        # Modeled data
-        # Since 0.7.8 the user may normalize the curves. The normalization
-        # factor is set in *Page.normfactor*.
-        corr = Page.datacorr[:,1]*Page.normfactor
-        if Page.dataexp is not None:
-            # Experimental data
-            tau = Page.dataexp[:,0]
-            exp = Page.dataexp[:,1]*Page.normfactor
-            res = Page.resid[:,1]*Page.normfactor
-            # Plotting! Because we only export plotted area.
-            weight = Page.weights_used_for_plotting
-            if weight is None:
-                pass
-            elif len(weight) != len(exp):
-                text = "Weights have not been calculated for the "+\
-                       "area you want to export. Pressing 'Fit' "+\
-                       "again should solve this issue. Data will "+\
-                       "not be saved."
-                dlg = wx.MessageDialog(parent, text, "Error", 
-                    style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-                dlg.ShowModal() == wx.ID_OK
-                return dirname, None
-        else:
-            tau = Page.datacorr[:,0]
-            exp = None
-            res = None
-        # Include weights in data saving:
-        # PyCorrFit thinks in [ms], but we will save as [s]
-        timefactor = 0.001
-        tau = timefactor * tau
-        ## Now we want to write all that data into the file
-        # This is for csv writing:
-        ## Correlation curve
-        dataWriter = csv.writer(openedfile, delimiter='\t')
-        if exp is not None:
-            header = '# Channel (tau [s])'+"\t"+ \
-                     'Experimental correlation'+"\t"+ \
-                     'Fitted correlation'+ "\t"+ \
-                     'Residuals'+"\r\n"
-            data = [tau, exp, corr, res]
-            if Page.weighted_fit_was_performed is True \
-            and weight is not None:
-                header = header.strip() + "\t"+'Weights (fit)'+"\r\n"
-                data.append(weight)
-        else:
-            header = '# Channel (tau [s])'+"\t"+ \
-                     'Correlation function'+"\r\n"
-            data = [tau, corr]
-        # Write header
-        openedfile.write(header)
-        # Write data
-        for i in np.arange(len(data[0])):
-            # row-wise, data may have more than two elements per row
-            datarow = list()
-            for j in np.arange(len(data)):
-                rowcoli = str("%.10e") % data[j][i]
-                datarow.append(rowcoli)
-            dataWriter.writerow(datarow)
-        ## Trace
-        # Only save the trace if user wants us to:
-        if dlg.GetFilterIndex() == 0:
-            # We will also save the trace in [s]
-            # Intensity trace in kHz may stay the same
-            if Page.trace is not None:
-                # Mark beginning of Trace
-                openedfile.write('#\r\n#\r\n# BEGIN TRACE\r\n#\r\n')
-                # Columns
-                time = Page.trace[:,0]*timefactor
-                intensity = Page.trace[:,1]
-                # Write
-                openedfile.write('# Time [s]'+"\t" 
-                                     'Intensity trace [kHz]'+" \r\n")
-                for i in np.arange(len(time)):
-                    dataWriter.writerow([str("%.10e") % time[i],
-                                         str("%.10e") % intensity[i]])
-            elif Page.tracecc is not None:
-                # We have some cross-correlation here:
-                # Mark beginning of Trace A
-                openedfile.write('#\r\n#\r\n# BEGIN TRACE\r\n#\r\n')
-                # Columns
-                time = Page.tracecc[0][:,0]*timefactor
-                intensity = Page.tracecc[0][:,1]
-                # Write
-                openedfile.write('# Time [s]'+"\t" 
-                                     'Intensity trace [kHz]'+" \r\n")
-                for i in np.arange(len(time)):
-                    dataWriter.writerow([str("%.10e") % time[i],
-                                         str("%.10e") % intensity[i]])
-                # Mark beginning of Trace B
-                openedfile.write('#\r\n#\r\n# BEGIN SECOND TRACE\r\n#\r\n')
-                # Columns
-                time = Page.tracecc[1][:,0]*timefactor
-                intensity = Page.tracecc[1][:,1]
-                # Write
-                openedfile.write('# Time [s]'+"\t" 
-                                     'Intensity trace [kHz]'+" \r\n")
-                for i in np.arange(len(time)):
-                    dataWriter.writerow([str("%.10e") % time[i],
-                                         str("%.10e") % intensity[i]])
-        dlg.Destroy()
-        openedfile.close()
-        return dirname, filename
-    else:
-        dirname = dlg.GetDirectory()
-        dlg.Destroy()
-        return dirname, None
-
-
 def SaveSession(parent, dirname, Infodict):
     """ Write whole Session into a zip file.
         Infodict may contain the following keys:
@@ -702,7 +572,7 @@ def SaveSession(parent, dirname, Infodict):
         ## Readme
         rmfilename = "Readme.txt"
         rmfile = open(rmfilename, 'wb')
-        rmfile.write(doc.SessionReadme(parent))
+        rmfile.write(ReadmeSession)
         rmfile.close()
         Arc.write(rmfilename)
         os.remove(os.path.join(tempdir, rmfilename))
@@ -719,3 +589,243 @@ def SaveSession(parent, dirname, Infodict):
         dirname = dlg.GetDirectory()
         dlg.Destroy()
         return dirname, None
+
+
+
+
+def saveCSV(parent, dirname, Page):
+    """ Write relevant data into a comma separated list.
+        
+        Parameters:
+        *parent*   the parent window
+        *dirname* directory to set on saving
+        *Page*     Page containing all necessary variables
+    """
+    filename = Page.tabtitle.GetValue().strip()+Page.counter[:2]
+    dlg = wx.FileDialog(parent, "Save curve", dirname, filename, 
+          "Correlation with trace (*.csv)|*.csv;*.CSV"+\
+          "|Correlation only (*.csv)|*.csv;*.CSV",
+           wx.SAVE|wx.FD_OVERWRITE_PROMPT)
+    # user cannot do anything until he clicks "OK"
+    if dlg.ShowModal() == wx.ID_OK:
+        path = dlg.GetPath()            # Workaround since 0.7.5
+        (dirname, filename) = os.path.split(path)
+        #filename = dlg.GetFilename()
+        #dirname = dlg.GetDirectory()
+        if filename.lower().endswith(".csv") is not True:
+            filename = filename+".csv"
+        openedfile = open(os.path.join(dirname, filename), 'wb')
+        ## First, some doc text
+        openedfile.write(ReadmeCSV.replace('\n', '\r\n'))
+        # The infos
+        InfoMan = info.InfoClass(CurPage=Page)
+        PageInfo = InfoMan.GetCurFancyInfo()
+        for line in PageInfo.splitlines():
+            openedfile.write("# "+line+"\r\n")
+        openedfile.write("#\r\n#\r\n")
+        # Get all the data we need from the Page
+        # Modeled data
+        # Since 0.7.8 the user may normalize the curves. The normalization
+        # factor is set in *Page.normfactor*.
+        corr = Page.datacorr[:,1]*Page.normfactor
+        if Page.dataexp is not None:
+            # Experimental data
+            tau = Page.dataexp[:,0]
+            exp = Page.dataexp[:,1]*Page.normfactor
+            res = Page.resid[:,1]*Page.normfactor
+            # Plotting! Because we only export plotted area.
+            weight = Page.weights_used_for_plotting
+            if weight is None:
+                pass
+            elif len(weight) != len(exp):
+                text = "Weights have not been calculated for the "+\
+                       "area you want to export. Pressing 'Fit' "+\
+                       "again should solve this issue. Data will "+\
+                       "not be saved."
+                wx.MessageDialog(parent, text, "Error", 
+                    style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
+                return dirname, None
+        else:
+            tau = Page.datacorr[:,0]
+            exp = None
+            res = None
+        # Include weights in data saving:
+        # PyCorrFit thinks in [ms], but we will save as [s]
+        timefactor = 0.001
+        tau = timefactor * tau
+        ## Now we want to write all that data into the file
+        # This is for csv writing:
+        ## Correlation curve
+        dataWriter = csv.writer(openedfile, delimiter='\t')
+        if exp is not None:
+            header = '# Channel (tau [s])'+"\t"+ \
+                     'Experimental correlation'+"\t"+ \
+                     'Fitted correlation'+ "\t"+ \
+                     'Residuals'+"\r\n"
+            data = [tau, exp, corr, res]
+            if Page.weighted_fit_was_performed is True \
+            and weight is not None:
+                header = header.strip() + "\t"+'Weights (fit)'+"\r\n"
+                data.append(weight)
+        else:
+            header = '# Channel (tau [s])'+"\t"+ \
+                     'Correlation function'+"\r\n"
+            data = [tau, corr]
+        # Write header
+        openedfile.write(header)
+        # Write data
+        for i in np.arange(len(data[0])):
+            # row-wise, data may have more than two elements per row
+            datarow = list()
+            for j in np.arange(len(data)):
+                rowcoli = str("%.10e") % data[j][i]
+                datarow.append(rowcoli)
+            dataWriter.writerow(datarow)
+        ## Trace
+        # Only save the trace if user wants us to:
+        if dlg.GetFilterIndex() == 0:
+            # We will also save the trace in [s]
+            # Intensity trace in kHz may stay the same
+            if Page.trace is not None:
+                # Mark beginning of Trace
+                openedfile.write('#\r\n#\r\n# BEGIN TRACE\r\n#\r\n')
+                # Columns
+                time = Page.trace[:,0]*timefactor
+                intensity = Page.trace[:,1]
+                # Write
+                openedfile.write('# Time [s]'+"\t" 
+                                     'Intensity trace [kHz]'+" \r\n")
+                for i in np.arange(len(time)):
+                    dataWriter.writerow([str("%.10e") % time[i],
+                                         str("%.10e") % intensity[i]])
+            elif Page.tracecc is not None:
+                # We have some cross-correlation here:
+                # Mark beginning of Trace A
+                openedfile.write('#\r\n#\r\n# BEGIN TRACE\r\n#\r\n')
+                # Columns
+                time = Page.tracecc[0][:,0]*timefactor
+                intensity = Page.tracecc[0][:,1]
+                # Write
+                openedfile.write('# Time [s]'+"\t" 
+                                     'Intensity trace [kHz]'+" \r\n")
+                for i in np.arange(len(time)):
+                    dataWriter.writerow([str("%.10e") % time[i],
+                                         str("%.10e") % intensity[i]])
+                # Mark beginning of Trace B
+                openedfile.write('#\r\n#\r\n# BEGIN SECOND TRACE\r\n#\r\n')
+                # Columns
+                time = Page.tracecc[1][:,0]*timefactor
+                intensity = Page.tracecc[1][:,1]
+                # Write
+                openedfile.write('# Time [s]'+"\t" 
+                                     'Intensity trace [kHz]'+" \r\n")
+                for i in np.arange(len(time)):
+                    dataWriter.writerow([str("%.10e") % time[i],
+                                         str("%.10e") % intensity[i]])
+        dlg.Destroy()
+        openedfile.close()
+        return dirname, filename
+    else:
+        dirname = dlg.GetDirectory()
+        dlg.Destroy()
+        return dirname, None
+
+
+ReadmeCSV = """# This file was created using PyCorrFit version {}.
+#
+# Lines starting with a '#' are treated as comments.
+# The data is stored as CSV below this comment section.
+# Data usually consists of lag times (channels) and
+# the corresponding correlation function - experimental
+# and fitted values plus resulting residuals.
+# If this file is opened by PyCorrFit, only the first two
+# columns will be imported as experimental data.
+#
+""".format(doc.__version__)
+    
+    
+ReadmeSession = """This file was created using PyCorrFit version {}.
+The .zip archive you are looking at is a stored session of PyCorrFit.
+If you are interested in how the data is stored, you will find
+out here. Most important are the dimensions of units:
+Dimensionless representation:
+ unit of time        : 1 ms
+ unit of inverse time: 10³ /s
+ unit of distance    : 100 nm
+ unit of Diff.coeff  : 10 µm²/s
+ unit of inverse area: 100 /µm²
+ unit of inv. volume : 1000 /µm³
+From there, the dimension of any parameter may be
+calculated.
+
+There are a number of files within this archive, 
+depending on what was done during the session.
+
+backgrounds.csv
+ - Contains the list of backgrounds used and
+ - Averaged intensities in [kHz]
+
+bg_trace*.csv (where * is an integer)
+ - The trace of the background corresponding
+   to the line number in backgrounds.csv
+ - Time in [ms], Trace in [kHz]
+
+comments.txt
+ - Contains page titles and session comment
+ - First n lines are titles, rest is session
+   comment (where n is total number of pages)
+
+data*.csv (where * is (Number of page))
+ - Contains lag times [ms]
+ - Contains experimental data, if available
+
+externalweights.txt
+ - Contains names (types) of external weights other than from
+   Model function or spline fit
+ - Linewise: 1st element is page number, 2nd is name
+ - According to this data, the following files are present in the archive
+
+externalweights_data_*PageID*_*Type*.csv
+ - Contains weighting information of Page *PageID* of type *Type*
+
+model_*ModelID*.txt
+ - An external (user-defined) model file with internal ID *ModelID*
+
+Parameters.yaml
+ - Contains all Parameters for each page
+   Block format:
+    - - '#(Number of page): '       
+      - (Internal model ID)
+      - (List of parameters)
+      - (List of checked parameters (for fitting))
+      - [(Min channel selected), (Max channel selected)]
+      - [(Weighted fit method (0=None, 1=Spline, 2=Model function)), 
+          (No. of bins from left and right(, (No. of knots (of e.g. spline))]
+      - [B1,B2] Background to use (line in backgrounds.csv)
+           B2 is always *null* for autocorrelation curves
+      - Data type is Cross-correlation?
+      - Parameter id (int) used for normalization in plotting.
+        This number first enumerates the model parameters and then
+        the supplemental parameters (e.g. "n1").
+      - - [min, max] fitting parameter range of 1st parameter
+        - [min, max] fitting parameter range of 2nd parameter
+        - etc.
+ - Order in Parameters.yaml defines order of pages in a session
+ - Order in Parameters.yaml defines order in comments.txt
+
+Readme.txt (this file)
+
+Supplements.yaml
+ - Contains errors of fitting
+   Format:
+   -- Page number
+    -- [parameter id, error value]
+     - [parameter id, error value]
+    - Chi squared
+    - [pages that share parameters] (from global fitting)
+     
+trace*.csv (where * is (Number of page) | appendix "A" or "B" point to
+            the respective channels (only in cross-correlation mode))
+ - Contains times [ms]
+ - Contains countrates [kHz]
+""".format(doc.__version__)
diff --git a/src/page.py b/src/page.py
index 840d16d..7a4e3df 100644
--- a/src/page.py
+++ b/src/page.py
@@ -1,10 +1,9 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module frontend
-    The frontend displays the GUI (Graphic User Interface). All necessary 
-    functions and modules are called from here.
+    The frontend displays the GUI (Graphic User Interface).
+    All functions and modules are called from here.
 
     Dimensionless representation:
     unit of time        : 1 ms
@@ -13,20 +12,34 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 # Use DEMO for contrast-rich screenshots.
 # This enlarges axis text and draws black lines instead of grey ones.
 DEMO = False
 
 
-import os
 import wx                               # GUI interface wxPython
+from wx.lib.agw import floatspin        # Float numbers in spin fields
 import wx.lib.plot as plot              # Plotting in wxPython
 import wx.lib.scrolledpanel as scrolled
 import numpy as np                      # NumPy
 import sys                              # System stuff
 
-import doc
 import edclasses                    # Cool stuf like better floatspin
 import leastsquaresfit as fit       # For fitting
 import models as mdls
@@ -50,18 +63,21 @@ class FittingPanel(wx.Panel):
         wx.Panel.__init__(self, parent=parent, id=wx.ID_ANY)
         self.parent = parent
         self.filename = "None"
-        ## If this value is set to True, the trace and traceavg variables
-        ## will not be used. Instead tracecc a list, of traces will be used.
+        ## If IsCrossCorrelation is set to True, the trace and traceavg 
+        ## variables will not be used. Instead tracecc a list, of traces
+        ## will be used.
         self.IsCrossCorrelation = False
         ## Setting up variables for plotting
         self.trace = None        # The intensity trace, tuple
         self.traceavg = None     # Average trace intensity
         self.tracecc = None      # List of traces (in CC mode only)
         self.bgselected = None   # integer, index for parent.Background
+        self.bg2selected = None  # integer, index for parent.Background
+        #                          -> for cross-correlation
         self.bgcorrect = 1.      # Background correction factor for dataexp
         self.normparm = None     # Parameter number used for graph normalization
-                                 # if greater than number of fitting parms,
-                                 # then supplementary parm is used.
+        #                          if greater than number of fitting parms,
+        #                          then supplementary parm is used.
         self.normfactor = 1.     # Graph normalization factor (e.g. value of n)
         self.startcrop = None    # Where cropping of dataexp starts
         self.endcrop = None      # Where cropping of dataexp ends
@@ -342,33 +358,53 @@ class FittingPanel(wx.Panel):
         ## self.parent.OnFNBPageChanged(e=None, Page=self)
 
 
-
     def CorrectDataexp(self, dataexp):
-        """ Background correction
-            Background correction with *self.bgcorrect*.
+        """ 
+            Background correction
+            Changes *self.bgcorrect*.
             Overwrites *self.dataexp*.
             For details see:
-            Incollection (Thomps:bookFCS2002)
-            Thompson, N. Lakowicz, J.; Geddes, C. D. & Lakowicz, J. R. (ed.)
-            Fluorescence Correlation Spectroscopy
-            Topics in Fluorescence Spectroscopy, Springer US, 2002, 1, 337-378
+            
+                Thompson, N. Lakowicz, J.;
+                Geddes, C. D. & Lakowicz, J. R. (ed.)
+                Fluorescence Correlation Spectroscopy
+                Topics in Fluorescence Spectroscopy,
+                Springer US, 2002, 1, 337-378
+            
+            and (for cross-correlation)
+            
+            Weidemann et al. ...?
         """
         # Make a copy. Do not overwrite the original.
         if dataexp is not None:
             modified = 1 * dataexp
-            if self.bgselected is not None:
-                # self.bgselected - background, needs to be imported via Tools
-                if self.traceavg is not None:
-                    S = self.traceavg
-                    B = self.parent.Background[self.bgselected][0]
-                    # Calculate correction factor
-                    self.bgcorrect = (S/(S-B))**2
-                    # self.dataexp should be set, since we have self.trace
-                    modified[:,1] *= self.bgcorrect
+            if self.IsCrossCorrelation:
+                # Cross-Correlation
+                if (self.bgselected is not None and
+                    self.bg2selected is not None    ):
+                    if self.tracecc is not None:
+                        S = self.tracecc[0][:,1].mean()
+                        S2 = self.tracecc[1][:,1].mean()
+                        B = self.parent.Background[self.bgselected][0]
+                        B2 = self.parent.Background[self.bg2selected][0]
+                        self.bgcorrect = (S/(S-B)) * (S2/(S2-B2))
+                        modified[:,1] *= self.bgcorrect
+            else:
+                # Autocorrelation
+                if self.bgselected is not None:
+                    # self.bgselected 
+                    if self.traceavg is not None:
+                        S = self.traceavg
+                        B = self.parent.Background[self.bgselected][0]
+                        # Calculate correction factor
+                        self.bgcorrect = (S/(S-B))**2
+                        # self.dataexp should be set, since we have self.trace
+                        modified[:,1] *= self.bgcorrect
             return modified
         else:
             return None
 
+
     def Fit_enable_fitting(self):
         """ Enable the fitting button and the weighted fit control"""
         #self.Fitbox=[ fitbox, weightedfitdrop, fittext, fittext2, fittextvar,
@@ -391,23 +427,10 @@ class FittingPanel(wx.Panel):
         Fitting.uselatex = self.parent.MenuUseLatex.IsChecked()
         Fitting.check_parms = self.check_parms
         Fitting.dataexpfull = self.CorrectDataexp(self.dataexpfull)
-      ## This is now done in apply_parameters
-      #  if self.Fitbox[1].GetSelection() == -1:
-      #      # User edited knot number
-      #      Knots = self.Fitbox[1].GetValue()
-      #      Knots = filter(lambda x: x.isdigit(), Knots)
-      #      if Knots == "":
-      #          Knots = "5"
-      #      List = self.Fitbox[1].GetItems()
-      #      List[1] = "Spline ("+Knots+" knots)"
-      #      Fitting.fittype = "spline"+Knots
-      #      self.Fitbox[1].SetItems(List)
-      #      self.Fitbox[1].SetSelection(1)
-      #      self.FitKnots = Knots
         if self.Fitbox[1].GetSelection() == 1:
-      #      Knots = self.Fitbox[1].GetValue()
-      #      Knots = filter(lambda x: x.isdigit(), Knots)
-      #      self.FitKnots = Knots
+            # Knots = self.Fitbox[1].GetValue()
+            # Knots = filter(lambda x: x.isdigit(), Knots)
+            # self.FitKnots = Knots
             Fitting.fittype = "spline"+str(self.FitKnots)
             self.parent.StatusBar.SetStatusText("You can change the number"+
                " of knots. Check 'Preference>Verbose Mode' to view the spline.")
@@ -560,7 +583,6 @@ class FittingPanel(wx.Panel):
         """ Enable/Disable BG rate text line.
             New feature introduced in 0.7.8
         """
-        #self.AmplitudeInfo = [ bgnorm, bgtex, normtoNDropdown, textnor]
         ## Normalization to a certain parameter in plots
         # Find all parameters that start with an "N"
         # ? and "C" ?
@@ -620,9 +642,6 @@ class FittingPanel(wx.Panel):
                     self.normfactor =  supplement[supnum][1]
                 
                 #### supplement are somehow sorted !!!!
-                
-                #import IPython
-                #IPython.embed()
                 # For parameter export:
                 self.normparm = parameterid
                 # No internal parameters will be changed
@@ -641,45 +660,71 @@ class FittingPanel(wx.Panel):
         # Set dropdown values
         self.AmplitudeInfo[2].SetItems(normlist)
         self.AmplitudeInfo[2].SetSelection(normsel)
-        ## Background correction
-        bgsel = self.AmplitudeInfo[0].GetSelection()
-        # Standard is the background of the page
-        # Read bg selection
-        if event == "init":
-            # Read everything from the page not from the panel
-            if self.bgselected is not None:
-                bgsel = self.bgselected + 1
+        ## Plot intensities
+        # Quick reminder:
+        #self.AmplitudeInfo = [ [intlabel1, intlabel2],
+        #                       [bgspin1, bgspin2],
+        #                       normtoNDropdown, textnor]
+        # Signal
+        if self.IsCrossCorrelation:
+            if self.tracecc is not None:
+                S1 = self.tracecc[0][:,1].mean()
+                S2 = self.tracecc[1][:,1].mean()
+                self.AmplitudeInfo[0][0].SetValue("{:.4f}".format(S1))
+                self.AmplitudeInfo[0][1].SetValue("{:.4f}".format(S2))
             else:
-                bgsel = 0
+                self.AmplitudeInfo[0][0].SetValue("{:.4f}".format(0))
+                self.AmplitudeInfo[0][1].SetValue("{:.4f}".format(0))
         else:
-            if bgsel <= 0:
-                self.bgselected = None
-                bgsel = 0 #None
+            if self.traceavg is not None:
+                self.AmplitudeInfo[0][0].SetValue("{:.4f}".format(
+                                                self.traceavg))
             else:
-                self.bgselected = bgsel - 1
-        # Rebuild itemlist
-        # self.parent.Background[self.bgselected][i]
-        # [0] average signal [kHz]
-        # [1] signal name (edited by user)
-        # [2] signal trace (tuple) ([ms], [kHz])
-        bglist = list()
-        bglist.append("None")
-        for item in self.parent.Background:
-            if len(item[1]) > 10:
-                item[1] = item[1][:7]+"..."
-            bgname = item[1]+" (%.2f kHz)" %item[0]
-            bglist.append(bgname)
-        self.AmplitudeInfo[0].SetItems(bglist)
-        self.AmplitudeInfo[0].SetSelection(bgsel)
-        #self.AmplitudeInfo = [ bgnorm, bgtex, normtoNDropdown, textnor]
-        if len(bglist) <= 1:
-            self.AmplitudeInfo[0].Disable()
-            self.AmplitudeInfo[1].Disable()
+                self.AmplitudeInfo[0][0].SetValue("{:.4f}".format(0))
+            self.AmplitudeInfo[0][1].SetValue("{:.4f}".format(0))
+        # Background
+        ## self.parent.Background[self.bgselected][i]
+        ## [0] average signal [kHz]
+        ## [1] signal name (edited by user)
+        ## [2] signal trace (tuple) ([ms], [kHz])
+        if self.bgselected is not None:
+            self.AmplitudeInfo[1][0].SetValue(
+                        self.parent.Background[self.bgselected][0])
+        else:
+            self.AmplitudeInfo[1][0].SetValue(0)
+        if self.bg2selected is not None and self.IsCrossCorrelation:
+            self.AmplitudeInfo[1][1].SetValue(
+                        self.parent.Background[self.bg2selected][0])
         else:
-            self.AmplitudeInfo[0].Enable()
-            self.AmplitudeInfo[1].Enable()
+            self.AmplitudeInfo[1][1].SetValue(0)
+        # Disable the second line in amplitude correction, if we have
+        # autocorrelation only.
+        boolval = self.IsCrossCorrelation
+        for item in self.WXAmplitudeCCOnlyStuff:
+            item.Enable(boolval)
 
 
+    def OnBGSpinChanged(self, e):
+        """ Calls tools.background.ApplyAutomaticBackground
+            to update background information
+        """
+        # Quick reminder:
+        #self.AmplitudeInfo = [ [intlabel1, intlabel2],
+        #                       [bgspin1, bgspin2],
+        #                       normtoNDropdown, textnor]
+        if self.IsCrossCorrelation:
+            # update both self.bgselected and self.bg2selected
+            bg = [self.AmplitudeInfo[1][0].GetValue(),
+                  self.AmplitudeInfo[1][1].GetValue()]
+            tools.background.ApplyAutomaticBackground(self, bg,
+                                                      self.parent)
+        else:
+            # Only update self.bgselected 
+            bg = self.AmplitudeInfo[1][0].GetValue()
+            tools.background.ApplyAutomaticBackground(self, bg,
+                                                      self.parent)
+
+    
     def OnTitleChanged(self, e):
         pid = self.parent.notebook.GetPageIndex(self)
         if self.tabtitle.GetValue() == "":
@@ -687,7 +732,7 @@ class FittingPanel(wx.Panel):
         else:
             # How many characters of the the page title should be displayed
             # in the tab? We choose 9: AC1-012 plus 2 whitespaces
-            text = self.counter + self.tabtitle.GetValue()[:9]
+            text = self.counter + self.tabtitle.GetValue()[-9:]
         self.parent.notebook.SetPageText(pid,text)        
         #import IPython
         #IPython.embed()
@@ -878,7 +923,7 @@ class FittingPanel(wx.Panel):
 
     def settings(self):
         """ Here we define, what should be displayed at the left side
-            of the window.
+            of the fitting page/tab.
             Parameters:
         """
         horizontalsize = self.sizepanelx-10
@@ -931,19 +976,43 @@ class FittingPanel(wx.Panel):
         normbox = wx.StaticBox(self.panelsettings, label="Amplitude corrections")
         miscsizer = wx.StaticBoxSizer(normbox, wx.VERTICAL)
         miscsizer.SetMinSize((horizontalsize, -1))
-        # Type of normalization
-        bgtex = wx.StaticText(self.panelsettings, label="Background correction")
-        miscsizer.Add(bgtex)
-        bgnorm = wx.ComboBox(self.panelsettings)
-        self.Bind(wx.EVT_COMBOBOX, self.PlotAll, bgnorm)
-        miscsizer.Add(bgnorm)
+        # Intensities and Background
+        sizeint = wx.FlexGridSizer(rows=3, cols=3, vgap=5, hgap=5)
+        sizeint.Add(wx.StaticText(self.panelsettings, label="[kHz]"))
+        sizeint.Add(wx.StaticText(self.panelsettings,
+                    label="Intensity"))
+        sizeint.Add(wx.StaticText(self.panelsettings,
+                    label="Background"))
+        sizeint.Add(wx.StaticText(self.panelsettings, label="Ch1"))
+        intlabel1 = wx.TextCtrl(self.panelsettings)
+        bgspin1 = floatspin.FloatSpin(self.panelsettings,
+                        increment=0.01, digits=4, min_val=0)
+        self.Bind(floatspin.EVT_FLOATSPIN, self.OnBGSpinChanged,
+                  bgspin1)
+        sizeint.Add(intlabel1)
+        intlabel1.SetEditable(False)
+        sizeint.Add(bgspin1)
+        chtext2 = wx.StaticText(self.panelsettings, label="Ch2")
+        sizeint.Add(chtext2)
+        intlabel2 = wx.TextCtrl(self.panelsettings)
+        intlabel2.SetEditable(False)
+        bgspin2 = floatspin.FloatSpin(self.panelsettings,
+                        increment=0.01, digits=4, min_val=0)
+        self.Bind(floatspin.EVT_FLOATSPIN, self.OnBGSpinChanged,
+                  bgspin2)
+        sizeint.Add(intlabel2)
+        sizeint.Add(bgspin2)
+        miscsizer.Add(sizeint)
         ## Normalize to n?
         textnor = wx.StaticText(self.panelsettings, label="Plot normalization")
         miscsizer.Add(textnor)
         normtoNDropdown = wx.ComboBox(self.panelsettings)
         self.Bind(wx.EVT_COMBOBOX, self.PlotAll, normtoNDropdown)
         miscsizer.Add(normtoNDropdown)
-        self.AmplitudeInfo = [ bgnorm, bgtex, normtoNDropdown, textnor]
+        self.AmplitudeInfo = [ [intlabel1, intlabel2],
+                               [bgspin1, bgspin2],
+                                normtoNDropdown, textnor]
+        self.WXAmplitudeCCOnlyStuff = [chtext2, intlabel2, bgspin2]
         self.panelsettings.sizer.Add(miscsizer)
         ## Add fitting Box
         fitbox = wx.StaticBox(self.panelsettings, label="Fitting options")
diff --git a/src/plotting.py b/src/plotting.py
index fdfd742..27a5f1e 100644
--- a/src/plotting.py
+++ b/src/plotting.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module plotting
     Everything about plotting with matplotlib is located here.
@@ -13,6 +12,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+    
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -33,8 +47,6 @@ from matplotlib.backends.backend_wx import NavigationToolbar2Wx #We hack this
 ## This hack enables us to remember directories.
 # import edclasses
 # NavigationToolbar2Wx = edclasses.NavigationToolbar2Wx
-import os
-import sys
 import unicodedata
 
 # For finding latex tools
@@ -235,7 +247,7 @@ def savePlotCorrelation(parent, dirname, Page, uselatex=False,
         text = r""
         text += r'\['            #every line is a separate raw string...
         text += r'\begin{split}' # ...but they are all concatenated
-                                 # by the interpreter :-)
+        #                          by the interpreter :-)
         for i in np.arange(len(parms)):
             text += r' '+latexmath(labels[i])+r" &= " + str(parms[i]) +r' \\ '
         if errparms is not None:
diff --git a/src/readfiles/__init__.py b/src/readfiles/__init__.py
index 6dea88b..98517c6 100644
--- a/src/readfiles/__init__.py
+++ b/src/readfiles/__init__.py
@@ -1,5 +1,25 @@
 # -*- coding: utf-8 -*-
+""" 
+    PyCorrFit
+    
+    Module readfiles:
+    Import correlation data from data files.
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
 
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
 # This file is necessary for this folder to become a module that can be 
 # imported by PyCorrFit.
 
diff --git a/src/readfiles/read_ASC_ALV_6000.py b/src/readfiles/read_ASC_ALV_6000.py
index 67fb0c5..e544dc8 100755
--- a/src/readfiles/read_ASC_ALV_6000.py
+++ b/src/readfiles/read_ASC_ALV_6000.py
@@ -1,4 +1,24 @@
 # -*- coding: utf-8 -*-
+""" 
+    PyCorrFit
+    
+    functions in this file: *openASC*
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
 import os
 import csv
 import numpy as np
diff --git a/src/readfiles/read_CSV_PyCorrFit.py b/src/readfiles/read_CSV_PyCorrFit.py
index 9305a94..9f16a92 100644
--- a/src/readfiles/read_CSV_PyCorrFit.py
+++ b/src/readfiles/read_CSV_PyCorrFit.py
@@ -1,4 +1,24 @@
 # -*- coding: utf-8 -*-
+""" 
+    PyCorrFit
+    
+    functions in this file: *openCSV*
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
 import os
 import csv
 import numpy as np
@@ -49,6 +69,14 @@ def openCSV(dirname, filename):
         3. A list with one element, indicating, that we are opening only
            one correlation curve.
     """
+    # Check if the file is correlation data
+    csvfile = open(os.path.join(dirname, filename), 'r')
+    firstline = csvfile.readline()
+    if firstline.lower().count("this is not correlation data") > 0:
+        csvfile.close()
+        return None
+    csvfile.close()
+    
     # Define what will happen to the file
     timefactor = 1000 # because we want ms instead of s
     csvfile = open(os.path.join(dirname, filename), 'r')
@@ -65,7 +93,7 @@ def openCSV(dirname, filename):
             # Beware that the len(row) statement has to be called first
             # (before the len(str(row[0]).strip()) ). Otherwise some
             # error would be raised.
-        elif str(row[0])[:12] == "# Type AC/CC":
+        elif str(row[0])[:12].lower() == "# Type AC/CC".lower():
             corrtype = str(row[0])[12:].strip().strip(":").strip()
             if corrtype[:17].lower() == "cross-correlation":
                 # We will later try to import a second trace
diff --git a/src/readfiles/read_FCS_Confocor3.py b/src/readfiles/read_FCS_Confocor3.py
index 36208df..1f09319 100644
--- a/src/readfiles/read_FCS_Confocor3.py
+++ b/src/readfiles/read_FCS_Confocor3.py
@@ -1,7 +1,24 @@
 # -*- coding: utf-8 -*-
 """ 
-    This works with files from the Confocor2, Confocor3 (AIM) and files
-    created from the newer ZEN Software.
+    PyCorrFit
+    
+    functions in this file: *openFCS*, *openFCS_Single*,
+                            *openFCS_Multiple*
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 import os
 import csv
@@ -10,8 +27,21 @@ import warnings
 
 
 def openFCS(dirname, filename):
-    """ The AIM software can save data as multiple or single data files.
-        The type is identified by the first line of the .fcs file. """
+    """ 
+        Load data from Zeiss Confocor3
+        Data is imported sequenially from the file.
+        PyCorrFit will give each curve an id which corresponds to the 
+        position of the curve in the .fcs file.
+        
+        The AIM software can save data as multiple or single data files.
+        The type is identified by the first line of the .fcs file.
+        
+        This works with files from the Confocor2, Confocor3 (AIM) and 
+        files created from the newer ZEN Software.
+        
+        This function is a wrapper combining *openFCS_Single* and
+        *openFCS_Multiple* 
+    """
     openfile = open(os.path.join(dirname, filename), 'r')
     identitystring = openfile.readline().strip()[:20]
     openfile.close()
@@ -24,8 +54,11 @@ def openFCS(dirname, filename):
 def openFCS_Multiple(dirname, filename):
     """ Load data from Zeiss Confocor3
         Data is imported sequenially from the file.
-        PyCorrFit will give each curve an id which corresponds to the position
-        of the curve in the .fcs file.
+        PyCorrFit will give each curve an id which corresponds to the 
+        position of the curve in the .fcs file.
+
+        This works with files from the Confocor2, Confocor3 (AIM) and 
+        files created from the newer ZEN Software.
     """
     openfile = open(os.path.join(dirname, filename), 'r')
     Alldata = openfile.readlines()
@@ -212,7 +245,6 @@ def openFCS_Multiple(dirname, filename):
     # Add traces to CC-correlation functions.
     # It seems reasonable, that if number of AC1,AC2 and CC are equal,
     # CC gets the traces accordingly.
-    cctracelist = list()
     n_ac1 = aclist.count("AC1")
     n_ac2 = aclist.count("AC2")
     n_cc12 = cclist.count("CC12")
@@ -261,7 +293,11 @@ def openFCS_Multiple(dirname, filename):
 
 
 def openFCS_Single(dirname, filename):
-    """ Load data from Zeiss Confocor3
+    """ 
+        Load data from Zeiss Confocor3 files containing only one curve.
+    
+        This works with files from the Confocor2, Confocor3 (AIM) and 
+        files created from the newer ZEN Software.
     """
     openfile = open(os.path.join(dirname, filename), 'r')
     Alldata = openfile.readlines()
diff --git a/src/readfiles/read_SIN_correlator_com.py b/src/readfiles/read_SIN_correlator_com.py
index 7fe23ca..e6ea80e 100644
--- a/src/readfiles/read_SIN_correlator_com.py
+++ b/src/readfiles/read_SIN_correlator_com.py
@@ -1,4 +1,23 @@
 # -*- coding: utf-8 -*-
+"""
+    PyCorrFit
+    functions in this file: *openSIN*
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
+"""
 import os
 import csv
 import numpy as np
diff --git a/src/readfiles/read_mat_ries.py b/src/readfiles/read_mat_ries.py
index 187b001..d241ca0 100644
--- a/src/readfiles/read_mat_ries.py
+++ b/src/readfiles/read_mat_ries.py
@@ -1,15 +1,25 @@
 # -*- coding: utf-8 -*-
 """
-Read mat files that Jonas Ries used in his programs.
-For opening .mat files, this helped a lot:
-http://stackoverflow.com/questions/7008608/
-scipy-io-loadmat-nested-structures-i-e-dictionaries
+    PyCorrFit
+    
+    functions in this file: *openMAT*
 
-The structure has been derived from "corrSFCS.m" from the SFCS.m program
-from Jonas Ries.
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
-import csv
 import numpy as np
 
 # On the windows machine the matlab binary import raised a warning.
@@ -34,6 +44,15 @@ import os
 
 
 def openMAT(dirname, filename):
+    """
+        Read mat files that Jonas Ries used in his programs.
+        For opening .mat files, this helped a lot:
+        http://stackoverflow.com/questions/7008608/
+        scipy-io-loadmat-nested-structures-i-e-dictionaries
+
+        The structure has been derived from "corrSFCS.m" from the SFCS.m
+        program from Jonas Ries.
+    """
     # initiate lists
     correlations = list()
     traces = list()
@@ -78,7 +97,7 @@ def openMAT(dirname, filename):
                     curvelist.append("AC"+str(i+1))
                     try:
                         # only trace averages are saved
-                        traceavg = g["trace"][i][j]
+                        traceavg = g["trace"][i]
                     except:
                         # No trace
                         traces.append(None)
@@ -154,7 +173,7 @@ def openMAT(dirname, filename):
                         traces.append(None)
     # Get dc2f "dual color two focus" functions
     try:
-        dc2f = g["dc2f"]
+        g["dc2f"]
     except KeyError:
         pass
     else:
diff --git a/src/tools/__init__.py b/src/tools/__init__.py
index 73e7d8c..049a2a2 100644
--- a/src/tools/__init__.py
+++ b/src/tools/__init__.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools
     This file contains useful tools, such as dialog boxes and other stuff,
@@ -13,6 +12,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
diff --git a/src/tools/average.py b/src/tools/average.py
index 1d9a0d0..8a29494 100644
--- a/src/tools/average.py
+++ b/src/tools/average.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - average
     Creates an average of curves.
@@ -12,6 +11,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -20,7 +34,6 @@ import wx
 
 import misc
 import models as mdls
-import doc
 
 # Menu entry name
 MENUINFO = ["&Average data", "Create an average curve from whole session."]
@@ -56,7 +69,7 @@ class Average(wx.Frame):
         self.topSizer.Add(self.WXTextPages)
         ## Chechbox asking for Mono-Model
         self.WXCheckMono = wx.CheckBox(self.panel,
-         label="Only use pages with the same model as the current page.")
+         label="Only use pages with the same model as the first page.")
         self.WXCheckMono.SetValue(True)
         self.topSizer.Add(self.WXCheckMono)
         ## Model selection Dropdown
@@ -120,7 +133,20 @@ class Average(wx.Frame):
             return
         pages = list()
         UsedPagenumbers = list()
-        referencePage = self.parent.notebook.GetCurrentPage()
+        # Reference page is the first page of the selection!
+        #referencePage = self.parent.notebook.GetCurrentPage()
+        referencePage = None
+        for i in np.arange(self.parent.notebook.GetPageCount()):
+            Page = self.parent.notebook.GetPage(i)
+            if Page.counter.strip(" :#") == str(PageNumbers[0]):
+                referencePage = Page
+                break
+		if referencePage is not None:
+			# If that did not work, we have to raise an error.
+			raise IndexError("PyCorrFit could not find the first"+
+							 " page for averaging.")
+			return
+        
         for i in np.arange(self.parent.notebook.GetPageCount()):
             Page = self.parent.notebook.GetPage(i)
             j = filter(lambda x: x.isdigit(), Page.counter)
@@ -129,7 +155,7 @@ class Average(wx.Frame):
                 if self.WXCheckMono.GetValue() == True:
                     if (Page.modelid == referencePage.modelid and
                        Page.IsCrossCorrelation == referencePage.IsCrossCorrelation):
-                        ## Check if current page has experimental data:
+                        ## Check if the page has experimental data:
                         # If there is an empty page somewhere, don't bother
                         if Page.dataexpfull is not None:
                             pages.append(Page)
@@ -147,10 +173,9 @@ class Average(wx.Frame):
                         "that you selected for averaging."
             if self.WXCheckMono.GetValue() == True:
                 texterr_a += " Note: You selected\n"+\
-                 "to only use pages with same model as the current page."
-            dlg = wx.MessageDialog(self, texterr_a, "Error", 
+                 "to only use pages with same model as the first page."
+            wx.MessageDialog(self, texterr_a, "Error", 
                               style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-            dlg.ShowModal() == wx.ID_OK
             return
         # Now get all the experimental data
         explist = list()
@@ -195,12 +220,11 @@ class Average(wx.Frame):
         for item in explist[1:]:
             if len(item) != len0:
                 # print an error  message
-                dlg = wx.MessageDialog(self,
+                wx.MessageDialog(self,
                 "Averaging over curves with different lengths is not"+\
                 "\nsupported. When measuring, please make sure that"+\
                 "\nthe measurement time for all curves is the same.",
                 "Error", style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-                dlg.ShowModal() == wx.ID_OK
                 return
         # Now shorten the trace, because we want as little memory usage as
         # possible. I used this algorithm in read_FCS_Confocor3.py as well.
@@ -275,7 +299,7 @@ class Average(wx.Frame):
         self.AvgPage.PlotAll()
         self.AvgPage.Fit_enable_fitting()
         if len(pages) == 1:
-            # Use the same title as the current page
+            # Use the same title as the first page
             newtabti = referencePage.tabtitle.GetValue()
         else:
             # Create a new tab title
diff --git a/src/tools/background.py b/src/tools/background.py
index e1a6ca3..53bd265 100644
--- a/src/tools/background.py
+++ b/src/tools/background.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - background
     We make some background corection here.
@@ -12,6 +11,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -58,16 +72,20 @@ class BackgroundCorrection(wx.Frame):
         ## Controls
         panel = wx.Panel(self.sp)
         # text1
-        textinit = wx.StaticText(panel, label=doc.backgroundinit)
+        backgroundinit = (
+            "Correct the amplitude for non-correlated background.\n"+
+            "The background intensity <B> can be either imported\n"+
+            "from a blank measurement or set manually.")
+        textinit = wx.StaticText(panel, label=backgroundinit)
         # Radio buttons
-        self.rbtnfile = wx.RadioButton (panel, -1, 'Blank measurement: ', 
+        self.rbtnfile = wx.RadioButton(panel, -1, 'Blank measurement: ', 
                                         style = wx.RB_GROUP)
         self.rbtnfile.SetValue(True)
         self.btnbrowse = wx.Button(panel, wx.ID_ANY, 'Browse ...')
         self.rbtnhand = wx.RadioButton (panel, -1, 'Manual, <B> [kHz]: ')
         # Spincontrol
-        self.spinctrl = floatspin.FloatSpin(panel, digits=7,
-                                            increment=.1)
+        self.spinctrl = floatspin.FloatSpin(panel, digits=4, min_val=0,
+                                            increment=.01)
         self.spinctrl.Enable(False)
         # Verbose text
         self.textfile = wx.StaticText(panel,
@@ -83,19 +101,16 @@ class BackgroundCorrection(wx.Frame):
         self.btnimport = wx.Button(panel, wx.ID_ANY, 'Import into session')
         self.btnimport.Enable(False)
         # Dropdown
+        self.BGlist = ["File/User"] # updated by self.UpdateDropdown()
         textdropdown = wx.StaticText(panel, label="Show background: ")
-        self.BGlist = list()
-        #self.BGlist.append("File/User")
-        for item in self.parent.Background:
-            bgname = "{} ({:.2f} kHz)".format(item[1],item[0])
-            self.BGlist.append(bgname)
-        if len(self.BGlist) == 0:
-            ddlist = ["File/User"]
-        else:
-            ddlist = 1*self.BGlist
         self.dropdown = wx.ComboBox(panel, -1, "File/User", (15, -1),
-                     wx.DefaultSize, ddlist, wx.CB_DROPDOWN|wx.CB_READONLY)
-        #self.textafterdropdown = wx.StaticText(panel, label="")
+                     wx.DefaultSize, self.BGlist, wx.CB_DROPDOWN|wx.CB_READONLY)
+        self.UpdateDropdown()
+        # Radio buttons Channel1 and 2
+        self.rbtnCh1 = wx.RadioButton (panel, -1, 'Ch1 ', 
+                                        style = wx.RB_GROUP)
+        self.rbtnCh1.SetValue(True)
+        self.rbtnCh2 = wx.RadioButton (panel, -1, 'Ch2')
         # Apply buttons
         self.btnapply = wx.Button(panel, wx.ID_ANY, 'Apply')
         textor = wx.StaticText(panel, label=" or ")
@@ -109,16 +124,10 @@ class BackgroundCorrection(wx.Frame):
             pagenumlist.append(int(filter(lambda x: x.isdigit(), Page.counter)))
         valstring=misc.parsePagenum2String(pagenumlist)
         self.WXTextPages.SetValue(valstring)
-        
-        textyma   = wx.StaticText(panel, label="You may also: ")
+        textyma   = wx.StaticText(panel, label="Shortcut - ")
         self.btnapplyall = wx.Button(panel, wx.ID_ANY, 'Apply to all pages')
-        self.btnapply.Enable(False)
-        self.btnapplyall.Enable(False)
         textor2 = wx.StaticText(panel, label=" or ")
         self.btnremyall = wx.Button(panel, wx.ID_ANY, 'Dismiss from all pages')
-        if len(self.BGlist) <= 1:
-            self.btnrem.Enable(False)
-            self.btnremyall.Enable(False)
         # Bindings
         self.Bind(wx.EVT_BUTTON, self.OnBrowse, self.btnbrowse)
         self.Bind(wx.EVT_RADIOBUTTON, self.OnRadioFile, self.rbtnfile)
@@ -153,7 +162,8 @@ class BackgroundCorrection(wx.Frame):
         applysizer.Add(self.btnrem)
         applysizer.Add(textpages)
         applysizer.Add(self.WXTextPages)
-        applysizer.Add(self.btnapplyall)
+        applysizer.Add(self.rbtnCh1)
+        applysizer.Add(self.rbtnCh2)
         allsizer = wx.BoxSizer(wx.HORIZONTAL)
         allsizer.Add(textyma)
         allsizer.Add(self.btnapplyall)
@@ -206,9 +216,17 @@ class BackgroundCorrection(wx.Frame):
             Page = self.parent.notebook.GetPage(i)
             j = filter(lambda x: x.isdigit(), Page.counter)
             if int(j) in PageNumbers:
-                Page.bgselected = item
+                if self.rbtnCh1.GetValue() == True:
+                    Page.bgselected = item
+                else:
+                    Page.bg2selected = item
+                if Page.IsCrossCorrelation is False:
+                    # Autocorrelation only has one background!
+                    Page.bg2selected = None
                 Page.OnAmplitudeCheck("init")
                 Page.PlotAll()
+        # Clean up unused backgrounds
+        CleanupAutomaticBackground(self.parent)
 
 
     def OnApplyAll(self, event):
@@ -220,6 +238,10 @@ class BackgroundCorrection(wx.Frame):
             # Set Page 
             Page = self.parent.notebook.GetPage(i)
             Page.bgselected = item
+            if Page.IsCrossCorrelation:
+                Page.bg2selected = item
+            else:
+                Page.bg2selected = None
             try:
                 Page.OnAmplitudeCheck("init")
                 Page.PlotAll()
@@ -230,6 +252,9 @@ class BackgroundCorrection(wx.Frame):
                     style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
                 dlg.ShowModal()
                 Page.bgselected = None
+                Page.bg2selected = None
+        # Clean up unused backgrounds
+        CleanupAutomaticBackground(self.parent)
 
 
     def OnClose(self, event=None):
@@ -271,9 +296,8 @@ class BackgroundCorrection(wx.Frame):
                 errstr += str(info[1])+"\n"
                 for tb_item in traceback.format_tb(info[2]):
                     errstr += tb_item
-                dlg = wx.MessageDialog(self, errstr, "Error", 
+                wx.MessageDialog(self, errstr, "Error", 
                     style=wx.ICON_ERROR|wx.OK|wx.STAY_ON_TOP)
-                dlg.ShowModal() == wx.ID_OK
                 return
             # Usually we will get a bunch of traces. Let the user select which
             # one to take.
@@ -379,11 +403,10 @@ class BackgroundCorrection(wx.Frame):
     def OnImport(self, event):
         self.parent.Background.append([self.average, self.bgname.GetValue(), 
                                       self.trace])
-        name = "{} ({:.2f} kHz)".format(self.bgname.GetValue(), self.average)
-        self.BGlist.append(name)
+        # Next two lines are taken care of by UpdateDropdown
+        #name = "{} ({:.2f} kHz)".format(self.bgname.GetValue(), self.average)
+        #self.BGlist.append(name)
         self.UpdateDropdown()
-        # Let the user see the imported file
-        self.dropdown.SetSelection(len(self.BGlist)-1)
         self.btnremyall.Enable(True)
         self.btnrem.Enable(True)
         self.btnapplyall.Enable(True)
@@ -394,7 +417,7 @@ class BackgroundCorrection(wx.Frame):
             self.parent.notebook.GetPage(i).OnAmplitudeCheck()
 
 
-    def OnPageChanged(self, page):
+    def OnPageChanged(self, page=None):
         # We do not need the *Range* Commands here yet.
         # We open and close the SelectChannelsFrame every time we
         # import some data.
@@ -406,6 +429,16 @@ class BackgroundCorrection(wx.Frame):
             self.sp.Disable()
             return
         self.sp.Enable()
+        if len(self.BGlist) <= 0:
+            self.btnrem.Enable(False)
+            self.btnremyall.Enable(False)
+            self.btnapply.Enable(False)
+            self.btnapplyall.Enable(False)
+        else:
+            self.btnrem.Enable(True)
+            self.btnremyall.Enable(True)
+            self.btnapply.Enable(True)
+            self.btnapplyall.Enable(True)
         if (self.WXTextPages.GetValue() == ""
             and self.parent.notebook.GetPageCount() != 0):
             # Initial value for WXTextPages
@@ -469,22 +502,32 @@ class BackgroundCorrection(wx.Frame):
             Page = self.parent.notebook.GetPage(i)
             j = filter(lambda x: x.isdigit(), Page.counter)
             if int(j) in PageNumbers:
+                if self.rbtnCh1.GetValue() == True:
+                    Page.bgselected = None
+                else:
+                    Page.bg2selected = None
                 Page.bgselected = None
                 Page.OnAmplitudeCheck("init")
                 Page.PlotAll()
-
+        # Clean up unused backgrounds
+        CleanupAutomaticBackground(self.parent)
+        
 
     def OnRemoveAll(self, event):
         N = self.parent.notebook.GetPageCount()
         for i in np.arange(N):
             Page = self.parent.notebook.GetPage(i)
             Page.bgselected = None
+            Page.bg2selected = None
             Page.OnAmplitudeCheck("init")
             Page.PlotAll()
+        # Clean up unused backgrounds
+        CleanupAutomaticBackground(self.parent)
 
     def SetPageNumbers(self, pagestring):
         self.WXTextPages.SetValue(pagestring)
     
+    
     def SpinCtrlChange(self, event=None):
         # Let user see the continuous trace we will generate
         self.average = self.spinctrl.GetValue()
@@ -493,6 +536,135 @@ class BackgroundCorrection(wx.Frame):
         self.OnDraw()
 
 
-    def UpdateDropdown(self):
+    def UpdateDropdown(self, e=None):
+        self.BGlist = list()
+        #self.BGlist.append("File/User")
+        for item in self.parent.Background:
+            bgname = "{} ({:.2f} kHz)".format(item[1],item[0])
+            self.BGlist.append(bgname)
+        if len(self.BGlist) == 0:
+            ddlist = ["File/User"]
+        else:
+            ddlist = 1*self.BGlist
         self.dropdown.SetItems(self.BGlist)
+        # Show the last item
+        self.dropdown.SetSelection(len(self.BGlist)-1)
+
 
+def ApplyAutomaticBackground(page, bg, parent):
+    """
+        Creates an "automatic" background with countrate in kHz *bg* and
+        applies it to the given *page* object. If an automatic
+        background with the same countrate exists, uses it.
+        
+        Input:
+        *page*   - page to which the background should be applied
+        *bg*     - background that should be applied to that page
+                   float or list of 1 or two elements
+                   -> if the page is cross-correlation, the second
+                      background will be applied as well.
+        *parent* - parent containing *Background* list
+    """
+    bglist = 1*np.atleast_1d(bg)
+    # minus 1 to identify non-set background id
+    bgid = np.zeros(bglist.shape, dtype=int) - 1
+    for b in xrange(len(bglist)):
+        # Check if exists:
+        for i in xrange(len(parent.Background)):
+            if parent.Background[i][0] == bglist[b]:
+                bgid[b] = i
+        if bgid[b] == -1:
+            # Add new background
+            bgname = "AUTO: {:e} kHz \t".format(bglist[b])
+            trace = np.array([[0,bglist[b]],[1,bglist[b]]])
+            parent.Background.append([bglist[b], bgname, trace])
+            bgid[b] = len(parent.Background) - 1
+    # Apply background to page
+    # Last item is id of background
+    page.bgselected = bgid[0]
+    if page.IsCrossCorrelation:
+        if len(bgid) != 2:
+            raise NotImplementedError("Cross-correlation data needs"+
+                "exactly two signals for background-correction!")
+        # Apply second background
+        page.bg2selected = bgid[1]
+    else:
+        page.bg2selected = None
+    CleanupAutomaticBackground(parent)
+    page.OnAmplitudeCheck("init")
+    page.PlotAll()
+
+
+def CleanupAutomaticBackground(parent):
+    """
+        Goes through the pagelist *parent.notebook.GetPageCount()*
+        and checks *parent.Background* for unnused automatic
+        backgrounds.
+        Removes these and updates the references to all backgrounds
+        within the pages.
+    """
+    # Create a dictionary with keys: indices of old background list -
+    # and elements: list of pages having this background
+    BGdict = dict()
+    BG2dict = dict() # cross-correlation
+    for i in xrange(len(parent.Background)):
+        BGdict[i] = list()
+        BG2dict[i] = list()
+    # Append pages to the lists inside the dictionary
+    for i in xrange(parent.notebook.GetPageCount()):
+        Page = parent.notebook.GetPage(i)
+        if Page.bgselected is not None:
+            BGdict[Page.bgselected].append(Page)
+        if Page.bg2selected is not None:
+            BG2dict[Page.bg2selected].append(Page)
+    # Sort the keys and create a new background list
+    NewBGlist = list()
+    keyID = 0
+    keys = BGdict.keys()
+    keys.sort()
+    for key in keys:
+        # Do not delete user-generated backgrounds
+        if len(BGdict[key]) == 0 and parent.Background[key][1][-1]=="\t":
+            # This discrads auto-generated backgrounds that have no
+            # pages assigned to them
+            pass
+        else:
+            for page in BGdict[key]:
+                page.bgselected = keyID
+            NewBGlist.append(parent.Background[key])
+            keyID += 1
+    # Same thing for cross-correlation (two bg signals)
+    #keyID = 0
+    keys = BG2dict.keys()
+    keys.sort()
+    for key in keys:
+        # Do not delete user-generated backgrounds
+        if len(BG2dict[key]) == 0 and parent.Background[key][1][-1]=="\t":
+            # This discrads auto-generated backgrounds that have no
+            # pages assigned to them
+            pass
+        elif parent.Background[key][1][-1]=="\t":
+            # We already added the user-defined backgrounds
+            # Therefore, we only check for aut-generated backgrounds
+            # ("\t")
+            for page in BG2dict[key]:
+                page.bg2selected = keyID
+            NewBGlist.append(parent.Background[key])
+            keyID += 1
+    # Finally, write back background list
+    parent.Background = NewBGlist
+    # If the background correction tool is open, update the list
+    # of backgrounds.
+    # (self.MyName="BACKGROUND")
+    toolkeys = parent.ToolsOpen.keys()
+    if len(toolkeys) == 0:
+        pass
+    else:
+        for key in toolkeys:
+            tool = parent.ToolsOpen[key]
+            try:
+                if tool.MyName == "BACKGROUND":
+                    tool.UpdateDropdown()
+                    tool.OnPageChanged()
+            except:
+                pass
diff --git a/src/tools/batchcontrol.py b/src/tools/batchcontrol.py
index 690c264..f97aef9 100644
--- a/src/tools/batchcontrol.py
+++ b/src/tools/batchcontrol.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - batch
     Stuff that concerns batch processing.
@@ -12,13 +11,25 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
 import numpy as np
-import platform
-import sys
-import traceback           # for Error handling
 import wx
 
 import openfile as opf     # How to treat an opened file
diff --git a/src/tools/chooseimport.py b/src/tools/chooseimport.py
index 7cae653..0a75b3d 100644
--- a/src/tools/chooseimport.py
+++ b/src/tools/chooseimport.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - chooseimport
     Displays a window that lets the user choose what type
@@ -13,6 +12,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -37,11 +51,15 @@ class ChooseImportTypes(wx.Dialog):
         #    title="Choose types", size=(250, 200))
         wx.Dialog.__init__(self, parent, -1, "Choose models")
         self.keys = list()
-         ## Content
+        ## Content
         self.panel = wx.Panel(self)
         self.sizer = wx.BoxSizer(wx.VERTICAL)
         self.boxes = dict()
-        textinit = wx.StaticText(self.panel, label=doc.chooseimport)
+        # For the selection of types to import when doing import Data
+        chooseimport = ("Several types of data were found in\n"+
+                        "the chosen file. Please select what\n"+
+                        "type(s) you would like to import.")
+        textinit = wx.StaticText(self.panel, label=chooseimport)
         self.sizer.Add(textinit)
         thekeys = curvedict.keys()
         thekeys.sort()
@@ -174,7 +192,6 @@ class ChooseImportTypesModel(wx.Dialog):
         # Get the type of curves we want to look at
         index = buttonevent.GetId() - 8000
         self.buttonindex = index
-        curvedict = dict()
         key = self.curvekeys[index]
         # Get correlation curves for corresponding type
         corrcurves = dict()
@@ -189,7 +206,7 @@ class ChooseImportTypesModel(wx.Dialog):
         prev_selected = list()
         for item in self.kept_curvedict.keys():
             prev_selected += self.kept_curvedict[item]
-        Selector = overlaycurves.Wrapper_OnImport(self.parent, corrcurves,
+        overlaycurves.Wrapper_OnImport(self.parent, corrcurves,
                                                  self.OnSelected, prev_selected,
                                                  labels=labeldict)
 
diff --git a/src/tools/comment.py b/src/tools/comment.py
index ab9792e..2f0988c 100755
--- a/src/tools/comment.py
+++ b/src/tools/comment.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - comment
     Just edit the sessions comment.
@@ -12,6 +11,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
diff --git a/src/tools/datarange.py b/src/tools/datarange.py
index 1b86e67..57f02d0 100644
--- a/src/tools/datarange.py
+++ b/src/tools/datarange.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - channels
     Let the user choose time domains.
@@ -12,13 +11,27 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
 import wx
 import numpy as np
 
-import doc
 
 # Menu entry name
 MENUINFO = ["&Data range",
@@ -144,7 +157,7 @@ class SelectChannels(wx.Frame):
 
     def OnApply(self, event=None):
         self.SetValues()
-        self.Page.PlotAll(event="init")
+        self.Page.PlotAll()
 
 
     def OnApplyAll(self, event=None):
diff --git a/src/tools/example.py b/src/tools/example.py
index 92d741f..d2ed007 100644
--- a/src/tools/example.py
+++ b/src/tools/example.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - example
     This is an example tool. You will need to edit __init__.py inside this
@@ -15,6 +14,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
diff --git a/src/tools/globalfit.py b/src/tools/globalfit.py
index 80ffb5d..e40e2ae 100644
--- a/src/tools/globalfit.py
+++ b/src/tools/globalfit.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - globalfit
     Perform global fitting on pages which share parameters.
@@ -12,6 +11,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -281,8 +295,9 @@ check parameters on each page and start 'Global fit'.
             # we don't use anyhow.
             # We are doing this for all elements, because in the future, other (?)
             # weighting methods might be implemented.
-            for i in np.arange(len(weightlist)):
-                weightlist[1] = weightlist[1].split("(")[0].strip()
+            #for i in np.arange(len(weightlist)):
+            #    weightlist[i] = weightlist[i].split("(")[0].strip()
+            weightlist[1] = weightlist[1].split("(")[0].strip()
             self.weightedfitdrop.SetItems(weightlist)
             try:
                 # if there is no data, this could go wrong
diff --git a/src/tools/info.py b/src/tools/info.py
index 451627e..0adbb89 100644
--- a/src/tools/info.py
+++ b/src/tools/info.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - info
     Open a text window with lots of information.
@@ -12,6 +11,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -213,17 +227,37 @@ class InfoClass(object):
             normparmtext =  MoreInfo[supnum][0]
         Title.append(["Normalization", normparmtext ]) 
         ## Background
-        bgselected = Page.bgselected # Selected Background
         Background = list()
-        if bgselected is not None:
-            bgname = Page.parent.Background[bgselected][1]
-            if len(bgname) == 0:
-                # Prevent saving no name
-                bgname = "NoName"
-            bgrate = Page.parent.Background[bgselected][0]
-            Background.append([ "bg name", bgname ])
-            Background.append([ "bg rate [kHz]", bgrate ])
-            InfoDict["background"] = Background
+        if Page.IsCrossCorrelation:
+            if ( Page.bgselected is not None and
+                 Page.bg2selected is not None     ):
+                # Channel 1
+                bgname = Page.parent.Background[Page.bgselected][1]
+                if len(bgname) == 0:
+                    # Prevent saving no name
+                    bgname = "NoName"
+                Background.append([ "bg name Ch1", bgname])
+                Background.append([ "bg rate Ch1 [kHz]", 
+                           Page.parent.Background[Page.bgselected][0] ])
+                # Channel 2
+                bg2name = Page.parent.Background[Page.bg2selected][1]
+                if len(bg2name) == 0:
+                    # Prevent saving no name
+                    bg2name = "NoName"
+                Background.append([ "bg name Ch2", bg2name])
+                Background.append([ "bg rate Ch2 [kHz]", 
+                          Page.parent.Background[Page.bg2selected][0] ])
+                InfoDict["background"] = Background
+        else:
+            if Page.bgselected is not None:
+                bgname = Page.parent.Background[Page.bgselected][1]
+                if len(bgname) == 0:
+                    # Prevent saving no name
+                    bgname = "NoName"
+                bgrate = Page.parent.Background[Page.bgselected][0]
+                Background.append([ "bg name", bgname ])
+                Background.append([ "bg rate [kHz]", bgrate ])
+                InfoDict["background"] = Background
         ## Function doc string
         InfoDict["modeldoc"] = [Page.active_fct.func_doc]
         InfoDict["title"] = Title
@@ -250,10 +284,11 @@ class ShowInfo(wx.Frame):
         initial_sizec = (initial_size[0]-6, initial_size[1]-30)
         self.SetMinSize(wx.Size(200,200))
         self.SetSize(initial_size)
-         ## Content
+        ## Content
         self.panel = wx.Panel(self)
         self.control = wx.TextCtrl(self.panel, style=wx.TE_MULTILINE, 
                         size=initial_sizec)
+        self.control.SetEditable(False)
         font1 = wx.Font(10, wx.MODERN, wx.NORMAL, wx.NORMAL, False, u'Monospace')
         self.control.SetFont(font1)
         btncopy = wx.Button(self.panel, wx.ID_CLOSE, 'Copy to clipboard')
diff --git a/src/tools/overlaycurves.py b/src/tools/overlaycurves.py
index 71d1469..fdf0bdb 100644
--- a/src/tools/overlaycurves.py
+++ b/src/tools/overlaycurves.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - selectcurves
 
@@ -14,6 +13,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 from matplotlib import cm
@@ -67,8 +81,8 @@ class Wrapper_OnImport(object):
 class Wrapper_Tools(object):
     def __init__(self, parent):
         """
-        Wrapper for tools menu. Gets curvedict from parent and starts curve
-        selection. See *UserSelectCurves* class.
+        Wrapper for tools menu. Gets curvedict from parent and starts
+        curve selection. See *UserSelectCurves* class.
         """
         # parent is the main frame of PyCorrFit
         self.parent = parent
@@ -192,6 +206,8 @@ class Wrapper_Tools(object):
             try:
                 tool.SetPageNumbers(string)
             except:
+                # tool does not have this function and hence does not
+                # need numbers.
                 pass
         
         
diff --git a/src/tools/parmrange.py b/src/tools/parmrange.py
index 4cc7ff7..122aace 100644
--- a/src/tools/parmrange.py
+++ b/src/tools/parmrange.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - RangeSelector
     Select the range in which the parameter should reside for fitting.
@@ -14,6 +13,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -129,11 +143,11 @@ class RangeSelector(wx.Frame):
         """
         # Read out parameters from all controls
         for i in range(len(self.WXparmlist)):
-           self.parameter_range[i][0] = self.WXparmlist[i][0].GetValue()
-           self.parameter_range[i][1] = self.WXparmlist[i][2].GetValue()
-           if self.parameter_range[i][0] > self.parameter_range[i][1]:
-               self.parameter_range[i][1] = 1.01*np.abs(self.parameter_range[i][0])
-               self.WXparmlist[i][2].SetValue(self.parameter_range[i][1])
+            self.parameter_range[i][0] = self.WXparmlist[i][0].GetValue()
+            self.parameter_range[i][1] = self.WXparmlist[i][2].GetValue()
+            if self.parameter_range[i][0] > self.parameter_range[i][1]:
+                self.parameter_range[i][1] = 1.01*np.abs(self.parameter_range[i][0])
+                self.WXparmlist[i][2].SetValue(self.parameter_range[i][1])
         # Set parameters
         l, parm0 = mdls.GetInternalFromHumanReadableParm(self.Page.modelid,
                                                      self.parameter_range[:,0])
diff --git a/src/tools/plotexport.py b/src/tools/plotexport.py
index bcbe190..c92fcbd 100644
--- a/src/tools/plotexport.py
+++ b/src/tools/plotexport.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - plotexport
     Let the user create nice plots of our data.
@@ -13,13 +12,25 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
 import wx
-import numpy as np
-
-import models as mdls
 
 
 class Tool(wx.Frame):
@@ -41,9 +52,9 @@ class Tool(wx.Frame):
         self.panel = wx.Panel(self)
         btnexample = wx.Button(self.panel, wx.ID_ANY, 'Example button')
         # Binds the button to the function - close the tool
-        self.Bind(wx.EVT_BUTTON, self.OnClose, btncopy)
+        self.Bind(wx.EVT_BUTTON, self.OnClose, btnexample)
         self.topSizer = wx.BoxSizer(wx.VERTICAL)
-        self.topSizer.Add(btncopy)
+        self.topSizer.Add(btnexample)
         self.panel.SetSizer(self.topSizer)
         self.topSizer.Fit(self)
         self.SetMinSize(self.topSizer.GetMinSizeTuple())
diff --git a/src/tools/simulation.py b/src/tools/simulation.py
index c346695..ab2cb0a 100644
--- a/src/tools/simulation.py
+++ b/src/tools/simulation.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - simulation
     Enables the user to change plotting parameters and replotting fast.
@@ -13,6 +12,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -177,7 +191,7 @@ class Slide(wx.Frame):
             try:
                 C = func(A,B)
             except ZeroDivisionError:
-                a = 0
+                pass
             else:
                 return B, C
         else:
@@ -185,7 +199,7 @@ class Slide(wx.Frame):
             try:
                 B = func(A,C)
             except ZeroDivisionError:
-                a = 0
+                pass
             else:
                 return B, C
 
@@ -361,13 +375,13 @@ class Slide(wx.Frame):
     def SetStart(self):
         # Sets first and second variable of a page to
         # Parameters A and B respectively.
-        self.SavedParms = self.parent.PackParameters(self.Page)
         if self.parent.notebook.GetPageCount() == 0:
             self.modelid = 6000
             ParmLabels, ParmValues = \
                    mdls.GetHumanReadableParms(self.modelid,
                                               mdls.valuedict[6000][1])
         else:
+            self.SavedParms = self.parent.PackParameters(self.Page)
             self.modelid = self.Page.modelid
             ParmLabels, ParmValues = \
                    mdls.GetHumanReadableParms(self.modelid,
diff --git a/src/tools/statistics.py b/src/tools/statistics.py
index 9bda0f0..ee100fd 100644
--- a/src/tools/statistics.py
+++ b/src/tools/statistics.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - statistics
     Provide the user with tab-separated statistics of their curves.
@@ -13,13 +12,27 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
 import wx
 import wx.lib.plot as plot              # Plotting in wxPython
 import numpy as np
-import os
 
 from info import InfoClass
 import misc
@@ -98,9 +111,6 @@ class Stat(wx.Frame):
         self.WXTextPages.SetValue(valstring)
         ## Plot parameter dropdown box
         self.PlotParms = self.GetListOfPlottableParms()
-        Parmlist = list()
-        #for item in self.PlotParms:
-        #    Parmlist.append(item[0])
         Parmlist = self.PlotParms
         DDtext = wx.StaticText(self.panel, 
                              label="Plot parameter ")
@@ -164,7 +174,6 @@ class Stat(wx.Frame):
         # in the statistics window afterwards.
         # new iteration
         keys = Infodict.keys()
-        head = list()
         body = list()
         tail = list()
 
@@ -419,8 +428,6 @@ class Stat(wx.Frame):
                     # Only pages selected in self.WXTextPages
                     pages.append(Page)
         plotcurve = list()
-        InfoCl = InfoClass()
-        oldpage = self.Page
         for page in pages:
             self.Page = page
             pllabel, pldata = self.GetListOfPlottableParms(return_values=True)
@@ -494,12 +501,9 @@ class Stat(wx.Frame):
         self.Page = page
         self.InfoClass = InfoClass(CurPage=self.Page)
         self.PlotParms = self.GetListOfPlottableParms()
-        #Parmlist = list()
         # Make sure the selection stays the same
         DDselid = 0
         for i in range(len(self.PlotParms)):
-            #Parmlist.append(self.PlotParms[i][0])
-            #if DDselection == self.PlotParms[i][0]:
             if DDselection == self.PlotParms[i]:
                 DDselid = i
         Parmlist = self.PlotParms
@@ -547,7 +551,6 @@ class Stat(wx.Frame):
             openedfile = open(filename, 'wb')
             # Get Parameterlist of all Pages with same model id as
             # Self.Page
-            modelid = self.Page.modelid
             # This creates self.SaveInfo:
             self.GetWantedParameters()
             # Write header
diff --git a/src/tools/trace.py b/src/tools/trace.py
index 6d797ea..8eb5a4e 100644
--- a/src/tools/trace.py
+++ b/src/tools/trace.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module tools - trace
     Show the trace of a file.
@@ -12,11 +11,25 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
 import wx
-import numpy as np
 import wx.lib.plot as plot    
 
 # Menu entry name
diff --git a/src/usermodel.py b/src/usermodel.py
index 14484d1..b9b5924 100644
--- a/src/usermodel.py
+++ b/src/usermodel.py
@@ -1,6 +1,5 @@
 # -*- coding: utf-8 -*-
 """ PyCorrFit
-    Paul Müller, Biotec - TU Dresden
 
     Module: user model:
     When the user wants to use his own functions.
@@ -16,6 +15,21 @@
     unit of Diff.coeff  : 10 µm²/s
     unit of inverse area: 100 /µm²
     unit of inv. volume : 1000 /µm³
+
+    Copyright (C) 2011-2012  Paul Müller
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License 
+    along with this program. If not, see <http://www.gnu.org/licenses/>.
 """
 
 
@@ -32,7 +46,6 @@ except ImportError:
     # Define Function, so PyCorrFit will start, even if sympy is not there.
     # wixi needs Function.
     Function = object
-import sys
 import wx
 
 import models as mdls

-- 
Alioth's /git/debian-med/git-commit-notice on /srv/git.debian.org/git/debian-med/pycorrfit.git



More information about the debian-med-commit mailing list