[med-svn] [python-mne] 285/353: ENH : simulation code refactoring

Yaroslav Halchenko debian at onerussian.com
Fri Nov 27 17:25:17 UTC 2015


This is an automated email from the git hooks/post-receive script.

yoh pushed a commit to tag 0.4
in repository python-mne.

commit 4c12dd9b60ec08690cb7b98a1bb15bc9a97b27c0
Author: Alexandre Gramfort <alexandre.gramfort at inria.fr>
Date:   Tue Jul 17 10:58:15 2012 +0200

    ENH : simulation code refactoring
---
 examples/plot_simulate_evoked_data.py |   3 +-
 mne/__init__.py                       |   2 +-
 mne/label.py                          | 148 ++++++++++++++++++++++-
 mne/simulation/__init__.py            |   4 +-
 mne/simulation/evoked.py              |   4 +-
 mne/simulation/source.py              | 217 ++++------------------------------
 mne/source_estimate.py                |  30 +++++
 7 files changed, 207 insertions(+), 201 deletions(-)

diff --git a/examples/plot_simulate_evoked_data.py b/examples/plot_simulate_evoked_data.py
index 7d4c9b5..e3d2506 100644
--- a/examples/plot_simulate_evoked_data.py
+++ b/examples/plot_simulate_evoked_data.py
@@ -63,7 +63,8 @@ stc_data *= 100 * 1e-9  # use nAm as unit
 
 # time translation
 stc_data[1] = np.roll(stc_data[1], 80)
-stc = generate_sparse_stc(fwd, labels, stc_data, tmin, tstep, random_state=0)
+stc = generate_sparse_stc(fwd['src'], labels, stc_data, tmin, tstep,
+                          random_state=0)
 
 ###############################################################################
 # Generate noisy evoked data
diff --git a/mne/__init__.py b/mne/__init__.py
index 0deffd2..352c2ca 100644
--- a/mne/__init__.py
+++ b/mne/__init__.py
@@ -17,7 +17,7 @@ from .surface import read_bem_surfaces, read_surface, write_bem_surface
 from .source_space import read_source_spaces
 from .epochs import Epochs
 from .label import label_time_courses, read_label, label_sign_flip, \
-                   write_label, stc_to_label
+                   write_label, stc_to_label, grow_labels
 from .misc import parse_config, read_reject_parameters
 from .transforms import transform_coordinates
 from .proj import read_proj, write_proj, compute_proj_epochs, \
diff --git a/mne/label.py b/mne/label.py
index 40eae0d..40d470c 100644
--- a/mne/label.py
+++ b/mne/label.py
@@ -1,8 +1,13 @@
+# Authors: Alexandre Gramfort <gramfort at nmr.mgh.harvard.edu>
+#          Martin Luessi <mluessi at nmr.mgh.harvard.edu>
+#
+# License: BSD (3-clause)
+
 import os
 import numpy as np
 from scipy import linalg
 
-from .source_estimate import read_stc, mesh_edges
+from .source_estimate import read_stc, mesh_edges, mesh_dist
 from .surface import read_surface
 
 
@@ -224,3 +229,144 @@ def stc_to_label(stc, src, smooth=5):
         labels.append(label)
 
     return labels
+
+
+def _verts_within_dist(graph, source, max_dist):
+    """Find all vertices wihin a maximum geodesic distance from source
+
+    Parameters:
+    -----------
+    graph : scipy.sparse.csr_matrix
+        Sparse matrix with distances between adjacent vertices
+    source : int
+        Source vertex
+    max_dist: float
+        Maximum geodesic distance
+
+    Returns:
+    --------
+    verts : array
+        Vertices within max_dist
+    dist : array
+        Distances from source vertex
+    """
+    dist_map = {}
+    dist_map[source] = 0
+    verts_added_last = [source]
+    # add neighbors until no more neighbors within max_dist can be found
+    while len(verts_added_last) > 0:
+        verts_added = []
+        for i in verts_added_last:
+            v_dist = dist_map[i]
+            row = graph[i, :]
+            neighbor_vert = row.indices
+            neighbor_dist = row.data
+            for j, d in zip(neighbor_vert, neighbor_dist):
+                n_dist = v_dist + d
+                if j in dist_map:
+                    if n_dist < dist_map[j]:
+                        dist_map[j] = n_dist
+                else:
+                    if n_dist <= max_dist:
+                        dist_map[j] = n_dist
+                        # we found a new vertex within max_dist
+                        verts_added.append(j)
+        verts_added_last = verts_added
+
+    verts = np.sort(np.array(dist_map.keys(), dtype=np.int))
+    dist = np.array([dist_map[v] for v in verts])
+
+    return verts, dist
+
+
+def grow_labels(subject, seeds, extents, hemis, subjects_dir=None):
+    """Generate circular labels in source space with region growing
+
+    This function generates a number of labels in source space by growing
+    regions starting from the vertices defined in "seeds". For each seed, a
+    label is generated containing all vertices within a maximum geodesic
+    distance on the white matter surface from the seed.
+
+    Note: "extents" and "hemis" can either be arrays with the same length as
+          seeds, which allows using a different extent and hemisphere for each
+          label, or integers, in which case the same extent and hemisphere is
+          used for each label.
+
+    Parameters:
+    ----------
+    subject : string
+        Name of the subject as in SUBJECTS_DIR
+    seeds : array or int
+        Seed vertex numbers
+    extents : array or float
+        Extents (radius in mm) of the labels
+    hemis : array or int
+        Hemispheres to use for the labels (0: left, 1: right)
+    subjects_dir : string
+        Path to SUBJECTS_DIR if not set in the environment
+
+    Returns:
+    --------
+    labels : list
+        List with lables. Each label is a dictionary with keys:
+            comment     Comment with seed vertex and extent
+            hemi        Hemisphere of the label ('lh', or 'rh')
+            vertices    Vertex indices (0 based)
+            pos         Locations in millimeters
+            values      Distances in millimeters from seed
+    """
+    if subjects_dir is None:
+        if 'SUBJECTS_DIR' in os.environ:
+            subjects_dir = os.environ['SUBJECTS_DIR']
+        else:
+            raise ValueError('SUBJECTS_DIR environment variable not set')
+
+    # make sure the inputs are arrays
+    seeds = np.atleast_1d(seeds)
+    extents = np.atleast_1d(extents)
+    hemis = np.atleast_1d(hemis)
+
+    n_seeds = len(seeds)
+
+    if len(extents) != 1 and len(extents) != n_seeds:
+        raise ValueError('The extents parameter has to be of length 1 or '
+                         'len(seeds)')
+
+    if len(hemis) != 1 and len(hemis) != n_seeds:
+        raise ValueError('The hemis parameter has to be of length 1 or '
+                         'len(seeds)')
+
+    # make the arrays the same length as seeds
+    if len(extents) == 1:
+        extents = np.tile(extents, n_seeds)
+
+    if len(hemis) == 1:
+        hemis = np.tile(hemis, n_seeds)
+
+    hemis = ['lh' if h == 0 else 'rh' for h in hemis]
+
+    # load the surfaces and create the distance graphs
+    tris, vert, dist = {}, {}, {}
+    for hemi in set(hemis):
+        surf_fname = os.path.join(subjects_dir, subject, 'surf',
+                                  hemi + '.white')
+        vert[hemi], tris[hemi] = read_surface(surf_fname)
+        dist[hemi] = mesh_dist(tris[hemi], vert[hemi])
+
+    # create the patches
+    labels = []
+    for seed, extent, hemi in zip(seeds, extents, hemis):
+        label_verts, label_dist = _verts_within_dist(dist[hemi], seed, extent)
+
+        # create a label
+        label = dict()
+        label['comment'] = 'Circular label: seed=%d, extent=%0.1fmm' %\
+                           (seed, extent)
+        label['vertices'] = label_verts
+        label['pos'] = vert[hemi][label_verts]
+        label['values'] = label_dist
+        label['hemi'] = hemi
+
+        labels.append(label)
+
+    return labels
diff --git a/mne/simulation/__init__.py b/mne/simulation/__init__.py
index 70c5eba..7076a90 100644
--- a/mne/simulation/__init__.py
+++ b/mne/simulation/__init__.py
@@ -3,6 +3,4 @@
 
 from .evoked import generate_evoked
 
-from .source import select_source_in_label, generate_sparse_stc, \
-                    circular_source_labels
-
+from .source import select_source_in_label, generate_sparse_stc
diff --git a/mne/simulation/evoked.py b/mne/simulation/evoked.py
index cdecb32..09813fa 100644
--- a/mne/simulation/evoked.py
+++ b/mne/simulation/evoked.py
@@ -1,6 +1,6 @@
 # Authors: Alexandre Gramfort <gramfort at nmr.mgh.harvard.edu>
+#          Daniel Strohmeier <daniel.strohmeier at tu-ilmenau.de>
 #          Martin Luessi <mluessi at nmr.mgh.harvard.edu>
-#          Matti Hamalainen <msh at nmr.mgh.harvard.edu>
 #
 # License: BSD (3-clause)
 import copy
@@ -123,5 +123,3 @@ def add_noise_evoked(evoked, noise, snr, tmin=None, tmax=None):
     noise.data = 10 ** ((tmp - float(snr)) / 20) * noise.data
     evoked.data += noise.data
     return evoked
-
-
diff --git a/mne/simulation/source.py b/mne/simulation/source.py
index ef54171..bddb5ed 100644
--- a/mne/simulation/source.py
+++ b/mne/simulation/source.py
@@ -1,26 +1,21 @@
 # Authors: Alexandre Gramfort <gramfort at nmr.mgh.harvard.edu>
 #          Martin Luessi <mluessi at nmr.mgh.harvard.edu>
-#          Matti Hamalainen <msh at nmr.mgh.harvard.edu>
+#          Daniel Strohmeier <daniel.strohmeier at tu-ilmenau.de>
 #
 # License: BSD (3-clause)
-import os
 
 import numpy as np
-from scipy.sparse import csr_matrix
-
 from ..minimum_norm.inverse import _make_stc
 from ..utils import check_random_state
-from ..surface import read_surface
-from ..source_estimate import mesh_edges
 
 
-def select_source_in_label(fwd, label, random_state=None):
+def select_source_in_label(src, label, random_state=None):
     """Select source positions using a label
 
     Parameters
     ----------
-    fwd : dict
-        A forward solution
+    src : list of dict
+        The source space
     label : dict
         the label (read with mne.read_label)
     random_state : None | int | np.random.RandomState
@@ -39,18 +34,18 @@ def select_source_in_label(fwd, label, random_state=None):
     rng = check_random_state(random_state)
 
     if label['hemi'] == 'lh':
-        src_sel_lh = np.intersect1d(fwd['src'][0]['vertno'], label['vertices'])
+        src_sel_lh = np.intersect1d(src[0]['vertno'], label['vertices'])
         idx_select = rng.randint(0, len(src_sel_lh), 1)
         lh_vertno.append(src_sel_lh[idx_select][0])
     else:
-        src_sel_rh = np.intersect1d(fwd['src'][1]['vertno'], label['vertices'])
+        src_sel_rh = np.intersect1d(src[1]['vertno'], label['vertices'])
         idx_select = rng.randint(0, len(src_sel_rh), 1)
         rh_vertno.append(src_sel_rh[idx_select][0])
 
     return lh_vertno, rh_vertno
 
 
-def generate_sparse_stc(fwd, labels, stc_data, tmin, tstep, random_state=0):
+def generate_sparse_stc(src, labels, stc_data, tmin, tstep, random_state=0):
     """Generate sparse sources time courses from waveforms and labels
 
     This function randomly selects a single vertex in each label and assigns
@@ -58,8 +53,8 @@ def generate_sparse_stc(fwd, labels, stc_data, tmin, tstep, random_state=0):
 
     Parameters
     ----------
-    fwd : dict
-        A forward solution
+    src : list of dict
+        The source space
     labels : list of dict
         The labels
     stc_data : array (shape: len(labels) x n_times)
@@ -81,18 +76,25 @@ def generate_sparse_stc(fwd, labels, stc_data, tmin, tstep, random_state=0):
 
     rng = check_random_state(random_state)
     vertno = [[], []]
-    for label in labels:
-        lh_vertno, rh_vertno = select_source_in_label(fwd, label, rng)
+    lh_data = list()
+    rh_data = list()
+    for label_data, label in zip(stc_data, labels):
+        lh_vertno, rh_vertno = select_source_in_label(src, label, rng)
         vertno[0] += lh_vertno
         vertno[1] += rh_vertno
+        if len(lh_vertno) != 0:
+            lh_data.append(label_data)
+        elif len(rh_vertno) != 0:
+            rh_data.append(label_data)
+        else:
+            raise ValueError('No vertno found.')
     vertno = map(np.array, vertno)
-    # XXX there is a bug here, the ordering of the stc_data is assumed
-    # to be the same as in vertno (not the same as labels, left comes first)
-    stc = _make_stc(stc_data, tmin, tstep, vertno)
+    data = np.r_[lh_data, rh_data]
+    stc = _make_stc(data, tmin, tstep, vertno)
     return stc
 
 
-def generate_stc(fwd, labels, stc_data, tmin, tstep, value_fun=None):
+def generate_stc(src, labels, stc_data, tmin, tstep, value_fun=None):
     """Generate sources time courses from waveforms and labels
 
     This function generates a source estimate with extended sources by
@@ -111,8 +113,8 @@ def generate_stc(fwd, labels, stc_data, tmin, tstep, value_fun=None):
 
     Parameters
     ----------
-    fwd : dict
-        A forward solution
+    src : list of dict
+        The source space
     labels : list of dict
         The labels
     stc_data : array (shape: len(labels) x n_times)
@@ -139,7 +141,7 @@ def generate_stc(fwd, labels, stc_data, tmin, tstep, value_fun=None):
     hemi_to_ind['lh'],  hemi_to_ind['rh'] = 0, 1
     for i, label in enumerate(labels):
         hemi_ind = hemi_to_ind[label['hemi']]
-        src_sel = np.intersect1d(fwd['src'][hemi_ind]['vertno'],
+        src_sel = np.intersect1d(src[hemi_ind]['vertno'],
                                  label['vertices'])
         if value_fun is not None:
             idx_sel = np.searchsorted(label['vertices'], src_sel)
@@ -179,172 +181,3 @@ def generate_stc(fwd, labels, stc_data, tmin, tstep, value_fun=None):
 
     stc = _make_stc(stc_data, tmin, tstep, vertno)
     return stc
-
-
-def mesh_dist(tris, vert):
-    """ Generate an adjacency matrix where the entries are the distances
-        between neighboring vertices
-
-    Parameters:
-    -----------
-    tris : array (n_tris x 3)
-        Mesh triangulation
-    vert : array (n_vert x 3)
-        Vertex locations
-
-    Returns:
-    --------
-    dist_matrix : scipy.sparse.csr_matrix
-        Sparse matrix with distances between adjacent vertices
-    """
-    edges = mesh_edges(tris).tocoo()
-
-    # Euclidean distances between neighboring vertices
-    dist = np.sqrt(np.sum((vert[edges.row, :] - vert[edges.col, :]) ** 2,
-                          axis=1))
-
-    dist_matrix = csr_matrix((dist, (edges.row, edges.col)), shape=edges.shape)
-
-    return dist_matrix
-
-
-def _verts_within_dist(graph, source, max_dist):
-    """ Find all vertices wihin a maximum geodesic distance from source
-
-    Parameters:
-    -----------
-    graph : scipy.sparse.csr_matrix
-        Sparse matrix with distances between adjacent vertices
-    source : int
-        Source vertex
-    max_dist: float
-        Maximum geodesic distance
-
-    Returns:
-    --------
-    verts : array
-        Vertices within max_dist
-    dist : array
-        Distances from source vertex
-    """
-    dist_map = {}
-    dist_map[source] = 0
-    verts_added_last = [source]
-    # add neighbors until no more neighbors within max_dist can be found
-    while len(verts_added_last) > 0:
-        verts_added = []
-        for i in verts_added_last:
-            v_dist = dist_map[i]
-            row = graph[i, :]
-            neighbor_vert = row.indices
-            neighbor_dist = row.data
-            for j, d in zip(neighbor_vert, neighbor_dist):
-                n_dist = v_dist + d
-                if j in dist_map:
-                    if n_dist < dist_map[j]:
-                        dist_map[j] = n_dist
-                else:
-                    if n_dist <= max_dist:
-                        dist_map[j] = n_dist
-                        # we found a new vertex within max_dist
-                        verts_added.append(j)
-        verts_added_last = verts_added
-
-    verts = np.sort(np.array(dist_map.keys(), dtype=np.int))
-    dist = np.array([dist_map[v] for v in verts])
-
-    return verts, dist
-
-
-def circular_source_labels(subject, seeds, extents, hemis, subjects_dir=None):
-    """ Generate circular labels in source space
-
-    This function generates a number of labels in source space by growing
-    regions starting from the vertices defined in "seeds". For each seed, a
-    label is generated containing all vertices within a maximum geodesic
-    distance on the white matter surface from the seed.
-
-    Note: "extents" and "hemis" can either be arrays with the same length as
-          seeds, which allows using a different extent and hemisphere for each
-          label, or integers, in which case the same extent and hemisphere is
-          used for each label.
-
-    Parameters:
-    ----------
-    subject : string
-        Name of the subject as in SUBJECTS_DIR
-    seeds : array or int
-        Seed vertex numbers
-    extents : array or float
-        Extents (radius in mm) of the labels
-    hemis : array or int
-        Hemispheres to use for the labels (0: left, 1: right)
-    subjects_dir : string
-        Path to SUBJECTS_DIR if not set in the environment
-
-    Returns:
-    --------
-    labels : list
-        List with lables. Each label is a dictionary with keys:
-            comment     Comment with seed vertex and extent
-            hemi        Hemisphere of the label ('lh', or 'rh')
-            vertices    Vertex indices (0 based)
-            pos         Locations in millimeters
-            values      Distances in millimeters from seed
-    """
-    if subjects_dir is None:
-        if 'SUBJECTS_DIR' in os.environ:
-            subjects_dir = os.environ['SUBJECTS_DIR']
-        else:
-            raise ValueError('SUBJECTS_DIR environment variable not set')
-
-    # make sure the inputs are arrays
-    seeds = np.atleast_1d(seeds)
-    extents = np.atleast_1d(extents)
-    hemis = np.atleast_1d(hemis)
-
-    n_seeds = len(seeds)
-
-    if len(extents) != 1 and len(extents) != n_seeds:
-        raise ValueError('The extents parameter has to be of length 1 or '
-                         'len(seeds)')
-
-    if len(hemis) != 1 and len(hemis) != n_seeds:
-        raise ValueError('The hemis parameter has to be of length 1 or '
-                         'len(seeds)')
-
-    # make the arrays the same length as seeds
-    if len(extents) == 1:
-        extents = np.tile(extents, n_seeds)
-
-    if len(hemis) == 1:
-        hemis = np.tile(hemis, n_seeds)
-
-    hemis = ['lh' if h == 0 else 'rh' for h in hemis]
-
-    # load the surfaces and create the distance graphs
-    tris, vert, dist = {}, {}, {}
-    for hemi in set(hemis):
-        surf_fname = os.path.join(subjects_dir, subject, 'surf',
-                                  hemi + '.white')
-        vert[hemi], tris[hemi] = read_surface(surf_fname)
-        dist[hemi] = mesh_dist(tris[hemi], vert[hemi])
-
-    # create the patches
-    labels = []
-    for seed, extent, hemi in zip(seeds, extents, hemis):
-        label_verts, label_dist = _verts_within_dist(dist[hemi], seed, extent)
-
-        # create a label
-        label = dict()
-        label['comment'] = 'Circular label: seed=%d, extent=%0.1fmm' %\
-                           (seed, extent)
-        label['vertices'] = label_verts
-        label['pos'] = vert[hemi][label_verts]
-        label['values'] = label_dist
-        label['hemi'] = hemi
-
-        labels.append(label)
-
-    return labels
-
diff --git a/mne/source_estimate.py b/mne/source_estimate.py
index cccb049..4bf741f 100644
--- a/mne/source_estimate.py
+++ b/mne/source_estimate.py
@@ -9,6 +9,7 @@ import copy
 from math import ceil
 import numpy as np
 from scipy import sparse
+from scipy.sparse import csr_matrix
 
 from .parallel import parallel_func
 
@@ -552,6 +553,35 @@ def mesh_edges(tris):
     return edges
 
 
+def mesh_dist(tris, vert):
+    """Compute adjacency matrix weighted by distances
+
+    It generates an adjacency matrix where the entries are the distances
+    between neighboring vertices.
+
+    Parameters:
+    -----------
+    tris : array (n_tris x 3)
+        Mesh triangulation
+    vert : array (n_vert x 3)
+        Vertex locations
+
+    Returns:
+    --------
+    dist_matrix : scipy.sparse.csr_matrix
+        Sparse matrix with distances between adjacent vertices
+    """
+    edges = mesh_edges(tris).tocoo()
+
+    # Euclidean distances between neighboring vertices
+    dist = np.sqrt(np.sum((vert[edges.row, :] - vert[edges.col, :]) ** 2,
+                          axis=1))
+
+    dist_matrix = csr_matrix((dist, (edges.row, edges.col)), shape=edges.shape)
+
+    return dist_matrix
+
+
 def _morph_buffer(data, idx_use, e, smooth, n_vertices, nearest, maps):
     n_iter = 100  # max nb of smoothing iterations
     for k in range(n_iter):

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-mne.git



More information about the debian-med-commit mailing list