[med-svn] [python-mne] 30/376: ENH : adding code to read epochs + debug in raw

Yaroslav Halchenko debian at onerussian.com
Fri Nov 27 17:22:00 UTC 2015


This is an automated email from the git hooks/post-receive script.

yoh pushed a commit to annotated tag v0.1
in repository python-mne.

commit 05264dff36daa463ec8b0831feb2188b06ce911a
Author: Alexandre Gramfort <alexandre.gramfort at inria.fr>
Date:   Thu Jan 13 15:54:42 2011 -0500

    ENH : adding code to read epochs + debug in raw
---
 examples/read_epochs.py |  39 ++++++++
 examples/read_raw.py    |  13 +--
 fiff/__init__.py        |   2 +
 fiff/diff.py            |  32 +++++++
 fiff/epochs.py          | 162 +++++++++++++++++++++++++++++++++
 fiff/meas_info.py       |  17 ++++
 fiff/proj.py            | 108 ++++++++++++++++++++++
 fiff/raw.py             | 235 +++++++++++++++++++++++++++++++++++++++---------
 fiff/tag.py             |  12 +++
 fiff/tests/test_raw.py  |  13 ++-
 10 files changed, 577 insertions(+), 56 deletions(-)

diff --git a/examples/read_epochs.py b/examples/read_epochs.py
new file mode 100644
index 0000000..311eaa9
--- /dev/null
+++ b/examples/read_epochs.py
@@ -0,0 +1,39 @@
+"""Example of reading epochs from a raw FIF file
+"""
+print __doc__
+
+# Authors : Alexandre Gramfort, gramfort at nmr.mgh.harvard.edu
+#           Matti Hamalainen, msh at nmr.mgh.harvard.edu
+
+import fiff
+
+###############################################################################
+# Set parameters
+raw_fname = 'MNE-sample-data/MEG/sample/sample_audvis_raw.fif'
+event_name = 'MNE-sample-data/MEG/sample/sample_audvis_raw-eve.fif'
+event_id = 1
+tmin = -0.2
+tmax = 0.5
+pick_all = True
+
+#   Setup for reading the raw data
+raw = fiff.setup_read_raw(raw_fname)
+events = fiff.read_events(event_name)
+
+if pick_all:
+   # Pick all
+   picks = range(raw['info']['nchan'])
+else:
+   #   Set up pick list: MEG + STI 014 - bad channels (modify to your needs)
+   include = ['STI 014'];
+   want_meg = True
+   want_eeg = False
+   want_stim = False
+   picks = fiff.fiff_pick_types(raw['info'], want_meg, want_eeg, want_stim,
+                                include, raw['info']['bads'])
+
+data, times, channel_names = fiff.read_epochs(raw, events, event_id,
+                                                    tmin, tmax, picks=picks)
+
+# for epoch in data:
+    
\ No newline at end of file
diff --git a/examples/read_raw.py b/examples/read_raw.py
index 79207d1..c078e23 100644
--- a/examples/read_raw.py
+++ b/examples/read_raw.py
@@ -9,16 +9,17 @@ fname = 'MNE-sample-data/MEG/sample/sample_audvis_raw.fif'
 
 raw = fiff.setup_read_raw(fname)
 
-nchan = raw['info']['nchan']
-ch_names = raw['info']['ch_names']
-meg_channels_idx = [k for k in range(nchan) if ch_names[k][:3]=='MEG']
-meg_channels_idx = meg_channels_idx[:5]
+meg_channels_idx = fiff.pick_types(raw['info'], meg=True)
+meg_channels_idx = meg_channels_idx[:5] # take 5 first
 
-data, times = fiff.read_raw_segment_times(raw, from_=100, to=115,
-                                          sel=meg_channels_idx)
+start, stop = raw.time_to_index(100, 115) # 100 s to 115 s data segment
+data, times = raw[meg_channels_idx, start:stop]
+
+raw.close()
 
 ###############################################################################
 # Show MEG data
+pl.close('all')
 pl.plot(times, data.T)
 pl.xlabel('time (ms)')
 pl.ylabel('MEG data (T)')
diff --git a/fiff/__init__.py b/fiff/__init__.py
index 57a1f3c..72ca143 100644
--- a/fiff/__init__.py
+++ b/fiff/__init__.py
@@ -12,4 +12,6 @@ from .stc import read_stc, write_stc
 from .bem_surfaces import read_bem_surfaces
 from .inverse import read_inverse_operator
 from .pick import pick_types
+from .meas_info import get_current_comp
+from .epochs import read_epochs
 
diff --git a/fiff/diff.py b/fiff/diff.py
new file mode 100644
index 0000000..4b18c40
--- /dev/null
+++ b/fiff/diff.py
@@ -0,0 +1,32 @@
+import numpy as np
+
+def is_equal(first, second):
+    """ Says if 2 python structures are the same. Designed to
+    handle dict, list, np.ndarray etc.
+    """
+    all_equal = True
+    # Check all keys in first dict
+    if type(first) != type(second):
+        all_equal = False
+    if isinstance(first, dict):
+        for key in first.keys():
+            if (not second.has_key(key)):
+                print "Missing key %s in %s" % (key, second)
+                all_equal = False
+            else:
+                if not is_equal(first[key], second[key]):
+                    all_equal = False
+    elif isinstance(first, np.ndarray):
+        if not np.allclose(first, second):
+            all_equal = False
+    elif isinstance(first, list):
+        for a, b in zip(first, second):
+            if not is_equal(a, b):
+                print '%s and\n%s are different' % (a, b)
+                all_equal = False
+    else:
+        if first != second:
+            print '%s and\n%s are different' % (first, second)
+            all_equal = False
+    return all_equal
+
diff --git a/fiff/epochs.py b/fiff/epochs.py
new file mode 100644
index 0000000..4928ca9
--- /dev/null
+++ b/fiff/epochs.py
@@ -0,0 +1,162 @@
+# Authors : Alexandre Gramfort, gramfort at nmr.mgh.harvard.edu
+#           Matti Hamalainen, msh at nmr.mgh.harvard.edu
+
+import numpy as np
+import fiff
+
+
+def read_epochs(raw, events, event_id, tmin, tmax, picks=None,
+                 keep_comp=False, dest_comp=0):
+    """Read epochs from a raw dataset
+
+    Parameters
+    ----------
+    raw : Raw object
+        Returned by the setup_read_raw function
+
+    events : array, of shape [n_events, 3]
+        Returned by the read_events function
+
+    event_id : int
+        The id of the event to consider
+
+    tmin : float
+        Start time before event
+
+    tmax : float
+        End time after event
+
+    Returns
+    -------
+    data : list of epochs
+        An epoch is a dict with key:
+            epoch    the epoch, channel by channel
+            event    event #
+            tmin     starting time in the raw data file (initial skip omitted)
+            tmax     ending stime in the raw data file (initial skip omitted)
+
+    times : array
+        The time points of the samples, in seconds
+
+    ch_names : list of strings
+        Names of the channels included
+
+    Notes
+    -----
+    NOTE 1: The purpose of this function is to demonstrate the raw data reading
+    routines. You may need to modify this for your purposes
+
+    NOTE 2: You need to run mne_process_raw once as
+
+    mne_process_raw --raw <fname> --projoff
+
+    to create the fif-format event file (or open the file in mne_browse_raw).
+    """
+
+    ch_names = [raw['info']['ch_names'][k] for k in picks]
+    sfreq = raw['info']['sfreq']
+
+    #   Set up projection
+    if raw['info']['projs'] is None:
+        print 'No projector specified for these data'
+        raw['proj'] = []
+    else:
+        #   Activate the projection items
+        for proj in raw['info']['projs']:
+            proj['active'] = True
+
+        print '%d projection items activated\n' % len(raw['info']['projs'])
+
+        #   Create the projector
+        proj, nproj = fiff.proj.make_projector_info(raw['info'])
+        if nproj == 0:
+            print 'The projection vectors do not apply to these channels'
+            raw['proj'] = None
+        else:
+            print 'Created an SSP operator (subspace dimension = %d)' % nproj
+            raw['proj'] = proj
+
+    #   Set up the CTF compensator
+    current_comp = fiff.get_current_comp(raw['info'])
+    if current_comp > 0:
+        print 'Current compensation grade : %d\n' % current_comp
+
+    if keep_comp:
+        dest_comp = current_comp
+
+    if current_comp != dest_comp:
+        raw.comp = fiff.raw.make_compensator(raw['info'], current_comp,
+                                             dest_comp)
+        print 'Appropriate compensator added to change to grade %d.' % (
+                                                                    dest_comp)
+
+    # #  Read the events
+    # if event_fname is None:
+    #     if fname.endswith('.fif'):
+    #         event_name = '%s-eve.fif' % fname[:-4]
+    #     else:
+    #         raise ValueError, 'Raw file name does not end properly'
+    #
+    #     events = fiff.read_events(event_name)
+    #     print 'Events read from %s\n' % event_name
+    # else:
+    #     #   Binary file
+    #     if event_name.endswith('-eve.fif'):
+    #         events = fiff.read_events(event_name)
+    #         print 'Binary event file %s read' % event_name
+    #     else:
+    #         #   Text file
+    #         events = np.loadtxt(event_name)
+    #         if events.shape[0] < 1:
+    #             raise ValueError, 'No data in the event file'
+    #
+    #         #   Convert time to samples if sample number is negative
+    #         events[events[:,0] < 0,0] = events[:,1] * sfreq
+    #         #    Select the columns of interest (convert to integers)
+    #         events = np.array(events[:,[0, 2, 3]], dtype=np.int32)
+    #         #    New format?
+    #         if events[0,1] == 0 and events[0,2] == 0:
+    #             print 'The text event file %s is in the new format' % event_name
+    #             if events[0,0] != raw['first_samp']:
+    #                 raise ValueError, ('This new format event file is not compatible'
+    #                                    ' with the raw data')
+    #         else:
+    #             print 'The text event file %s is in the old format\n' % event_name
+    #             #   Offset with first sample
+    #             events[:,0] += raw['first_samp']
+
+    #    Select the desired events
+    selected = np.logical_and(events[:, 1] == 0, events[:, 2] == event_id)
+    n_events = np.sum(selected)
+    if n_events > 0:
+        print '%d matching events found' % n_events
+    else:
+        raise ValueError, 'No desired events found.'
+
+    data = list()
+
+    for p, event_samp in enumerate(events[selected, 0]):
+        #       Read a data segment
+        start = event_samp + tmin*sfreq
+        stop = event_samp + tmax*sfreq
+        epoch, _ = raw[picks, start:stop]
+
+        if p == 0:
+            times = np.arange(start - event_samp, stop - event_samp,
+                              dtype=np.float) / sfreq
+
+        d = dict()
+        d['epoch'] = epoch
+        d['event'] = event_id
+        d['tmin'] = (float(start) - float(raw['first_samp'])) / sfreq
+        d['tmax'] = (float(stop) - float(raw['first_samp'])) / sfreq
+        data.append(d)
+
+    print 'Read %d epochs, %d samples each.' % (len(data),
+                                                data[0]['epoch'].shape[1])
+
+    #   Remember to close the file descriptor
+    raw['fid'].close()
+    print 'File closed.'
+
+    return data, times, ch_names
diff --git a/fiff/meas_info.py b/fiff/meas_info.py
index c6cb322..144a7ca 100644
--- a/fiff/meas_info.py
+++ b/fiff/meas_info.py
@@ -251,3 +251,20 @@ def read_meas_info(source, tree=None):
        fid.close()
 
     return info, meas
+
+
+def get_current_comp(info):
+    """Get the current compensation in effect in the data
+    """
+    comp = 0;
+    first_comp = -1;
+    for k, chan in enumerate(info['chs']):
+        if chan.kind == FIFF.FIFFV_MEG_CH:
+            comp = int(chan['coil_type']) >> 16
+            if first_comp < 0:
+                first_comp = comp;
+            elif comp != first_comp:
+                raise ValueError, ('Compensation is not set equally on '
+                                   'all MEG channels')
+
+    return comp
diff --git a/fiff/proj.py b/fiff/proj.py
index caf17b5..010307f 100644
--- a/fiff/proj.py
+++ b/fiff/proj.py
@@ -1,3 +1,7 @@
+from math import sqrt
+import numpy as np
+from scipy import linalg
+
 from .tree import dir_tree_find
 from .constants import FIFF
 from .tag import find_tag
@@ -150,3 +154,107 @@ def write_proj(fid, projs):
         end_block(fid,FIFF.FIFFB_PROJ_ITEM)
 
     end_block(fid, FIFF.FIFFB_PROJ)
+
+###############################################################################
+# Utils
+
+def make_projector(projs, ch_names, bads=[]):
+    """
+    %
+    % [proj,nproj,U] = mne_make_projector(projs,ch_names,bads)
+    %
+    % proj     - The projection operator to apply to the data
+    % nproj    - How many items in the projector
+    % U        - The orthogonal basis of the projection vectors (optional)
+    %
+    % Make an SSP operator
+    %
+    % projs    - A set of projection vectors
+    % ch_names - A cell array of channel names
+    % bads     - Bad channels to exclude
+    %
+    """
+    nchan = len(ch_names)
+    if len(ch_names) == 0:
+        raise ValueError, 'No channel names specified'
+
+    proj  = np.eye(nchan, nchan)
+    nproj = 0;
+    U     = [];
+
+    #   Check trivial cases first
+    if projs is None:
+       return proj, nproj, U
+
+    nactive = 0
+    nvec = 0
+    for p in projs:
+        if p.active:
+            nactive += 1
+            nvec += p['data']['nrow']
+
+    if nactive == 0:
+        return proj, nproj, U
+
+    #   Pick the appropriate entries
+    vecs = np.zeros((nchan, nvec))
+    nvec = 0
+    nonzero = 0
+    for k, p in enumerate(projs):
+        if p.active:
+            one = p # XXX really necessary?
+            if len(one['data']['col_names']) != \
+                        len(np.unique(one['data']['col_names'])):
+                raise ValueError, ('Channel name list in projection item %d'
+                                  ' contains duplicate items' % k)
+
+            # Get the two selection vectors to pick correct elements from
+            # the projection vectors omitting bad channels
+            sel = []
+            vecsel = []
+            for c, name in enumerate(ch_names):
+                if name in one['data']['col_names']:
+                    sel.append(c)
+                    vecsel.append(one['data']['col_names'].index(name))
+
+            # If there is something to pick, pickit
+            if len(sel) > 0:
+                for v in range(one['data']['nrow']):
+                    vecs[sel, nvec+v] = one['data']['data'][v,vecsel].T
+
+            #   Rescale for more straightforward detection of small singular values
+            for v in range(one['data']['nrow']):
+                onesize = sqrt(np.sum(vecs[:,nvec+v] * vecs[:, nvec + v]))
+                if onesize > 0:
+                    vecs[:, nvec+v] /= onesize
+                    nonzero += 1
+
+            nvec += one['data']['nrow']
+
+    #   Check whether all of the vectors are exactly zero
+    if nonzero == 0:
+        return proj, nproj, U
+
+    #   Reorthogonalize the vectors
+    U, S, V = linalg.svd(vecs[:,:nvec], full_matrices=False)
+    #   Throw away the linearly dependent guys
+    nvec = np.sum((S / S[0]) < 1e-2)
+    U = U[:,:nvec]
+
+    #   Here is the celebrated result
+    proj  -= np.dot(U, U.T)
+    nproj = nvec
+
+    return proj, nproj, U
+
+
+def make_projector_info(info):
+    """
+    %
+    % [proj,nproj] = mne_make_projector_info(info)
+    %
+    % Make an SSP operator using the meas info
+    %
+    """
+    proj, nproj, _ = make_projector(info['projs'], info['ch_names'], info['bads'])
+    return proj, nproj
diff --git a/fiff/raw.py b/fiff/raw.py
index 81174bd..a529f90 100644
--- a/fiff/raw.py
+++ b/fiff/raw.py
@@ -8,6 +8,35 @@ from .tree import dir_tree_find
 from .tag import read_tag
 
 
+class Raw(dict):
+    """Raw data set"""
+    def __getitem__(self, item):
+        """getting raw data content with python slicing"""
+        if isinstance(item, tuple): # slicing required
+            if len(item) == 2: # channels and time instants
+                time_slice = item[1]
+                sel = item[0]
+            else:
+                time_slice = item[0]
+                sel = None
+            start, stop, step = time_slice.start, time_slice.stop, time_slice.step
+            if step is not None:
+                raise ValueError('step needs to be 1 : %d given' % step)
+            return read_raw_segment(self, start=start, stop=stop, sel=sel)
+        else:
+            return super(Raw, self).__getitem__(item)
+
+    def time_to_index(self, *args):
+        indices = []
+        for time in args:
+            ind = int(time * self['info']['sfreq'])
+            indices.append(ind)
+        return indices
+
+    def close(self):
+        self['fid'].close()
+
+
 def setup_read_raw(fname, allow_maxshield=False):
     """Read information about raw data file
 
@@ -50,7 +79,7 @@ def setup_read_raw(fname, allow_maxshield=False):
     #   Set up the output structure
     info['filename'] = fname
 
-    data = dict(fid=fid, info=info, first_samp=0, last_samp=0)
+    data = Raw(fid=fid, info=info, first_samp=0, last_samp=0)
 
     #   Process the directory
     directory = raw['directory']
@@ -83,7 +112,6 @@ def setup_read_raw(fname, allow_maxshield=False):
         if ent.kind == FIFF.FIFF_DATA_SKIP:
             tag = read_tag(fid, ent.pos)
             nskip = int(tag.data)
-            print nskip
         elif ent.kind == FIFF.FIFF_DATA_BUFFER:
             #   Figure out the number of samples in this buffer
             if ent.type == FIFF.FIFFT_DAU_PACK16:
@@ -101,7 +129,7 @@ def setup_read_raw(fname, allow_maxshield=False):
 
             #  Do we have an initial skip pending?
             if first_skip > 0:
-                first_samp += nsamp*first_skip
+                first_samp += nsamp * first_skip
                 data['first_samp'] = first_samp
                 first_skip = 0
 
@@ -116,7 +144,7 @@ def setup_read_raw(fname, allow_maxshield=False):
 
             #  Add a data buffer
             rawdir.append(dict(ent=ent, first=first_samp,
-                               last=first_samp + nsamp -1,
+                               last=first_samp + nsamp - 1,
                                nsamp=nsamp))
             first_samp += nsamp
 
@@ -125,7 +153,8 @@ def setup_read_raw(fname, allow_maxshield=False):
     #   Add the calibration factors
     cals = np.zeros(data['info']['nchan'])
     for k in range(data['info']['nchan']):
-        cals[k] = data['info']['chs'][k]['range']*data['info']['chs'][k]['cal']
+        cals[k] = data['info']['chs'][k]['range'] * \
+                  data['info']['chs'][k]['cal']
 
     data['cals'] = cals
     data['rawdir'] = rawdir
@@ -140,7 +169,7 @@ def setup_read_raw(fname, allow_maxshield=False):
     return data
 
 
-def read_raw_segment(raw, from_=None, to=None, sel=None):
+def read_raw_segment(raw, start=None, stop=None, sel=None):
     """Read a chunck of raw data
 
     Parameters
@@ -148,12 +177,13 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
     raw: dict
         The dict returned by setup_read_raw
 
-    from_: int
-        first sample to include. If omitted, defaults to the first
+    start: int, (optional)
+        first sample to include (first is 0). If omitted, defaults to the first
         sample in data
 
-    to: int
-        Last sample to include. If omitted, defaults to the last sample in data
+    stop: int, (optional)
+        First sample to not include.
+        If omitted, data is included to the end.
 
     sel: array, optional
         Indices of channels to select
@@ -171,26 +201,26 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
 
     """
 
-    if to is None:
-        to = raw['last_samp']
-    if from_ is None:
-        from_ = raw['first_samp']
+    if stop is None:
+        stop = raw['last_samp'] + 1
+    if start is None:
+        start = raw['first_samp']
 
     #  Initial checks
-    from_ = int(from_)
-    to = int(to)
-    if from_ < raw['first_samp']:
-        from_ = raw['first_samp']
+    start = int(start)
+    stop = int(stop)
+    if start < raw['first_samp']:
+        start = raw['first_samp']
 
-    if to > raw['last_samp']:
-        to = raw['last_samp']
+    if stop >= raw['last_samp']:
+        stop = raw['last_samp'] + 1
 
-    if from_ > to:
+    if start >= stop:
         raise ValueError, 'No data in this range'
 
     print 'Reading %d ... %d  =  %9.3f ... %9.3f secs...' % (
-                       from_, to, from_ / float(raw['info']['sfreq']),
-                       to / float(raw['info']['sfreq']))
+                       start, stop - 1, start / float(raw['info']['sfreq']),
+                       (stop - 1) / float(raw['info']['sfreq'])),
 
     #  Initialize the data and calibration vector
     nchan = raw['info']['nchan']
@@ -198,7 +228,7 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
     cal = np.diag(raw['cals'].ravel())
 
     if sel is None:
-        data = np.empty((nchan, to - from_))
+        data = np.empty((nchan, stop - start))
         if raw['proj'] is None and raw['comp'] is None:
             mult = None
         else:
@@ -210,7 +240,7 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
                 mult = raw['proj'] * raw['comp'] * cal
 
     else:
-        data = np.empty((len(sel), to - from_))
+        data = np.empty((len(sel), stop - start))
         if raw['proj'] is None and raw['comp'] is None:
             mult = None
             cal = np.diag(raw['cals'][sel].ravel())
@@ -223,6 +253,7 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
                 mult = raw['proj'][sel,:] * raw['comp'] * cal
 
     do_debug = False
+    # do_debug = True
     if cal is not None:
         from scipy import sparse
         cal = sparse.csr_matrix(cal)
@@ -235,7 +266,7 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
         this = raw['rawdir'][k]
 
         #  Do we need this buffer
-        if this['last'] > from_:
+        if this['last'] >= start:
             if this['ent'] is None:
                 #  Take the easy route: skip is translated to zeros
                 if do_debug:
@@ -262,29 +293,29 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
                                                   nchan).astype(np.float).T
 
             #  The picking logic is a bit complicated
-            if to > this['last'] and from_ <= this['first']:
+            if stop - 1 > this['last'] and start < this['first']:
                 #    We need the whole buffer
                 first_pick = 0
                 last_pick = this['nsamp']
                 if do_debug:
                     print 'W'
 
-            elif from_ > this['first']:
-                first_pick = from_ - this['first']
-                if to < this['last']:
+            elif start >= this['first']:
+                first_pick = start - this['first']
+                if stop - 1 <= this['last']:
                     #   Something from the middle
-                    last_pick = this['nsamp'] + to - this['last']
+                    last_pick = this['nsamp'] + stop - this['last'] - 1
                     if do_debug:
                         print 'M'
                 else:
                     #   From the middle to the end
-                    last_pick = this['nsamp'] - 1
+                    last_pick = this['nsamp']
                     if do_debug:
                         print 'E'
             else:
                 #    From the beginning to the middle
                 first_pick = 0
-                last_pick = to - this['first']
+                last_pick = stop - this['first']
                 if do_debug:
                     print 'B'
 
@@ -294,17 +325,19 @@ def read_raw_segment(raw, from_=None, to=None, sel=None):
                 data[:, dest:dest+picksamp] = one[:, first_pick:last_pick]
                 dest += picksamp
 
-       #   Done?
-        if this['last'] >= to:
-            print ' [done]\n'
+        #   Done?
+        if this['last'] >= stop-1:
+            print ' [done]'
             break
 
-    times = np.arange(from_, to) / raw['info']['sfreq']
+    times = np.arange(start, stop) / raw['info']['sfreq']
+
+    raw['fid'].seek(0, 0) # Go back to beginning of the file
 
     return data, times
 
 
-def read_raw_segment_times(raw, from_, to, sel=None):
+def read_raw_segment_times(raw, start, stop, sel=None):
     """Read a chunck of raw data
 
     Parameters
@@ -312,10 +345,10 @@ def read_raw_segment_times(raw, from_, to, sel=None):
     raw: dict
         The dict returned by setup_read_raw
 
-    from_: float
+    start: float
         Starting time of the segment in seconds
 
-    to: float
+    stop: float
         End time of the segment in seconds
 
     sel: array, optional
@@ -333,11 +366,11 @@ def read_raw_segment_times(raw, from_, to, sel=None):
         returns the time values corresponding to the samples
     """
     #   Convert to samples
-    from_ = floor(from_ * raw['info']['sfreq'])
-    to = ceil(to * raw['info']['sfreq'])
+    start = floor(start * raw['info']['sfreq'])
+    stop = ceil(stop * raw['info']['sfreq'])
 
     #   Read it
-    return read_raw_segment(raw, from_, to, sel)
+    return read_raw_segment(raw, start, stop, sel)
 
 ###############################################################################
 # Writing
@@ -519,3 +552,119 @@ def finish_writing_raw(fid):
     end_block(fid, FIFF.FIFFB_RAW_DATA)
     end_block(fid, FIFF.FIFFB_MEAS)
     end_file(fid)
+
+###############################################################################
+# misc
+
+def findall(L, value, start=0):
+    """Returns indices of all occurence of value in list L starting from start
+    """
+    c = L.count(value)
+    if c == 0:
+        return list()
+    else:
+        ind = list()
+        i = start-1
+        for _ in range(c):
+            i = L.index(value, i+1)
+            ind.append(i)
+        return ind
+
+
+def _make_compensator(info, kind):
+    """Auxiliary function for make_compensator
+    """
+    for k in range(len(info['comps'])):
+        if info['comps'][k]['kind'] == kind:
+            this_data = info['comps'][k]['data'];
+
+            #   Create the preselector
+            presel = np.zeros((this_data['ncol'], info['nchan']))
+            for col, col_name in enumerate(this_data['col_names']):
+                ind = findall(info['ch_names'], col_name)
+                if len(ind) == 0:
+                    raise ValueError, 'Channel %s is not available in data' % \
+                                                                      col_name
+                elif len(ind) > 1:
+                    raise ValueError, 'Ambiguous channel %s' % col_name
+                presel[col, ind] = 1.0
+
+            #   Create the postselector
+            postsel = np.zeros((info['nchan'], this_data['nrow']))
+            for c, ch_name in enumerate(info['ch_names']):
+                ind = findall(this_data['row_names'], ch_name)
+                if len(ind) > 1:
+                    raise ValueError, 'Ambiguous channel %s' % ch_name
+                elif len(ind) == 1:
+                    postsel[c, ind] = 1.0
+
+            this_comp = postsel*this_data['data']*presel;
+            return this_comp
+
+    return []
+
+
+def make_compensator(info, from_, to, exclude_comp_chs=False):
+    """
+    %
+    % [comp] = mne_make_compensator(info,from,to,exclude_comp_chs)
+    %
+    % info              - measurement info as returned by the fif reading routines
+    % from              - compensation in the input data
+    % to                - desired compensation in the output
+    % exclude_comp_chs  - exclude compensation channels from the output (optional)
+    %
+
+    %
+    % Create a compensation matrix to bring the data from one compensation
+    % state to another
+    %
+    """
+
+    if from_ == to:
+        comp = np.zeros((info['nchan'], info['nchan']))
+        return comp
+
+    if from_ == 0:
+        C1 = np.zeros((info['nchan'], info['nchan']))
+    else:
+        try:
+            C1 = _make_compensator(info, from_)
+        except Exception as inst:
+            raise ValueError, 'Cannot create compensator C1 (%s)' % inst
+
+        if len(C1) == 0:
+            raise ValueError, 'Desired compensation matrix (kind = %d) not found' % from_
+
+
+    if to == 0:
+       C2 = np.zeros((info['nchan'], info['nchan']))
+    else:
+        try:
+            C2 = _make_compensator(info, to)
+        except Exception as inst:
+            raise ValueError, 'Cannot create compensator C2 (%s)' % inst
+
+        if len(C2) == 0:
+            raise ValueError, 'Desired compensation matrix (kind = %d) not found' % to
+
+
+    #   s_orig = s_from + C1*s_from = (I + C1)*s_from
+    #   s_to   = s_orig - C2*s_orig = (I - C2)*s_orig
+    #   s_to   = (I - C2)*(I + C1)*s_from = (I + C1 - C2 - C2*C1)*s_from
+    comp = np.eye(info['nchan']) + C1 - C2 - C2*C1;
+
+    if exclude_comp_chs:
+        pick = np.zeros((info['nchan'], info['nchan']))
+        npick = 0
+        for k, chan in info['chs']:
+            if chan['kind'] != FIFF.FIFFV_REF_MEG_CH:
+                npick += 1
+                pick[npick] = k
+
+        if npick == 0:
+            raise ValueError, 'Nothing remains after excluding the compensation channels'
+
+        comp = comp[pick[1:npick], :]
+
+    return comp
diff --git a/fiff/tag.py b/fiff/tag.py
index d1bf294..6cd766f 100644
--- a/fiff/tag.py
+++ b/fiff/tag.py
@@ -44,6 +44,18 @@ class Tag(object):
         out += "\n"
         return out
 
+    def __cmp__(self, tag):
+        is_equal = (self.kind == tag.kind and
+                    self.type == tag.type and
+                    self.size == tag.size and
+                    self.next == tag.next and
+                    self.pos == tag.pos and
+                    self.data == tag.data)
+        if is_equal:
+            return 0
+        else:
+            return 1
+
 
 def read_tag_info(fid):
     """Read Tag info (or header)
diff --git a/fiff/tests/test_raw.py b/fiff/tests/test_raw.py
index 4643fe2..84f6aa2 100644
--- a/fiff/tests/test_raw.py
+++ b/fiff/tests/test_raw.py
@@ -20,7 +20,7 @@ def test_io_raw():
     meg_channels_idx = [k for k in range(nchan) if ch_names[k][:3]=='MEG']
     meg_channels_idx = meg_channels_idx[:5]
 
-    data, times = fiff.read_raw_segment_times(raw, from_=100, to=115,
+    data, times = fiff.read_raw_segment_times(raw, start=100, stop=115,
                                               sel=meg_channels_idx)
 
     # Writing
@@ -41,18 +41,17 @@ def test_io_raw():
     #
     #   Set up the reading parameters
     #
-    from_ = raw['first_samp']
-    to = raw['last_samp']
+    start = raw['first_samp']
+    stop = raw['last_samp']
     quantum_sec = 10
     quantum = int(ceil(quantum_sec * raw['info']['sfreq']))
     #
     #   Read and write all the data
     #
-    first_buffer = True
-    for first in range(from_, to, quantum):
+    for first in range(start, stop, quantum):
         last = first + quantum
-        if last > to:
-            last = to
+        if last > stop:
+            last = stop
         try:
             data, times = fiff.read_raw_segment(raw, first, last, picks)
         except Exception as inst:

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-med/python-mne.git



More information about the debian-med-commit mailing list