Browse Source

Update frozen Neo to version 0.5.2

Michael Denker 6 years ago
parent
commit
eebd26f4ea
89 changed files with 1497 additions and 843 deletions
  1. 64 0
      code/python-neo/.gitignore
  2. 17 0
      code/python-neo/.travis.yml
  3. 1 1
      code/python-neo/README.rst
  4. 0 0
      code/python-neo/doc/old_stuffs/gif2011workshop.rst
  5. 0 0
      code/python-neo/doc/old_stuffs/specific_annotations.rst
  6. 4 0
      code/python-neo/doc/source/authors.rst
  7. 1 1
      code/python-neo/doc/source/conf.py
  8. 4 4
      code/python-neo/doc/source/install.rst
  9. 21 0
      code/python-neo/doc/source/releases/0.5.2.rst
  10. 1 2
      code/python-neo/doc/source/whatisnew.rst
  11. 130 0
      code/python-neo/examples/igorio.ipynb
  12. BIN
      code/python-neo/examples/nmc-portal/grouped_ephys/B95/B95_Ch0_IDRest_107.ibw
  13. BIN
      code/python-neo/examples/nmc-portal/grouped_ephys/B95/B95_Ch0_IDRest_107.pxp
  14. 27 74
      code/python-neo/neo/core/analogsignal.py
  15. 99 0
      code/python-neo/neo/core/basesignal.py
  16. 68 27
      code/python-neo/neo/core/channelindex.py
  17. 1 2
      code/python-neo/neo/core/container.py
  18. 15 75
      code/python-neo/neo/core/irregularlysampledsignal.py
  19. 96 30
      code/python-neo/neo/core/spiketrain.py
  20. 2 1
      code/python-neo/neo/io/__init__.py
  21. 4 4
      code/python-neo/neo/io/axonio.py
  22. 57 28
      code/python-neo/neo/io/blackrockio.py
  23. 6 4
      code/python-neo/neo/io/elanio.py
  24. 20 2
      code/python-neo/neo/io/hdf5io.py
  25. 49 18
      code/python-neo/neo/io/igorproio.py
  26. 13 16
      code/python-neo/neo/io/micromedio.py
  27. 4 4
      code/python-neo/neo/io/nestio.py
  28. 2 2
      code/python-neo/neo/io/neuralynxio.py
  29. 57 48
      code/python-neo/neo/io/nixio.py
  30. 159 35
      code/python-neo/neo/io/nsdfio.py
  31. 32 23
      code/python-neo/neo/io/plexonio.py
  32. 1 1
      code/python-neo/neo/io/rawbinarysignalio.py
  33. 1 0
      code/python-neo/neo/io/spike2io.py
  34. 6 4
      code/python-neo/neo/io/tdtio.py
  35. 5 5
      code/python-neo/neo/io/tools.py
  36. 4 2
      code/python-neo/neo/io/winedrio.py
  37. 6 6
      code/python-neo/neo/io/winwcpio.py
  38. 52 35
      code/python-neo/neo/test/coretest/test_analogsignal.py
  39. 1 4
      code/python-neo/neo/test/coretest/test_analogsignalarray.py
  40. 1 4
      code/python-neo/neo/test/coretest/test_base.py
  41. 1 4
      code/python-neo/neo/test/coretest/test_block.py
  42. 1 4
      code/python-neo/neo/test/coretest/test_channelindex.py
  43. 1 4
      code/python-neo/neo/test/coretest/test_container.py
  44. 1 4
      code/python-neo/neo/test/coretest/test_epoch.py
  45. 1 4
      code/python-neo/neo/test/coretest/test_event.py
  46. 1 4
      code/python-neo/neo/test/coretest/test_generate_datasets.py
  47. 19 5
      code/python-neo/neo/test/coretest/test_irregularysampledsignal.py
  48. 1 4
      code/python-neo/neo/test/coretest/test_segment.py
  49. 302 176
      code/python-neo/neo/test/coretest/test_spiketrain.py
  50. 1 4
      code/python-neo/neo/test/coretest/test_unit.py
  51. 3 3
      code/python-neo/neo/test/generate_datasets.py
  52. 2 0
      code/python-neo/neo/test/iotest/common_io_test.py
  53. 1 4
      code/python-neo/neo/test/iotest/test_alphaomegaio.py
  54. 1 4
      code/python-neo/neo/test/iotest/test_asciisignalio.py
  55. 1 4
      code/python-neo/neo/test/iotest/test_asciispiketrainio.py
  56. 10 5
      code/python-neo/neo/test/iotest/test_axonio.py
  57. 1 4
      code/python-neo/neo/test/iotest/test_baseio.py
  58. 2 5
      code/python-neo/neo/test/iotest/test_blackrockio.py
  59. 1 4
      code/python-neo/neo/test/iotest/test_brainvisionio.py
  60. 1 4
      code/python-neo/neo/test/iotest/test_brainwaredamio.py
  61. 1 4
      code/python-neo/neo/test/iotest/test_brainwaref32io.py
  62. 1 4
      code/python-neo/neo/test/iotest/test_brainwaresrcio.py
  63. 1 5
      code/python-neo/neo/test/iotest/test_elanio.py
  64. 1 4
      code/python-neo/neo/test/iotest/test_elphyio.py
  65. 1 4
      code/python-neo/neo/test/iotest/test_exampleio.py
  66. 3 6
      code/python-neo/neo/test/iotest/test_hdf5io.py
  67. 2 4
      code/python-neo/neo/test/iotest/test_igorio.py
  68. 1 4
      code/python-neo/neo/test/iotest/test_klustakwikio.py
  69. 4 6
      code/python-neo/neo/test/iotest/test_kwikio.py
  70. 1 5
      code/python-neo/neo/test/iotest/test_micromedio.py
  71. 1 4
      code/python-neo/neo/test/iotest/test_neomatlabio.py
  72. 1 4
      code/python-neo/neo/test/iotest/test_nestio.py
  73. 1 4
      code/python-neo/neo/test/iotest/test_neuralynxio.py
  74. 1 4
      code/python-neo/neo/test/iotest/test_neuroexplorerio.py
  75. 1 4
      code/python-neo/neo/test/iotest/test_neuroscopeio.py
  76. 1 4
      code/python-neo/neo/test/iotest/test_neuroshareio.py
  77. 55 7
      code/python-neo/neo/test/iotest/test_nixio.py
  78. 22 10
      code/python-neo/neo/test/iotest/test_nsdfio.py
  79. 1 4
      code/python-neo/neo/test/iotest/test_pickleio.py
  80. 5 8
      code/python-neo/neo/test/iotest/test_plexonio.py
  81. 1 4
      code/python-neo/neo/test/iotest/test_pynnio.py
  82. 1 4
      code/python-neo/neo/test/iotest/test_rawbinarysignalio.py
  83. 1 4
      code/python-neo/neo/test/iotest/test_spike2io.py
  84. 1 4
      code/python-neo/neo/test/iotest/test_stimfitio.py
  85. 1 5
      code/python-neo/neo/test/iotest/test_tdtio.py
  86. 1 4
      code/python-neo/neo/test/iotest/test_winedrio.py
  87. 1 4
      code/python-neo/neo/test/iotest/test_winwcpio.py
  88. 1 1
      code/python-neo/neo/version.py
  89. 1 2
      code/python-neo/setup.py

+ 64 - 0
code/python-neo/.gitignore

@@ -0,0 +1,64 @@
+#########################################
+# Editor temporary/working/backup files #
+.#*
+[#]*#
+*~
+*$
+*.bak
+*.kdev4
+*.komodoproject
+*.orig
+.project
+.pydevproject
+.settings
+*.tmp*
+.idea
+
+# Compiled source #
+###################
+*.a
+*.com
+*.class
+*.dll
+*.exe
+*.mo
+*.o
+*.py[ocd]
+*.so
+
+# Python files #
+################
+# setup.py working directory
+build
+# other build directories
+bin
+dist
+# sphinx build directory
+doc/_build
+# setup.py dist directory
+dist
+# Egg metadata
+*.egg-info
+*.egg
+*.EGG
+*.EGG-INFO
+# tox testing tool
+.tox
+# coverage
+.coverage
+cover
+*.ipynb_checkpoints
+
+# OS generated files #
+######################
+.directory
+.gdb_history
+.DS_Store?
+ehthumbs.db
+Icon?
+Thumbs.db
+
+# Things specific to this project #
+###################################
+neo/test/io/neurosharemergeio.py
+files_for_testing_neo

+ 17 - 0
code/python-neo/.travis.yml

@@ -0,0 +1,17 @@
+language: python
+python:
+  - "2.7"
+  - "3.4"
+  - "3.5"
+  - "3.6"  
+
+# command to install dependencies
+install:
+  - pip install -r requirements.txt
+  - pip install coveralls
+  - pip install . 
+# command to run tests, e.g. python setup.py test
+script:
+  nosetests --with-coverage --cover-package=neo
+after_success:
+  coveralls

+ 1 - 1
code/python-neo/README.rst

@@ -44,7 +44,7 @@ More information
 
 - Home page: http://neuralensemble.org/neo
 - Mailing list: https://groups.google.com/forum/?fromgroups#!forum/neuralensemble
-- Documentation: http://packages.python.org/neo/
+- Documentation: http://neo.readthedocs.io/
 - Bug reports: https://github.com/NeuralEnsemble/python-neo/issues
 
 For installation instructions, see doc/source/install.rst

code/python-neo/doc/source/gif2011workshop.rst → code/python-neo/doc/old_stuffs/gif2011workshop.rst


code/python-neo/doc/source/specific_annotations.rst → code/python-neo/doc/old_stuffs/specific_annotations.rst


+ 4 - 0
code/python-neo/doc/source/authors.rst

@@ -34,6 +34,8 @@ and may not be the current affiliation of a contributor.
 * Hélissande Fragnaud [2]
 * Mark Hollenbeck [14]
 * Mieszko Grodzicki
+* Rick Gerkin [15]
+* Matthieu Sénoville [2]
 
 
 1. Centre de Recherche en Neuroscience de Lyon, CNRS UMR5292 - INSERM U1028 - Universite Claude Bernard Lyon 1
@@ -50,5 +52,7 @@ and may not be the current affiliation of a contributor.
 12. University of Virginia
 13. INM-6, Forschungszentrum Jülich, Germany
 14. University of Texas at Austin
+15. Arizona State University
+
 
 If we've somehow missed you off the list we're very sorry - please let us know.

+ 1 - 1
code/python-neo/doc/source/conf.py

@@ -51,7 +51,7 @@ copyright = u'2010-2017, ' + AUTHORS
 # The short X.Y version.
 version = '0.5'
 # The full version, including alpha/beta/rc tags.
-release = '0.5.1'
+release = '0.5.2'
 
 # The language for content autogenerated by Sphinx. Refer to documentation
 # for a list of supported languages.

+ 4 - 4
code/python-neo/doc/source/install.rst

@@ -8,7 +8,7 @@ system.
 Dependencies
 ============
   
-    * Python_ >= 2.6
+    * Python_ >= 2.7
     * numpy_ >= 1.7.1
     * quantities_ >= 0.9.0
 
@@ -47,12 +47,12 @@ on).
     
 To download and install manually, download:
 
-    https://github.com/NeuralEnsemble/python-neo/archive/neo-0.5.1.zip
+    https://github.com/NeuralEnsemble/python-neo/archive/neo-0.5.2.zip
 
 Then::
 
-    $ unzip neo-0.5.1.zip
-    $ cd neo-0.5.1
+    $ unzip neo-0.5.2.zip
+    $ cd neo-0.5.2
     $ python setup.py install
     
 or::

+ 21 - 0
code/python-neo/doc/source/releases/0.5.2.rst

@@ -0,0 +1,21 @@
+=======================
+Neo 0.5.2 release notes
+=======================
+
+27th September 2017
+
+
+* Removed support for Python 2.6
+* Pickling :class:`AnalogSignal` and :class:`SpikeTrain` now preserves parent objects
+* Added NSDFIO, which reads and writes NSDF files
+* Fixes and improvements to PlexonIO, NixIO, BlackrockIO, NeuralynxIO, IgorIO, ElanIO, MicromedIO, TdtIO and others.
+
+Thanks to Michael Denker, Achilleas Koutsou, Mieszko Grodzicki, Samuel Garcia, Julia Sprenger, Andrew Davison,
+Rohan Shah, Richard C Gerkin, Mieszko Grodzicki, Mikkel Elle Lepperød, Joffrey Gonin, Hélissande Fragnaud,
+Elodie Legouée and Matthieu Sénoville for their contributions to this release.
+
+(Full `list of closed issues`_)
+
+.. _`list of closed issues`: https://github.com/NeuralEnsemble/python-neo/issues?q=is%3Aissue+milestone%3A0.5.2+is%3Aclosed
+
+

+ 1 - 2
code/python-neo/doc/source/whatisnew.rst

@@ -8,6 +8,7 @@ Release notes
 
    releases/0.5.0.rst
    releases/0.5.1.rst
+   releases/0.5.2.rst
 
 ..   releases/0.2.0.rst
 ..   releases/0.2.1.rst
@@ -17,8 +18,6 @@ Release notes
 ..   releases/0.3.3.rst
 
 
-
-
 Version 0.4.0
 -------------
 

File diff suppressed because it is too large
+ 130 - 0
code/python-neo/examples/igorio.ipynb


BIN
code/python-neo/examples/nmc-portal/grouped_ephys/B95/B95_Ch0_IDRest_107.ibw


BIN
code/python-neo/examples/nmc-portal/grouped_ephys/B95/B95_Ch0_IDRest_107.pxp


+ 27 - 74
code/python-neo/neo/core/analogsignal.py

@@ -2,8 +2,8 @@
 '''
 This module implements :class:`AnalogSignal`, an array of analog signals.
 
-:class:`AnalogSignal` inherits from :class:`quantites.Quantity`, which
-inherits from :class:`numpy.array`.
+:class:`AnalogSignal` inherits from :class:`basesignal.BaseSignal` and 
+:class:`quantities.Quantity`, which inherits from :class:`numpy.array`.
 Inheritance from :class:`numpy.array` is explained here:
 http://docs.scipy.org/doc/numpy/user/basics.subclassing.html
 
@@ -26,9 +26,12 @@ import quantities as pq
 
 from neo.core.baseneo import BaseNeo, MergeError, merge_annotations
 from neo.core.channelindex import ChannelIndex
+from copy import copy, deepcopy
 
 logger = logging.getLogger("Neo")
 
+from neo.core import basesignal
+
 
 def _get_sampling_rate(sampling_rate, sampling_period):
     '''
@@ -67,7 +70,7 @@ def _new_AnalogSignalArray(cls, signal, units=None, dtype=None, copy=True,
     return obj
 
 
-class AnalogSignal(BaseNeo, pq.Quantity):
+class AnalogSignal(basesignal.BaseSignal):
     '''
     Array of one or more continuous analog signals.
 
@@ -227,6 +230,20 @@ class AnalogSignal(BaseNeo, pq.Quantity):
                                         self.annotations,
                                         self.channel_index,
                                         self.segment)
+    def __deepcopy__(self, memo):
+        cls = self.__class__
+        new_AS = cls(np.array(self), units=self.units, dtype=self.dtype,
+               t_start=self.t_start, sampling_rate=self.sampling_rate,
+               sampling_period=self.sampling_period, name=self.name,
+               file_origin=self.file_origin, description=self.description)
+        new_AS.__dict__.update(self.__dict__)
+        memo[id(self)] = new_AS
+        for k, v in self.__dict__.items():
+            try:
+                setattr(new_AS, k, deepcopy(v, memo))
+            except:
+                setattr(new_AS, k, v)
+        return new_AS
 
     def __array_finalize__(self, obj):
         '''
@@ -251,7 +268,7 @@ class AnalogSignal(BaseNeo, pq.Quantity):
         self.file_origin = getattr(obj, 'file_origin', None)
         self.description = getattr(obj, 'description', None)
 
-        # Parents objects
+        # Parent objects
         self.segment = getattr(obj, 'segment', None)
         self.channel_index = getattr(obj, 'channel_index', None)
 
@@ -285,11 +302,11 @@ class AnalogSignal(BaseNeo, pq.Quantity):
         Get the item or slice :attr:`i`.
         '''
         obj = super(AnalogSignal, self).__getitem__(i)
-        if isinstance(i, int):  # a single point in time across all channels
+        if isinstance(i, (int, np.integer)):  # a single point in time across all channels
             obj = pq.Quantity(obj.magnitude, units=obj.units)
         elif isinstance(i, tuple):
             j, k = i
-            if isinstance(j, int):  # extract a quantity array
+            if isinstance(j, (int, np.integer)):  # extract a quantity array
                 obj = pq.Quantity(obj.magnitude, units=obj.units)
             else:
                 if isinstance(j, slice):
@@ -303,7 +320,7 @@ class AnalogSignal(BaseNeo, pq.Quantity):
                     # in the general case, would need to return IrregularlySampledSignal(Array)
                 else:
                     raise TypeError("%s not supported" % type(j))
-                if isinstance(k, int):
+                if isinstance(k, (int, np.integer)):
                     obj = obj.reshape(-1, 1)
                 if self.channel_index:
                     obj.channel_index = self.channel_index.__getitem__(k)
@@ -437,7 +454,10 @@ class AnalogSignal(BaseNeo, pq.Quantity):
         new = self.__class__(signal=signal, units=to_u,
                              sampling_rate=self.sampling_rate)
         new._copy_data_complement(self)
+        new.channel_index = self.channel_index
+        new.segment = self.segment
         new.annotations.update(self.annotations)
+
         return new
 
     def duplicate_with_new_array(self, signal):
@@ -461,12 +481,6 @@ class AnalogSignal(BaseNeo, pq.Quantity):
             return False
         return super(AnalogSignal, self).__eq__(other)
 
-    def __ne__(self, other):
-        '''
-        Non-equality test (!=)
-        '''
-        return not self.__eq__(other)
-
     def _check_consistency(self, other):
         '''
         Check if the attributes of another :class:`AnalogSignal`
@@ -486,50 +500,6 @@ class AnalogSignal(BaseNeo, pq.Quantity):
                      "description", "annotations"):
             setattr(self, attr, getattr(other, attr, None))
 
-    def _apply_operator(self, other, op, *args):
-        '''
-        Handle copying metadata to the new :class:`AnalogSignal`
-        after a mathematical operation.
-        '''
-        self._check_consistency(other)
-        f = getattr(super(AnalogSignal, self), op)
-        new_signal = f(other, *args)
-        new_signal._copy_data_complement(self)
-        return new_signal
-
-    def __add__(self, other, *args):
-        '''
-        Addition (+)
-        '''
-        return self._apply_operator(other, "__add__", *args)
-
-    def __sub__(self, other, *args):
-        '''
-        Subtraction (-)
-        '''
-        return self._apply_operator(other, "__sub__", *args)
-
-    def __mul__(self, other, *args):
-        '''
-        Multiplication (*)
-        '''
-        return self._apply_operator(other, "__mul__", *args)
-
-    def __truediv__(self, other, *args):
-        '''
-        Float division (/)
-        '''
-        return self._apply_operator(other, "__truediv__", *args)
-
-    def __div__(self, other, *args):
-        '''
-        Integer division (//)
-        '''
-        return self._apply_operator(other, "__div__", *args)
-
-    __radd__ = __add__
-    __rmul__ = __sub__
-
     def __rsub__(self, other, *args):
         '''
         Backwards subtraction (other-self)
@@ -653,20 +623,3 @@ class AnalogSignal(BaseNeo, pq.Quantity):
         if hasattr(self, "lazy_shape"):
             signal.lazy_shape = merged_lazy_shape
         return signal
-
-    def as_array(self, units=None):
-        """
-        Return the signal as a plain NumPy array.
-
-        If `units` is specified, first rescale to those units.
-        """
-        if units:
-            return self.rescale(units).magnitude
-        else:
-            return self.magnitude
-
-    def as_quantity(self):
-        """
-        Return the signal as a quantities array.
-        """
-        return self.view(pq.Quantity)

+ 99 - 0
code/python-neo/neo/core/basesignal.py

@@ -0,0 +1,99 @@
+# -*- coding: utf-8 -*-
+'''
+This module implements :class:`BaseSignal`, an array of signals.
+
+:class:`BaseSignal` inherits from :class:`quantites.Quantity`, which
+inherits from :class:`numpy.array`.
+Inheritance from :class:`numpy.array` is explained here:
+http://docs.scipy.org/doc/numpy/user/basics.subclassing.html
+
+In brief:
+* Initialization of a new object from constructor happens in :meth:`__new__`.
+This is where user-specified attributes are set.
+
+* :meth:`__array_finalize__` is called for all new objects, including those
+created by slicing. This is where attributes are copied over from
+the old object.
+'''
+
+# needed for python 3 compatibility
+from __future__ import absolute_import, division, print_function
+
+import logging
+
+import numpy as np
+import quantities as pq
+
+from neo.core.baseneo import BaseNeo, MergeError, merge_annotations
+from neo.core.channelindex import ChannelIndex
+
+logger = logging.getLogger("Neo")
+
+class BaseSignal(BaseNeo, pq.Quantity):    
+    
+    def __ne__(self, other):
+        '''
+        Non-equality test (!=)
+        '''
+        return not self.__eq__(other)
+
+    def _apply_operator(self, other, op, *args):
+        '''
+        Handle copying metadata to the new :class:`BaseSignal`
+        after a mathematical operation.
+        '''
+        self._check_consistency(other)
+        f = getattr(super(BaseSignal, self), op)
+        new_signal = f(other, *args)
+        new_signal._copy_data_complement(self)
+        return new_signal
+
+    def __add__(self, other, *args):
+        '''
+        Addition (+)
+        '''
+        return self._apply_operator(other, "__add__", *args)
+
+    def __sub__(self, other, *args):
+        '''
+        Subtraction (-)
+        '''
+        return self._apply_operator(other, "__sub__", *args)
+
+    def __mul__(self, other, *args):
+        '''
+        Multiplication (*)
+        '''
+        return self._apply_operator(other, "__mul__", *args)
+
+    def __truediv__(self, other, *args):
+        '''
+        Float division (/)
+        '''
+        return self._apply_operator(other, "__truediv__", *args)
+
+    def __div__(self, other, *args):
+        '''
+        Integer division (//)
+        '''
+        return self._apply_operator(other, "__div__", *args)
+
+    __radd__ = __add__
+    __rmul__ = __sub__
+
+    def as_array(self, units=None):
+        """
+        Return the signal as a plain NumPy array.
+
+        If `units` is specified, first rescale to those units.
+        """
+        if units:
+            return self.rescale(units).magnitude
+        else:
+            return self.magnitude
+
+    def as_quantity(self):
+        """
+        Return the signal as a quantities array.
+        """
+        return self.view(pq.Quantity)

+ 68 - 27
code/python-neo/neo/core/channelindex.py

@@ -22,79 +22,114 @@ class ChannelIndex(Container):
 
     This container has several purposes:
 
-      * Grouping all :class:`AnalogSignal`\s inside a :class:`Block`
-        across :class:`Segment`\s;
-      * Indexing a subset of the channels within an :class:`AnalogSignal`;
-      * Container of :class:`Unit`\s. A neuron discharge (:class:`Unit`)
-        can be seen by several electrodes (e.g. 4 for tetrodes).
-
-    *Usage 1* multi :class:`Segment` recording with 2 electrode arrays::
+      * Grouping all :class:`AnalogSignal`\s and
+        :class:`IrregularlySampledSignal`\s inside a :class:`Block` across
+        :class:`Segment`\s;
+      * Indexing a subset of the channels within an :class:`AnalogSignal` and
+        :class:`IrregularlySampledSignal`\s;
+      * Container of :class:`Unit`\s. Discharges of multiple neurons
+        (:class:`Unit`\'s) can be seen on the same channel.
+
+    *Usage 1* providing channel IDs across multiple :class:`Segment`::
+        * Recording with 2 electrode arrays across 3 segments
+        * Each array has 64 channels and is data is represented in a single
+          :class:`AnalogSignal` object per electrode array
+        * channel ids range from 0 to 127 with the first half covering
+          electrode 0 and second half covering electrode 1
 
         >>> from neo.core import (Block, Segment, ChannelIndex,
         ...                       AnalogSignal)
         >>> from quantities import nA, kHz
         >>> import numpy as np
-        >>>
+        ...
         >>> # create a Block with 3 Segment and 2 ChannelIndex objects
-        ... blk = Block()
+        >>> blk = Block()
         >>> for ind in range(3):
         ...     seg = Segment(name='segment %d' % ind, index=ind)
         ...     blk.segments.append(seg)
         ...
         >>> for ind in range(2):
+        ...     channel_ids=np.arange(64)+ind
         ...     chx = ChannelIndex(name='Array probe %d' % ind,
-        ...                        index=np.arange(64))
+        ...                        index=np.arange(64),
+        ...                        channel_ids=channel_ids,
+        ...                        channel_names=['Channel %i' % chid
+        ...                                       for chid in channel_ids])
         ...     blk.channel_indexes.append(chx)
         ...
         >>> # Populate the Block with AnalogSignal objects
-        ... for seg in blk.segments:
+        >>> for seg in blk.segments:
         ...     for chx in blk.channel_indexes:
         ...         a = AnalogSignal(np.random.randn(10000, 64)*nA,
         ...                          sampling_rate=10*kHz)
+        ...         # link AnalogSignal and ID providing channel_index
+        ...         a.channel_index = chx
         ...         chx.analogsignals.append(a)
         ...         seg.analogsignals.append(a)
 
     *Usage 2* grouping channels::
+        * Recording with a single probe with 8 channels, 4 of which belong to a
+          Tetrode
+        * Global channel IDs range from 0 to 8
+        * An additional ChannelIndex is used to group subset of Tetrode channels
 
         >>> from neo.core import Block, ChannelIndex
         >>> import numpy as np
         >>> from quantities import mV, kHz
-        >>>
+        ...
         >>> # Create a Block
-        ... blk = Block()
+        >>> blk = Block()
         >>> blk.segments.append(Segment())
-        >>>
-        >>> # Create a signal with 8 channels
-        ... sig = AnalogSignal(np.random.randn(1000, 8)*mV, sampling_rate=10*kHz)
-        ... blk.segments[0].append(sig)
         ...
-        >>> # Create a new ChannelIndex which groups three channels from the signal
-        ... chx = ChannelIndex(channel_names=np.array(['ch1', 'ch4', 'ch6']),
-        ...                    index=np.array([0, 3, 5])
+        >>> # Create a signal with 8 channels and a ChannelIndex handling the
+        >>> # channel IDs (see usage case 1)
+        >>> sig = AnalogSignal(np.random.randn(1000, 8)*mV, sampling_rate=10*kHz)
+        >>> chx = ChannelIndex(name='Probe 0', index=range(8),
+        ...                    channel_ids=range(8),
+        ...                    channel_names=['Channel %i' % chid
+        ...                                   for chid in range(8)])
+        >>> chx.analogsignals.append(sig)
+        >>> sig.channel_index=chx
+        >>> blk.segments[0].analogsignals.append(sig)
+        ...
+        >>> # Create a new ChannelIndex which groups four channels from the
+        >>> # analogsignal and provides a second ID scheme
+        >>> chx = ChannelIndex(name='Tetrode 0',
+        ...                    channel_names=np.array(['Tetrode ch1',
+        ...                                            'Tetrode ch4',
+        ...                                            'Tetrode ch6',
+        ...                                            'Tetrode ch7']),
+        ...                    index=np.array([0, 3, 5, 6]))
+        >>> # Attach the ChannelIndex to the the Block,
+        >>> # but not the to the AnalogSignal, since sig.channel_index is
+        >>> # already linked to the global ChannelIndex of Probe 0 created above
         >>> chx.analogsignals.append(sig)
         >>> blk.channel_indexes.append(chx)
 
     *Usage 3* dealing with :class:`Unit` objects::
+        * Group 5 unit objects in a single :class:`ChannelIndex` object
 
         >>> from neo.core import Block, ChannelIndex, Unit
-        >>>
+        ...
         >>> # Create a Block
         >>> blk = Block()
-        >>>
+        ...
         >>> # Create a new ChannelIndex and add it to the Block
-        >>> chx = ChannelIndex(name='octotrode A')
+        >>> chx = ChannelIndex(index=None, name='octotrode A')
         >>> blk.channel_indexes.append(chx)
-        >>>
+        ...
         >>> # create several Unit objects and add them to the
         >>> # ChannelIndex
-        ... for ind in range(5):
+        >>> for ind in range(5):
         ...     unit = Unit(name = 'unit %d' % ind,
         ...                 description='after a long and hard spike sorting')
         ...     chx.units.append(unit)
 
     *Required attributes/properties*:
-        :channel_indexes: (numpy.array 1D dtype='i')
-            Index of each channel in the attached signals.
+        :index: (numpy.array 1D dtype='i')
+            Index of each channel in the attached signals (AnalogSignals and
+            IrregularlySampledSignals). The order of the channel IDs needs to
+            be consistent across attached signals.
 
     *Recommended attributes/properties*:
         :name: (str) A label for the dataset.
@@ -102,6 +137,8 @@ class ChannelIndex(Container):
         :file_origin: (str) Filesystem path or URL of the original data file.
         :channel_names: (numpy.array 1D dtype='S')
             Names for each recording channel.
+        :channel_ids: (numpy.array 1D dtype='int')
+            IDs of the corresponding channels referenced by 'index'.
         :coordinates: (quantity array 2D (x, y, z))
             Physical or logical coordinates of all channels.
 
@@ -171,4 +208,8 @@ class ChannelIndex(Container):
                            channel_names=channel_names,
                            channel_ids=channel_ids)
         obj.block = self.block
+        obj.analogsignals = self.analogsignals
+        obj.irregularlysampledsignals = self.irregularlysampledsignals
+        # we do not copy the list of units, since these are related to
+        # the entire set of channels in the parent ChannelIndex
         return obj

+ 1 - 2
code/python-neo/neo/core/container.py

@@ -3,8 +3,7 @@
 This module implements generic container base class that all neo container
 object inherit from.  It provides shared methods for all container types.
 
-:class:`Container` is derived from :class:`BaseNeo` but is
-defined in :module:`neo.core.analogsignalarray`.
+:class:`Container` is derived from :class:`BaseNeo`
 """
 
 # needed for python 3 compatibility

+ 15 - 75
code/python-neo/neo/core/irregularlysampledsignal.py

@@ -3,9 +3,9 @@
 This module implements :class:`IrregularlySampledSignal`, an array of analog
 signals with samples taken at arbitrary time points.
 
-:class:`IrregularlySampledSignal` derives from :class:`BaseNeo`, from
-:module:`neo.core.baseneo`, and from :class:`quantites.Quantity`, which
-inherits from :class:`numpy.array`.
+:class:`IrregularlySampledSignal` inherits from :class:`basesignal.BaseSignal`
+and  derives from :class:`BaseNeo`, from :module:`neo.core.baseneo`, 
+and from :class:`quantities.Quantity`, which inherits from :class:`numpy.array`.
 
 Inheritance from :class:`numpy.array` is explained here:
 http://docs.scipy.org/doc/numpy/user/basics.subclassing.html
@@ -27,6 +27,7 @@ import quantities as pq
 
 from neo.core.baseneo import BaseNeo, MergeError, merge_annotations
 
+from neo.core import basesignal
 
 def _new_IrregularlySampledSignal(cls, times, signal, units=None, time_units=None, dtype=None,
                                   copy=True, name=None, file_origin=None, description=None,
@@ -43,7 +44,7 @@ def _new_IrregularlySampledSignal(cls, times, signal, units=None, time_units=Non
     return iss
 
 
-class IrregularlySampledSignal(BaseNeo, pq.Quantity):
+class IrregularlySampledSignal(basesignal.BaseSignal):
     '''
     An array of one or more analog signals with samples taken at arbitrary time points.
 
@@ -208,6 +209,10 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
         self.file_origin = getattr(obj, 'file_origin', None)
         self.description = getattr(obj, 'description', None)
 
+        # Parent objects
+        self.segment = getattr(obj, 'segment', None)
+        self.channel_index = getattr(obj, 'channel_index', None)
+
     def __repr__(self):
         '''
         Returns a string representing the :class:`IrregularlySampledSignal`.
@@ -230,11 +235,11 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
         Get the item or slice :attr:`i`.
         '''
         obj = super(IrregularlySampledSignal, self).__getitem__(i)
-        if isinstance(i, int):  # a single point in time across all channels
+        if isinstance(i, (int, np.integer)):  # a single point in time across all channels
             obj = pq.Quantity(obj.magnitude, units=obj.units)
         elif isinstance(i, tuple):
             j, k = i
-            if isinstance(j, int):  # a single point in time across some channels
+            if isinstance(j, (int, np.integer)):  # a single point in time across some channels
                 obj = pq.Quantity(obj.magnitude, units=obj.units)
             else:
                 if isinstance(j, slice):
@@ -243,7 +248,7 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
                     raise NotImplementedError("Arrays not yet supported")
                 else:
                     raise TypeError("%s not supported" % type(j))
-                if isinstance(k, int):
+                if isinstance(k, (int, np.integer)):
                     obj = obj.reshape(-1, 1)
         elif isinstance(i, slice):
             obj.times = self.times.__getitem__(i)
@@ -286,23 +291,6 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
         return (super(IrregularlySampledSignal, self).__eq__(other).all() and
                 (self.times == other.times).all())
 
-    def __ne__(self, other):
-        '''
-        Non-equality test (!=)
-        '''
-        return not self.__eq__(other)
-
-    def _apply_operator(self, other, op, *args):
-        '''
-        Handle copying metadata to the new :class:`IrregularlySampledSignal`
-        after a mathematical operation.
-        '''
-        self._check_consistency(other)
-        f = getattr(super(IrregularlySampledSignal, self), op)
-        new_signal = f(other, *args)
-        new_signal._copy_data_complement(self)
-        return new_signal
-
     def _check_consistency(self, other):
         '''
         Check if the attributes of another :class:`IrregularlySampledSignal`
@@ -336,39 +324,6 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
                      "description", "annotations"):
             setattr(self, attr, getattr(other, attr, None))
 
-    def __add__(self, other, *args):
-        '''
-        Addition (+)
-        '''
-        return self._apply_operator(other, "__add__", *args)
-
-    def __sub__(self, other, *args):
-        '''
-        Subtraction (-)
-        '''
-        return self._apply_operator(other, "__sub__", *args)
-
-    def __mul__(self, other, *args):
-        '''
-        Multiplication (*)
-        '''
-        return self._apply_operator(other, "__mul__", *args)
-
-    def __truediv__(self, other, *args):
-        '''
-        Float division (/)
-        '''
-        return self._apply_operator(other, "__truediv__", *args)
-
-    def __div__(self, other, *args):
-        '''
-        Integer division (//)
-        '''
-        return self._apply_operator(other, "__div__", *args)
-
-    __radd__ = __add__
-    __rmul__ = __sub__
-
     def __rsub__(self, other, *args):
         '''
         Backwards subtraction (other-self)
@@ -456,6 +411,8 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
             signal = cf * self.magnitude
         new = self.__class__(times=self.times, signal=signal, units=to_u)
         new._copy_data_complement(self)
+        new.channel_index = self.channel_index
+        new.segment = self.segment
         new.annotations.update(self.annotations)
         return new
 
@@ -534,21 +491,4 @@ class IrregularlySampledSignal(BaseNeo, pq.Quantity):
         
         new_st = self[id_start:id_stop]
 
-        return new_st
-
-    def as_array(self, units=None):
-        """
-        Return the signal as a plain NumPy array.
-
-        If `units` is specified, first rescale to those units.
-        """
-        if units:
-            return self.rescale(units).magnitude
-        else:
-            return self.magnitude
-
-    def as_quantity(self):
-        """
-        Return the signal as a quantities array.
-        """
-        return self.view(pq.Quantity)
+        return new_st

+ 96 - 30
code/python-neo/neo/core/spiketrain.py

@@ -20,11 +20,12 @@ the old object.
 
 # needed for python 3 compatibility
 from __future__ import absolute_import, division, print_function
+import sys
 
+import copy
 import numpy as np
 import quantities as pq
-
-from neo.core.baseneo import BaseNeo
+from neo.core.baseneo import BaseNeo, MergeError, merge_annotations
 
 
 def check_has_dimensions_time(*values):
@@ -68,6 +69,7 @@ def _check_time_in_range(value, t_start, t_stop, view=False):
         raise ValueError("The last spike (%s) is after t_stop (%s)" %
                          (value, t_stop))
 
+
 def _check_waveform_dimensions(spiketrain):
     '''
     Verify that waveform is compliant with the waveform definition as
@@ -198,8 +200,8 @@ class SpikeTrain(BaseNeo, pq.Quantity):
     _single_parent_objects = ('Segment', 'Unit')
     _quantity_attr = 'times'
     _necessary_attrs = (('times', pq.Quantity, 1),
-                       ('t_start', pq.Quantity, 0),
-                       ('t_stop', pq.Quantity, 0))
+                        ('t_start', pq.Quantity, 0),
+                        ('t_stop', pq.Quantity, 0))
     _recommended_attrs = ((('waveforms', pq.Quantity, 3),
                            ('left_sweep', pq.Quantity, 0),
                            ('sampling_rate', pq.Quantity, 0)) +
@@ -215,9 +217,12 @@ class SpikeTrain(BaseNeo, pq.Quantity):
         This is called whenever a new :class:`SpikeTrain` is created from the
         constructor, but not when slicing.
         '''
-        if  len(times)!=0 and waveforms is not None and len(times) != waveforms.shape[0]: #len(times)!=0 has been used to workaround a bug occuring during neo import)
-            raise ValueError("the number of waveforms should be equal to the number of spikes")
-        
+        if len(times) != 0 and waveforms is not None and len(times) != \
+                waveforms.shape[
+                    0]:  # len(times)!=0 has been used to workaround a bug occuring during neo import)
+            raise ValueError(
+                "the number of waveforms should be equal to the number of spikes")
+
         # Make sure units are consistent
         # also get the dimensionality now since it is much faster to feed
         # that to Quantity rather than a unit
@@ -277,14 +282,14 @@ class SpikeTrain(BaseNeo, pq.Quantity):
         # using items() is orders of magnitude faster
         if (hasattr(t_start, 'dtype') and t_start.dtype == obj.dtype and
                 hasattr(t_start, 'dimensionality') and
-                t_start.dimensionality.items() == dim.items()):
+                    t_start.dimensionality.items() == dim.items()):
             obj.t_start = t_start.copy()
         else:
             obj.t_start = pq.Quantity(t_start, units=dim, dtype=obj.dtype)
 
         if (hasattr(t_stop, 'dtype') and t_stop.dtype == obj.dtype and
                 hasattr(t_stop, 'dimensionality') and
-                t_stop.dimensionality.items() == dim.items()):
+                    t_stop.dimensionality.items() == dim.items()):
             obj.t_stop = t_stop.copy()
         else:
             obj.t_stop = pq.Quantity(t_stop, units=dim, dtype=obj.dtype)
@@ -303,7 +308,7 @@ class SpikeTrain(BaseNeo, pq.Quantity):
 
         return obj
 
-    def __init__(self, times, t_stop, units=None,  dtype=np.float,
+    def __init__(self, times, t_stop, units=None, dtype=np.float,
                  copy=True, sampling_rate=1.0 * pq.Hz, t_start=0.0 * pq.s,
                  waveforms=None, left_sweep=None, name=None, file_origin=None,
                  description=None, **annotations):
@@ -328,12 +333,15 @@ class SpikeTrain(BaseNeo, pq.Quantity):
         if self.dimensionality == pq.quantity.validate_dimensionality(units):
             return self.copy()
         spikes = self.view(pq.Quantity)
-        return SpikeTrain(times=spikes, t_stop=self.t_stop, units=units,
-                          sampling_rate=self.sampling_rate,
-                          t_start=self.t_start, waveforms=self.waveforms,
-                          left_sweep=self.left_sweep, name=self.name,
-                          file_origin=self.file_origin,
-                          description=self.description, **self.annotations)
+        obj = SpikeTrain(times=spikes, t_stop=self.t_stop, units=units,
+                         sampling_rate=self.sampling_rate,
+                         t_start=self.t_start, waveforms=self.waveforms,
+                         left_sweep=self.left_sweep, name=self.name,
+                         file_origin=self.file_origin,
+                         description=self.description, **self.annotations)
+        obj.segment = self.segment
+        obj.unit = self.unit
+        return obj
 
     def __reduce__(self):
         '''
@@ -422,16 +430,7 @@ class SpikeTrain(BaseNeo, pq.Quantity):
 
         Doesn't get called in Python 3, :meth:`__getitem__` is called instead
         '''
-        # first slice the Quantity array
-        obj = super(SpikeTrain, self).__getslice__(i, j)
-        # somehow this knows to call SpikeTrain.__array_finalize__, though
-        # I'm not sure how. (If you know, please add an explanatory comment
-        # here.) That copies over all of the metadata.
-
-        # update waveforms
-        if obj.waveforms is not None:
-            obj.waveforms = obj.waveforms[i:j]
-        return obj
+        return self.__getitem__(slice(i, j))
 
     def __add__(self, time):
         '''
@@ -496,16 +495,19 @@ class SpikeTrain(BaseNeo, pq.Quantity):
         _check_time_in_range(value, self.t_start, self.t_stop)
         super(SpikeTrain, self).__setslice__(i, j, value)
 
-    def _copy_data_complement(self, other):
+    def _copy_data_complement(self, other, deep_copy=False):
         '''
         Copy the metadata from another :class:`SpikeTrain`.
         '''
         for attr in ("left_sweep", "sampling_rate", "name", "file_origin",
                      "description", "annotations"):
-            setattr(self, attr, getattr(other, attr, None))
+            attr_value = getattr(other, attr, None)
+            if deep_copy:
+                attr_value = copy.deepcopy(attr_value)
+            setattr(self, attr, attr_value)
 
     def duplicate_with_new_data(self, signal, t_start=None, t_stop=None,
-                                waveforms=None):
+                                waveforms=None, deep_copy=True):
         '''
         Create a new :class:`SpikeTrain` with the same metadata
         but different data (times, t_start, t_stop)
@@ -520,7 +522,7 @@ class SpikeTrain(BaseNeo, pq.Quantity):
 
         new_st = self.__class__(signal, t_start=t_start, t_stop=t_stop,
                                 waveforms=waveforms, units=self.units)
-        new_st._copy_data_complement(self)
+        new_st._copy_data_complement(self, deep_copy=deep_copy)
 
         # overwriting t_start and t_stop with new values
         new_st.t_start = t_start
@@ -554,6 +556,70 @@ class SpikeTrain(BaseNeo, pq.Quantity):
 
         return new_st
 
+    def merge(self, other):
+        '''
+        Merge another :class:`SpikeTrain` into this one.
+
+        The times of the :class:`SpikeTrain` objects combined in one array
+        and sorted.
+
+        If the attributes of the two :class:`SpikeTrain` are not
+        compatible, an Exception is raised.
+        '''
+        if self.sampling_rate != other.sampling_rate:
+            raise MergeError("Cannot merge, different sampling rates")
+        if self.t_start != other.t_start:
+            raise MergeError("Cannot merge, different t_start")
+        if self.t_stop != other.t_stop:
+            raise MemoryError("Cannot merge, different t_stop")
+        if self.left_sweep != other.left_sweep:
+            raise MemoryError("Cannot merge, different left_sweep")
+        if self.segment != other.segment:
+            raise MergeError("Cannot merge these two signals as they belong to"
+                             " different segments.")
+        if hasattr(self, "lazy_shape"):
+            if hasattr(other, "lazy_shape"):
+                merged_lazy_shape = (self.lazy_shape[0] + other.lazy_shape[0])
+            else:
+                raise MergeError("Cannot merge a lazy object with a real"
+                                 " object.")
+        if other.units != self.units:
+            other = other.rescale(self.units)
+        wfs = [self.waveforms is not None, other.waveforms is not None]
+        if any(wfs) and not all(wfs):
+            raise MergeError("Cannot merge signal with waveform and signal "
+                             "without waveform.")
+        stack = np.concatenate((np.asarray(self), np.asarray(other)))
+        sorting = np.argsort(stack)
+        stack = stack[sorting]
+        kwargs = {}
+        for name in ("name", "description", "file_origin"):
+            attr_self = getattr(self, name)
+            attr_other = getattr(other, name)
+            if attr_self == attr_other:
+                kwargs[name] = attr_self
+            else:
+                kwargs[name] = "merge(%s, %s)" % (attr_self, attr_other)
+        merged_annotations = merge_annotations(self.annotations,
+                                               other.annotations)
+        kwargs.update(merged_annotations)
+        train = SpikeTrain(stack, units=self.units, dtype=self.dtype,
+                           copy=False, t_start=self.t_start,
+                           t_stop=self.t_stop,
+                           sampling_rate=self.sampling_rate,
+                           left_sweep=self.left_sweep, **kwargs)
+        if all(wfs):
+            wfs_stack = np.vstack((self.waveforms, other.waveforms))
+            wfs_stack = wfs_stack[sorting]
+            train.waveforms = wfs_stack
+        train.segment = self.segment
+        if train.segment is not None:
+            self.segment.spiketrains.append(train)
+
+        if hasattr(self, "lazy_shape"):
+            train.lazy_shape = merged_lazy_shape
+        return train
+
     @property
     def times(self):
         '''

+ 2 - 1
code/python-neo/neo/io/__init__.py

@@ -30,7 +30,8 @@ Classes:
 
 .. autoclass:: neo.io.ElanIO
 
-.. autoclass:: neo.io.ElphyIO
+..
+  .. autoclass:: neo.io.ElphyIO
 
 .. autoclass:: neo.io.IgorIO
 

+ 4 - 4
code/python-neo/neo/io/axonio.py

@@ -46,7 +46,6 @@ import quantities as pq
 
 from neo.io.baseio import BaseIO
 from neo.core import Block, Segment, AnalogSignal, Event
-from neo.io.tools import iteritems
 
 
 class StructFile(BufferedReader):
@@ -302,7 +301,7 @@ class AxonIO(BaseIO):
 
                     anaSig = AnalogSignal(signal, sampling_rate=sampling_rate,
                                           t_start=t_start,
-                                          name=name.decode("utf-8"),
+                                          name=str(name.decode("utf-8")),
                                           channel_index=int(num))
                     if lazy:
                         anaSig.lazy_shape = length / nbchannel
@@ -576,7 +575,7 @@ class AxonIO(BaseIO):
                     # Go over EpochInfoPerDAC and change the analog signal
                     # according to the epochs
                     epochInfo = header['dictEpochInfoPerDAC'][DACNum]
-                    for epochNum, epoch in iteritems(epochInfo):
+                    for epochNum, epoch in epochInfo.items():
                         i_begin = i_last
                         i_end = i_last + epoch['lEpochInitDuration'] +\
                             epoch['lEpochDurationInc'] * epiNum
@@ -585,7 +584,8 @@ class AxonIO(BaseIO):
                             pq.Quantity(1, unit) * (epoch['fEpochInitLevel'] +
                                                     epoch['fEpochLevelInc'] *
                                                     epiNum)
-                        i_last += epoch['lEpochInitDuration']
+                        i_last += epoch['lEpochInitDuration'] +\
+                            epoch['lEpochDurationInc'] * epiNum
                 seg.analogsignals.append(ana_sig)
             segments.append(seg)
 

+ 57 - 28
code/python-neo/neo/io/blackrockio.py

@@ -249,6 +249,7 @@ class BlackrockIO(BaseIO):
 
         # check which files are available
         self._avail_files = dict.fromkeys(self.extensions, False)
+
         self._avail_nsx = []
         for ext in self.extensions:
             if ext.startswith('ns'):
@@ -264,6 +265,20 @@ class BlackrockIO(BaseIO):
                 if ext.startswith('ns'):
                     self._avail_nsx.append(int(ext[-1]))
 
+        # check if there are any files present
+        if not any(list(self._avail_files.values())):
+            raise IOError(
+                'No Blackrock files present at {}'.format(filename))
+
+        # check if manually specified files were found
+        exts = ['nsx', 'nev', 'sif', 'ccf']
+        ext_overrides = [nsx_override, nev_override, sif_override, ccf_override]
+        for ext, ext_override in zip(exts, ext_overrides):
+            if ext_override is not None and self._avail_files[ext] == False:
+                raise ValueError('Specified {} file {} could not be '
+                                 'found.'.format(ext, ext_override))
+
+
         # These dictionaries are used internally to map the file specification
         # revision of the nsx and nev files to one of the reading routines
         self.__nsx_header_reader = {
@@ -421,7 +436,7 @@ class BlackrockIO(BaseIO):
         dt1 = [('electrode_id', 'uint32')]
 
         nsx_ext_header = np.memmap(
-            filename, shape=shape, offset=offset_dt0, dtype=dt1)
+            filename, mode='r', shape=shape, offset=offset_dt0, dtype=dt1)
 
         return nsx_basic_header, nsx_ext_header
 
@@ -485,7 +500,7 @@ class BlackrockIO(BaseIO):
             ('lo_freq_type', 'uint16')]  # 0=None, 1=Butterworth
 
         nsx_ext_header = np.memmap(
-            filename, shape=shape, offset=offset_dt0, dtype=dt1)
+            filename, mode='r', shape=shape, offset=offset_dt0, dtype=dt1)
 
         return nsx_basic_header, nsx_ext_header
 
@@ -501,7 +516,8 @@ class BlackrockIO(BaseIO):
             ('timestamp', 'uint32'),
             ('nb_data_points', 'uint32')]
 
-        return np.memmap(filename, dtype=dt2, shape=1, offset=offset)[0]
+        return np.memmap(
+            filename, mode='r', dtype=dt2, shape=1, offset=offset)[0]
 
     def __read_nsx_dataheader_variant_a(
             self, nsx_nb, filesize=None, offset=None):
@@ -562,7 +578,7 @@ class BlackrockIO(BaseIO):
         # read nsx data
         # store as dict for compatibility with higher file specs
         data = {1: np.memmap(
-            filename, dtype='int16', shape=shape, offset=offset)}
+            filename, mode='r', dtype='int16', shape=shape, offset=offset)}
 
         return data
 
@@ -584,7 +600,7 @@ class BlackrockIO(BaseIO):
 
             # read data
             data[data_bl] = np.memmap(
-                filename, dtype='int16', shape=shape, offset=offset)
+                filename, mode='r', dtype='int16', shape=shape, offset=offset)
 
         return data
 
@@ -638,7 +654,7 @@ class BlackrockIO(BaseIO):
             ('info_field', 'S24')]
 
         raw_ext_header = np.memmap(
-            filename, offset=offset_dt0, dtype=dt1, shape=shape)
+            filename, mode='r', offset=offset_dt0, dtype=dt1, shape=shape)
 
         nev_ext_header = {}
         for packet_id in ext_header_variants.keys():
@@ -715,7 +731,7 @@ class BlackrockIO(BaseIO):
             ('packet_id', 'uint16'),
             ('value', 'S{0}'.format(data_size - 6))]
 
-        raw_data = np.memmap(filename, offset=header_size, dtype=dt0)
+        raw_data = np.memmap(filename, mode='r', offset=header_size, dtype=dt0)
 
         masks = self.__nev_data_masks(raw_data['packet_id'])
         types = self.__nev_data_types(data_size)
@@ -1067,7 +1083,8 @@ class BlackrockIO(BaseIO):
         offset = \
             self.__get_file_size(filename) - \
             self.__nev_params('bytes_in_data_packets')
-        last_data_packet = np.memmap(filename, offset=offset, dtype=dt)[0]
+        last_data_packet = np.memmap(
+            filename, mode='r', offset=offset, dtype=dt)[0]
 
         n_starts = [0 * self.__nev_params('event_unit')]
         n_stops = [
@@ -1609,33 +1626,45 @@ class BlackrockIO(BaseIO):
 
         # define the higest time resolution
         # (for accurate manipulations of the time settings)
+        max_time = self.__get_max_time()
+        min_time = self.__get_min_time()
         highest_res = self.__nev_params('event_unit')
         user_n_starts = self.__transform_times(
-            user_n_starts, self.__get_min_time())
+            user_n_starts, min_time)
         user_n_stops = self.__transform_times(
-            user_n_stops, self.__get_max_time())
+            user_n_stops, max_time)
 
         # check if user provided as many n_starts as n_stops
         if len(user_n_starts) != len(user_n_stops):
             raise ValueError("n_starts and n_stops must be of equal length")
 
         # if necessary reset max n_stop to max time of file set
-        if user_n_starts[0] < self.__get_min_time():
-            user_n_starts[0] = self.__get_min_time()
-            self._print_verbose(
-                "First entry of n_start is smaller than min time of the file "
-                "set: n_start[0] set to min time of file set")
-        if user_n_starts[-1] > self.__get_max_time():
-            user_n_starts = user_n_starts[:-1]
-            user_n_stops = user_n_stops[:-1]
-            self._print_verbose(
-                "Last entry of n_start is larger than max time of the file "
-                "set: last n_start and n_stop entry are excluded")
-        if user_n_stops[-1] > self.__get_max_time():
-            user_n_stops[-1] = self.__get_max_time()
-            self._print_verbose(
-                "Last entry of n_stop is larger than max time of the file "
-                "set: n_stop[-1] set to max time of file set")
+        start_stop_id = 0
+        while start_stop_id < len(user_n_starts):
+            if user_n_starts[start_stop_id] < min_time:
+                user_n_starts[start_stop_id] = min_time
+                self._print_verbose(
+                    "Entry of n_start '{}' is smaller than min time of the file "
+                    "set: n_start set to min time of file set"
+                    "".format(user_n_starts[start_stop_id]))
+            if user_n_stops[start_stop_id] > max_time:
+                user_n_stops[start_stop_id] = max_time
+                self._print_verbose(
+                    "Entry of n_stop '{}' is larger than max time of the file "
+                    "set: n_stop set to max time of file set"
+                    "".format(user_n_stops[start_stop_id]))
+
+            if (user_n_stops[start_stop_id] < min_time
+                or user_n_starts[start_stop_id] > max_time):
+                user_n_stops.pop(start_stop_id)
+                user_n_starts.pop(start_stop_id)
+                self._print_verbose(
+                    "Entry of n_start is larger than max time or entry of "
+                    "n_stop is smaller than min time of the "
+                    "file set: n_start and n_stop are ignored")
+                continue
+            start_stop_id += 1
+
 
         # get intrinsic time settings of nsx files (incl. rec pauses)
         n_starts_files = []
@@ -1769,7 +1798,7 @@ class BlackrockIO(BaseIO):
         # get spike times for given time interval
         if not lazy:
             times = spikes['timestamp'] * event_unit
-            mask = (times >= n_start) & (times < n_stop)
+            mask = (times >= n_start) & (times <= n_stop)
             times = times[mask].astype(float)
         else:
             times = np.array([]) * event_unit
@@ -2014,7 +2043,7 @@ class BlackrockIO(BaseIO):
         # additional information about the LFP signal
         if self.__nev_spec in ['2.2', '2.3'] and self.__nsx_ext_header:
             # It does not matter which nsX file to ask for this info
-            k = self.__nsx_ext_header.keys()[0]
+            k = list(self.__nsx_ext_header.keys())[0]
             if channel_id in self.__nsx_ext_header[k]['electrode_id']:
                 get_idx = list(
                     self.__nsx_ext_header[k]['electrode_id']).index(

+ 6 - 4
code/python-neo/neo/io/elanio.py

@@ -38,6 +38,7 @@ class VersionError(Exception):
     def __str__(self):
         return repr(self.value)
 
+import io
 
 class ElanIO(BaseIO):
     """
@@ -92,7 +93,7 @@ class ElanIO(BaseIO):
 
         # # Read header file
 
-        f = open(self.filename + '.ent', 'rU')
+        f = io.open(self.filename + '.ent', mode='rt', encoding='ascii')
         #version
         version = f.readline()
         if version[:2] != 'V2' and version[:2] != 'V3':
@@ -142,6 +143,7 @@ class ElanIO(BaseIO):
                       rec_datetime=fulldatetime)
 
         if not cascade:
+            f.close()
             return seg
 
         l = f.readline()
@@ -197,7 +199,7 @@ class ElanIO(BaseIO):
         n = int(round(np.log(max_logic[0] - min_logic[0]) / np.log(2)) / 8)
         data = np.fromfile(self.filename, dtype='i' + str(n))
         data = data.byteswap().reshape(
-            (data.size / (nbchannel + 2), nbchannel + 2)).astype('f4')
+            (data.size // (nbchannel + 2), nbchannel + 2)).astype('float32')
         for c in range(nbchannel):
             if lazy:
                 sig = []
@@ -213,7 +215,7 @@ class ElanIO(BaseIO):
 
             ana_sig = AnalogSignal(
                 sig * unit, sampling_rate=sampling_rate,
-                t_start=0. * pq.s, name=labels[c], channel_index=c)
+                t_start=0. * pq.s, name=str(labels[c]), channel_index=c)
             if lazy:
                 ana_sig.lazy_shape = data.shape[0]
             ana_sig.annotate(channel_name=labels[c])
@@ -235,7 +237,7 @@ class ElanIO(BaseIO):
             reject_codes = []
         else:
             times = np.array(times) * pq.s
-            labels = np.array(labels)
+            labels = np.array(labels, dtype='S')
             reject_codes = np.array(reject_codes)
         ea = Event(times=times, labels=labels, reject_codes=reject_codes)
         if lazy:

+ 20 - 2
code/python-neo/neo/io/hdf5io.py

@@ -6,6 +6,7 @@
 
 from __future__ import absolute_import
 
+import sys
 import logging
 import pickle
 import numpy as np
@@ -88,6 +89,8 @@ class NeoHdf5IO(BaseIO):
 
     def _read_block(self, node):
         attributes = self._get_standard_attributes(node)
+        if "index" in attributes:
+            attributes["index"] = int(attributes["index"])
         block = Block(**attributes)
 
         if self._cascade:
@@ -340,8 +343,23 @@ class NeoHdf5IO(BaseIO):
                 attributes[name] = node.attrs[name]
         for name in ('rec_datetime', 'file_datetime'):
             if name in node.attrs:
-                attributes[name] = pickle.loads(node.attrs[name])
-        attributes.update(pickle.loads(node.attrs['annotations']))
+                if sys.version_info.major > 2:
+                    attributes[name] = pickle.loads(node.attrs[name], encoding='bytes')
+                else:  # Python 2 doesn't have the encoding argument
+                    attributes[name] = pickle.loads(node.attrs[name])
+        if sys.version_info.major > 2:
+            annotations = pickle.loads(node.attrs['annotations'], encoding='bytes')
+        else:
+            annotations = pickle.loads(node.attrs['annotations'])
+        attributes.update(annotations)
+        attribute_names = list(attributes.keys())  # avoid "dictionary changed size during iteration" error
+        if sys.version_info.major > 2:
+            for name in attribute_names:
+                if isinstance(attributes[name], (bytes, np.bytes_)):
+                    attributes[name] = attributes[name].decode('utf-8')
+                if isinstance(name, bytes):
+                    attributes[name.decode('utf-8')] = attributes[name]
+                    attributes.pop(name)
         return attributes
 
     def _resolve_channel_indexes(self, block):

+ 49 - 18
code/python-neo/neo/io/igorproio.py

@@ -1,12 +1,14 @@
 # -*- coding: utf-8 -*-
 """
-Class for reading data created by IGOR Pro (WaveMetrics, Inc., Portland, OR, USA)
+Class for reading data created by IGOR Pro 
+(WaveMetrics, Inc., Portland, OR, USA)
 
 Depends on: igor (https://pypi.python.org/pypi/igor/)
 
 Supported: Read
 
 Author: Andrew Davison
+Also contributing: Rick Gerkin
 
 """
 
@@ -18,6 +20,7 @@ from neo.io.baseio import BaseIO
 from neo.core import Block, Segment, AnalogSignal
 try:
     import igor.binarywave as bw
+    import igor.packed as pxp
     HAVE_IGOR = True
 except ImportError:
     HAVE_IGOR = False
@@ -25,7 +28,8 @@ except ImportError:
 
 class IgorIO(BaseIO):
     """
-    Class for reading Igor Binary Waves (.ibw) written by WaveMetrics’ IGOR Pro software.
+    Class for reading Igor Binary Waves (.ibw) written by WaveMetrics’ 
+    IGOR Pro software.
 
     Support for Packed Experiment (.pxp) files is planned.
 
@@ -47,7 +51,7 @@ class IgorIO(BaseIO):
     has_header = False
     is_streameable = False
     name = 'igorpro'
-    extensions = ['ibw'] #, 'pxp']
+    extensions = ['ibw', 'pxp']
     mode = 'file'
 
     def __init__(self, filename=None, parse_notes=None) :
@@ -63,7 +67,10 @@ class IgorIO(BaseIO):
 
         """
         BaseIO.__init__(self)
+        assert any([filename.endswith('.%s' % x) for x in self.extensions]), \
+            "Only the following extensions are supported: %s" % self.extensions
         self.filename = filename
+        self.extension = filename.split('.')[-1]
         self.parse_notes = parse_notes
 
     def read_block(self, lazy=False, cascade=True):
@@ -76,20 +83,35 @@ class IgorIO(BaseIO):
     def read_segment(self, lazy=False, cascade=True):
         segment = Segment(file_origin=self.filename)
         if cascade:
-            segment.analogsignals.append(self.read_analogsignal(lazy=lazy, cascade=cascade))
+            segment.analogsignals.append(
+                self.read_analogsignal(lazy=lazy, cascade=cascade))
             segment.analogsignals[-1].segment = segment
         return segment
 
-    def read_analogsignal(self, lazy=False, cascade=True):
+    def read_analogsignal(self, path=None, lazy=False, cascade=True):
         if not HAVE_IGOR:
-            raise Exception("igor package not installed. Try `pip install igor`")
-        data = bw.load(self.filename)
-        version = data['version']
-        if version > 3:
-            raise IOError("Igor binary wave file format version {0} is not supported.".format(version))
+            raise Exception(("`igor` package not installed. "
+                             "Try `pip install igor`"))
+        if self.extension == 'ibw':
+            data = bw.load(self.filename)
+            version = data['version']
+            if version > 5:
+                raise IOError(("Igor binary wave file format version {0} "
+                               "is not supported.".format(version)))
+        elif self.extension == 'pxp':
+            assert type(path) is str, \
+                "A colon-separated Igor-style path must be provided."
+            _,filesystem = pxp.load(self.filename)
+            path = path.split(':')
+            location = filesystem['root']
+            for element in path:
+                if element != 'root':
+                    location = location[element.encode('utf8')]
+            data = location.wave
         content = data['wave']
         if "padding" in content:
-            assert content['padding'].size == 0, "Cannot handle non-empty padding"
+            assert content['padding'].size == 0, \
+                "Cannot handle non-empty padding"
         if lazy:
             # not really lazy, since the `igor` module loads the data anyway
             signal = np.array((), dtype=content['wData'].dtype)
@@ -98,12 +120,20 @@ class IgorIO(BaseIO):
         note = content['note']
         header = content['wave_header']
         name = header['bname']
-        assert header['botFullScale'] == 0
-        assert header['topFullScale'] == 0
-        units = "".join(header['dataUnits'])
-        time_units = "".join(header['xUnits']) or "s"
-        t_start = pq.Quantity(header['hsB'], time_units)
-        sampling_period = pq.Quantity(header['hsA'], time_units)
+        units = "".join([x.decode() for x in header['dataUnits']])
+        try:
+            time_units = "".join([x.decode() for x in header['xUnits']])
+            assert len(time_units)
+        except:
+            time_units = "s"
+        try:
+            t_start = pq.Quantity(header['hsB'], time_units)
+        except KeyError:
+            t_start = pq.Quantity(header['sfB'][0], time_units)
+        try:
+            sampling_period = pq.Quantity(header['hsA'], time_units)
+        except:
+            sampling_period = pq.Quantity(header['sfA'][0], time_units)
         if self.parse_notes:
             try:
                 annotations = self.parse_notes(note)
@@ -132,7 +162,8 @@ def key_value_string_parser(itemsep=";", kvsep=":"):
         kvsep - character which separates the key and value within an item
 
     Returns:
-        a function which takes the string to be parsed as the sole argument and returns a dict.
+        a function which takes the string to be parsed as the sole argument 
+        and returns a dict.
 
     Example:
 

+ 13 - 16
code/python-neo/neo/io/micromedio.py

@@ -14,12 +14,7 @@ import datetime
 import os
 import struct
 
-# file no longer exists in Python3
-try:
-    file
-except NameError:
-    import io
-    file = io.BufferedReader
+from io import open, BufferedReader
 
 import numpy as np
 import quantities as pq
@@ -28,7 +23,7 @@ from neo.io.baseio import BaseIO
 from neo.core import Segment, AnalogSignal, Epoch, Event
 
 
-class StructFile(file):
+class StructFile(BufferedReader):
     def read_f(self, fmt):
         return struct.unpack(fmt, self.read(struct.calcsize(fmt)))
 
@@ -76,16 +71,16 @@ class MicromedIO(BaseIO):
         """
         Arguments:
         """
-        f = StructFile(self.filename, 'rb')
+        f = StructFile(open(self.filename, 'rb'))
 
         # Name
         f.seek(64, 0)
-        surname = f.read(22)
+        surname = f.read(22).decode('ascii')
         while surname[-1] == ' ':
             if len(surname) == 0:
                 break
             surname = surname[:-1]
-        firstname = f.read(20)
+        firstname = f.read(20).decode('ascii')
         while firstname[-1] == ' ':
             if len(firstname) == 0:
                 break
@@ -107,13 +102,14 @@ class MicromedIO(BaseIO):
         header_version, = f.read_f('b')
         assert header_version == 4
 
-        seg = Segment(name=firstname + ' ' + surname,
+        seg = Segment(name=str(firstname + ' ' + surname),
                       file_origin=os.path.basename(self.filename))
         seg.annotate(surname=surname)
         seg.annotate(firstname=firstname)
         seg.annotate(rec_datetime=rec_datetime)
 
         if not cascade:
+            f.close()
             return seg
 
         # area
@@ -132,12 +128,12 @@ class MicromedIO(BaseIO):
         if not lazy:
             f.seek(Data_Start_Offset, 0)
             rawdata = np.fromstring(f.read(), dtype='u' + str(Bytes))
-            rawdata = rawdata.reshape((rawdata.size / Num_Chan, Num_Chan))
+            rawdata = rawdata.reshape((-1, Num_Chan))
 
         # Reading Code Info
         zname2, pos, length = zones['ORDER']
         f.seek(pos, 0)
-        code = np.fromfile(f, dtype='u2', count=Num_Chan)
+        code = np.fromstring(f.read(Num_Chan*2), dtype='u2', count=Num_Chan)
 
         units = {-1: pq.nano * pq.V, 0: pq.uV, 1: pq.mV, 2: 1, 100: pq.percent,
                  101: pq.dimensionless, 102: pq.dimensionless}
@@ -146,8 +142,8 @@ class MicromedIO(BaseIO):
             zname2, pos, length = zones['LABCOD']
             f.seek(pos + code[c] * 128 + 2, 0)
 
-            label = f.read(6).strip("\x00")
-            ground = f.read(6).strip("\x00")
+            label = f.read(6).strip(b"\x00").decode('ascii')
+            ground = f.read(6).strip(b"\x00").decode('ascii')
             (logical_min, logical_max, logical_ground, physical_min,
              physical_max) = f.read_f('iiiii')
             k, = f.read_f('h')
@@ -169,7 +165,7 @@ class MicromedIO(BaseIO):
                     'f') - logical_ground) * factor * unit
 
             ana_sig = AnalogSignal(signal, sampling_rate=sampling_rate,
-                                   name=label, channel_index=c)
+                                   name=str(label), channel_index=c)
             if lazy:
                 ana_sig.lazy_shape = None
             ana_sig.annotate(ground=ground)
@@ -222,4 +218,5 @@ class MicromedIO(BaseIO):
             seg.epochs.append(ep)
 
         seg.create_many_to_one_relationship()
+        f.close()
         return seg

+ 4 - 4
code/python-neo/neo/io/nestio.py

@@ -36,12 +36,12 @@ class NestIO(BaseIO):
     files for analog signals are possible.
 
     Usage:
-        from neo.io.nestio import NestIO
+        >>> from neo.io.nestio import NestIO
 
-        files = ['membrane_voltages-1261-0.dat',
+        >>> files = ['membrane_voltages-1261-0.dat',
                  'spikes-1258-0.gdf']
-        r = NestIO(filenames=files)
-        seg = r.read_segment(gid_list=[], t_start=400 * pq.ms,
+        >>> r = NestIO(filenames=files)
+        >>> seg = r.read_segment(gid_list=[], t_start=400 * pq.ms,
                              t_stop=600 * pq.ms,
                              id_column_gdf=0, time_column_gdf=1,
                              id_column_dat=0, time_column_dat=1,

+ 2 - 2
code/python-neo/neo/io/neuralynxio.py

@@ -1634,7 +1634,7 @@ class NeuralynxIO(BaseIO):
         if filesize > 16384:
             data = np.memmap(self.sessiondir + sep + filename,
                              dtype='<u2',
-                             shape=((filesize - 16384) / 2 / 56, 56),
+                             shape=((filesize - 16384) // 2 // 56, 56),
                              mode='r', offset=16384)
 
             # reconstructing original data
@@ -1679,7 +1679,7 @@ class NeuralynxIO(BaseIO):
         if filesize > 16384:
             data = np.memmap(self.sessiondir + sep + filename,
                              dtype='<u4',
-                             shape=((filesize - 16384) / 4 / 261, 261),
+                             shape=((filesize - 16384) // 4 // 261, 261),
                              mode='r', offset=16384)
 
             ts = data[:, 0:2]

+ 57 - 48
code/python-neo/neo/io/nixio.py

@@ -113,7 +113,8 @@ class NixIO(BaseIO):
                              "Valid modes: 'ro' (ReadOnly)', 'rw' (ReadWrite),"
                              " 'ow' (Overwrite).".format(mode))
         self.nix_file = nix.File.open(self.filename, filemode, backend="h5py")
-        self._object_map = dict()
+        self._neo_map = dict()
+        self._nix_map = dict()
         self._lazy_loaded = list()
         self._object_hashes = dict()
         self._block_read_counter = 0
@@ -156,7 +157,7 @@ class NixIO(BaseIO):
             self._read_cascade(nix_group, path, cascade, lazy)
         self._update_maps(neo_segment, lazy)
         nix_parent = self._get_parent(path)
-        neo_parent = self._get_mapped_object(nix_parent)
+        neo_parent = self._neo_map.get(nix_parent.name)
         if neo_parent:
             neo_segment.block = neo_parent
         return neo_segment
@@ -169,7 +170,7 @@ class NixIO(BaseIO):
             self._read_cascade(nix_source, path, cascade, lazy)
         self._update_maps(neo_rcg, lazy)
         nix_parent = self._get_parent(path)
-        neo_parent = self._get_mapped_object(nix_parent)
+        neo_parent = self._neo_map.get(nix_parent.name)
         neo_rcg.block = neo_parent
         return neo_rcg
 
@@ -196,7 +197,7 @@ class NixIO(BaseIO):
         if self._find_lazy_loaded(neo_signal) is None:
             self._update_maps(neo_signal, lazy)
             nix_parent = self._get_parent(path)
-            neo_parent = self._get_mapped_object(nix_parent)
+            neo_parent = self._neo_map.get(nix_parent.name)
             neo_signal.segment = neo_parent
         return neo_signal
 
@@ -212,7 +213,7 @@ class NixIO(BaseIO):
         neo_eest.path = path
         self._update_maps(neo_eest, lazy)
         nix_parent = self._get_parent(path)
-        neo_parent = self._get_mapped_object(nix_parent)
+        neo_parent = self._neo_map.get(nix_parent.name)
         neo_eest.segment = neo_parent
         return neo_eest
 
@@ -233,7 +234,7 @@ class NixIO(BaseIO):
             self._read_cascade(nix_source, path, cascade, lazy)
         self._update_maps(neo_unit, lazy)
         nix_parent = self._get_parent(path)
-        neo_parent = self._get_mapped_object(nix_parent)
+        neo_parent = self._neo_map.get(nix_parent.name)
         neo_unit.channel_index = neo_parent
         return neo_unit
 
@@ -243,7 +244,7 @@ class NixIO(BaseIO):
         neo_block.rec_datetime = datetime.fromtimestamp(
             nix_block.created_at
         )
-        self._object_map[nix_block.id] = neo_block
+        self._neo_map[nix_block.name] = neo_block
         return neo_block
 
     def _group_to_neo(self, nix_group):
@@ -252,7 +253,7 @@ class NixIO(BaseIO):
         neo_segment.rec_datetime = datetime.fromtimestamp(
             nix_group.created_at
         )
-        self._object_map[nix_group.id] = neo_segment
+        self._neo_map[nix_group.name] = neo_segment
         return neo_segment
 
     def _source_chx_to_neo(self, nix_source):
@@ -261,21 +262,24 @@ class NixIO(BaseIO):
                    for c in nix_source.sources
                    if c.type == "neo.channelindex")
         chan_names = list(c["neo_name"] for c in chx if "neo_name" in c)
+        chan_ids = list(c["channel_id"] for c in chx if "channel_id" in c)
         if chan_names:
             neo_attrs["channel_names"] = chan_names
+        if chan_ids:
+            neo_attrs["channel_ids"] = chan_ids
         neo_attrs["index"] = np.array([c["index"] for c in chx])
         if "coordinates" in chx[0]:
             coord_units = chx[0]["coordinates.units"]
             coord_values = list(c["coordinates"] for c in chx)
             neo_attrs["coordinates"] = pq.Quantity(coord_values, coord_units)
         rcg = ChannelIndex(**neo_attrs)
-        self._object_map[nix_source.id] = rcg
+        self._neo_map[nix_source.name] = rcg
         return rcg
 
     def _source_unit_to_neo(self, nix_unit):
         neo_attrs = self._nix_attr_to_neo(nix_unit)
         neo_unit = Unit(**neo_attrs)
-        self._object_map[nix_unit.id] = neo_unit
+        self._neo_map[nix_unit.name] = neo_unit
         return neo_unit
 
     def _signal_da_to_neo(self, nix_da_group, lazy):
@@ -336,7 +340,7 @@ class NixIO(BaseIO):
         else:
             return None
         for da in nix_da_group:
-            self._object_map[da.id] = neo_signal
+            self._neo_map[da.name] = neo_signal
         if lazy_shape:
             neo_signal.lazy_shape = lazy_shape
         return neo_signal
@@ -426,13 +430,13 @@ class NixIO(BaseIO):
                         )
         else:
             return None
-        self._object_map[nix_mtag.id] = eest
+        self._neo_map[nix_mtag.name] = eest
         if lazy_shape:
             eest.lazy_shape = lazy_shape
         return eest
 
     def _read_cascade(self, nix_obj, path, cascade, lazy):
-        neo_obj = self._object_map[nix_obj.id]
+        neo_obj = self._neo_map[nix_obj.name]
         for neocontainer in getattr(neo_obj, "_child_containers", []):
             nixcontainer = self._container_map[neocontainer]
             if not hasattr(nix_obj, nixcontainer):
@@ -460,7 +464,7 @@ class NixIO(BaseIO):
             parent_block_path = "/" + path.split("/")[1]
             parent_block = self._get_object_at(parent_block_path)
             ref_das = self._get_referers(nix_obj, parent_block.data_arrays)
-            ref_signals = self._get_mapped_objects(ref_das)
+            ref_signals = list(self._neo_map[da.name] for da in ref_das)
             # deduplicate by name
             ref_signals = list(dict((s.annotations["nix_name"], s)
                                     for s in ref_signals).values())
@@ -476,7 +480,7 @@ class NixIO(BaseIO):
             parent_block_path = "/" + path.split("/")[1]
             parent_block = self._get_object_at(parent_block_path)
             ref_mtags = self._get_referers(nix_obj, parent_block.multi_tags)
-            ref_sts = self._get_mapped_objects(ref_mtags)
+            ref_sts = list(self._neo_map[mt.name] for mt in ref_mtags)
             for st in ref_sts:
                 neo_obj.spiketrains.append(st)
                 st.unit = neo_obj
@@ -533,7 +537,7 @@ class NixIO(BaseIO):
             nix_name = "neo.{}.{}".format(objtype, self._generate_nix_name())
             obj.annotate(nix_name=nix_name)
         objpath = loc + containerstr + nix_name
-        oldhash = self._object_hashes.get(objpath)
+        oldhash = self._object_hashes.get(nix_name)
         if oldhash is None:
             try:
                 oldobj = self.get(objpath, cascade=False, lazy=False)
@@ -554,9 +558,16 @@ class NixIO(BaseIO):
             if isinstance(obj, pq.Quantity):
                 self._write_data(nixobj, attr, objpath)
         else:
-            nixobj = self._get_object_at(objpath)
-        self._object_map[id(obj)] = nixobj
-        self._object_hashes[objpath] = newhash
+            nixobj = self._nix_map.get(nix_name)
+            if nixobj is None:
+                nixobj = self._get_object_at(objpath)
+            else:
+                # object is already in file but may not be linked at objpath
+                objat = self._get_object_at(objpath)
+                if not objat:
+                    self._link_nix_obj(nixobj, loc, containerstr)
+        self._nix_map[nix_name] = nixobj
+        self._object_hashes[nix_name] = newhash
         self._write_cascade(obj, objpath)
 
     def _create_nix_obj(self, loc, attr):
@@ -615,6 +626,15 @@ class NixIO(BaseIO):
             raise ValueError("Unable to create NIX object. Invalid type.")
         return nixobj
 
+    def _link_nix_obj(self, obj, loc, neocontainer):
+        parentobj = self._get_object_at(loc)
+        container = getattr(parentobj,
+                            self._container_map[neocontainer.strip("/")])
+        if isinstance(obj, list):
+            container.extend(obj)
+        else:
+            container.append(obj)
+
     def write_block(self, bl, loc=""):
         """
         Convert ``bl`` to the NIX equivalent and write it to the file.
@@ -654,7 +674,7 @@ class NixIO(BaseIO):
         :param chx: The Neo ChannelIndex
         :param loc: Path to the CHX
         """
-        nixsource = self._get_mapped_object(chx)
+        nixsource = self._nix_map[chx.annotations["nix_name"]]
         for idx, channel in enumerate(chx.index):
             channame = "{}.ChannelIndex{}".format(chx.annotations["nix_name"],
                                                   idx)
@@ -668,10 +688,13 @@ class NixIO(BaseIO):
                 )
             nixchan.definition = nixsource.definition
             chanmd = nixchan.metadata
+            chanmd["index"] = nix.Value(int(channel))
             if len(chx.channel_names):
                 neochanname = stringify(chx.channel_names[idx])
                 chanmd["neo_name"] = nix.Value(neochanname)
-            chanmd["index"] = nix.Value(int(channel))
+            if len(chx.channel_ids):
+                chanid = chx.channel_ids[idx]
+                chanmd["channel_id"] = nix.Value(chanid)
             if chx.coordinates is not None:
                 coords = chx.coordinates[idx]
                 coordunits = stringify(coords[0].dimensionality)
@@ -782,27 +805,28 @@ class NixIO(BaseIO):
          NIX objects.
         """
         for seg in block.segments:
-            group = self._get_mapped_object(seg)
+            group = self._nix_map[seg.annotations["nix_name"]]
             group_signals = self._get_contained_signals(group)
             for mtag in group.multi_tags:
                 if mtag.type in ("neo.epoch", "neo.event"):
                     mtag.references.extend([sig for sig in group_signals
                                             if sig not in mtag.references])
         for rcg in block.channel_indexes:
-            rcgsource = self._get_mapped_object(rcg)
-            das = self._get_mapped_objects(rcg.analogsignals +
-                                           rcg.irregularlysampledsignals)
+            chidxsrc = self._nix_map[rcg.annotations["nix_name"]]
+            das = list(self._nix_map[sig.annotations["nix_name"]]
+                       for sig in rcg.analogsignals +
+                       rcg.irregularlysampledsignals)
             # flatten nested lists
             das = [da for dalist in das for da in dalist]
             for da in das:
-                if rcgsource not in da.sources:
-                    da.sources.append(rcgsource)
+                if chidxsrc not in da.sources:
+                    da.sources.append(chidxsrc)
             for unit in rcg.units:
-                unitsource = self._get_mapped_object(unit)
+                unitsource = self._nix_map[unit.annotations["nix_name"]]
                 for st in unit.spiketrains:
-                    stmtag = self._get_mapped_object(st)
-                    if rcgsource not in stmtag.sources:
-                        stmtag.sources.append(rcgsource)
+                    stmtag = self._nix_map[st.annotations["nix_name"]]
+                    if chidxsrc not in stmtag.sources:
+                        stmtag.sources.append(chidxsrc)
                     if unitsource not in stmtag.sources:
                         stmtag.sources.append(unitsource)
 
@@ -853,21 +877,6 @@ class NixIO(BaseIO):
         parent_obj = self._get_object_at(parent_path)
         return parent_obj
 
-    def _get_mapped_objects(self, object_list):
-        return list(map(self._get_mapped_object, object_list))
-
-    def _get_mapped_object(self, obj):
-        # We could use paths here instead
-        try:
-            if hasattr(obj, "id"):
-                return self._object_map[obj.id]
-            else:
-                return self._object_map[id(obj)]
-        except KeyError:
-            # raise KeyError("Failed to find mapped object for {}. "
-            #                "Object not yet converted.".format(obj))
-            return None
-
     def _write_attr_annotations(self, nixobj, attr, path):
         if isinstance(nixobj, list):
             metadata = nixobj[0].metadata
@@ -970,7 +979,8 @@ class NixIO(BaseIO):
         elif not lazy and objidx is not None:
             self._lazy_loaded.pop(objidx)
         if not lazy:
-            self._object_hashes[obj.path] = self._hash_object(obj)
+            nix_name = obj.annotations["nix_name"]
+            self._object_hashes[nix_name] = self._hash_object(obj)
 
     def _find_lazy_loaded(self, obj):
         """
@@ -1260,7 +1270,6 @@ class NixIO(BaseIO):
                 self.nix_file and self.nix_file.is_open()):
             self.nix_file.close()
             self.nix_file = None
-            self._object_map = None
             self._lazy_loaded = None
             self._object_hashes = None
             self._block_read_counter = None

+ 159 - 35
code/python-neo/neo/io/nsdfio.py

@@ -29,24 +29,19 @@ else:
     NSDF_ERR = None
 
 from neo.io.baseio import BaseIO
-from neo.core import Block, Segment, AnalogSignal
+from neo.core import Block, Segment, AnalogSignal, ChannelIndex
 
 
 class NSDFIO(BaseIO):
     """
     Class for reading and writing files in NSDF Format.
 
-    It supports reading and writing:
-    - :class:'Block'
-    - :class:'Segment'
-    - :class:'AnalogSignal'
-
-    with all relationships and metadata.
+    It supports reading and writing: Block, Segment, AnalogSignal, ChannelIndex, with all relationships and metadata.
     """
     is_readable = True
     is_writable = True
 
-    supported_objects = [Block, Segment, AnalogSignal]
+    supported_objects = [Block, Segment, AnalogSignal, ChannelIndex]
 
     readable_objects = [Block, Segment]
     writeable_objects = [Block, Segment]
@@ -83,14 +78,12 @@ class NSDFIO(BaseIO):
         :param blocks: List of blocks to be written
         """
         writer = self._init_writing()
-        neo_model, blocks_model, segments_model = self._prepare_model_tree()
+        neo_model, blocks_model, segments_model = self._prepare_model_tree(writer)
 
         name_pattern = self._name_pattern(len(blocks))
         for i, block in enumerate(blocks):
             self.write_block(block, name_pattern.format(i), writer, blocks_model)
 
-        writer.add_modeltree(neo_model)
-
     def write_block(self, block = None, name='0', writer=None, parent=None):
         """
         Write a Block to the file
@@ -106,26 +99,34 @@ class NSDFIO(BaseIO):
         if writer is None:
             writer = self._init_writing()
 
-        single_block = False
         if parent is None:
-            neo_model, parent, segments_model = self._prepare_model_tree()
-            single_block = True
+            neo_model, parent, segments_model = self._prepare_model_tree(writer)
 
         block_model = nsdf.ModelComponent(name, uid=uuid1().hex, parent=parent)
+        self._write_container_metadata(block, block_model)
+        self._write_model_component(block_model, writer)
 
         self._write_block_children(block, block_model, writer)
-        self._write_container_metadata(block, block_model)
 
-        if single_block:
-            writer.add_modeltree(neo_model)
+        self._clean_nsdfio_annotations(block)
+
 
     def _write_block_children(self, block, block_model, writer):
         segments_model = nsdf.ModelComponent(name='segments', uid=uuid1().hex, parent=block_model)
-        name_pattern = '{{:0{}d}}'.format(self._number_of_digits(max(len(block.segments) - 1, 0)))
+        self._write_model_component(segments_model, writer)
+        name_pattern = self._name_pattern(len(block.segments))
         for i, segment in enumerate(block.segments):
             self.write_segment(segment=segment, name=name_pattern.format(i),
                                writer=writer, parent=segments_model)
 
+        channel_indexes_model = nsdf.ModelComponent(name='channel_indexes', uid=uuid1().hex, parent=block_model)
+        self._write_model_component(channel_indexes_model, writer)
+        name_pattern = self._name_pattern(len(block.channel_indexes))
+        for i, channelindex in enumerate(block.channel_indexes):
+            self.write_channelindex(channelindex=channelindex, name=name_pattern.format(i),
+                                    writer=writer, parent=channel_indexes_model)
+
+
     def write_segment(self, segment = None, name='0', writer=None, parent=None):
         """
         Write a Segment to the file
@@ -143,26 +144,27 @@ class NSDFIO(BaseIO):
 
         single_segment = False
         if parent is None:
-            neo_model, blocks_model, parent = self._prepare_model_tree()
+            neo_model, blocks_model, parent = self._prepare_model_tree(writer)
             single_segment = True
 
         model = nsdf.ModelComponent(name, uid=uuid1().hex, parent=parent)
+        self._write_container_metadata(segment, model)
+        self._write_model_component(model, writer)
 
         self._write_segment_children(model, segment, writer)
-        self._write_container_metadata(segment, model)
 
         if single_segment:
-            writer.add_modeltree(neo_model)
+            self._clean_nsdfio_annotations(segment)
 
     def _write_segment_children(self, model, segment, writer):
         analogsignals_model = nsdf.ModelComponent(name='analogsignals', uid=uuid1().hex, parent=model)
-        name_pattern = '{{:0{}d}}'.format(self._number_of_digits(max(len(segment.analogsignals) - 1, 0)))
+        self._write_model_component(analogsignals_model, writer)
+        name_pattern = self._name_pattern(len(segment.analogsignals))
         for i, signal in enumerate(segment.analogsignals):
-            self.write_analogsignal(signal=signal,
-                                    name=name_pattern.format(i),
+            self.write_analogsignal(signal=signal, name=name_pattern.format(i),
                                     parent=analogsignals_model, writer=writer)
 
-    def write_analogsignal(self, signal, name='0', writer=None, parent=None):
+    def write_analogsignal(self, signal, name, writer, parent):
         """
         Write an AnalogSignal to the file
 
@@ -174,19 +176,63 @@ class NSDFIO(BaseIO):
         uid = uuid1().hex
         model = nsdf.ModelComponent(name, uid=uid, parent=parent)
 
+        if signal.annotations.get('nsdfio_uid') is not None:
+            model.attrs['reference_to'] = signal.annotations['nsdfio_uid']
+            self._write_model_component(model, writer)
+            return
+
+        self._write_basic_metadata(model, signal)
+        signal.annotations['nsdfio_uid'] = uid
+
         r_signal = np.swapaxes(signal, 0, 1)
-        channels, source_ds = self._create_signal_data_sources(model, r_signal, uid, writer)
+        channels_model, channels, source_ds = self._create_signal_data_sources(model, r_signal, uid, writer)
         self._write_signal_data(model, channels, r_signal, signal, source_ds, writer)
 
-        self._write_basic_metadata(model, signal)
+        self._write_model_component(model, writer)
+        self._write_model_component(channels_model, writer)
+        for channel_model in channels:
+            self._write_model_component(channel_model, writer)
+
+    def write_channelindex(self, channelindex, name, writer, parent):
+        """
+        Write a ChannelIndex to the file
+
+        :param channelindex: ChannelIndex to be written
+        :param name: Name for channelindex representation in NSDF model tree
+        :param writer: NSDFWriter instance
+        :param parent: NSDF ModelComponent which will be the parent of channelindex NSDF representation
+        """
+        uid = uuid1().hex
+        model = nsdf.ModelComponent(name, uid=uid, parent=parent)
+
+        self._write_basic_metadata(model, channelindex)
+        self._write_model_component(model, writer)
+
+        self._write_channelindex_arrays(model, channelindex, writer)
+
+        self._write_channelindex_children(channelindex, model, writer)
+
+    def _write_channelindex_children(self, channelindex, model, writer):
+        analogsignals_model = nsdf.ModelComponent(name='analogsignals', uid=uuid1().hex, parent=model)
+        self._write_model_component(analogsignals_model, writer)
+        name_pattern = self._name_pattern(len(channelindex.analogsignals))
+        for i, signal in enumerate(channelindex.analogsignals):
+            self.write_analogsignal(signal=signal, name=name_pattern.format(i),
+                                    parent=analogsignals_model, writer=writer)
 
     def _init_writing(self):
         return nsdf.NSDFWriter(self.filename, mode='w')
 
-    def _prepare_model_tree(self):
+    def _prepare_model_tree(self, writer):
         neo_model = nsdf.ModelComponent('neo', uid=uuid1().hex)
+        self._write_model_component(neo_model, writer)
+
         blocks_model = nsdf.ModelComponent('blocks', uid=uuid1().hex, parent=neo_model)
+        self._write_model_component(blocks_model, writer)
+
         segments_model = nsdf.ModelComponent('segments', uid=uuid1().hex, parent=neo_model)
+        self._write_model_component(segments_model, writer)
+
         return neo_model, blocks_model, segments_model
 
     def _number_of_digits(self, n):
@@ -195,6 +241,22 @@ class NSDFIO(BaseIO):
     def _name_pattern(self, how_many_items):
         return '{{:0{}d}}'.format(self._number_of_digits(max(how_many_items - 1, 0)))
 
+    def _clean_nsdfio_annotations(self, object):
+        nsdfio_annotations = ('nsdfio_uid', )
+
+        for key in nsdfio_annotations:
+            object.annotations.pop(key, None)
+
+        if hasattr(object, 'children'):
+            for child in object.children:
+                self._clean_nsdfio_annotations(child)
+
+    def _write_model_component(self, model, writer):
+        if model.parent is None:
+            nsdf.add_model_component(model, writer.model['modeltree/'])
+        else:
+            nsdf.add_model_component(model, model.parent.hdfgroup)
+
     def _write_container_metadata(self, container, container_model):
         self._write_basic_metadata(container_model, container)
 
@@ -232,9 +294,6 @@ class NSDFIO(BaseIO):
         dataobj.set_dt(float(signal.sampling_period.magnitude),
                        str(signal.sampling_period.dimensionality))
 
-        self._write_analogsignal_t_start(dataobj, model, signal, source_ds, writer)
-
-    def _write_analogsignal_t_start(self, dataobj, model, signal, source_ds, writer):
         rescaled_tstart = signal.t_start.rescale(signal.sampling_period.dimensionality)
         writer.add_uniform_data(source_ds, dataobj,
                                 tstart=float(rescaled_tstart.magnitude))
@@ -244,13 +303,31 @@ class NSDFIO(BaseIO):
         channels = []
         channels_model = nsdf.ModelComponent(name='channels', uid=uuid1().hex, parent=model)
         name_pattern = '{{:0{}d}}'.format(self._number_of_digits(max(len(r_signal) - 1, 0)))
-        for i, channel in enumerate(r_signal):
+        for i in range(len(r_signal)):
             channels.append(nsdf.ModelComponent(name_pattern.format(i),
                                                 uid=uuid1().hex,
                                                 parent=channels_model))
 
         source_ds = writer.add_uniform_ds(uid, [channel.uid for channel in channels])
-        return channels, source_ds
+        return channels_model, channels, source_ds
+
+    def _write_channelindex_arrays(self, model, channelindex, writer):
+        group = model.hdfgroup
+
+        self._write_array(group, 'index', channelindex.index)
+        if channelindex.channel_names is not None:
+            self._write_array(group, 'channel_names', channelindex.channel_names)
+        if channelindex.channel_ids is not None:
+            self._write_array(group, 'channel_ids', channelindex.channel_ids)
+        if channelindex.coordinates is not None:
+            self._write_array(group, 'coordinates', channelindex.coordinates)
+
+    def _write_array(self, group, name, array):
+        if isinstance(array, pq.Quantity):
+            group.create_dataset(name, data=array.magnitude)
+            group[name].attrs['dimensionality'] = str(array.dimensionality)
+        else:
+            group.create_dataset(name, data=array)
 
     def read_all_blocks(self, lazy=False, cascade=True):
         """
@@ -298,6 +375,8 @@ class NSDFIO(BaseIO):
     def _read_block_children(self, lazy, block, group, reader):
         for child in group['segments/'].values():
             block.segments.append(self.read_segment(lazy=lazy, group=child, reader=reader))
+        for child in group['channel_indexes/'].values():
+            block.channel_indexes.append(self.read_channelindex(lazy=lazy, group=child, reader=reader))
 
     def read_segment(self, lazy=False, cascade=True, group=None, reader=None):
         """
@@ -305,7 +384,7 @@ class NSDFIO(BaseIO):
 
         :param lazy: Enables lazy reading
         :param cascade: Read nested objects or not?
-        :param group: HDF5 Group representing the block in NSDF model tree (optional)
+        :param group: HDF5 Group representing the segment in NSDF model tree (optional)
         :param reader: NSDFReader instance (optional)
         :return: Read segment
         """
@@ -334,11 +413,15 @@ class NSDFIO(BaseIO):
 
         :param lazy: Enables lazy reading
         :param cascade: Read nested objects or not?
-        :param group: HDF5 Group representing the block in NSDF model tree
+        :param group: HDF5 Group representing the analogsignal in NSDF model tree
         :param reader: NSDFReader instance
         :return: Read AnalogSignal
         """
         attrs = group.attrs
+
+        if attrs.get('reference_to') is not None:
+            return self.objects_dict[attrs['reference_to']]
+
         uid = attrs['uid']
         data_group = reader.data['uniform/{}/signal'.format(uid)]
 
@@ -347,11 +430,37 @@ class NSDFIO(BaseIO):
 
         self._read_basic_metadata(attrs, signal)
 
+        self.objects_dict[uid] = signal
         return signal
 
+    def read_channelindex(self, lazy=False, cascade=True, group=None, reader=None):
+        """
+        Read a ChannelIndex from the file (must be child of a Block)
+
+        :param lazy: Enables lazy reading
+        :param cascade: Read nested objects or not?
+        :param group: HDF5 Group representing the channelindex in NSDF model tree
+        :param reader: NSDFReader instance
+        :return: Read ChannelIndex
+        """
+        attrs = group.attrs
+
+        channelindex = self._create_channelindex(group)
+        if cascade:
+            self._read_channelindex_children(lazy, group, reader, channelindex)
+
+        self._read_basic_metadata(attrs, channelindex)
+
+        return channelindex
+
+    def _read_channelindex_children(self, lazy, group, reader, channelindex):
+        for child in group['analogsignals/'].values():
+            channelindex.analogsignals.append(self.read_analogsignal(lazy=lazy, group=child, reader=reader))
+
     def _init_reading(self):
         reader = nsdf.NSDFReader(self.filename)
         self.file_datetime = datetime.fromtimestamp(os.stat(self.filename).st_mtime)
+        self.objects_dict = {}
         return reader
 
     def _select_first_container(self, group, reader, name):
@@ -428,3 +537,18 @@ class NSDFIO(BaseIO):
                               t_start=t_start, sampling_period=pq.Quantity(attrs['dt'], attrs['tunit']))
         signal.lazy_shape = shape
         return signal
+
+    def _create_channelindex(self, group):
+        return ChannelIndex(index=self._read_array(group, 'index'),
+                            channel_names=self._read_array(group, 'channel_names'),
+                            channel_ids=self._read_array(group, 'channel_ids'),
+                            coordinates=self._read_array(group, 'coordinates'))
+
+    def _read_array(self, group, name):
+        if group.__contains__(name) == False:
+            return None
+        array = group[name][:]
+
+        if group[name].attrs.get('dimensionality') is not None:
+            return pq.Quantity(array, group[name].attrs['dimensionality'])
+        return array

+ 32 - 23
code/python-neo/neo/io/plexonio.py

@@ -24,7 +24,6 @@ import quantities as pq
 
 from neo.io.baseio import BaseIO
 from neo.core import Segment, AnalogSignal, SpikeTrain, Event
-from neo.io.tools import iteritems
 
 
 class PlexonIO(BaseIO):
@@ -106,8 +105,9 @@ class PlexonIO(BaseIO):
             seg.annotate(**{key: val})
 
         if not cascade:
+            fid.close()
             return seg
-
+        
         ## Step 1 : read headers
         # dsp channels header = spikes and waveforms
         dspChannelHeaders = {}
@@ -121,8 +121,8 @@ class PlexonIO(BaseIO):
             dspChannelHeaders[channelHeader['Channel']] = channelHeader
             maxunit = max(channelHeader['NUnits'], maxunit)
             maxchan = max(channelHeader['Channel'], maxchan)
-
-            # event channel header
+        
+        # event channel header
         eventHeaders = {}
         for _ in range(global_header['NumEventChannels']):
             eventHeader = HeaderReader(fid, EventHeader).read_f(offset=None)
@@ -135,12 +135,12 @@ class PlexonIO(BaseIO):
                 offset=None)
             slowChannelHeaders[slowChannelHeader['Channel']] = \
                 slowChannelHeader
-
+        
         ## Step 2 : a first loop for counting size
         # signal
-        nb_samples = np.zeros(len(slowChannelHeaders))
-        sample_positions = np.zeros(len(slowChannelHeaders))
-        t_starts = np.zeros(len(slowChannelHeaders), dtype='f')
+        nb_samples = np.zeros(len(slowChannelHeaders), dtype='int64')
+        sample_positions = np.zeros(len(slowChannelHeaders), dtype='int64')
+        t_starts = np.zeros(len(slowChannelHeaders), dtype='float64')
 
         #spiketimes and waveform
         nb_spikes = np.zeros((maxchan + 1, maxunit + 1), dtype='i')
@@ -149,7 +149,7 @@ class PlexonIO(BaseIO):
         # eventarrays
         nb_events = {}
         #maxstrsizeperchannel = { }
-        for chan, h in iteritems(eventHeaders):
+        for chan, h in eventHeaders.items():
             nb_events[chan] = 0
             #maxstrsizeperchannel[chan] = 0
 
@@ -186,7 +186,7 @@ class PlexonIO(BaseIO):
         if not lazy:
             # allocating mem for signal
             sigarrays = {}
-            for chan, h in iteritems(slowChannelHeaders):
+            for chan, h in slowChannelHeaders.items():
                 sigarrays[chan] = np.zeros(nb_samples[chan])
 
             # allocating mem for SpikeTrain
@@ -204,7 +204,7 @@ class PlexonIO(BaseIO):
             # allocating mem for event
             eventpositions = {}
             evarrays = {}
-            for chan, nb in iteritems(nb_events):
+            for chan, nb in nb_events.items():
                 evarrays[chan] = {
                     'times': np.zeros(nb, dtype='f'),
                     'labels': np.zeros(nb, dtype='S4')
@@ -256,7 +256,7 @@ class PlexonIO(BaseIO):
 
 
         ## Step 4: create neo object
-        for chan, h in iteritems(eventHeaders):
+        for chan, h in eventHeaders.items():
             if lazy:
                 times = []
                 labels = None
@@ -273,7 +273,7 @@ class PlexonIO(BaseIO):
                 ea.lazy_shape = nb_events[chan]
             seg.events.append(ea)
 
-        for chan, h in iteritems(slowChannelHeaders):
+        for chan, h in slowChannelHeaders.items():
             if lazy:
                 signal = []
             else:
@@ -291,9 +291,10 @@ class PlexonIO(BaseIO):
                         slowChannelHeaders[chan]['PreampGain'])
                 signal = sigarrays[chan] * gain
             anasig = AnalogSignal(
-                signal * pq.V,
+                signal,
                 sampling_rate=float(
                     slowChannelHeaders[chan]['ADFreq']) * pq.Hz,
+                units='mV',
                 t_start=t_starts[chan] * pq.s,
                 channel_index=slowChannelHeaders[chan]['Channel'],
                 channel_name=slowChannelHeaders[chan]['Name'])
@@ -319,21 +320,28 @@ class PlexonIO(BaseIO):
                             'Version'] < 105:
                         gain = global_header['SpikeMaxMagnitudeMV'] / (
                             .5 * 2. ** (global_header['BitsPerSpikeSample']) *
+                            dspChannelHeaders[chan]['Gain'] *
                             1000.)
                     elif global_header['Version'] > 105:
                         gain = global_header['SpikeMaxMagnitudeMV'] / (
                             .5 * 2. ** (global_header['BitsPerSpikeSample']) *
+                            dspChannelHeaders[chan]['Gain'] *
                             global_header['SpikePreAmpGain'])
-                    waveforms = swfarrays[chan, unit] * gain * pq.V
+                    waveforms = swfarrays[chan, unit] * gain * pq.mV
                 else:
                     waveforms = None
+            
+            if global_header['WaveformFreq']>0:
+                wf_sampling_rate= float(global_header['WaveformFreq']) * pq.Hz
+            else:
+                wf_sampling_rate = float(global_header['ADFrequency']) * pq.Hz
+            
             sptr = SpikeTrain(
                 times,
                 units='s',
                 t_stop=t_stop*pq.s,
                 waveforms=waveforms,
-                sampling_rate=float(
-                    global_header['WaveformFreq']) * pq.Hz,
+                sampling_rate=wf_sampling_rate,
             )
             sptr.annotate(unit_name = dspChannelHeaders[chan]['Name'])
             sptr.annotate(channel_index = chan)
@@ -343,8 +351,10 @@ class PlexonIO(BaseIO):
             if lazy:
                 sptr.lazy_shape = nb_spikes[chan, unit]
             seg.spiketrains.append(sptr)
-
+        
         seg.create_many_to_one_relationship()
+        
+        fid.close()
         return seg
 
 
@@ -352,7 +362,7 @@ GlobalHeader = [
     ('MagicNumber', 'I'),
     ('Version', 'i'),
     ('Comment', '128s'),
-    ('ADFrequency', 'i'),
+    ('ADFrequency', 'i'), #this rate is only for events
     ('NumDSPChannels', 'i'),
     ('NumEventChannels', 'i'),
     ('NumSlowChannels', 'i'),
@@ -396,7 +406,7 @@ ChannelHeader = [
     ('Name', '32s'),
     ('SIGName', '32s'),
     ('Channel', 'i'),
-    ('WFRate', 'i'),
+    ('WFRate', 'i'), #this is not the waveform sampling rate!!!
     ('SIG', 'i'),
     ('Ref', 'i'),
     ('Gain', 'i'),
@@ -463,7 +473,6 @@ DataBlockHeader = [
 
 
 class HeaderReader():
-
     def __init__(self, fid, description):
         self.fid = fid
         self.description = description
@@ -479,8 +488,8 @@ class HeaderReader():
             val = list(struct.unpack(fmt, buf))
             for i, ival in enumerate(val):
                 if hasattr(ival, 'replace'):
-                    ival = ival.replace(str.encode('\x03'), str.encode(''))
-                    ival = ival.replace(str.encode('\x00'), str.encode(''))
+                    ival = ival.replace(b'\x03', b'')
+                    ival = ival.replace(b'\x00', b'')
                     val[i] = ival.decode("utf-8")
             if len(val) == 1:
                 val = val[0]

+ 1 - 1
code/python-neo/neo/io/rawbinarysignalio.py

@@ -128,7 +128,7 @@ class RawBinarySignalIO(BaseIO):
             sig = np.memmap(self.filename, dtype = dtype, mode = 'r', offset = bytesoffset)
             if sig.size % nbchannel != 0 :
                 sig = sig[:- sig.size%nbchannel]
-            sig = sig.reshape((sig.size/nbchannel,nbchannel))
+            sig = sig.reshape((sig.size//nbchannel,nbchannel))
             if dtype.kind == 'i' :
                 sig = sig.astype('f')
                 sig /= 2**(8*dtype.itemsize)

+ 1 - 0
code/python-neo/neo/io/spike2io.py

@@ -100,6 +100,7 @@ class Spike2IO(BaseIO):
         )
 
         if not cascade:
+            fid.close()
             return seg
 
         def addannotations(ob, channelHeader):

+ 6 - 4
code/python-neo/neo/io/tdtio.py

@@ -28,17 +28,19 @@ import itertools
 
 from neo.io.baseio import BaseIO
 from neo.core import Block, Segment, AnalogSignal, SpikeTrain, Event
-from neo.io.tools import iteritems
+
 
 PY3K = (sys.version_info[0] == 3)
 
+if not PY3K:
+    zip = itertools.izip
 
 def get_chunks(sizes, offsets, big_array):
     # offsets are octect count
     # sizes are not!!
     # so need this (I really do not knwo why...):
     sizes = (sizes -10)  * 4 #
-    all = np.concatenate([ big_array[o:o+s] for s, o in itertools.izip(sizes, offsets) ])
+    all = np.concatenate([ big_array[o:o+s] for s, o in zip(sizes, offsets) ])
     return all
 
 class TdtIO(BaseIO):
@@ -175,12 +177,12 @@ class TdtIO(BaseIO):
                     if type_label in ['EVTYPE_STRON', 'EVTYPE_STROFF']:
                         if lazy:
                             times = [ ]*pq.s
-                            labels = np.array([ ], dtype=str)
+                            labels = np.array([ ], dtype='S')
                         else:
                             times = (tsq[mask3]['timestamp'] - global_t_start) * pq.s
                             labels = tsq[mask3]['eventoffset'].view('float64').astype('S')
                         ea = Event(times=times,
-                                   name=code,
+                                   name=str(code),
                                    channel_index=int(channel),
                                    labels=labels)
                         if lazy:

+ 5 - 5
code/python-neo/neo/io/tools.py

@@ -72,11 +72,11 @@ from neo.core import (AnalogSignal, Block,
 #         recordingchannels[ind].channel_indexes.append(chx)
 
 
-def iteritems(D):
-    try:
-        return D.iteritems()  # Python 2
-    except AttributeError:
-        return D.items()  # Python 3
+#def iteritems(D):
+#    try:
+#        return D.iteritems()  # Python 2
+#    except AttributeError:
+#        return D.items()  # Python 3
 
 
 class LazyList(collections.MutableSequence):

+ 4 - 2
code/python-neo/neo/io/winedrio.py

@@ -100,7 +100,7 @@ class WinEdrIO(BaseIO):
         if not lazy:
             data = np.memmap(self.filename , np.dtype('i2')  , 'r',
                   #shape = (header['NC'], header['NP']) ,
-                  shape = (header['NP']/header['NC'],header['NC'], ) ,
+                  shape = (header['NP']//header['NC'],header['NC'], ) ,
                   offset = header['NBH'])
 
         for c in range(header['NC']):
@@ -126,7 +126,8 @@ class WinEdrIO(BaseIO):
             if lazy:
                 signal = [ ] * unit
             else:
-                signal = (data[:,header['YO%d'%c]].astype('f4')-YZ) *AD/( YCF*YAG*(ADCMAX+1)) * unit
+                chan = int(header['YO%d'%c])
+                signal = (data[:,chan].astype('float32')-YZ) *AD/( YCF*YAG*(ADCMAX+1)) * unit
 
             ana = AnalogSignal(signal,
                                sampling_rate=pq.Hz / DT,
@@ -139,6 +140,7 @@ class WinEdrIO(BaseIO):
             seg.analogsignals.append(ana)
 
         seg.create_many_to_one_relationship()
+        fid.close()
         return seg
 
 

+ 6 - 6
code/python-neo/neo/io/winwcpio.py

@@ -110,12 +110,11 @@ class WinWcpIO(BaseIO):
             # read data
             NP = (SECTORSIZE*header['NBD'])/2
             NP = NP - NP%header['NC']
-            NP = NP/header['NC']
+            NP = int(NP//header['NC'])
             if not lazy:
-                data = np.memmap(self.filename , np.dtype('i2')  , 'r',
-                              #shape = (header['NC'], header['NP']) ,
-                              shape = (NP,header['NC'], ) ,
-                              offset = offset+header['NBA']*SECTORSIZE)
+                data = np.memmap(self.filename , np.dtype('int16')  , mode='r',
+                              shape=(NP,header['NC'], ) ,
+                              offset=offset+header['NBA']*SECTORSIZE)
 
             # create a segment
             seg = Segment()
@@ -135,7 +134,8 @@ class WinWcpIO(BaseIO):
                     YG = float(header['YG%d'%c].replace(',','.'))
                     ADCMAX = header['ADCMAX']
                     VMax = analysisHeader['VMax'][c]
-                    signal = data[:,header['YO%d'%c]].astype('f4')*VMax/ADCMAX/YG * unit
+                    chan = int(header['YO%d'%c])
+                    signal = data[:,chan].astype('f4')*VMax/ADCMAX/YG * unit
                 anaSig = AnalogSignal(signal,
                                       sampling_rate=
                                       pq.Hz /

+ 52 - 35
code/python-neo/neo/test/coretest/test_analogsignal.py

@@ -8,11 +8,9 @@ from __future__ import division
 
 import os
 import pickle
+import copy
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq
@@ -30,6 +28,9 @@ from neo.core.channelindex import ChannelIndex
 from neo.core import Segment
 from neo.test.tools import (assert_arrays_almost_equal,
                             assert_neo_object_is_compliant,
+                            assert_same_sub_schema,
+                            assert_objects_equivalent,
+                            assert_same_attributes,
                             assert_same_sub_schema)
 from neo.test.generate_datasets import (get_fake_value, get_fake_values,
                                         fake_neo, TEST_ANNOTATIONS)
@@ -305,7 +306,7 @@ class TestAnalogSignalArrayMethods(unittest.TestCase):
         self.signal1 = AnalogSignal(self.data1quant, sampling_rate=1*pq.kHz,
                                          name='spam', description='eggs',
                                          file_origin='testfile.txt', arg1='test')
-        self.signal1.segment = 1
+        self.signal1.segment = Segment()
         self.signal1.channel_index = ChannelIndex(index=[0])
 
     def test__compliant(self):
@@ -313,28 +314,29 @@ class TestAnalogSignalArrayMethods(unittest.TestCase):
 
     def test__slice_should_return_AnalogSignalArray(self):
         # slice
-        result = self.signal1[3:8, 0]
-        self.assertIsInstance(result, AnalogSignal)
-        assert_neo_object_is_compliant(result)
-        self.assertEqual(result.name, 'spam')         # should slicing really preserve name and description?
-        self.assertEqual(result.description, 'eggs')  # perhaps these should be modified to indicate the slice?
-        self.assertEqual(result.file_origin, 'testfile.txt')
-        self.assertEqual(result.annotations, {'arg1': 'test'})
-
-        self.assertEqual(result.size, 5)
-        self.assertEqual(result.sampling_period, self.signal1.sampling_period)
-        self.assertEqual(result.sampling_rate, self.signal1.sampling_rate)
-        self.assertEqual(result.t_start,
-                         self.signal1.t_start+3*result.sampling_period)
-        self.assertEqual(result.t_stop,
-                         result.t_start + 5*result.sampling_period)
-        assert_array_equal(result.magnitude, self.data1[3:8].reshape(-1, 1))
-
-        # Test other attributes were copied over (in this case, defaults)
-        self.assertEqual(result.file_origin, self.signal1.file_origin)
-        self.assertEqual(result.name, self.signal1.name)
-        self.assertEqual(result.description, self.signal1.description)
-        self.assertEqual(result.annotations, self.signal1.annotations)
+        for index in (0, np.int64(0)):
+            result = self.signal1[3:8, index]
+            self.assertIsInstance(result, AnalogSignal)
+            assert_neo_object_is_compliant(result)
+            self.assertEqual(result.name, 'spam')         # should slicing really preserve name and description?
+            self.assertEqual(result.description, 'eggs')  # perhaps these should be modified to indicate the slice?
+            self.assertEqual(result.file_origin, 'testfile.txt')
+            self.assertEqual(result.annotations, {'arg1': 'test'})
+
+            self.assertEqual(result.size, 5)
+            self.assertEqual(result.sampling_period, self.signal1.sampling_period)
+            self.assertEqual(result.sampling_rate, self.signal1.sampling_rate)
+            self.assertEqual(result.t_start,
+                             self.signal1.t_start+3*result.sampling_period)
+            self.assertEqual(result.t_stop,
+                             result.t_start + 5*result.sampling_period)
+            assert_array_equal(result.magnitude, self.data1[3:8].reshape(-1, 1))
+
+            # Test other attributes were copied over (in this case, defaults)
+            self.assertEqual(result.file_origin, self.signal1.file_origin)
+            self.assertEqual(result.name, self.signal1.name)
+            self.assertEqual(result.description, self.signal1.description)
+            self.assertEqual(result.annotations, self.signal1.annotations)
 
     def test__slice_should_let_access_to_parents_objects(self):
         result =  self.signal1.time_slice(1*pq.ms,3*pq.ms)
@@ -381,25 +383,30 @@ class TestAnalogSignalArrayMethods(unittest.TestCase):
         n = 8  # number of channels
         signal = AnalogSignal(np.arange(n * 100.0).reshape(100, n),
                               sampling_rate=1*pq.kHz,
-                              units="mV")
+                              units="mV",
+                              name="test")
         self.assertEqual(signal.shape, (100, n))
         signal.channel_index = ChannelIndex(index=np.arange(n, dtype=int),
                                             channel_names=["channel{0}".format(i) for i in range(n)])
+        signal.channel_index.analogsignals.append(signal)
         odd_channels = signal[:, 1::2]
         self.assertEqual(odd_channels.shape, (100, n//2))
         assert_array_equal(odd_channels.channel_index.index, np.arange(n//2, dtype=int))
         assert_array_equal(odd_channels.channel_index.channel_names, ["channel{0}".format(i) for i in range(1, n, 2)])
         assert_array_equal(signal.channel_index.channel_names, ["channel{0}".format(i) for i in range(n)])
+        self.assertEqual(odd_channels.channel_index.analogsignals[0].name, signal.name)
 
     def test__copy_should_let_access_to_parents_objects(self):
-        ##copy
         result =  self.signal1.copy()
-        self.assertEqual(result.segment, self.signal1.segment)
-        self.assertEqual(result.channel_index, self.signal1.channel_index)
-        ## deep copy (not fixed yet)
-        #result = copy.deepcopy(self.signal1)
-        #self.assertEqual(result.segment, self.signal1.segment)
-        #self.assertEqual(result.channel_index, self.signal1.channel_index)
+        self.assertIs(result.segment, self.signal1.segment)
+        self.assertIs(result.channel_index, self.signal1.channel_index)
+
+    def test__deepcopy_should_let_access_to_parents_objects(self):
+        result = copy.deepcopy(self.signal1)
+        self.assertIsInstance(result.segment, Segment)
+        self.assertIsInstance(result.channel_index, ChannelIndex)
+        assert_same_sub_schema(result.segment, self.signal1.segment)
+        assert_same_sub_schema(result.channel_index, self.signal1.channel_index)
 
     def test__getitem_should_return_single_quantity(self):
         result1 = self.signal1[0, 0]
@@ -454,6 +461,11 @@ class TestAnalogSignalArrayMethods(unittest.TestCase):
         assert_array_equal(result.magnitude, self.data1.reshape(-1, 1))
         assert_same_sub_schema(result, self.signal1)
 
+        self.assertIsInstance(result.channel_index, ChannelIndex)
+        self.assertIsInstance(result.segment, Segment)
+        self.assertIs(result.channel_index, self.signal1.channel_index)
+        self.assertIs(result.segment, self.signal1.segment)
+
     def test__rescale_new(self):
         result = self.signal1.copy()
         result = result.rescale(pq.pA)
@@ -468,6 +480,11 @@ class TestAnalogSignalArrayMethods(unittest.TestCase):
         self.assertEqual(result.units, 1*pq.pA)
         assert_arrays_almost_equal(np.array(result), self.data1.reshape(-1, 1)*1000., 1e-10)
 
+        self.assertIsInstance(result.channel_index, ChannelIndex)
+        self.assertIsInstance(result.segment, Segment)
+        self.assertIs(result.channel_index, self.signal1.channel_index)
+        self.assertIs(result.segment, self.signal1.segment)
+
     def test__rescale_new_incompatible_ValueError(self):
         self.assertRaises(ValueError, self.signal1.rescale, pq.mV)
 

+ 1 - 4
code/python-neo/neo/test/coretest/test_analogsignalarray.py

@@ -6,10 +6,7 @@ Tests of the neo.core.analogsignalarray.AnalogSignalArrayArray class
 import os
 import pickle
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/coretest/test_base.py

@@ -8,10 +8,7 @@ from decimal import Decimal
 from fractions import Fraction
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/coretest/test_block.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import, division, print_function
 
 from datetime import datetime
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 

+ 1 - 4
code/python-neo/neo/test/coretest/test_channelindex.py

@@ -6,10 +6,7 @@ Tests of the neo.core.channelindex.ChannelIndex class
 # needed for python 3 compatibility
 from __future__ import absolute_import, division, print_function
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 

+ 1 - 4
code/python-neo/neo/test/coretest/test_container.py

@@ -3,10 +3,7 @@
 Tests of the neo.core.container.Container class
 """
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 

+ 1 - 4
code/python-neo/neo/test/coretest/test_epoch.py

@@ -3,10 +3,7 @@
 Tests of the neo.core.epoch.Epoch class
 """
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/coretest/test_event.py

@@ -3,10 +3,7 @@
 Tests of the neo.core.event.Event class
 """
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/coretest/test_generate_datasets.py

@@ -6,10 +6,7 @@ Test to  make sure generated datasets are sane.
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from datetime import datetime
 

+ 19 - 5
code/python-neo/neo/test/coretest/test_irregularysampledsignal.py

@@ -3,10 +3,7 @@
 Tests of the neo.core.irregularlysampledsignal.IrregularySampledSignal class
 """
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import os
 import pickle
@@ -22,7 +19,7 @@ else:
     HAVE_IPYTHON = True
 
 from neo.core.irregularlysampledsignal import IrregularlySampledSignal
-from neo.core import Segment
+from neo.core import Segment, ChannelIndex
 from neo.test.tools import (assert_arrays_almost_equal, assert_arrays_equal,
                             assert_neo_object_is_compliant,
                             assert_same_sub_schema)
@@ -274,6 +271,8 @@ class TestIrregularlySampledSignalArrayMethods(unittest.TestCase):
                                                 description='eggs',
                                                 file_origin='testfile.txt',
                                                 arg1='test')
+        self.signal1.segment = Segment()
+        self.signal1.channel_index = ChannelIndex([0])
 
     def test__compliant(self):
         assert_neo_object_is_compliant(self.signal1)
@@ -348,6 +347,11 @@ class TestIrregularlySampledSignalArrayMethods(unittest.TestCase):
         assert_array_equal(result.times, self.time1quant)
         assert_same_sub_schema(result, self.signal1)
 
+        self.assertIsInstance(result.channel_index, ChannelIndex)
+        self.assertIsInstance(result.segment, Segment)
+        self.assertIs(result.channel_index, self.signal1.channel_index)
+        self.assertIs(result.segment, self.signal1.segment)
+
     def test__rescale_new(self):
         result = self.signal1.copy()
         result = result.rescale(pq.uV)
@@ -363,6 +367,11 @@ class TestIrregularlySampledSignalArrayMethods(unittest.TestCase):
         assert_arrays_almost_equal(np.array(result), self.data1.reshape(-1, 1)*1000., 1e-10)
         assert_array_equal(result.times, self.time1quant)
 
+        self.assertIsInstance(result.channel_index, ChannelIndex)
+        self.assertIsInstance(result.segment, Segment)
+        self.assertIs(result.channel_index, self.signal1.channel_index)
+        self.assertIs(result.segment, self.signal1.segment)
+
     def test__rescale_new_incompatible_ValueError(self):
         self.assertRaises(ValueError, self.signal1.rescale, pq.nA)
 
@@ -553,6 +562,11 @@ class TestIrregularlySampledSignalArrayMethods(unittest.TestCase):
         self.assertIsInstance(sig_as_q, pq.Quantity)
         assert_array_equal(self.data1, sig_as_q.magnitude.flat)
 
+    def test__copy_should_preserve_parent_objects(self):
+        result = self.signal1.copy()
+        self.assertIs(result.segment, self.signal1.segment)
+        self.assertIs(result.channel_index, self.signal1.channel_index)
+
 
 class TestIrregularlySampledSignalCombination(unittest.TestCase):
     def setUp(self):

+ 1 - 4
code/python-neo/neo/test/coretest/test_segment.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import, division, print_function
 
 from datetime import datetime
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 302 - 176
code/python-neo/neo/test/coretest/test_spiketrain.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 from numpy.testing import assert_array_equal
@@ -27,9 +24,10 @@ else:
 from neo.core.spiketrain import (check_has_dimensions_time, SpikeTrain,
                                  _check_time_in_range, _new_spiketrain)
 from neo.core import Segment, Unit
-from neo.test.tools import  (assert_arrays_equal,
-                             assert_arrays_almost_equal,
-                             assert_neo_object_is_compliant)
+from neo.core.baseneo import MergeError
+from neo.test.tools import (assert_arrays_equal,
+                            assert_arrays_almost_equal,
+                            assert_neo_object_is_compliant)
 from neo.test.generate_datasets import (get_fake_value, get_fake_values,
                                         fake_neo, TEST_ANNOTATIONS)
 
@@ -44,7 +42,8 @@ class Test__generate_datasets(unittest.TestCase):
         self.annotations['seed'] = 0
         waveforms = get_fake_value('waveforms', pq.Quantity, seed=3, dim=3)
         shape = waveforms.shape[0]
-        times = get_fake_value('times', pq.Quantity, seed=0, dim=1, shape=waveforms.shape[0])
+        times = get_fake_value('times', pq.Quantity, seed=0, dim=1,
+                               shape=waveforms.shape[0])
         t_start = get_fake_value('t_start', pq.Quantity, seed=1, dim=0)
         t_stop = get_fake_value('t_stop', pq.Quantity, seed=2, dim=0)
         left_sweep = get_fake_value('left_sweep', pq.Quantity, seed=4, dim=0)
@@ -137,39 +136,39 @@ class Testcheck_has_dimensions_time(unittest.TestCase):
 class Testcheck_time_in_range(unittest.TestCase):
     def test__check_time_in_range_empty_array(self):
         value = np.array([])
-        t_start = 0*pq.s
-        t_stop = 10*pq.s
+        t_start = 0 * pq.s
+        t_stop = 10 * pq.s
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop)
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=False)
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=True)
 
     def test__check_time_in_range_exact(self):
-        value = np.array([0., 5., 10.])*pq.s
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([0., 5., 10.]) * pq.s
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop)
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=False)
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=True)
 
     def test__check_time_in_range_scale(self):
-        value = np.array([0., 5000., 10000.])*pq.ms
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([0., 5000., 10000.]) * pq.ms
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop)
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=False)
 
     def test__check_time_in_range_inside(self):
-        value = np.array([0.1, 5., 9.9])*pq.s
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([0.1, 5., 9.9]) * pq.s
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop)
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=False)
         _check_time_in_range(value, t_start=t_start, t_stop=t_stop, view=True)
 
     def test__check_time_in_range_below(self):
-        value = np.array([-0.1, 5., 10.])*pq.s
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([-0.1, 5., 10.]) * pq.s
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _check_time_in_range, value,
@@ -178,18 +177,18 @@ class Testcheck_time_in_range(unittest.TestCase):
                           t_start=t_start, t_stop=t_stop, view=True)
 
     def test__check_time_in_range_below_scale(self):
-        value = np.array([-1., 5000., 10000.])*pq.ms
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([-1., 5000., 10000.]) * pq.ms
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop, view=False)
 
     def test__check_time_in_range_above(self):
-        value = np.array([0., 5., 10.1])*pq.s
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([0., 5., 10.1]) * pq.s
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _check_time_in_range, value,
@@ -198,18 +197,18 @@ class Testcheck_time_in_range(unittest.TestCase):
                           t_start=t_start, t_stop=t_stop, view=True)
 
     def test__check_time_in_range_above_scale(self):
-        value = np.array([0., 5000., 10001.])*pq.ms
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([0., 5000., 10001.]) * pq.ms
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop, view=False)
 
     def test__check_time_in_range_above_below(self):
-        value = np.array([-0.1, 5., 10.1])*pq.s
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([-0.1, 5., 10.1]) * pq.s
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _check_time_in_range, value,
@@ -218,9 +217,9 @@ class Testcheck_time_in_range(unittest.TestCase):
                           t_start=t_start, t_stop=t_stop, view=True)
 
     def test__check_time_in_range_above_below_scale(self):
-        value = np.array([-1., 5000., 10001.])*pq.ms
-        t_start = 0.*pq.s
-        t_stop = 10.*pq.s
+        value = np.array([-1., 5000., 10001.]) * pq.ms
+        t_start = 0. * pq.s
+        t_stop = 10. * pq.s
         self.assertRaises(ValueError, _check_time_in_range, value,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _check_time_in_range, value,
@@ -256,8 +255,8 @@ class TestConstructor(unittest.TestCase):
     def test__create_minimal(self):
         t_start = 0.0
         t_stop = 10.0
-        train1 = SpikeTrain([]*pq.s, t_stop)
-        train2 = _new_spiketrain(SpikeTrain, []*pq.s, t_stop)
+        train1 = SpikeTrain([] * pq.s, t_stop)
+        train2 = _new_spiketrain(SpikeTrain, [] * pq.s, t_stop)
 
         dtype = np.float64
         units = 1 * pq.s
@@ -304,8 +303,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_list(self):
         times = range(10)
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop, units="ms")
         train2 = _new_spiketrain(SpikeTrain, times,
                                  t_start=t_start, t_stop=t_stop, units="ms")
@@ -322,8 +321,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_list_set_dtype(self):
         times = range(10)
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop,
                             units="ms", dtype='f4')
         train2 = _new_spiketrain(SpikeTrain, times,
@@ -380,8 +379,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_array(self):
         times = np.arange(10)
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop, units="s")
         train2 = _new_spiketrain(SpikeTrain, times,
                                  t_start=t_start, t_stop=t_stop, units="s")
@@ -398,8 +397,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_array_with_dtype(self):
         times = np.arange(10, dtype='f4')
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop, units="s")
         train2 = _new_spiketrain(SpikeTrain, times,
                                  t_start=t_start, t_stop=t_stop, units="s")
@@ -416,8 +415,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_array_set_dtype(self):
         times = np.arange(10)
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop,
                             units="s", dtype='f4')
         train2 = _new_spiketrain(SpikeTrain, times,
@@ -492,8 +491,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_quantity_array(self):
         times = np.arange(10) * pq.ms
-        t_start = 0.0*pq.s
-        t_stop = 12.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 12.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop)
         train2 = _new_spiketrain(SpikeTrain, times,
                                  t_start=t_start, t_stop=t_stop)
@@ -510,8 +509,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_quantity_array_with_dtype(self):
         times = np.arange(10, dtype='f4') * pq.ms
-        t_start = 0.0*pq.s
-        t_stop = 12.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 12.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop)
         train2 = _new_spiketrain(SpikeTrain, times,
                                  t_start=t_start, t_stop=t_stop)
@@ -528,8 +527,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_quantity_array_set_dtype(self):
         times = np.arange(10) * pq.ms
-        t_start = 0.0*pq.s
-        t_stop = 12.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 12.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop,
                             dtype='f4')
         train2 = _new_spiketrain(SpikeTrain, times,
@@ -604,8 +603,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_quantity_array_units(self):
         times = np.arange(10) * pq.ms
-        t_start = 0.0*pq.s
-        t_stop = 12.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 12.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop, units='s')
         train2 = _new_spiketrain(SpikeTrain, times,
                                  t_start=t_start, t_stop=t_stop, units='s')
@@ -622,8 +621,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_quantity_array_units_with_dtype(self):
         times = np.arange(10, dtype='f4') * pq.ms
-        t_start = 0.0*pq.s
-        t_stop = 12.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 12.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop,
                             units='s')
         train2 = _new_spiketrain(SpikeTrain, times,
@@ -641,8 +640,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_quantity_array_units_set_dtype(self):
         times = np.arange(10) * pq.ms
-        t_start = 0.0*pq.s
-        t_stop = 12.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 12.0 * pq.ms
         train1 = SpikeTrain(times, t_start=t_start, t_stop=t_stop,
                             units='s', dtype='f4')
         train2 = _new_spiketrain(SpikeTrain, times,
@@ -699,8 +698,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_list_without_units_should_raise_ValueError(self):
         times = range(10)
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         self.assertRaises(ValueError, SpikeTrain, times,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _new_spiketrain, SpikeTrain, times,
@@ -708,8 +707,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_array_without_units_should_raise_ValueError(self):
         times = np.arange(10)
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         self.assertRaises(ValueError, SpikeTrain, times,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _new_spiketrain, SpikeTrain, times,
@@ -717,8 +716,8 @@ class TestConstructor(unittest.TestCase):
 
     def test__create_from_array_with_incompatible_units_ValueError(self):
         times = np.arange(10) * pq.km
-        t_start = 0.0*pq.s
-        t_stop = 10000.0*pq.ms
+        t_start = 0.0 * pq.s
+        t_stop = 10000.0 * pq.ms
         self.assertRaises(ValueError, SpikeTrain, times,
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _new_spiketrain, SpikeTrain, times,
@@ -735,22 +734,23 @@ class TestConstructor(unittest.TestCase):
         assert_neo_object_is_compliant(train1)
         assert_neo_object_is_compliant(train2)
         self.assertRaises(ValueError, SpikeTrain,
-                          np.arange(t_start-5, t_stop), units='ms',
+                          np.arange(t_start - 5, t_stop), units='ms',
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _new_spiketrain, SpikeTrain,
-                          np.arange(t_start-5, t_stop), units='ms',
+                          np.arange(t_start - 5, t_stop), units='ms',
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, SpikeTrain,
-                          np.arange(t_start, t_stop+5), units='ms',
+                          np.arange(t_start, t_stop + 5), units='ms',
                           t_start=t_start, t_stop=t_stop)
         self.assertRaises(ValueError, _new_spiketrain, SpikeTrain,
-                          np.arange(t_start, t_stop+5), units='ms',
+                          np.arange(t_start, t_stop + 5), units='ms',
                           t_start=t_start, t_stop=t_stop)
 
-    def test__create_with_len_times_different_size_than_waveform_shape1_ValueError(self):
+    def test__create_with_len_times_different_size_than_waveform_shape1_ValueError(
+            self):
         self.assertRaises(ValueError, SpikeTrain,
                           times=np.arange(10), units='s',
-                          t_stop=4, waveforms=np.ones((10,6,50)))
+                          t_stop=4, waveforms=np.ones((10, 6, 50)))
 
     def test_defaults(self):
         # default recommended attributes
@@ -770,67 +770,67 @@ class TestConstructor(unittest.TestCase):
 
     def test_default_tstart(self):
         # t start defaults to zero
-        train11 = SpikeTrain([3, 4, 5]*pq.s, t_stop=8000*pq.ms)
-        train21 = _new_spiketrain(SpikeTrain, [3, 4, 5]*pq.s,
-                                  t_stop=8000*pq.ms)
+        train11 = SpikeTrain([3, 4, 5] * pq.s, t_stop=8000 * pq.ms)
+        train21 = _new_spiketrain(SpikeTrain, [3, 4, 5] * pq.s,
+                                  t_stop=8000 * pq.ms)
         assert_neo_object_is_compliant(train11)
         assert_neo_object_is_compliant(train21)
-        self.assertEqual(train11.t_start, 0.*pq.s)
-        self.assertEqual(train21.t_start, 0.*pq.s)
+        self.assertEqual(train11.t_start, 0. * pq.s)
+        self.assertEqual(train21.t_start, 0. * pq.s)
 
         # unless otherwise specified
-        train12 = SpikeTrain([3, 4, 5]*pq.s, t_start=2.0, t_stop=8)
-        train22 = _new_spiketrain(SpikeTrain, [3, 4, 5]*pq.s,
+        train12 = SpikeTrain([3, 4, 5] * pq.s, t_start=2.0, t_stop=8)
+        train22 = _new_spiketrain(SpikeTrain, [3, 4, 5] * pq.s,
                                   t_start=2.0, t_stop=8)
         assert_neo_object_is_compliant(train12)
         assert_neo_object_is_compliant(train22)
-        self.assertEqual(train12.t_start, 2.*pq.s)
-        self.assertEqual(train22.t_start, 2.*pq.s)
+        self.assertEqual(train12.t_start, 2. * pq.s)
+        self.assertEqual(train22.t_start, 2. * pq.s)
 
     def test_tstop_units_conversion(self):
-        train11 = SpikeTrain([3, 5, 4]*pq.s, t_stop=10)
-        train21 = _new_spiketrain(SpikeTrain, [3, 5, 4]*pq.s, t_stop=10)
+        train11 = SpikeTrain([3, 5, 4] * pq.s, t_stop=10)
+        train21 = _new_spiketrain(SpikeTrain, [3, 5, 4] * pq.s, t_stop=10)
         assert_neo_object_is_compliant(train11)
         assert_neo_object_is_compliant(train21)
-        self.assertEqual(train11.t_stop, 10.*pq.s)
-        self.assertEqual(train21.t_stop, 10.*pq.s)
+        self.assertEqual(train11.t_stop, 10. * pq.s)
+        self.assertEqual(train21.t_stop, 10. * pq.s)
 
-        train12 = SpikeTrain([3, 5, 4]*pq.s, t_stop=10000.*pq.ms)
-        train22 = _new_spiketrain(SpikeTrain, [3, 5, 4]*pq.s,
-                                  t_stop=10000.*pq.ms)
+        train12 = SpikeTrain([3, 5, 4] * pq.s, t_stop=10000. * pq.ms)
+        train22 = _new_spiketrain(SpikeTrain, [3, 5, 4] * pq.s,
+                                  t_stop=10000. * pq.ms)
         assert_neo_object_is_compliant(train12)
         assert_neo_object_is_compliant(train22)
-        self.assertEqual(train12.t_stop, 10.*pq.s)
-        self.assertEqual(train22.t_stop, 10.*pq.s)
+        self.assertEqual(train12.t_stop, 10. * pq.s)
+        self.assertEqual(train22.t_stop, 10. * pq.s)
 
-        train13 = SpikeTrain([3, 5, 4], units='sec', t_stop=10000.*pq.ms)
+        train13 = SpikeTrain([3, 5, 4], units='sec', t_stop=10000. * pq.ms)
         train23 = _new_spiketrain(SpikeTrain, [3, 5, 4],
-                                  units='sec', t_stop=10000.*pq.ms)
+                                  units='sec', t_stop=10000. * pq.ms)
         assert_neo_object_is_compliant(train13)
         assert_neo_object_is_compliant(train23)
-        self.assertEqual(train13.t_stop, 10.*pq.s)
-        self.assertEqual(train23.t_stop, 10.*pq.s)
+        self.assertEqual(train13.t_stop, 10. * pq.s)
+        self.assertEqual(train23.t_stop, 10. * pq.s)
 
 
 class TestSorting(unittest.TestCase):
     def test_sort(self):
         waveforms = np.array([[[0., 1.]], [[2., 3.]], [[4., 5.]]]) * pq.mV
-        train = SpikeTrain([3, 4, 5]*pq.s, waveforms=waveforms, name='n',
+        train = SpikeTrain([3, 4, 5] * pq.s, waveforms=waveforms, name='n',
                            t_stop=10.0)
         assert_neo_object_is_compliant(train)
         train.sort()
         assert_neo_object_is_compliant(train)
-        assert_arrays_equal(train, [3, 4, 5]*pq.s)
+        assert_arrays_equal(train, [3, 4, 5] * pq.s)
         assert_arrays_equal(train.waveforms, waveforms)
         self.assertEqual(train.name, 'n')
         self.assertEqual(train.t_stop, 10.0 * pq.s)
 
-        train = SpikeTrain([3, 5, 4]*pq.s, waveforms=waveforms, name='n',
+        train = SpikeTrain([3, 5, 4] * pq.s, waveforms=waveforms, name='n',
                            t_stop=10.0)
         assert_neo_object_is_compliant(train)
         train.sort()
         assert_neo_object_is_compliant(train)
-        assert_arrays_equal(train, [3, 4, 5]*pq.s)
+        assert_arrays_equal(train, [3, 4, 5] * pq.s)
         assert_arrays_equal(train.waveforms, waveforms[[0, 2, 1]])
         self.assertEqual(train.name, 'n')
         self.assertEqual(train.t_start, 0.0 * pq.s)
@@ -846,7 +846,7 @@ class TestSlice(unittest.TestCase):
                                     [[4., 5.],
                                      [4.1, 5.1]]]) * pq.mV
         self.data1 = np.array([3, 4, 5])
-        self.data1quant = self.data1*pq.s
+        self.data1quant = self.data1 * pq.s
         self.train1 = SpikeTrain(self.data1quant, waveforms=self.waveforms1,
                                  name='n', arb='arbb', t_stop=10.0)
 
@@ -858,7 +858,7 @@ class TestSlice(unittest.TestCase):
         result = self.train1[1:2]
         assert_arrays_equal(self.train1[1:2], result)
         targwaveforms = np.array([[[2., 3.],
-                                   [2.1, 3.1]]])
+                                   [2.1, 3.1]]]) * pq.mV
 
         # but keep everything else pristine
         assert_neo_object_is_compliant(result)
@@ -879,9 +879,9 @@ class TestSlice(unittest.TestCase):
         result = self.train1[1:]
         assert_arrays_equal(self.train1[1:], result)
         targwaveforms = np.array([[[2., 3.],
-                                  [2.1, 3.1]],
-                                 [[4., 5.],
-                                  [4.1, 5.1]]]) * pq.mV
+                                   [2.1, 3.1]],
+                                  [[4., 5.],
+                                   [4.1, 5.1]]]) * pq.mV
 
         # but keep everything else pristine
         assert_neo_object_is_compliant(result)
@@ -902,9 +902,9 @@ class TestSlice(unittest.TestCase):
         result = self.train1[:2]
         assert_arrays_equal(self.train1[:2], result)
         targwaveforms = np.array([[[0., 1.],
-                                  [0.1, 1.1]],
-                                 [[2., 3.],
-                                  [2.1, 3.1]]]) * pq.mV
+                                   [0.1, 1.1]],
+                                  [[2., 3.],
+                                   [2.1, 3.1]]]) * pq.mV
 
         # but keep everything else pristine
         assert_neo_object_is_compliant(result)
@@ -925,9 +925,9 @@ class TestSlice(unittest.TestCase):
         result = self.train1[:-1]
         assert_arrays_equal(self.train1[:-1], result)
         targwaveforms = np.array([[[0., 1.],
-                                  [0.1, 1.1]],
-                                 [[2., 3.],
-                                  [2.1, 3.1]]]) * pq.mV
+                                   [0.1, 1.1]],
+                                  [[2., 3.],
+                                   [2.1, 3.1]]]) * pq.mV
 
         # but keep everything else pristine
         assert_neo_object_is_compliant(result)
@@ -947,20 +947,20 @@ class TestSlice(unittest.TestCase):
 class TestTimeSlice(unittest.TestCase):
     def setUp(self):
         self.waveforms1 = np.array([[[0., 1.],
-                                    [0.1, 1.1]],
-                                   [[2., 3.],
-                                    [2.1, 3.1]],
-                                   [[4., 5.],
-                                    [4.1, 5.1]],
-                                   [[6., 7.],
-                                    [6.1, 7.1]],
-                                   [[8., 9.],
-                                    [8.1, 9.1]],
-                                   [[10., 11.],
-                                    [10.1, 11.1]]]) * pq.mV
+                                     [0.1, 1.1]],
+                                    [[2., 3.],
+                                     [2.1, 3.1]],
+                                    [[4., 5.],
+                                     [4.1, 5.1]],
+                                    [[6., 7.],
+                                     [6.1, 7.1]],
+                                    [[8., 9.],
+                                     [8.1, 9.1]],
+                                    [[10., 11.],
+                                     [10.1, 11.1]]]) * pq.mV
         self.data1 = np.array([0.1, 0.5, 1.2, 3.3, 6.4, 7])
-        self.data1quant = self.data1*pq.ms
-        self.train1 = SpikeTrain(self.data1quant, t_stop=10.0*pq.ms,
+        self.data1quant = self.data1 * pq.ms
+        self.train1 = SpikeTrain(self.data1quant, t_stop=10.0 * pq.ms,
                                  waveforms=self.waveforms1)
 
     def test_compliant(self):
@@ -976,11 +976,11 @@ class TestTimeSlice(unittest.TestCase):
         targ = SpikeTrain([0.5, 1.2, 3.3] * pq.ms, t_stop=3.3)
         assert_arrays_equal(result, targ)
         targwaveforms = np.array([[[2., 3.],
-                                  [2.1, 3.1]],
-                                 [[4., 5.],
-                                  [4.1, 5.1]],
-                                 [[6., 7.],
-                                  [6.1, 7.1]]]) * pq.mV
+                                   [2.1, 3.1]],
+                                  [[4., 5.],
+                                   [4.1, 5.1]],
+                                  [[6., 7.],
+                                   [6.1, 7.1]]]) * pq.mV
         assert_arrays_equal(targwaveforms, result.waveforms)
 
         # but keep everything else pristine
@@ -1037,7 +1037,7 @@ class TestTimeSlice(unittest.TestCase):
         self.assertEqual(t_stop, result.t_stop)
 
     def test_time_slice_out_of_boundries(self):
-        self.train1.t_start = 0.1*pq.ms
+        self.train1.t_start = 0.1 * pq.ms
         assert_neo_object_is_compliant(self.train1)
 
         # time_slice spike train, keep sliced spike times
@@ -1126,7 +1126,7 @@ class TestTimeSlice(unittest.TestCase):
         self.assertEqual(t_stop, result.t_stop)
 
     def test_time_slice_none_both(self):
-        self.train1.t_start = 0.1*pq.ms
+        self.train1.t_start = 0.1 * pq.ms
         assert_neo_object_is_compliant(self.train1)
 
         # time_slice spike train, keep sliced spike times
@@ -1144,6 +1144,113 @@ class TestTimeSlice(unittest.TestCase):
         self.assertEqual(self.train1.t_start, result.t_start)
         self.assertEqual(self.train1.t_stop, result.t_stop)
 
+
+class TestMerge(unittest.TestCase):
+    def setUp(self):
+        self.waveforms1 = np.array([[[0., 1.],
+                                     [0.1, 1.1]],
+                                    [[2., 3.],
+                                     [2.1, 3.1]],
+                                    [[4., 5.],
+                                     [4.1, 5.1]],
+                                    [[6., 7.],
+                                     [6.1, 7.1]],
+                                    [[8., 9.],
+                                     [8.1, 9.1]],
+                                    [[10., 11.],
+                                     [10.1, 11.1]]]) * pq.mV
+        self.data1 = np.array([0.1, 0.5, 1.2, 3.3, 6.4, 7])
+        self.data1quant = self.data1 * pq.ms
+        self.train1 = SpikeTrain(self.data1quant, t_stop=10.0 * pq.ms,
+                                 waveforms=self.waveforms1)
+
+        self.waveforms2 = np.array([[[0., 1.],
+                                     [0.1, 1.1]],
+                                    [[2., 3.],
+                                     [2.1, 3.1]],
+                                    [[4., 5.],
+                                     [4.1, 5.1]],
+                                    [[6., 7.],
+                                     [6.1, 7.1]],
+                                    [[8., 9.],
+                                     [8.1, 9.1]],
+                                    [[10., 11.],
+                                     [10.1, 11.1]]]) * pq.mV
+        self.data2 = np.array([0.1, 0.5, 1.2, 3.3, 6.4, 7])
+        self.data2quant = self.data1 * pq.ms
+        self.train2 = SpikeTrain(self.data1quant, t_stop=10.0 * pq.ms,
+                                 waveforms=self.waveforms1)
+
+        self.segment = Segment()
+        self.segment.spiketrains.extend([self.train1, self.train2])
+        self.train1.segment = self.segment
+        self.train2.segment = self.segment
+
+    def test_compliant(self):
+        assert_neo_object_is_compliant(self.train1)
+        assert_neo_object_is_compliant(self.train2)
+
+    def test_merge_typical(self):
+        self.train1.waveforms = None
+        self.train2.waveforms = None
+
+        result = self.train1.merge(self.train2)
+        assert_neo_object_is_compliant(result)
+
+    def test_merge_with_waveforms(self):
+        result = self.train1.merge(self.train2)
+        assert_neo_object_is_compliant(result)
+
+    def test_correct_shape(self):
+        result = self.train1.merge(self.train2)
+        self.assertEqual(len(result.shape), 1)
+        self.assertEqual(result.shape[0],
+                         self.train1.shape[0] + self.train2.shape[0])
+
+    def test_correct_times(self):
+        result = self.train1.merge(self.train2)
+        expected = sorted(np.concatenate((self.train1.times,
+                                          self.train2.times)))
+        np.testing.assert_array_equal(result, expected)
+
+    def test_rescaling_units(self):
+        train3 = self.train1.duplicate_with_new_data(
+            self.train1.times.magnitude * pq.microsecond)
+        train3.segment = self.train1.segment
+        result = train3.merge(self.train2)
+        time_unit = result.units
+        expected = sorted(np.concatenate((train3.rescale(time_unit).times,
+                                          self.train2.rescale(
+                                              time_unit).times)))
+        expected = expected * time_unit
+        np.testing.assert_array_equal(result.rescale(time_unit), expected)
+
+    def test_sampling_rate(self):
+        result = self.train1.merge(self.train2)
+        self.assertEqual(result.sampling_rate, self.train1.sampling_rate)
+
+    def test_neo_relations(self):
+        result = self.train1.merge(self.train2)
+        self.assertEqual(self.train1.segment, result.segment)
+        self.assertTrue(result in result.segment.spiketrains)
+
+    def test_missing_waveforms_error(self):
+        self.train1.waveforms = None
+        with self.assertRaises(MergeError):
+            self.train1.merge(self.train2)
+        with self.assertRaises(MergeError):
+            self.train2.merge(self.train1)
+
+    def test_incompatible_t_start(self):
+        train3 = self.train1.duplicate_with_new_data(self.train1,
+                                                     t_start=-1 * pq.s)
+        train3.segment = self.train1.segment
+        with self.assertRaises(MergeError):
+            train3.merge(self.train2)
+        with self.assertRaises(MergeError):
+            self.train2.merge(train3)
+
+
 class TestDuplicateWithNewData(unittest.TestCase):
     def setUp(self):
         self.waveforms = np.array([[[0., 1.],
@@ -1159,14 +1266,14 @@ class TestDuplicateWithNewData(unittest.TestCase):
                                    [[10., 11.],
                                     [10.1, 11.1]]]) * pq.mV
         self.data = np.array([0.1, 0.5, 1.2, 3.3, 6.4, 7])
-        self.dataquant = self.data*pq.ms
-        self.train = SpikeTrain(self.dataquant, t_stop=10.0*pq.ms,
+        self.dataquant = self.data * pq.ms
+        self.train = SpikeTrain(self.dataquant, t_stop=10.0 * pq.ms,
                                 waveforms=self.waveforms)
 
     def test_duplicate_with_new_data(self):
         signal1 = self.train
-        new_t_start = -10*pq.s
-        new_t_stop = 10*pq.s
+        new_t_start = -10 * pq.s
+        new_t_stop = 10 * pq.s
         new_data = np.sort(np.random.uniform(new_t_start.magnitude,
                                              new_t_stop.magnitude,
                                              len(self.train))) * pq.ms
@@ -1180,6 +1287,20 @@ class TestDuplicateWithNewData(unittest.TestCase):
         self.assertEqual(signal1b.t_stop, new_t_stop)
         self.assertEqual(signal1b.sampling_rate, signal1.sampling_rate)
 
+    def test_deep_copy_attributes(self):
+        signal1 = self.train
+        new_t_start = -10*pq.s
+        new_t_stop = 10*pq.s
+        new_data = np.sort(np.random.uniform(new_t_start.magnitude,
+                                             new_t_stop.magnitude,
+                                             len(self.train))) * pq.ms
+
+        signal1b = signal1.duplicate_with_new_data(new_data,
+                                                   t_start=new_t_start,
+                                                   t_stop=new_t_stop)
+        signal1.annotate(new_annotation='for signal 1')
+        self.assertTrue('new_annotation' not in signal1b.annotations)
+
 class TestAttributesAnnotations(unittest.TestCase):
     def test_set_universally_recommended_attributes(self):
         train = SpikeTrain([3, 4, 5], units='sec', name='Name',
@@ -1191,18 +1312,18 @@ class TestAttributesAnnotations(unittest.TestCase):
         self.assertEqual(train.file_origin, 'crack.txt')
 
     def test_autoset_universally_recommended_attributes(self):
-        train = SpikeTrain([3, 4, 5]*pq.s, t_stop=10.0)
+        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=10.0)
         assert_neo_object_is_compliant(train)
         self.assertEqual(train.name, None)
         self.assertEqual(train.description, None)
         self.assertEqual(train.file_origin, None)
 
     def test_annotations(self):
-        train = SpikeTrain([3, 4, 5]*pq.s, t_stop=11.1)
+        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=11.1)
         assert_neo_object_is_compliant(train)
         self.assertEqual(train.annotations, {})
 
-        train = SpikeTrain([3, 4, 5]*pq.s, t_stop=11.1, ratname='Phillippe')
+        train = SpikeTrain([3, 4, 5] * pq.s, t_stop=11.1, ratname='Phillippe')
         assert_neo_object_is_compliant(train)
         self.assertEqual(train.annotations, {'ratname': 'Phillippe'})
 
@@ -1216,8 +1337,8 @@ class TestChanging(unittest.TestCase):
         train = SpikeTrain(data, t_stop=100.0)
         train[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 99*pq.s)
-        self.assertEqual(data[0], 3*pq.s)
+        self.assertEqual(train[0], 99 * pq.s)
+        self.assertEqual(data[0], 3 * pq.s)
 
     def test_change_with_copy_false(self):
         # Changing spike train also changes data, because it is a view
@@ -1226,8 +1347,8 @@ class TestChanging(unittest.TestCase):
         train = SpikeTrain(data, copy=False, t_stop=100.0)
         train[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 99*pq.s)
-        self.assertEqual(data[0], 99*pq.s)
+        self.assertEqual(train[0], 99 * pq.s)
+        self.assertEqual(data[0], 99 * pq.s)
 
     def test_change_with_copy_false_and_fake_rescale(self):
         # Changing spike train also changes data, because it is a view
@@ -1237,8 +1358,8 @@ class TestChanging(unittest.TestCase):
         train = SpikeTrain(data, units='ms', copy=False, t_stop=100000)
         train[0] = 99000 * pq.ms
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 99000*pq.ms)
-        self.assertEqual(data[0], 99000*pq.ms)
+        self.assertEqual(train[0], 99000 * pq.ms)
+        self.assertEqual(data[0], 99000 * pq.ms)
 
     def test_change_with_copy_false_and_rescale_true(self):
         # When rescaling, a view cannot be returned
@@ -1251,9 +1372,9 @@ class TestChanging(unittest.TestCase):
         data = [3, 4, 5] * pq.s
         train = SpikeTrain(data, units='ms', t_stop=6000)
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 3000*pq.ms)
+        self.assertEqual(train[0], 3000 * pq.ms)
         self.assertEqual(train._dimensionality, pq.ms._dimensionality)
-        self.assertEqual(train.t_stop, 6000*pq.ms)
+        self.assertEqual(train.t_stop, 6000 * pq.ms)
 
     def test_change_with_copy_true(self):
         # Changing spike train does not change data
@@ -1262,8 +1383,8 @@ class TestChanging(unittest.TestCase):
         train = SpikeTrain(data, copy=True, t_stop=100)
         train[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 99*pq.s)
-        self.assertEqual(data[0], 3*pq.s)
+        self.assertEqual(train[0], 99 * pq.s)
+        self.assertEqual(data[0], 3 * pq.s)
 
     def test_change_with_copy_default_and_data_not_quantity(self):
         # Default is copy = True
@@ -1275,8 +1396,8 @@ class TestChanging(unittest.TestCase):
         train = SpikeTrain(data, units='sec', t_stop=100)
         train[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 99*pq.s)
-        self.assertEqual(data[0], 3*pq.s)
+        self.assertEqual(train[0], 99 * pq.s)
+        self.assertEqual(data[0], 3 * pq.s)
 
     def test_change_with_copy_false_and_data_not_quantity(self):
         # Changing spike train also changes data, because it is a view
@@ -1288,7 +1409,7 @@ class TestChanging(unittest.TestCase):
                            t_stop=101)
         train[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 99*pq.s)
+        self.assertEqual(train[0], 99 * pq.s)
         self.assertEqual(data[0], 99)
 
     def test_change_with_copy_false_and_dtype_change(self):
@@ -1306,7 +1427,7 @@ class TestChanging(unittest.TestCase):
         train = SpikeTrain(data, units='sec', copy=True, t_stop=123.4)
         train[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[0], 99*pq.s)
+        self.assertEqual(train[0], 99 * pq.s)
         self.assertEqual(data[0], 3)
 
     def test_changing_slice_changes_original_spiketrain(self):
@@ -1320,9 +1441,9 @@ class TestChanging(unittest.TestCase):
         result = train[1:3]
         result[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
-        self.assertEqual(train[1], 99*pq.s)
-        self.assertEqual(result[0], 99*pq.s)
-        self.assertEqual(data[1], 4*pq.s)
+        self.assertEqual(train[1], 99 * pq.s)
+        self.assertEqual(result[0], 99 * pq.s)
+        self.assertEqual(data[1], 4 * pq.s)
 
     def test_changing_slice_changes_original_spiketrain_with_copy_false(self):
         # If we slice a spiketrain and then change the slice, the
@@ -1336,17 +1457,17 @@ class TestChanging(unittest.TestCase):
         result[0] = 99 * pq.s
         assert_neo_object_is_compliant(train)
         assert_neo_object_is_compliant(result)
-        self.assertEqual(train[1], 99*pq.s)
-        self.assertEqual(result[0], 99*pq.s)
-        self.assertEqual(data[1], 99*pq.s)
+        self.assertEqual(train[1], 99 * pq.s)
+        self.assertEqual(result[0], 99 * pq.s)
+        self.assertEqual(data[1], 99 * pq.s)
 
     def test__changing_spiketime_should_check_time_in_range(self):
         data = [3, 4, 5] * pq.ms
         train = SpikeTrain(data, copy=False, t_start=0.5, t_stop=10.0)
         assert_neo_object_is_compliant(train)
-        self.assertRaises(ValueError, train.__setitem__, 0, 10.1*pq.ms)
-        self.assertRaises(ValueError, train.__setitem__, 1, 5.0*pq.s)
-        self.assertRaises(ValueError, train.__setitem__, 2, 5.0*pq.s)
+        self.assertRaises(ValueError, train.__setitem__, 0, 10.1 * pq.ms)
+        self.assertRaises(ValueError, train.__setitem__, 1, 5.0 * pq.s)
+        self.assertRaises(ValueError, train.__setitem__, 2, 5.0 * pq.s)
         self.assertRaises(ValueError, train.__setitem__, 0, 0)
 
     def test__changing_multiple_spiketimes(self):
@@ -1362,7 +1483,7 @@ class TestChanging(unittest.TestCase):
         assert_neo_object_is_compliant(train)
         if sys.version_info[0] == 2:
             self.assertRaises(ValueError, train.__setslice__,
-                              0, 3,  [3, 4, 11] * pq.ms)
+                              0, 3, [3, 4, 11] * pq.ms)
             self.assertRaises(ValueError, train.__setslice__,
                               0, 3, [0, 4, 5] * pq.ms)
 
@@ -1383,11 +1504,16 @@ class TestChanging(unittest.TestCase):
     def test__rescale(self):
         data = [3, 4, 5] * pq.ms
         train = SpikeTrain(data, t_start=0.5, t_stop=10.0)
+        train.segment = Segment()
+        train.unit = Unit()
         result = train.rescale(pq.s)
         assert_neo_object_is_compliant(train)
         assert_neo_object_is_compliant(result)
         assert_arrays_equal(train, result)
         self.assertEqual(result.units, 1 * pq.s)
+        self.assertIs(result.segment, train.segment)
+        self.assertIs(result.unit, train.unit)
+
 
     def test__rescale_same_units(self):
         data = [3, 4, 5] * pq.ms
@@ -1418,8 +1544,8 @@ class TestPropertiesMethods(unittest.TestCase):
         self.t_stop1 = 10.0
         self.t_start1quant = self.t_start1 * pq.ms
         self.t_stop1quant = self.t_stop1 * pq.ms
-        self.sampling_rate1 = .1*pq.Hz
-        self.left_sweep1 = 2.*pq.s
+        self.sampling_rate1 = .1 * pq.Hz
+        self.left_sweep1 = 2. * pq.s
         self.name1 = 'train 1'
         self.description1 = 'a test object'
         self.ann1 = {'targ0': [1, 2], 'targ1': 1.1}
@@ -1469,8 +1595,8 @@ class TestPropertiesMethods(unittest.TestCase):
         assert_neo_object_is_compliant(self.train1)
         result3 = self.train1.spike_duration
 
-        self.assertEqual(result1, 20./pq.Hz)
-        self.assertEqual(result1.units, 1./pq.Hz)
+        self.assertEqual(result1, 20. / pq.Hz)
+        self.assertEqual(result1.units, 1. / pq.Hz)
         self.assertEqual(result2, None)
         self.assertEqual(result3, None)
 
@@ -1482,7 +1608,7 @@ class TestPropertiesMethods(unittest.TestCase):
         result2 = self.train1.sampling_period
 
         self.train1.sampling_rate = self.sampling_rate1
-        self.train1.sampling_period = 10.*pq.ms
+        self.train1.sampling_period = 10. * pq.ms
         assert_neo_object_is_compliant(self.train1)
         result3a = self.train1.sampling_period
         result3b = self.train1.sampling_rate
@@ -1491,13 +1617,13 @@ class TestPropertiesMethods(unittest.TestCase):
         result4a = self.train1.sampling_period
         result4b = self.train1.sampling_rate
 
-        self.assertEqual(result1, 10./pq.Hz)
-        self.assertEqual(result1.units, 1./pq.Hz)
+        self.assertEqual(result1, 10. / pq.Hz)
+        self.assertEqual(result1.units, 1. / pq.Hz)
         self.assertEqual(result2, None)
-        self.assertEqual(result3a, 10.*pq.ms)
-        self.assertEqual(result3a.units, 1.*pq.ms)
-        self.assertEqual(result3b, .1/pq.ms)
-        self.assertEqual(result3b.units, 1./pq.ms)
+        self.assertEqual(result3a, 10. * pq.ms)
+        self.assertEqual(result3a.units, 1. * pq.ms)
+        self.assertEqual(result3b, .1 / pq.ms)
+        self.assertEqual(result3b.units, 1. / pq.ms)
         self.assertEqual(result4a, None)
         self.assertEqual(result4b, None)
 
@@ -1518,8 +1644,8 @@ class TestPropertiesMethods(unittest.TestCase):
         assert_neo_object_is_compliant(self.train1)
         result4 = self.train1.right_sweep
 
-        self.assertEqual(result1, 22.*pq.s)
-        self.assertEqual(result1.units, 1.*pq.s)
+        self.assertEqual(result1, 22. * pq.s)
+        self.assertEqual(result1.units, 1. * pq.s)
         self.assertEqual(result2, None)
         self.assertEqual(result3, None)
         self.assertEqual(result4, None)
@@ -1584,7 +1710,7 @@ class TestMiscellaneous(unittest.TestCase):
         t_start_custom64 = np.array(t_start_custom, dtype=np.float64)
         t_stop_custom64 = np.array(t_stop_custom, dtype=np.float64)
 
-        #This is OK.
+        # This is OK.
         train = SpikeTrain(data64, copy=True, t_start=t_start, t_stop=t_stop)
         assert_neo_object_is_compliant(train)
 
@@ -1739,7 +1865,7 @@ class TestMiscellaneous(unittest.TestCase):
                            dtype=np.float64)
         assert_neo_object_is_compliant(train)
 
-        #This use to bug - see ticket #38
+        # This use to bug - see ticket #38
         train = SpikeTrain(data16, copy=True, t_start=t_start, t_stop=t_stop)
         assert_neo_object_is_compliant(train)
         train = SpikeTrain(data16, copy=True, t_start=t_start, t_stop=t_stop,

+ 1 - 4
code/python-neo/neo/test/coretest/test_unit.py

@@ -6,10 +6,7 @@ Tests of the neo.core.unit.Unit class
 # needed for python 3 compatibility
 from __future__ import absolute_import, division, print_function
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 

+ 3 - 3
code/python-neo/neo/test/generate_datasets.py

@@ -20,7 +20,7 @@ from neo.core import (AnalogSignal,
                       Segment, SpikeTrain,
                       Unit,
                       class_by_name)
-from neo.io.tools import iteritems
+
 from neo.core.baseneo import _container_name
 
 
@@ -104,7 +104,7 @@ def generate_one_simple_segment(seg_name='segment 0',
             seg.spiketrains.append(sptr)
 
     if Event in supported_objects:
-        for name, labels in iteritems(event_types):
+        for name, labels in event_types.items():
             evt_size = rand()*np.diff(event_size_range)
             evt_size += event_size_range[0]
             evt_size = int(evt_size)
@@ -114,7 +114,7 @@ def generate_one_simple_segment(seg_name='segment 0',
             seg.events.append(evt)
 
     if Epoch in supported_objects:
-        for name, labels in iteritems(epoch_types):
+        for name, labels in epoch_types.items():
             t = 0
             times = []
             durations = []

+ 2 - 0
code/python-neo/neo/test/iotest/common_io_test.py

@@ -23,6 +23,7 @@ __test__ = False
 url_for_tests = "https://portal.g-node.org/neo/"
 
 import os
+from copy import copy
 
 try:
     import unittest2 as unittest
@@ -84,6 +85,7 @@ class BaseTestIO(object):
         '''
         Set up the test fixture.  This is run for every test
         '''
+        self.files_to_test = copy(self.__class__.files_to_test)
         self.higher = self.ioclass.supported_objects[0]
         self.shortname = self.ioclass.__name__.lower().strip('io')
         # these objects can both be written and read

+ 1 - 4
code/python-neo/neo/test/iotest/test_alphaomegaio.py

@@ -6,10 +6,7 @@ Tests of neo.io.alphaomegaio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import AlphaOmegaIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_asciisignalio.py

@@ -6,10 +6,7 @@ Tests of neo.io.asciisignalio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import AsciiSignalIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_asciispiketrainio.py

@@ -6,10 +6,7 @@ Tests of neo.io.asciispiketrainio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import AsciiSpikeTrainIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 10 - 5
code/python-neo/neo/test/iotest/test_axonio.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import AxonIO
 from neo.test.iotest.common_io_test import BaseTestIO
@@ -29,7 +26,15 @@ class TestAxonIO(BaseTestIO, unittest.TestCase):
                      ]
     files_to_download = files_to_test
     ioclass = AxonIO
-
+    
+    def test_read_protocol(self):
+        for f in self.files_to_test:
+            filename = self.get_filename_path(f)
+            reader = AxonIO(filename=filename)
+            bl = reader.read_block(lazy=True)
+            if bl.annotations['abf_version'].startswith('2'):
+                reader.read_protocol()
+        
 
 if __name__ == "__main__":
     unittest.main()

+ 1 - 4
code/python-neo/neo/test/iotest/test_baseio.py

@@ -6,10 +6,7 @@ Tests of neo.io.baseio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.core import objectlist
 from neo.io.baseio import BaseIO

+ 2 - 5
code/python-neo/neo/test/iotest/test_blackrockio.py

@@ -6,10 +6,7 @@ Tests of neo.io.blackrockio
 # needed for python 3 compatibility
 from __future__ import absolute_import
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from numpy.testing import assert_equal
 
@@ -146,7 +143,7 @@ class CommonTests(BaseTestIO, unittest.TestCase):
     @unittest.skipUnless(HAVE_SCIPY, "requires scipy")
     def test_compare_blackrockio_with_matlabloader(self):
         """
-        This test compares the output of ReachGraspIO.read_block() with the
+        This test compares the output of BlackRockIO.read_block() with the
         output generated by a Matlab implementation of a Blackrock file reader
         provided by the company. The output for comparison is provided in a
         .mat file created by the script create_data_matlab_blackrock.m.

+ 1 - 4
code/python-neo/neo/test/iotest/test_brainvisionio.py

@@ -6,10 +6,7 @@ Tests of neo.io.brainvisionio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import BrainVisionIO
 

+ 1 - 4
code/python-neo/neo/test/iotest/test_brainwaredamio.py

@@ -9,10 +9,7 @@ from __future__ import absolute_import, division, print_function
 import os.path
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/iotest/test_brainwaref32io.py

@@ -9,10 +9,7 @@ from __future__ import absolute_import, division, print_function
 import os.path
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/iotest/test_brainwaresrcio.py

@@ -10,10 +10,7 @@ import logging
 import os.path
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 5
code/python-neo/neo/test/iotest/test_elanio.py

@@ -8,16 +8,12 @@ from __future__ import absolute_import, division
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import ElanIO
 from neo.test.iotest.common_io_test import BaseTestIO
 
 
-@unittest.skipIf(sys.version_info[0] > 2, "not Python 3 compatible")
 class TestElanIO(BaseTestIO, unittest.TestCase, ):
     ioclass = ElanIO
     files_to_test = ['File_elan_1.eeg']

+ 1 - 4
code/python-neo/neo/test/iotest/test_elphyio.py

@@ -8,10 +8,7 @@ from __future__ import division
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 try:
     from neo.io import ElphyIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_exampleio.py

@@ -6,10 +6,7 @@ Tests of neo.io.exampleio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io.exampleio import ExampleIO, HAVE_SCIPY
 from neo.test.iotest.common_io_test import BaseTestIO

+ 3 - 6
code/python-neo/neo/test/iotest/test_hdf5io.py

@@ -5,10 +5,7 @@ Tests of neo.io.hdf5io_new
 """
 
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 import numpy as np
 from numpy.testing import assert_array_equal
 from quantities import kHz, mV, ms, second, nA
@@ -69,7 +66,7 @@ class ReadOldNeoHdf5IOTest(BaseTestIO, unittest.TestCase):
             self.assertEqual(len(segment.events), 1)
             ev = segment.events[0]
             assert_array_equal(ev.labels,
-                               np.array(['trig0', 'trig1', 'trig2']))
+                               np.array(['trig0', 'trig1', 'trig2'], dtype='|S5'))
             self.assertEqual(ev.units, second)
             assert_array_equal(ev.magnitude, np.arange(0, 30, 10))
             self.assertEqual(ev.segment, segment)
@@ -77,7 +74,7 @@ class ReadOldNeoHdf5IOTest(BaseTestIO, unittest.TestCase):
             self.assertEqual(len(segment.epochs), 1)
             ep = segment.epochs[0]
             assert_array_equal(ep.labels,
-                               np.array(['btn0', 'btn1', 'btn2']))
+                               np.array(['btn0', 'btn1', 'btn2'], dtype='|S4'))
             assert_array_equal(ep.durations.magnitude,
                                np.array([10, 5, 7]))
             self.assertEqual(ep.units, second)

+ 2 - 4
code/python-neo/neo/test/iotest/test_igorio.py

@@ -3,10 +3,8 @@
 Tests of neo.io.igorproio
 """
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
+
 try:
     import igor
     HAVE_IGOR = True

+ 1 - 4
code/python-neo/neo/test/iotest/test_klustakwikio.py

@@ -11,10 +11,7 @@ import os.path
 import sys
 import tempfile
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 4 - 6
code/python-neo/neo/test/iotest/test_kwikio.py

@@ -7,22 +7,20 @@ Tests of neo.io.kwikio
 from __future__ import division
 
 import sys
+import unittest
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
 try:
     import h5py
     HAVE_H5PY = True
 except ImportError:
     HAVE_H5PY = False
-from neo.io import KwikIO
+from neo.io import kwikio
 from neo.test.iotest.common_io_test import BaseTestIO
 
 @unittest.skipUnless(HAVE_H5PY, "requires h5py")
+@unittest.skipUnless(kwikio.HAVE_KWIK, "requires klusta")
 class TestKwikIO(BaseTestIO, unittest.TestCase):
-    ioclass = KwikIO
+    ioclass = kwikio.KwikIO
     files_to_test = ['experiment1.kwik']
     files_to_download =  ['experiment1.kwik',
                           'experiment1.kwx',

+ 1 - 5
code/python-neo/neo/test/iotest/test_micromedio.py

@@ -8,16 +8,12 @@ from __future__ import absolute_import, division
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import MicromedIO
 from neo.test.iotest.common_io_test import BaseTestIO
 
 
-@unittest.skipIf(sys.version_info[0] > 2, "not Python 3 compatible")
 class TestMicromedIO(BaseTestIO, unittest.TestCase, ):
     ioclass = MicromedIO
     files_to_test = ['File_micromed_1.TRC']

+ 1 - 4
code/python-neo/neo/test/iotest/test_neomatlabio.py

@@ -6,10 +6,7 @@ Tests of neo.io.neomatlabio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.test.iotest.common_io_test import BaseTestIO
 from neo.io.neomatlabio import NeoMatlabIO, HAVE_SCIPY

+ 1 - 4
code/python-neo/neo/test/iotest/test_nestio.py

@@ -7,10 +7,7 @@ Tests of neo.io.exampleio
 from __future__ import absolute_import, division
 import warnings
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import quantities as pq
 import numpy as np

+ 1 - 4
code/python-neo/neo/test/iotest/test_neuralynxio.py

@@ -11,10 +11,7 @@ import sys
 import re
 import warnings
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/iotest/test_neuroexplorerio.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import, division
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import NeuroExplorerIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_neuroscopeio.py

@@ -6,10 +6,7 @@ Tests of neo.io.neuroscopeio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import NeuroScopeIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_neuroshareio.py

@@ -13,10 +13,7 @@ import zipfile
 import tempfile
 import platform
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 try:
     from urllib import urlretrieve  # Py2

+ 55 - 7
code/python-neo/neo/test/iotest/test_nixio.py

@@ -14,10 +14,7 @@ Tests for neo.io.nixio
 import os
 from datetime import datetime
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 try:
     from unittest import mock
@@ -65,6 +62,13 @@ class NixIOTest(unittest.TestCase):
         nix_channels = list(src for src in nixsrc.sources
                             if src.type == "neo.channelindex")
         self.assertEqual(len(neochx.index), len(nix_channels))
+
+        if len(neochx.channel_ids):
+            nix_chanids = list(src.metadata["channel_id"] for src
+                               in nixsrc.sources
+                               if src.type == "neo.channelindex")
+            self.assertEqual(len(neochx.channel_ids), len(nix_chanids))
+
         for nixchan in nix_channels:
             nixchanidx = nixchan.metadata["index"]
             try:
@@ -78,6 +82,12 @@ class NixIOTest(unittest.TestCase):
                     neochanname = neochanname.decode()
                 nixchanname = nixchan.metadata["neo_name"]
                 self.assertEqual(neochanname, nixchanname)
+            if len(neochx.channel_ids):
+                neochanid = neochx.channel_ids[neochanpos]
+                nixchanid = nixchan.metadata["channel_id"]
+                self.assertEqual(neochanid, nixchanid)
+            elif "channel_id" in nixchan.metadata:
+                self.fail("Channel ID not loaded")
         nix_units = list(src for src in nixsrc.sources
                          if src.type == "neo.unit")
         self.assertEqual(len(neochx.units), len(nix_units))
@@ -494,6 +504,7 @@ class NixIOTest(unittest.TestCase):
                 nixrc.name, "neo.channelindex.metadata"
             )
             nixrc.metadata.create_property("index", nix.Value(chan))
+            nixrc.metadata.create_property("channel_id", nix.Value(chan+1))
             dims = tuple(map(nix.Value, cls.rquant(3, 1)))
             nixrc.metadata.create_property("coordinates", dims)
             nixrc.metadata.create_property("coordinates.units",
@@ -600,7 +611,7 @@ class NixIOTest(unittest.TestCase):
         spiketrain.annotate(**d)
         seg.spiketrains.append(spiketrain)
 
-        chx = ChannelIndex(name="achx", index=[1, 2])
+        chx = ChannelIndex(name="achx", index=[1, 2], channel_ids=[0, 10])
         chx.annotate(**cls.rdict(5))
         blk.channel_indexes.append(chx)
 
@@ -611,6 +622,7 @@ class NixIOTest(unittest.TestCase):
         return blk
 
 
+@unittest.skipUnless(HAVE_NIX, "Requires NIX")
 class NixIOWriteTest(NixIOTest):
 
     def setUp(self):
@@ -629,6 +641,7 @@ class NixIOWriteTest(NixIOTest):
     def write_and_compare(self, blocks):
         self.writer.write_all_blocks(blocks)
         self.compare_blocks(self.writer.read_all_blocks(), self.reader.blocks)
+        self.compare_blocks(blocks, self.reader.blocks)
 
     def test_block_write(self):
         block = Block(name=self.rword(),
@@ -651,6 +664,7 @@ class NixIOWriteTest(NixIOTest):
         block = Block(name=self.rword())
         chx = ChannelIndex(name=self.rword(),
                            description=self.rsentence(),
+                           channel_ids=[10, 20, 30, 50, 80, 130],
                            index=[1, 2, 3, 5, 8, 13])
         block.channel_indexes.append(chx)
         self.write_and_compare([block])
@@ -681,7 +695,7 @@ class NixIOWriteTest(NixIOTest):
             units=pq.A
         )
         seg.irregularlysampledsignals.append(irsig)
-        self.write_and_compare([anotherblock])
+        self.write_and_compare([block, anotherblock])
 
         block.segments[0].analogsignals.append(
             AnalogSignal(signal=[10.0, 1.0, 3.0], units=pq.S,
@@ -813,7 +827,8 @@ class NixIOWriteTest(NixIOTest):
                                                       units=pq.s))
             for chidx in range(nchx):
                 chx = ChannelIndex(name="chx{}".format(chidx),
-                                   index=[1, 2])
+                                   index=[1, 2],
+                                   channel_ids=[11, 22])
                 blk.channel_indexes.append(chx)
                 for unidx in range(nunits):
                     unit = Unit()
@@ -821,6 +836,31 @@ class NixIOWriteTest(NixIOTest):
         self.writer.write_all_blocks(blocks)
         self.compare_blocks(blocks, self.reader.blocks)
 
+    def test_multiref_write(self):
+        blk = Block("blk1")
+        signal = AnalogSignal(name="sig1", signal=[0, 1, 2], units="mV",
+                              sampling_period=pq.Quantity(1, "ms"))
+
+        for idx in range(3):
+            segname = "seg" + str(idx)
+            seg = Segment(segname)
+            blk.segments.append(seg)
+            seg.analogsignals.append(signal)
+
+        chidx = ChannelIndex([10, 20, 29])
+        seg = blk.segments[0]
+        st = SpikeTrain(name="choochoo", times=[10, 11, 80], t_stop=1000,
+                        units="s")
+        seg.spiketrains.append(st)
+        blk.channel_indexes.append(chidx)
+        for idx in range(6):
+            unit = Unit("unit" + str(idx))
+            chidx.units.append(unit)
+            unit.spiketrains.append(st)
+
+        self.writer.write_block(blk)
+        self.compare_blocks([blk], self.reader.blocks)
+
     def test_to_value(self):
         section = self.io.nix_file.create_section("Metadata value test",
                                                   "Test")
@@ -867,6 +907,7 @@ class NixIOWriteTest(NixIOTest):
         self.assertEqual(val, section["val"])
 
 
+@unittest.skipUnless(HAVE_NIX, "Requires NIX")
 class NixIOReadTest(NixIOTest):
 
     filename = "testfile_readtest.h5"
@@ -888,6 +929,7 @@ class NixIOReadTest(NixIOTest):
     def tearDownClass(cls):
         if HAVE_NIX:
             cls.nixfile.close()
+            os.remove(cls.filename)
 
     def tearDown(self):
         self.io.close()
@@ -974,6 +1016,7 @@ class NixIOReadTest(NixIOTest):
         self.assertEqual(np.shape(segment.analogsignals[0]), (100, 3))
 
 
+@unittest.skipUnless(HAVE_NIX, "Requires NIX")
 class NixIOHashTest(NixIOTest):
 
     def setUp(self):
@@ -1072,6 +1115,7 @@ class NixIOHashTest(NixIOTest):
         self._hash_test(SpikeTrain, argfuncs)
 
 
+@unittest.skipUnless(HAVE_NIX, "Requires NIX")
 class NixIOPartialWriteTest(NixIOTest):
 
     filename = "testfile_partialwrite.h5"
@@ -1094,6 +1138,7 @@ class NixIOPartialWriteTest(NixIOTest):
     def tearDownClass(cls):
         if HAVE_NIX:
             cls.nixfile.close()
+            os.remove(cls.filename)
 
     def tearDown(self):
         self.restore_methods()
@@ -1163,6 +1208,7 @@ class NixIOPartialWriteTest(NixIOTest):
         self.compare_blocks(self.neo_blocks, self.io.nix_file.blocks)
 
 
+@unittest.skipUnless(HAVE_NIX, "Requires NIX")
 class NixIOContextTests(NixIOTest):
 
     filename = "context_test.h5"
@@ -1184,6 +1230,7 @@ class NixIOContextTests(NixIOTest):
                                 backend="h5py")
         self.compare_blocks([neoblock], nixfile.blocks)
         nixfile.close()
+        os.remove(self.filename)
 
     def test_context_read(self):
         nixfile = nix.File.open(self.filename, nix.FileMode.Overwrite,
@@ -1199,6 +1246,7 @@ class NixIOContextTests(NixIOTest):
 
         self.assertEqual(blocks[0].annotations["nix_name"], name_one)
         self.assertEqual(blocks[1].annotations["nix_name"], name_two)
+        os.remove(self.filename)
 
 
 @unittest.skipUnless(HAVE_NIX, "Requires NIX")

+ 22 - 10
code/python-neo/neo/test/iotest/test_nsdfio.py

@@ -11,14 +11,11 @@ import quantities as pq
 from datetime import datetime
 import os
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io.nsdfio import HAVE_NSDF, NSDFIO
 from neo.test.iotest.common_io_test import BaseTestIO
-from neo.core import AnalogSignal, Segment, Block
+from neo.core import AnalogSignal, Segment, Block, ChannelIndex
 from neo.test.tools import assert_same_attributes, assert_same_annotations, assert_neo_object_is_compliant
 
 
@@ -68,6 +65,9 @@ class NSDFIOTest(unittest.TestCase):
     def _create_block_children(self, block):
         for i in range(3):
             block.segments.append(self.create_segment(block, name='Segment #{}'.format(i)))
+        for i in range(3):
+            block.channel_indexes.append(self.create_channelindex(block, name='ChannelIndex #{}'.format(i),
+                                            analogsignals=[seg.analogsignals[i] for seg in block.segments]))
 
     def create_segment(self, parent=None, name='Segment'):
         segment = Segment()
@@ -95,9 +95,7 @@ class NSDFIOTest(unittest.TestCase):
                               sampling_rate=100 * pq.Hz, t_start=2 * pq.min)
 
         signal.segment = parent
-
         self._assign_basic_attributes(signal, name=name)
-
         self._assign_annotations(signal)
 
         return signal
@@ -107,7 +105,6 @@ class NSDFIOTest(unittest.TestCase):
                               sampling_period=0.5 * pq.ms)
 
         signal.segment = parent
-
         self._assign_annotations(signal)
 
         return signal
@@ -117,11 +114,26 @@ class NSDFIOTest(unittest.TestCase):
                               sampling_rate=2 * pq.kHz, t_start=100 * pq.s)
 
         signal.segment = parent
-
         self._assign_basic_attributes(signal, name=name)
 
         return signal
 
+    def create_channelindex(self, parent=None, name='ChannelIndex', analogsignals=None):
+        channels_num = min([signal.shape[1] for signal in analogsignals])
+
+        channelindex = ChannelIndex(index=np.arange(channels_num),
+                                    channel_names=['Channel{}'.format(i) for i in range(channels_num)],
+                                    channel_ids=np.arange(channels_num),
+                                    coordinates=([[1.87, -5.2, 4.0]] * channels_num) * pq.cm)
+
+        for signal in analogsignals:
+            channelindex.analogsignals.append(signal)
+
+        self._assign_basic_attributes(channelindex, name)
+        self._assign_annotations(channelindex)
+
+        return channelindex
+
     def _assign_basic_attributes(self, object, name=None):
         if name is None:
             object.name = 'neo object'
@@ -219,7 +231,7 @@ class NSDFIOTestWriteThenRead(NSDFIOTest):
         else:
             self._compare_objects(signal1, signal2, exclude_attr=['shape', 'signal'])
             assert signal2.lazy_shape == signal1.shape
-            assert signal2.dtype == signal1.dtype
+        assert signal2.dtype == signal1.dtype
 
     def _compare_objects(self, object1, object2, exclude_attr=[]):
         assert object1.__class__.__name__ == object2.__class__.__name__

+ 1 - 4
code/python-neo/neo/test/iotest/test_pickleio.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import, division
 
 import os
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 5 - 8
code/python-neo/neo/test/iotest/test_plexonio.py

@@ -8,21 +8,18 @@ from __future__ import absolute_import, division
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import PlexonIO
 from neo.test.iotest.common_io_test import BaseTestIO
 
 
-@unittest.skipIf(sys.version_info[0] > 2, "not Python 3 compatible")
 class TestPlexonIO(BaseTestIO, unittest.TestCase, ):
     ioclass = PlexonIO
-    files_to_test = ['File_plexon_1.plx',
-                     'File_plexon_2.plx',
-                     'File_plexon_3.plx',
+    files_to_test = [
+                    'File_plexon_1.plx',
+                    'File_plexon_2.plx',
+                    'File_plexon_3.plx',
                      ]
     files_to_download = files_to_test
 

+ 1 - 4
code/python-neo/neo/test/iotest/test_pynnio.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import, division
 
 import os
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 import numpy as np
 import quantities as pq

+ 1 - 4
code/python-neo/neo/test/iotest/test_rawbinarysignalio.py

@@ -6,10 +6,7 @@ Tests of io.rawbinarysignal
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import RawBinarySignalIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_spike2io.py

@@ -6,10 +6,7 @@ Tests of neo.io.spike2io
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import Spike2IO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_stimfitio.py

@@ -8,10 +8,7 @@ from __future__ import absolute_import
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import StimfitIO
 from neo.io.stimfitio import HAS_STFIO

+ 1 - 5
code/python-neo/neo/test/iotest/test_tdtio.py

@@ -8,16 +8,12 @@ from __future__ import absolute_import, division
 
 import sys
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import TdtIO
 from neo.test.iotest.common_io_test import BaseTestIO
 
 
-@unittest.skipIf(sys.version_info[0] > 2, "not Python 3 compatible")
 class TestTdtIOIO(BaseTestIO, unittest.TestCase, ):
     ioclass = TdtIO
     files_to_test = ['aep_05']

+ 1 - 4
code/python-neo/neo/test/iotest/test_winedrio.py

@@ -6,10 +6,7 @@ Tests of neo.io.wineedrio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import WinEdrIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 4
code/python-neo/neo/test/iotest/test_winwcpio.py

@@ -6,10 +6,7 @@ Tests of neo.io.winwcpio
 # needed for python 3 compatibility
 from __future__ import absolute_import, division
 
-try:
-    import unittest2 as unittest
-except ImportError:
-    import unittest
+import unittest
 
 from neo.io import WinWcpIO
 from neo.test.iotest.common_io_test import BaseTestIO

+ 1 - 1
code/python-neo/neo/version.py

@@ -1,3 +1,3 @@
 # -*- coding: utf-8 -*-
+version = '0.5.2'
 
-version = '0.5.1'

+ 1 - 2
code/python-neo/setup.py

@@ -22,7 +22,7 @@ if os.environ.get('TRAVIS') == 'true' and \
 
 setup(
     name = "neo",
-    version = '0.5.1',
+    version = '0.5.2',
     packages = ['neo', 'neo.core', 'neo.io', 'neo.test', 'neo.test.iotest'],
     install_requires=install_requires,
     extras_require=extras_require,
@@ -39,7 +39,6 @@ setup(
         'Natural Language :: English',
         'Operating System :: OS Independent',
         'Programming Language :: Python :: 2',
-        'Programming Language :: Python :: 2.6',
         'Programming Language :: Python :: 2.7',
         'Programming Language :: Python :: 3',
         'Programming Language :: Python :: 3.3',