Procházet zdrojové kódy

Merge branch 'synced/add/utils' of NeuralEnsemble/ephy_testing_data into master

It is ok for me.
Do you want I merge it now ?
Samuel Garcia před 2 roky
rodič
revize
793fa1fa8c
3 změnil soubory, kde provedl 68 přidání a 1 odebrání
  1. 1 0
      .gitattributes
  2. 1 1
      config.yml
  3. 66 0
      utils.py

+ 1 - 0
.gitattributes

@@ -1,6 +1,7 @@
 LICENSE* annex.largefiles=nothing
 README* annex.largefiles=nothing
 config* annex.largefiles=nothing
+*.py annex.largefiles=nothing
 
 * annex.backend=MD5E
 **/.git* annex.largefiles=nothing

+ 1 - 1
config.yml

@@ -1,3 +1,3 @@
 annex:
     minsize: 0
-    exclude: ["*README*", "*LICENSE*", "config*", "**/.git*"]
+    exclude: ["*README*", "*LICENSE*", "config*", "*.py", "**/.git*"]

+ 66 - 0
utils.py

@@ -0,0 +1,66 @@
+import sys
+import argparse
+
+
+def reduce_file_size(filename, new_filename, n_packages, header_size=0, packet_size=32):
+    """
+    Create a smaller version of a binary file containing a defined number of data packages
+
+    The file structure has to consist of a header (header_size) and multiple data packages of
+    size packet_size. This preserves the header and only reduces the number of data packages.
+
+    Parameters
+    ----------
+    filename:
+        The source filename to load from.
+    new_filename:
+        The target filename to save to. Will be overwritten if it exists.
+    n_packages:
+        The number of packages to preserve in the file
+    header_size:
+        The size of the file header in byte. Default 0
+    packet_size:
+        The size of a single data package in byte. Default 32
+    Returns
+    -------
+
+    """
+    with open(filename, 'rb') as f:
+        data = f.read(header_size + n_packages * packet_size)
+    with open(new_filename, 'wb') as f:
+        f.write(data)
+
+
+param_help = """
+tdt-tev': {'header_size': 0, 'packet_size': 8},  # uint8
+'tdt-tsq': {'header_size': 0, 'packet_size': 40*8},
+'tdt-sev': {'header_size': 0, 'packet_size': 8},  # uint8
+'edf': {'header_size': 256 (number_of_signals + 1),  # 256 byte per channel and header
+        'packet_size': number_of_signals * samples_in_datarecord * 2}}  # int16
+"""
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description='Bit-based file manipulation')
+    subparsers = parser.add_subparsers(help='cut file', dest='subparser_name')
+
+    # create the parser for the "cut" command
+    parser_cut = subparsers.add_parser('cut', help='shorten file to limited bits')
+    parser_cut.add_argument('filename', type=str, help='name of the file to load')
+    parser_cut.add_argument('new_filename', type=str, help='name of the file to save')
+    parser_cut.add_argument('n_packages', type=int, help='number of packages to preserve')
+    parser_cut.add_argument('--header_size', metavar='header_size', type=int, nargs=1,
+                            help='the size of the file header in bits')
+    parser_cut.add_argument('--packet_size', metavar='packet_size', type=int, nargs=1,
+                            help='the size of the data packets in bits')
+
+    # add parser for printing help options for parameters
+    parser.add_argument('--parameter_help', help='show suggested parameters', action='store_true')
+
+    args = parser.parse_args()
+
+    if args.parameter_help:
+        print(param_help)
+    elif args.subparser_name == 'cut':
+        kwargs = {k: getattr(args, k)[0] for k in ['header_size', 'packet_size'] if k in args}
+        reduce_file_size(filename=args.filename, new_filename=args.new_filename,
+                         n_packages=args.n_packages, **kwargs)