Ver Fonte

fix & test parsing of `root_dir_path` argument

Fabian Peter Hammerle há 5 anos atrás
pai
commit
408ab97723

+ 5 - 4
freesurfer_volume_reader/__init__.py

@@ -65,13 +65,14 @@ def main():
                            help='default: %(default)s')
     argparser.add_argument('--output-format', choices=['csv'], default='csv',
                            help='default: %(default)s')
+    subjects_dir_path = os.environ.get('SUBJECTS_DIR', None)
     argparser.add_argument('root_dir_path',
-                           nargs='?' if 'SUBJECTS_DIR' in os.environ else 1,
-                           default=os.environ.get('SUBJECTS_DIR', None),
-                           help='default: $SUBJECTS_DIR (%(default)s)')
+                           nargs='?' if subjects_dir_path  else 1,
+                           default=[subjects_dir_path],
+                           help='default: $SUBJECTS_DIR ({})'.format(subjects_dir_path))
     args = argparser.parse_args()
     volume_file_paths = find_hippocampal_volume_files(
-        root_dir_path=args.root_dir_path,
+        root_dir_path=args.root_dir_path[0],
         filename_regex=re.compile(args.filename_pattern))
     volume_frames = []
     for volume_file_path in volume_file_paths:

+ 31 - 0
tests/hippocampus_test.py

@@ -1,5 +1,7 @@
+import io
 import os
 import re
+import unittest.mock
 
 import pandas.util.testing
 import pytest
@@ -128,3 +130,32 @@ def test_read_hippocampal_volume_file_dataframe(volume_file_path, expected_dataf
         right=freesurfer_volume_reader.read_hippocampal_volume_file_dataframe(
             volume_file_path=volume_file_path),
     )
+
+
+@pytest.mark.parametrize(('root_dir_path', 'expected_csv_path'), [
+    (os.path.join(SUBJECTS_DIR, 'bert'),
+     os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
+])
+def test_main_root_dir_param(capsys, root_dir_path, expected_csv_path):
+    with unittest.mock.patch('sys.argv', ['', root_dir_path]):
+        freesurfer_volume_reader.main()
+    out, _ = capsys.readouterr()
+    pandas.util.testing.assert_frame_equal(
+        left=pandas.read_csv(expected_csv_path),
+        right=pandas.read_csv(io.StringIO(out)).drop(columns=['source_path']),
+    )
+
+
+@pytest.mark.parametrize(('root_dir_path', 'expected_csv_path'), [
+    (os.path.join(SUBJECTS_DIR, 'bert'),
+     os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
+])
+def test_main_root_dir_env(capsys, root_dir_path, expected_csv_path):
+    os.environ['SUBJECTS_DIR'] = root_dir_path
+    with unittest.mock.patch('sys.argv', ['']):
+        freesurfer_volume_reader.main()
+    out, _ = capsys.readouterr()
+    pandas.util.testing.assert_frame_equal(
+        left=pandas.read_csv(expected_csv_path),
+        right=pandas.read_csv(io.StringIO(out)).drop(columns=['source_path']),
+    )

+ 28 - 0
tests/subjects/bert/hippocampal-volumes.csv

@@ -0,0 +1,28 @@
+subfield,volume,subject,hemisphere,T1_input,analysis_id
+Hippocampal_tail,123.456789,bert,left,True,
+subiculum,234.567891,bert,left,True,
+CA1,34.567891,bert,left,True,
+hippocampal-fissure,345.678912,bert,left,True,
+presubiculum,456.789123,bert,left,True,
+parasubiculum,45.678912,bert,left,True,
+molecular_layer_HP,56.789123,bert,left,True,
+GC-ML-DG,567.891234,bert,left,True,
+CA3,678.912345,bert,left,True,
+CA4,789.123456,bert,left,True,
+fimbria,89.123456,bert,left,True,
+HATA,91.234567,bert,left,True,
+Whole_hippocampus,1234.567899,bert,left,True,
+Hippocampal_tail,124.456789,bert,left,True,T2
+subiculum,244.567891,bert,left,True,T2
+CA1,44.567891,bert,left,True,T2
+hippocampal-fissure,445.678912,bert,left,True,T2
+presubiculum,456.789124,bert,left,True,T2
+parasubiculum,45.678912,bert,left,True,T2
+molecular_layer_HP,56.789124,bert,left,True,T2
+GC-ML-DG,567.891244,bert,left,True,T2
+CA3,678.912445,bert,left,True,T2
+CA4,789.124456,bert,left,True,T2
+fimbria,89.124456,bert,left,True,T2
+HATA,91.244567,bert,left,True,T2
+Whole_hippocampus,1244.567899,bert,left,True,T2
+