hippocampus_test.py 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200
  1. import io
  2. import os
  3. import typing
  4. import unittest.mock
  5. import pandas
  6. import pandas.util.testing
  7. import pytest
  8. import freesurfer_volume_reader
  9. SUBJECTS_DIR = os.path.join(os.path.dirname(__file__), 'subjects')
  10. @pytest.mark.parametrize(('source_pattern', 'expected_pattern'), [
  11. (r'^(?P<h>[lr])h\.hippoSfVolumes', r'^([lr])h\.hippoSfVolumes'),
  12. (r'(?P<a>a(?P<b>b))', r'(a(b))'),
  13. ])
  14. def test_remove_group_names_from_regex(source_pattern, expected_pattern):
  15. assert expected_pattern == freesurfer_volume_reader.remove_group_names_from_regex(
  16. regex_pattern=source_pattern,
  17. )
  18. @pytest.mark.parametrize(('volume_file_path', 'expected_volumes'), [
  19. (os.path.join(SUBJECTS_DIR, 'bert/mri/lh.hippoSfVolumes-T1.v10.txt'),
  20. {'Hippocampal_tail': 123.456789,
  21. 'subiculum': 234.567891,
  22. 'CA1': 34.567891,
  23. 'hippocampal-fissure': 345.678912,
  24. 'presubiculum': 456.789123,
  25. 'parasubiculum': 45.678912,
  26. 'molecular_layer_HP': 56.789123,
  27. 'GC-ML-DG': 567.891234,
  28. 'CA3': 678.912345,
  29. 'CA4': 789.123456,
  30. 'fimbria': 89.123456,
  31. 'HATA': 91.234567,
  32. 'Whole_hippocampus': 1234.567899}),
  33. ])
  34. def test_read_hippocampal_volumes_mm3(volume_file_path, expected_volumes):
  35. assert expected_volumes == freesurfer_volume_reader.read_hippocampal_volumes_mm3(
  36. volume_file_path)
  37. def test_read_hippocampal_volumes_mm3_not_found():
  38. with pytest.raises(FileNotFoundError):
  39. freesurfer_volume_reader.read_hippocampal_volumes_mm3(
  40. os.path.join(SUBJECTS_DIR, 'non-existing', 'lh.hippoSfVolumes-T1.v10.txt'))
  41. @pytest.mark.parametrize(('volume_file_path', 'expected_attrs'), [
  42. ('bert/mri/lh.hippoSfVolumes-T1.v10.txt',
  43. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': None}),
  44. ('bert/mri/lh.hippoSfVolumes-T1-T2.v10.txt',
  45. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': 'T2'}),
  46. ('bert/mri/lh.hippoSfVolumes-T2.v10.txt',
  47. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': False, 'analysis_id': 'T2'}),
  48. ('bert/mri/lh.hippoSfVolumes-T1-T2-high-res.v10.txt',
  49. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': 'T2-high-res'}),
  50. ('bert/mri/lh.hippoSfVolumes-T2-high-res.v10.txt',
  51. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': False, 'analysis_id': 'T2-high-res'}),
  52. ('bert/mri/lh.hippoSfVolumes-PD.v10.txt',
  53. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': False, 'analysis_id': 'PD'}),
  54. ('bert/mri/rh.hippoSfVolumes-T1.v10.txt',
  55. {'subject': 'bert', 'hemisphere': 'right', 'T1_input': True, 'analysis_id': None}),
  56. ('bert/mri/rh.hippoSfVolumes-T1-T2.v10.txt',
  57. {'subject': 'bert', 'hemisphere': 'right', 'T1_input': True, 'analysis_id': 'T2'}),
  58. ('freesurfer/subjects/bert/mri/lh.hippoSfVolumes-T1.v10.txt',
  59. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': None}),
  60. ('../../bert/mri/lh.hippoSfVolumes-T1.v10.txt',
  61. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': None}),
  62. ])
  63. def test_parse_hippocampal_volume_file_path(volume_file_path, expected_attrs):
  64. assert expected_attrs == freesurfer_volume_reader.parse_hippocampal_volume_file_path(
  65. volume_file_path=volume_file_path)
  66. @pytest.mark.parametrize('volume_file_path', [
  67. 'bert/mri/lh.hippoSfLabels-T1.v10.mgz',
  68. 'bert/mri/lh.hippoSfVolumes-T1.v9.txt',
  69. 'bert/mri/lh.hippoSfVolumes.v10.txt',
  70. 'bert/mri/mh.hippoSfVolumes-T1.v10.txt',
  71. ])
  72. def test_parse_hippocampal_volume_file_path_invalid(volume_file_path):
  73. with pytest.raises(Exception):
  74. freesurfer_volume_reader.parse_hippocampal_volume_file_path(
  75. volume_file_path=volume_file_path)
  76. @pytest.mark.parametrize(('volume_file_path', 'expected_dataframe'), [
  77. (os.path.join(SUBJECTS_DIR, 'alice', 'mri', 'lh.hippoSfVolumes-T1.v10.txt'),
  78. pandas.DataFrame({
  79. 'subfield': ['Hippocampal_tail', 'subiculum', 'CA1', 'hippocampal-fissure',
  80. 'presubiculum', 'parasubiculum', 'molecular_layer_HP', 'GC-ML-DG',
  81. 'CA3', 'CA4', 'fimbria', 'HATA', 'Whole_hippocampus'],
  82. 'volume_mm^3': [173.456789, 734.567891, 34.567891, 345.678917, 456.789173, 45.678917,
  83. 56.789173, 567.891734, 678.917345, 789.173456, 89.173456, 91.734567,
  84. 1734.567899],
  85. 'subject': 'alice',
  86. 'hemisphere': 'left',
  87. 'T1_input': True,
  88. 'analysis_id': None,
  89. })),
  90. ])
  91. def test_read_hippocampal_volume_file_dataframe(volume_file_path, expected_dataframe):
  92. assert_volume_frames_equal(
  93. left=expected_dataframe,
  94. right=freesurfer_volume_reader.read_hippocampal_volume_file_dataframe(
  95. volume_file_path=volume_file_path),
  96. )
  97. def assert_volume_frames_equal(left: pandas.DataFrame, right: pandas.DataFrame):
  98. sort_by = ['volume_mm^3', 'analysis_id']
  99. left.sort_values(sort_by, inplace=True)
  100. right.sort_values(sort_by, inplace=True)
  101. left.reset_index(inplace=True, drop=True)
  102. right.reset_index(inplace=True, drop=True)
  103. pandas.util.testing.assert_frame_equal(
  104. left=left,
  105. right=right,
  106. # ignore the order of index & columns
  107. check_like=True,
  108. )
  109. def assert_main_volume_frame_equals(capsys, argv: list, expected_frame: pandas.DataFrame,
  110. subjects_dir: typing.Optional[str] = None):
  111. if subjects_dir:
  112. os.environ['SUBJECTS_DIR'] = subjects_dir
  113. elif 'SUBJECTS_DIR' in os.environ:
  114. del os.environ['SUBJECTS_DIR']
  115. with unittest.mock.patch('sys.argv', [''] + argv):
  116. freesurfer_volume_reader.main()
  117. out, _ = capsys.readouterr()
  118. assert_volume_frames_equal(
  119. left=expected_frame,
  120. # pandas.DataFrame.drop(columns=[...], ...) >= pandas0.21.0
  121. right=pandas.read_csv(io.StringIO(out)).drop(columns=['source_path']),
  122. )
  123. @pytest.mark.parametrize(('root_dir_paths', 'expected_csv_path'), [
  124. ([os.path.join(SUBJECTS_DIR, 'alice')],
  125. os.path.join(SUBJECTS_DIR, 'alice', 'hippocampal-volumes.csv')),
  126. ([os.path.join(SUBJECTS_DIR, 'bert')],
  127. os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
  128. ([os.path.join(SUBJECTS_DIR, 'alice'),
  129. os.path.join(SUBJECTS_DIR, 'bert')],
  130. os.path.join(SUBJECTS_DIR, 'all-hippocampal-volumes.csv')),
  131. ])
  132. def test_main_root_dir_param(capsys, root_dir_paths: list, expected_csv_path):
  133. assert_main_volume_frame_equals(
  134. argv=root_dir_paths,
  135. expected_frame=pandas.read_csv(expected_csv_path),
  136. capsys=capsys,
  137. )
  138. @pytest.mark.parametrize(('root_dir_path', 'expected_csv_path'), [
  139. (os.path.join(SUBJECTS_DIR, 'bert'),
  140. os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
  141. ])
  142. def test_main_root_dir_env(capsys, root_dir_path, expected_csv_path):
  143. assert_main_volume_frame_equals(
  144. argv=[],
  145. subjects_dir=root_dir_path,
  146. expected_frame=pandas.read_csv(expected_csv_path),
  147. capsys=capsys,
  148. )
  149. @pytest.mark.timeout(8)
  150. @pytest.mark.parametrize(('root_dir_path', 'subjects_dir', 'expected_csv_path'), [
  151. (os.path.join(SUBJECTS_DIR, 'bert'),
  152. os.path.join(SUBJECTS_DIR, 'alice'),
  153. os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
  154. (os.path.join(SUBJECTS_DIR, 'bert'),
  155. os.path.abspath(os.sep),
  156. os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
  157. ])
  158. def test_main_root_dir_overwrite_env(capsys, root_dir_path, subjects_dir, expected_csv_path):
  159. assert_main_volume_frame_equals(
  160. argv=[root_dir_path],
  161. subjects_dir=subjects_dir,
  162. expected_frame=pandas.read_csv(expected_csv_path),
  163. capsys=capsys,
  164. )
  165. def test_main_root_dir_filename_regex(capsys):
  166. expected_volume_frame = pandas.read_csv(
  167. os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv'))
  168. assert_main_volume_frame_equals(
  169. argv=['--filename-regex', r'^.*-T1-T2\.v10\.txt$',
  170. os.path.join(SUBJECTS_DIR, 'bert')],
  171. expected_frame=expected_volume_frame[expected_volume_frame['analysis_id'] == 'T2'].copy(),
  172. capsys=capsys,
  173. )