hippocampus_test.py 7.6 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176
  1. import io
  2. import os
  3. import re
  4. import unittest.mock
  5. import pandas
  6. import pandas.util.testing
  7. import pytest
  8. import freesurfer_volume_reader
  9. SUBJECTS_DIR = os.path.join(os.path.dirname(__file__), 'subjects')
  10. @pytest.mark.parametrize(('root_dir_path', 'expected_file_paths'), [
  11. (SUBJECTS_DIR,
  12. {os.path.join(SUBJECTS_DIR, 'alice', 'mri', 'lh.hippoSfVolumes-T1.v10.txt'),
  13. os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1-T2.v10.txt'),
  14. os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1.v10.txt')}),
  15. (os.path.join(SUBJECTS_DIR, 'bert'),
  16. {os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1-T2.v10.txt'),
  17. os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1.v10.txt')}),
  18. (os.path.join(SUBJECTS_DIR, 'bert', 'mri'),
  19. {os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1-T2.v10.txt'),
  20. os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1.v10.txt')}),
  21. ])
  22. def test_find_hippocampal_volume_files(root_dir_path, expected_file_paths):
  23. assert expected_file_paths == set(
  24. freesurfer_volume_reader.find_hippocampal_volume_files(root_dir_path=root_dir_path))
  25. @pytest.mark.parametrize(('root_dir_path', 'filename_pattern', 'expected_file_paths'), [
  26. (SUBJECTS_DIR,
  27. r'hippoSfVolumes-T1\.v10',
  28. {os.path.join(SUBJECTS_DIR, 'alice', 'mri', 'lh.hippoSfVolumes-T1.v10.txt'),
  29. os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1.v10.txt')}),
  30. (os.path.join(SUBJECTS_DIR, 'bert'),
  31. r'hippoSfVolumes-T1-T2',
  32. {os.path.join(SUBJECTS_DIR, 'bert', 'mri', 'lh.hippoSfVolumes-T1-T2.v10.txt')}),
  33. ])
  34. def test_find_hippocampal_volume_files_pattern(root_dir_path, filename_pattern,
  35. expected_file_paths):
  36. assert expected_file_paths == set(freesurfer_volume_reader.find_hippocampal_volume_files(
  37. root_dir_path=root_dir_path, filename_regex=re.compile(filename_pattern)))
  38. @pytest.mark.parametrize(('volume_file_path', 'expected_volumes'), [
  39. (os.path.join(SUBJECTS_DIR, 'bert/mri/lh.hippoSfVolumes-T1.v10.txt'),
  40. {'Hippocampal_tail': 123.456789,
  41. 'subiculum': 234.567891,
  42. 'CA1': 34.567891,
  43. 'hippocampal-fissure': 345.678912,
  44. 'presubiculum': 456.789123,
  45. 'parasubiculum': 45.678912,
  46. 'molecular_layer_HP': 56.789123,
  47. 'GC-ML-DG': 567.891234,
  48. 'CA3': 678.912345,
  49. 'CA4': 789.123456,
  50. 'fimbria': 89.123456,
  51. 'HATA': 91.234567,
  52. 'Whole_hippocampus': 1234.567899}),
  53. ])
  54. def test_read_hippocampal_volumes(volume_file_path, expected_volumes):
  55. assert expected_volumes == freesurfer_volume_reader.read_hippocampal_volumes(
  56. volume_file_path)
  57. def test_read_hippocampal_volumes_not_found():
  58. with pytest.raises(FileNotFoundError):
  59. freesurfer_volume_reader.read_hippocampal_volumes(
  60. os.path.join(SUBJECTS_DIR, 'non-existing', 'lh.hippoSfVolumes-T1.v10.txt'))
  61. @pytest.mark.parametrize(('volume_file_path', 'expected_attrs'), [
  62. ('bert/mri/lh.hippoSfVolumes-T1.v10.txt',
  63. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': None}),
  64. ('bert/mri/lh.hippoSfVolumes-T1-T2.v10.txt',
  65. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': 'T2'}),
  66. ('bert/mri/lh.hippoSfVolumes-T2.v10.txt',
  67. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': False, 'analysis_id': 'T2'}),
  68. ('bert/mri/lh.hippoSfVolumes-T1-T2-high-res.v10.txt',
  69. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': 'T2-high-res'}),
  70. ('bert/mri/lh.hippoSfVolumes-T2-high-res.v10.txt',
  71. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': False, 'analysis_id': 'T2-high-res'}),
  72. ('bert/mri/lh.hippoSfVolumes-PD.v10.txt',
  73. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': False, 'analysis_id': 'PD'}),
  74. ('bert/mri/rh.hippoSfVolumes-T1.v10.txt',
  75. {'subject': 'bert', 'hemisphere': 'right', 'T1_input': True, 'analysis_id': None}),
  76. ('bert/mri/rh.hippoSfVolumes-T1-T2.v10.txt',
  77. {'subject': 'bert', 'hemisphere': 'right', 'T1_input': True, 'analysis_id': 'T2'}),
  78. ('freesurfer/subjects/bert/mri/lh.hippoSfVolumes-T1.v10.txt',
  79. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': None}),
  80. ('../../bert/mri/lh.hippoSfVolumes-T1.v10.txt',
  81. {'subject': 'bert', 'hemisphere': 'left', 'T1_input': True, 'analysis_id': None}),
  82. ])
  83. def test_parse_hippocampal_volume_file_path(volume_file_path, expected_attrs):
  84. assert expected_attrs == freesurfer_volume_reader.parse_hippocampal_volume_file_path(
  85. volume_file_path=volume_file_path)
  86. @pytest.mark.parametrize('volume_file_path', [
  87. 'bert/mri/lh.hippoSfLabels-T1.v10.mgz',
  88. 'bert/mri/lh.hippoSfVolumes-T1.v9.txt',
  89. 'bert/mri/lh.hippoSfVolumes.v10.txt',
  90. 'bert/mri/mh.hippoSfVolumes-T1.v10.txt',
  91. ])
  92. def test_parse_hippocampal_volume_file_path_invalid(volume_file_path):
  93. with pytest.raises(Exception):
  94. freesurfer_volume_reader.parse_hippocampal_volume_file_path(
  95. volume_file_path=volume_file_path)
  96. @pytest.mark.parametrize(('volume_file_path', 'expected_dataframe'), [
  97. (os.path.join(SUBJECTS_DIR, 'alice', 'mri', 'lh.hippoSfVolumes-T1.v10.txt'),
  98. pandas.DataFrame({
  99. 'subfield': ['Hippocampal_tail', 'subiculum', 'CA1', 'hippocampal-fissure',
  100. 'presubiculum', 'parasubiculum', 'molecular_layer_HP', 'GC-ML-DG',
  101. 'CA3', 'CA4', 'fimbria', 'HATA', 'Whole_hippocampus'],
  102. 'volume': [173.456789, 734.567891, 34.567891, 345.678917, 456.789173, 45.678917,
  103. 56.789173, 567.891734, 678.917345, 789.173456, 89.173456, 91.734567,
  104. 1734.567899],
  105. 'subject': 'alice',
  106. 'hemisphere': 'left',
  107. 'T1_input': True,
  108. 'analysis_id': None,
  109. })),
  110. ])
  111. def test_read_hippocampal_volume_file_dataframe(volume_file_path, expected_dataframe):
  112. pandas.util.testing.assert_frame_equal(
  113. left=expected_dataframe,
  114. right=freesurfer_volume_reader.read_hippocampal_volume_file_dataframe(
  115. volume_file_path=volume_file_path),
  116. )
  117. def assert_volume_frames_equal(left: pandas.DataFrame, right: pandas.DataFrame):
  118. sort_by = ['volume', 'analysis_id']
  119. left.sort_values(sort_by, inplace=True)
  120. right.sort_values(sort_by, inplace=True)
  121. left.reset_index(inplace=True, drop=True)
  122. right.reset_index(inplace=True, drop=True)
  123. pandas.util.testing.assert_frame_equal(
  124. left=left,
  125. right=right,
  126. # ignore the order of index & columns
  127. check_like=True,
  128. )
  129. @pytest.mark.parametrize(('root_dir_path', 'expected_csv_path'), [
  130. (os.path.join(SUBJECTS_DIR, 'bert'),
  131. os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
  132. ])
  133. def test_main_root_dir_param(capsys, root_dir_path, expected_csv_path):
  134. with unittest.mock.patch('sys.argv', ['', root_dir_path]):
  135. freesurfer_volume_reader.main()
  136. out, _ = capsys.readouterr()
  137. assert_volume_frames_equal(
  138. left=pandas.read_csv(expected_csv_path),
  139. right=pandas.read_csv(io.StringIO(out)).drop(columns=['source_path']),
  140. )
  141. @pytest.mark.parametrize(('root_dir_path', 'expected_csv_path'), [
  142. (os.path.join(SUBJECTS_DIR, 'bert'),
  143. os.path.join(SUBJECTS_DIR, 'bert', 'hippocampal-volumes.csv')),
  144. ])
  145. def test_main_root_dir_env(capsys, root_dir_path, expected_csv_path):
  146. os.environ['SUBJECTS_DIR'] = root_dir_path
  147. with unittest.mock.patch('sys.argv', ['']):
  148. freesurfer_volume_reader.main()
  149. out, _ = capsys.readouterr()
  150. assert_volume_frames_equal(
  151. left=pandas.read_csv(expected_csv_path),
  152. right=pandas.read_csv(io.StringIO(out)).drop(columns=['source_path']),
  153. )