freesurfer_test.py 9.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323
  1. # pylint: disable=missing-module-docstring
  2. import os
  3. import re
  4. import pandas
  5. import pytest
  6. from freesurfer_volume_reader.freesurfer import HippocampalSubfieldsVolumeFile
  7. # pylint: disable=wrong-import-order; false positive
  8. from conftest import SUBJECTS_DIR, assert_volume_frames_equal
  9. @pytest.mark.parametrize(
  10. ("volume_file_path", "expected_attrs"),
  11. [
  12. (
  13. "bert/mri/lh.hippoSfVolumes-T1.v10.txt",
  14. {
  15. "subject": "bert",
  16. "hemisphere": "left",
  17. "t1_input": True,
  18. "analysis_id": None,
  19. },
  20. ),
  21. (
  22. "bert/mri/lh.hippoSfVolumes-T1-T2.v10.txt",
  23. {
  24. "subject": "bert",
  25. "hemisphere": "left",
  26. "t1_input": True,
  27. "analysis_id": "T2",
  28. },
  29. ),
  30. (
  31. "bert/mri/lh.hippoSfVolumes-T2.v10.txt",
  32. {
  33. "subject": "bert",
  34. "hemisphere": "left",
  35. "t1_input": False,
  36. "analysis_id": "T2",
  37. },
  38. ),
  39. (
  40. "bert/mri/lh.hippoSfVolumes-T1-T2-high-res.v10.txt",
  41. {
  42. "subject": "bert",
  43. "hemisphere": "left",
  44. "t1_input": True,
  45. "analysis_id": "T2-high-res",
  46. },
  47. ),
  48. (
  49. "bert/mri/lh.hippoSfVolumes-T2-high-res.v10.txt",
  50. {
  51. "subject": "bert",
  52. "hemisphere": "left",
  53. "t1_input": False,
  54. "analysis_id": "T2-high-res",
  55. },
  56. ),
  57. (
  58. "bert/mri/lh.hippoSfVolumes-PD.v10.txt",
  59. {
  60. "subject": "bert",
  61. "hemisphere": "left",
  62. "t1_input": False,
  63. "analysis_id": "PD",
  64. },
  65. ),
  66. (
  67. "bert/mri/rh.hippoSfVolumes-T1.v10.txt",
  68. {
  69. "subject": "bert",
  70. "hemisphere": "right",
  71. "t1_input": True,
  72. "analysis_id": None,
  73. },
  74. ),
  75. (
  76. "bert/mri/rh.hippoSfVolumes-T1-T2.v10.txt",
  77. {
  78. "subject": "bert",
  79. "hemisphere": "right",
  80. "t1_input": True,
  81. "analysis_id": "T2",
  82. },
  83. ),
  84. (
  85. "freesurfer/subjects/bert/mri/lh.hippoSfVolumes-T1.v10.txt",
  86. {
  87. "subject": "bert",
  88. "hemisphere": "left",
  89. "t1_input": True,
  90. "analysis_id": None,
  91. },
  92. ),
  93. (
  94. "../../bert/mri/lh.hippoSfVolumes-T1.v10.txt",
  95. {
  96. "subject": "bert",
  97. "hemisphere": "left",
  98. "t1_input": True,
  99. "analysis_id": None,
  100. },
  101. ),
  102. ],
  103. )
  104. def test_hippocampal_subfields_volume_file_init(volume_file_path, expected_attrs):
  105. volume_file = HippocampalSubfieldsVolumeFile(path=volume_file_path)
  106. assert os.path.basename(volume_file_path) == os.path.basename(
  107. volume_file.absolute_path
  108. )
  109. for attr, value in expected_attrs.items():
  110. assert value == getattr(volume_file, attr)
  111. @pytest.mark.parametrize(
  112. "volume_file_path",
  113. [
  114. "bert/mri/lh.hippoSfLabels-T1.v10.mgz",
  115. "bert/mri/lh.hippoSfVolumes-T1.v9.txt",
  116. "bert/mri/lh.hippoSfVolumes.v10.txt",
  117. "bert/mri/mh.hippoSfVolumes-T1.v10.txt",
  118. "bert_left_corr_nogray_volumes.txt",
  119. ],
  120. )
  121. def test_hippocampal_subfields_volume_file_init_invalid_path(volume_file_path):
  122. with pytest.raises(Exception):
  123. HippocampalSubfieldsVolumeFile(path=volume_file_path)
  124. @pytest.mark.parametrize(
  125. ("volume_file_path", "expected_volumes"),
  126. [
  127. (
  128. os.path.join(SUBJECTS_DIR, "bert/mri/lh.hippoSfVolumes-T1.v10.txt"),
  129. {
  130. "Hippocampal_tail": 123.456789,
  131. "subiculum": 234.567891,
  132. "CA1": 34.567891,
  133. "hippocampal-fissure": 345.678912,
  134. "presubiculum": 456.789123,
  135. "parasubiculum": 45.678912,
  136. "molecular_layer_HP": 56.789123,
  137. "GC-ML-DG": 567.891234,
  138. "CA3": 678.912345,
  139. "CA4": 789.123456,
  140. "fimbria": 89.123456,
  141. "HATA": 91.234567,
  142. "Whole_hippocampus": 1234.567899,
  143. },
  144. )
  145. ],
  146. )
  147. def test_hippocampal_subfields_volume_file_read_volumes_mm3(
  148. volume_file_path, expected_volumes
  149. ):
  150. volume_file = HippocampalSubfieldsVolumeFile(path=volume_file_path)
  151. assert volume_file.t1_input
  152. assert expected_volumes == volume_file.read_volumes_mm3()
  153. def test_hippocampal_subfields_volume_file_read_volumes_mm3_not_found():
  154. volume_file = HippocampalSubfieldsVolumeFile(
  155. path=os.path.join(SUBJECTS_DIR, "non-existing", "lh.hippoSfVolumes-T1.v10.txt")
  156. )
  157. with pytest.raises(FileNotFoundError):
  158. volume_file.read_volumes_mm3()
  159. @pytest.mark.parametrize(
  160. ("volume_file_path", "expected_dataframe"),
  161. [
  162. (
  163. os.path.join(SUBJECTS_DIR, "alice", "mri", "lh.hippoSfVolumes-T1.v10.txt"),
  164. pandas.DataFrame(
  165. {
  166. "subfield": [
  167. "Hippocampal_tail",
  168. "subiculum",
  169. "CA1",
  170. "hippocampal-fissure",
  171. "presubiculum",
  172. "parasubiculum",
  173. "molecular_layer_HP",
  174. "GC-ML-DG",
  175. "CA3",
  176. "CA4",
  177. "fimbria",
  178. "HATA",
  179. "Whole_hippocampus",
  180. ],
  181. "volume_mm^3": [
  182. 173.456789,
  183. 734.567891,
  184. 34.567891,
  185. 345.678917,
  186. 456.789173,
  187. 45.678917,
  188. 56.789173,
  189. 567.891734,
  190. 678.917345,
  191. 789.173456,
  192. 89.173456,
  193. 91.734567,
  194. 1734.567899,
  195. ],
  196. "subject": "alice",
  197. "hemisphere": "left",
  198. "T1_input": True,
  199. "analysis_id": None,
  200. }
  201. ),
  202. )
  203. ],
  204. )
  205. def test_hippocampal_subfields_volume_file_read_volumes_dataframe(
  206. volume_file_path: str, expected_dataframe: pandas.DataFrame
  207. ):
  208. assert_volume_frames_equal(
  209. left=expected_dataframe,
  210. right=HippocampalSubfieldsVolumeFile(
  211. path=volume_file_path
  212. ).read_volumes_dataframe(),
  213. )
  214. def test_hippocampal_subfields_volume_file_read_volumes_dataframe_not_found():
  215. volume_file = HippocampalSubfieldsVolumeFile(
  216. path=os.path.join(SUBJECTS_DIR, "non-existing", "lh.hippoSfVolumes-T1.v10.txt")
  217. )
  218. with pytest.raises(FileNotFoundError):
  219. volume_file.read_volumes_dataframe()
  220. @pytest.mark.parametrize(
  221. ("root_dir_path", "expected_file_paths"),
  222. [
  223. (
  224. SUBJECTS_DIR,
  225. {
  226. os.path.join(
  227. SUBJECTS_DIR, "alice", "mri", "lh.hippoSfVolumes-T1.v10.txt"
  228. ),
  229. os.path.join(
  230. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1-T2.v10.txt"
  231. ),
  232. os.path.join(
  233. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1.v10.txt"
  234. ),
  235. },
  236. ),
  237. (
  238. os.path.join(SUBJECTS_DIR, "bert"),
  239. {
  240. os.path.join(
  241. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1-T2.v10.txt"
  242. ),
  243. os.path.join(
  244. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1.v10.txt"
  245. ),
  246. },
  247. ),
  248. (
  249. os.path.join(SUBJECTS_DIR, "bert", "mri"),
  250. {
  251. os.path.join(
  252. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1-T2.v10.txt"
  253. ),
  254. os.path.join(
  255. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1.v10.txt"
  256. ),
  257. },
  258. ),
  259. ],
  260. )
  261. def test_hippocampal_subfields_volume_file_find(root_dir_path, expected_file_paths):
  262. volume_files = list(
  263. HippocampalSubfieldsVolumeFile.find(root_dir_path=root_dir_path)
  264. )
  265. assert all(
  266. "hippoSfVolumes" in os.path.basename(f.absolute_path) for f in volume_files
  267. )
  268. assert expected_file_paths == set(f.absolute_path for f in volume_files)
  269. @pytest.mark.parametrize(
  270. ("root_dir_path", "filename_pattern", "expected_file_paths"),
  271. [
  272. (
  273. SUBJECTS_DIR,
  274. r"hippoSfVolumes-T1\.v10",
  275. {
  276. os.path.join(
  277. SUBJECTS_DIR, "alice", "mri", "lh.hippoSfVolumes-T1.v10.txt"
  278. ),
  279. os.path.join(
  280. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1.v10.txt"
  281. ),
  282. },
  283. ),
  284. (
  285. os.path.join(SUBJECTS_DIR, "bert"),
  286. r"hippoSfVolumes-T1-T2",
  287. {
  288. os.path.join(
  289. SUBJECTS_DIR, "bert", "mri", "lh.hippoSfVolumes-T1-T2.v10.txt"
  290. )
  291. },
  292. ),
  293. ],
  294. )
  295. def test_hippocampal_subfields_volume_file_find_pattern(
  296. root_dir_path, filename_pattern, expected_file_paths
  297. ):
  298. assert expected_file_paths == set(
  299. f.absolute_path
  300. for f in HippocampalSubfieldsVolumeFile.find(
  301. root_dir_path=root_dir_path, filename_regex=re.compile(filename_pattern)
  302. )
  303. )