__main__.py 3.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. """
  2. Read hippocampal subfield volumes computed by Freesurfer and/or ASHS
  3. and export collected data as CSV.
  4. """
  5. import argparse
  6. import os
  7. import re
  8. import sys
  9. import typing
  10. import pandas
  11. from freesurfer_volume_reader import (
  12. __version__,
  13. ashs,
  14. freesurfer,
  15. parse_version_string,
  16. remove_group_names_from_regex,
  17. )
  18. def concat_dataframes(
  19. dataframes: typing.Iterable[pandas.DataFrame],
  20. ) -> pandas.DataFrame: # pragma: no cover
  21. # pylint: disable=unexpected-keyword-arg
  22. if parse_version_string(pandas.__version__) < (0, 23):
  23. return pandas.concat(dataframes, ignore_index=True)
  24. return pandas.concat(dataframes, ignore_index=True, sort=False)
  25. VOLUME_FILE_FINDERS = {
  26. "ashs": ashs.HippocampalSubfieldsVolumeFile,
  27. # https://github.com/freesurfer/freesurfer/tree/release_6_0_0/HippoSF
  28. "freesurfer-hipposf": freesurfer.HippocampalSubfieldsVolumeFile,
  29. }
  30. def main():
  31. argparser = argparse.ArgumentParser(
  32. description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter
  33. )
  34. argparser.add_argument(
  35. "--source-types",
  36. nargs="+",
  37. default=["freesurfer-hipposf"],
  38. choices=VOLUME_FILE_FINDERS.keys(),
  39. help="default: [freesurfer-hipposf]",
  40. )
  41. for source_type, file_class in VOLUME_FILE_FINDERS.items():
  42. argparser.add_argument(
  43. f"--{source_type}-filename-regex",
  44. dest=f"filename_regex.{source_type}",
  45. metavar="REGULAR_EXPRESSION",
  46. type=re.compile,
  47. default=remove_group_names_from_regex(file_class.FILENAME_PATTERN),
  48. help="default: %(default)s",
  49. )
  50. argparser.add_argument(
  51. "--output-format", choices=["csv"], default="csv", help="default: %(default)s"
  52. )
  53. subjects_dir_path = os.environ.get("SUBJECTS_DIR", None)
  54. argparser.add_argument(
  55. "root_dir_paths",
  56. metavar="ROOT_DIR",
  57. nargs="*" if subjects_dir_path else "+",
  58. default=[subjects_dir_path],
  59. help=f"default: $SUBJECTS_DIR ({subjects_dir_path})",
  60. )
  61. argparser.add_argument("--version", action="version", version=__version__)
  62. args = argparser.parse_args()
  63. filename_regexs = {
  64. k[len("filename_regex.") :]: v
  65. for k, v in vars(args).items()
  66. if k.startswith("filename_regex.")
  67. }
  68. volume_frames = []
  69. for source_type in args.source_types:
  70. finder = VOLUME_FILE_FINDERS[source_type]
  71. for root_dir_path in args.root_dir_paths:
  72. for volume_file in finder.find(
  73. root_dir_path=root_dir_path, filename_regex=filename_regexs[source_type]
  74. ):
  75. volume_frame = volume_file.read_volumes_dataframe()
  76. volume_frame["source_type"] = source_type
  77. volume_frame["source_path"] = volume_file.absolute_path
  78. volume_frames.append(volume_frame)
  79. if not volume_frames:
  80. print(
  81. "Did not find any volume files matching the specified criteria.",
  82. file=sys.stderr,
  83. )
  84. return os.EX_NOINPUT
  85. united_volume_frame = concat_dataframes(volume_frames)
  86. print(united_volume_frame.to_csv(index=False))
  87. return os.EX_OK
  88. if __name__ == "__main__":
  89. sys.exit(main())