__init__.py 3.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115
  1. import datetime
  2. import re
  3. import subprocess
  4. import sys
  5. import time
  6. def _duplicity(params):
  7. stdout = subprocess.check_output(
  8. ['duplicity'] + params,
  9. )
  10. return stdout.decode(sys.stdout.encoding)
  11. def _parse_duplicity_timestamp(timestamp):
  12. return datetime.datetime.fromtimestamp(
  13. time.mktime(time.strptime(timestamp))
  14. )
  15. class Collection(object):
  16. def __init__(self, url):
  17. self.url = url
  18. def request_status(self):
  19. return _CollectionStatus._parse(
  20. text=_duplicity(['collection-status', self.url])
  21. )
  22. class _Status(object):
  23. def __eq__(self, other):
  24. return isinstance(self, type(other)) and vars(self) == vars(other)
  25. def __neq__(self, other):
  26. return not (self == other)
  27. class _CollectionStatus(_Status):
  28. chain_separator_regex = r'-{25}\s'
  29. def __init__(self, archive_dir_path, primary_chain):
  30. self.archive_dir_path = archive_dir_path
  31. self.primary_chain = primary_chain
  32. @property
  33. def last_full_backup_time(self):
  34. return self.primary_chain.first_backup_time if self.primary_chain else None
  35. @property
  36. def last_incremental_backup_time(self):
  37. return self.primary_chain.last_incremental_backup_time if self.primary_chain else None
  38. @classmethod
  39. def _parse(cls, text):
  40. if 'No backup chains with active signatures found' in text:
  41. primary_chain = None
  42. else:
  43. primary_chain_match = re.search(
  44. '^Found primary backup chain.*\s{sep}([\w\W]*?)\s{sep}'.format(
  45. sep=_CollectionStatus.chain_separator_regex,
  46. ),
  47. text,
  48. re.MULTILINE,
  49. )
  50. primary_chain = _ChainStatus._parse(
  51. text=primary_chain_match.group(1),
  52. )
  53. return cls(
  54. archive_dir_path=re.search(r'Archive dir: (.*)', text).group(1),
  55. primary_chain=primary_chain,
  56. )
  57. class _ChainStatus(_Status):
  58. def __init__(self, sets):
  59. self.sets = sets
  60. @property
  61. def first_backup_time(self):
  62. return min([s.backup_time for s in self.sets])
  63. @property
  64. def last_backup_time(self):
  65. return max([s.backup_time for s in self.sets])
  66. @property
  67. def last_incremental_backup_time(self):
  68. return self.last_backup_time if len(self.sets) > 1 else None
  69. @classmethod
  70. def _parse(cls, text):
  71. sets = []
  72. set_lines = re.split(r'Num volumes: *\r?\n', text)[1]
  73. for set_line in re.split(r'\r?\n', set_lines):
  74. set_attr = re.match(
  75. r'\s*(?P<mode>\w+) {2,}(?P<ts>.+?) {2,} (?P<vol>\d+)',
  76. set_line,
  77. ).groupdict()
  78. # duplicity uses time.asctime().
  79. # time.strptime() without format inverts time.asctime().
  80. sets.append(_SetStatus(
  81. backup_time=_parse_duplicity_timestamp(set_attr['ts']),
  82. ))
  83. return cls(sets=sets)
  84. class _SetStatus(_Status):
  85. def __init__(self, backup_time):
  86. self.backup_time = backup_time