backup.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403
  1. import contextlib
  2. import os
  3. import platform
  4. import subprocess
  5. import sys
  6. import time
  7. import yaml
  8. from glob import glob
  9. from slugify import slugify
  10. def term():
  11. """Get the Terminal reference to make output pretty
  12. Returns:
  13. (blessings.Terminal): Returns
  14. a `blessings <https://blessings.readthedocs.io/en/latest>`_ terminal
  15. instance. If running in windows and not cygwin it will return an
  16. `intercessions <https://pypi.org/project/intercessions>`_ terminal
  17. instance instead
  18. """
  19. if not hasattr(term, '_handle'):
  20. if sys.platform != "cygwin" and platform.system() == 'Windows':
  21. from intercessions import Terminal
  22. else:
  23. from blessings import Terminal
  24. term._handle = Terminal()
  25. return term._handle
  26. @contextlib.contextmanager
  27. def pushd(newDir):
  28. previousDir = os.getcwd()
  29. os.chdir(newDir)
  30. try:
  31. yield
  32. finally:
  33. os.chdir(previousDir)
  34. class StateException(Exception):
  35. pass
  36. class BackupException(Exception):
  37. pass
  38. class Job(object):
  39. pool = []
  40. maxthreads = 4
  41. READY = 0
  42. ACTIVE = 1
  43. RUNNING = 2
  44. FAILED = 3
  45. SUCCESSFUL = 4
  46. @staticmethod
  47. def initPool():
  48. if Job.pool:
  49. maxthreads = Job.maxthreads
  50. if len(Job.pool) < maxthreads:
  51. maxthreads = len(Job.pool)
  52. for i in range(0, maxthreads):
  53. Job.pool.pop(0).start()
  54. @staticmethod
  55. def finished():
  56. return not [x.state for x in Job.pool]
  57. def __init__(self, backup, config):
  58. self._config = config
  59. self._backup = backup
  60. self._state = Job.READY
  61. def __str__(self):
  62. return 'Backup {0} Job #{1} ({2})'.format(
  63. self._backup.name, self._backup.jobs.index(self), {
  64. Job.READY: 'ready',
  65. Job.ACTIVE: 'active',
  66. Job.RUNNING: 'running',
  67. Job.FAILED: 'failed',
  68. Job.SUCCESSFUL: 'successful',
  69. }[self.state])
  70. @property
  71. def config(self):
  72. return self._config
  73. @property
  74. def state(self):
  75. if self._state is Job.RUNNING:
  76. code = self._process.poll()
  77. if code is not None:
  78. self._state = Job.FAILED if code else Job.SUCCESSFUL
  79. Job.initPool()
  80. return self._state
  81. def queue(self):
  82. if self.state is not Job.READY:
  83. raise StateException('{} not in state to queue'.format(self))
  84. if self in Job.pool:
  85. raise StateException('{} already in queued pool'.format(self))
  86. self._state = Job.ACTIVE
  87. Job.pool.append(self)
  88. @property
  89. def args(self):
  90. return Backup.args + []
  91. @property
  92. def logfile(self):
  93. return self._backup.logfile
  94. @property
  95. def process(self):
  96. return self._process
  97. def start(self):
  98. if self.state is not Job.ACTIVE:
  99. raise StateException('Invalid state to start {}'.format(self))
  100. self._process = subprocess.Popen(
  101. self.args, stdout=self.logfile, stderr=self.logfile,
  102. stdin=subprocess.DEVNULL, universal_newlines=True)
  103. self._state = Job.RUNNING
  104. self._backup._state = Backup.RUNNING
  105. class Backup(object):
  106. instances = {}
  107. _queue = []
  108. args = ['rdiff-backup']
  109. logdir = '/var/log/backup.d'
  110. BLOCKED = 0
  111. READY = 1
  112. QUEUED = 2
  113. RUNNING = 3
  114. FAILED = 4
  115. SUCCESSFUL = 5
  116. @staticmethod
  117. def _log():
  118. print('Backup status:')
  119. for backup in Backup.instances.values():
  120. print(' {}'.format(backup))
  121. for job in backup.pending:
  122. print(' {}'.format(job))
  123. @staticmethod
  124. def load(paths):
  125. for path in paths:
  126. Backup.get(path)
  127. @staticmethod
  128. def blocked():
  129. return [x for x in Backup.instances.values()
  130. if x.status is Backup.BLOCKED]
  131. @staticmethod
  132. def get(path):
  133. if path not in Backup.instances:
  134. Backup(sources()[path])
  135. return Backup.instances[path]
  136. @staticmethod
  137. def start():
  138. for backup in Backup.instances.values():
  139. backup.queue()
  140. Job.initPool()
  141. @staticmethod
  142. def wait(log=False):
  143. while Backup._queue:
  144. for backup in Backup._queue:
  145. if backup.status is Backup.BLOCKED:
  146. for dependency in backup.blocking:
  147. if dependency.status is Backup.READY:
  148. dependency.queue()
  149. if log:
  150. Backup._log()
  151. time.sleep(1)
  152. def __init__(self, config):
  153. self._config = config
  154. self._path = self._config['path']
  155. self._name = os.path.basename(self._path)
  156. self._logpath = os.path.realpath(os.path.join(
  157. Backup.logdir, '{}.log'.format(slugify(self._path))))
  158. self._status = Backup.BLOCKED if self.blocking else Backup.READY
  159. Backup.instances[self._path] = self
  160. def __str__(self):
  161. return 'Backup {0} ({1}, {2} jobs)'.format(
  162. self.name, {
  163. Backup.BLOCKED: 'blocked',
  164. Backup.READY: 'ready',
  165. Backup.QUEUED: 'queued',
  166. Backup.RUNNING: 'running',
  167. Backup.FAILED: 'failed',
  168. Backup.SUCCESSFUL: 'successful'
  169. }[self.status], len([x for x in self.jobs
  170. if x.state not in (Job.FAILED, Job.SUCCESSFUL)]))
  171. @property
  172. def config(self):
  173. return self._config
  174. @property
  175. def name(self):
  176. return self._name
  177. @property
  178. def path(self):
  179. return self._path
  180. @property
  181. def logpath(self):
  182. return self._logpath
  183. @property
  184. def logfile(self):
  185. if not hasattr(self, '_logfile'):
  186. self._logfile = open(self.logpath, 'w')
  187. return self._logfile
  188. def log(self, text):
  189. self._logfile.write(text)
  190. @property
  191. def status(self):
  192. if self.blocking:
  193. if [x for x in self.blocking if x.status is Backup.FAILED]:
  194. self._status = Backup.FAILED
  195. elif self._status is not Backup.BLOCKED:
  196. self._status = Backup.BLOCKED
  197. elif self._status is Backup.BLOCKED:
  198. if self not in Backup._queue:
  199. self._status = Backup.READY
  200. else:
  201. self._status = Backup.QUEUED
  202. for job in self.ready:
  203. job.queue()
  204. elif self._status is Backup.QUEUED and self not in Backup._queue:
  205. self._status = Backup.READY
  206. if self._status in (Backup.RUNNING, Backup.QUEUED) and not self.pending:
  207. self._status = Backup.FAILED if self.failed else Backup.SUCCESSFUL
  208. if self._status in (Backup.FAILED, Backup.SUCCESSFUL) \
  209. and self in Backup._queue:
  210. Backup._queue.remove(self)
  211. return self._status
  212. @property
  213. def blocking(self):
  214. return [x for x in self.depends
  215. if x.status is not Backup.SUCCESSFUL]
  216. @property
  217. def depends(self):
  218. if not hasattr(self, '_depends'):
  219. self._depends = []
  220. for path in self.config["depends"]:
  221. if path not in Backup.instances:
  222. Backup(sources()[path])
  223. self._depends.append(Backup.instances[path])
  224. return self._depends
  225. @property
  226. def jobs(self):
  227. if not hasattr(self, '_jobs'):
  228. self._jobs = []
  229. for job in self.config['jobs']:
  230. self._jobs.append(Job(self, job))
  231. return self._jobs
  232. @property
  233. def pending(self):
  234. return [x for x in self.jobs if x.state not in (Job.FAILED, Job.SUCCESSFUL)]
  235. @property
  236. def ready(self):
  237. return [x for x in self.jobs if x.state is Job.READY]
  238. @property
  239. def failed(self):
  240. return [x for x in self.jobs if x.state is Job.FAILED]
  241. def setStatus(self, state):
  242. if state in (None, False, True):
  243. self._status = state
  244. def queue(self):
  245. if self in Backup._queue:
  246. raise StateException('Backup already queued')
  247. Backup._queue.append(self)
  248. self._status = Backup.QUEUED
  249. if self.status is not Backup.BLOCKED:
  250. for job in self.ready:
  251. job.queue()
  252. def config():
  253. if hasattr(config, '_handle'):
  254. return config._handle
  255. with pushd('etc/backup.d'):
  256. with open("backup.yml") as f:
  257. config._handle = yaml.load(f, Loader=yaml.SafeLoader)
  258. return config._handle
  259. def sources():
  260. if hasattr(sources, '_handle'):
  261. return sources._handle
  262. sources._handle = {}
  263. with pushd('etc/backup.d'):
  264. for source in config()['sources']:
  265. source = os.path.realpath(source)
  266. for path in glob('{}/*.yml'.format(source)):
  267. path = os.path.realpath(path)
  268. with pushd(os.path.dirname(path)), open(path) as f:
  269. data = yaml.load(f, Loader=yaml.SafeLoader)
  270. if "active" in data and data["active"]:
  271. data['path'] = path
  272. if "depends" not in data:
  273. data["depends"] = []
  274. for i in range(0, len(data["depends"])):
  275. data["depends"][i] = os.path.realpath(
  276. '{}.yml'.format(data["depends"][i]))
  277. sources._handle[path] = data
  278. return sources._handle
  279. def main(args):
  280. if 'engine' in config():
  281. engine = config()["engine"]
  282. if engine not in ("rdiff-backup"):
  283. raise BackupException('Unknown backup engine: {}'.format(engine))
  284. Backup.args = [engine]
  285. if 'logdir' in config():
  286. logdir = config()['logdir']
  287. os.makedirs(logdir, exist_ok=True)
  288. if not os.path.exists(logdir):
  289. raise BackupException(
  290. 'Unable to create logging directory: {}'.format(logdir))
  291. Backup.logdir = logdir
  292. if 'maxthreads' in config():
  293. Job.maxthreads = config()['maxthreads']
  294. Backup.load(sources().keys())
  295. Backup._log()
  296. Backup.start()
  297. Backup._log()
  298. Backup.wait(True)
  299. if __name__ == '__main__':
  300. try:
  301. main(sys.argv[1:])
  302. except BackupException as ex:
  303. print(ex)
  304. sys.exit(1)
  305. except Exception:
  306. from traceback import format_exc
  307. msg = "Error encountered:\n" + format_exc().strip()
  308. print(msg)
  309. sys.exit(1)