__init__.py 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621
  1. import contextlib
  2. import os
  3. import platform
  4. import psutil
  5. import shlex
  6. import subprocess
  7. import sys
  8. import time
  9. import yaml
  10. from datetime import datetime
  11. from glob import glob
  12. from slugify import slugify
  13. def term():
  14. """Get the Terminal reference to make output pretty
  15. Returns:
  16. (blessings.Terminal): Returns
  17. a `blessings <https://blessings.readthedocs.io/en/latest>`_ terminal
  18. instance. If running in windows and not cygwin it will return an
  19. `intercessions <https://pypi.org/project/intercessions>`_ terminal
  20. instance instead
  21. """
  22. if not hasattr(term, '_handle'):
  23. if sys.platform != "cygwin" and platform.system() == 'Windows':
  24. from intercessions import Terminal
  25. else:
  26. from blessings import Terminal
  27. term._handle = Terminal()
  28. return term._handle
  29. @contextlib.contextmanager
  30. def pushd(newDir):
  31. previousDir = os.getcwd()
  32. os.chdir(newDir)
  33. try:
  34. yield
  35. finally:
  36. os.chdir(previousDir)
  37. class StateException(Exception):
  38. pass
  39. class BackupException(Exception):
  40. pass
  41. class Job(object):
  42. pool = []
  43. maxthreads = 4
  44. verbosity = 3
  45. logTransitions = False
  46. READY = 0
  47. QUEUED = 1
  48. STARTING = 2
  49. RUNNING = 3
  50. ENDING = 4
  51. FAILED = 5
  52. SUCCESSFUL = 6
  53. @staticmethod
  54. def initPool():
  55. if Job.pool:
  56. maxthreads = Job.maxthreads
  57. running = len(Job.running())
  58. if maxthreads > running:
  59. queued = Job.queued()
  60. if len(queued) < maxthreads - running:
  61. maxthreads = len(queued)
  62. if maxthreads:
  63. for i in range(0, maxthreads):
  64. if len(queued) > i:
  65. queued[i].start()
  66. @staticmethod
  67. def finished():
  68. return [x for x in Job.pool if x.state in (Job.FAILED, Job.SUCCESSFUL)]
  69. @staticmethod
  70. def pending():
  71. return [x for x in Job.pool
  72. if x.state not in (Job.FAILED, Job.SUCCESSFUL)]
  73. @staticmethod
  74. def running():
  75. return [x for x in Job.pool
  76. if x.state in (Job.STARTING, Job.RUNNING, Job.ENDING)]
  77. @staticmethod
  78. def queued():
  79. return [x for x in Job.pending() if x.state is Job.QUEUED]
  80. def __init__(self, backup, config):
  81. self._config = config
  82. self._backup = backup
  83. self._state = Job.READY
  84. def __str__(self):
  85. return 'Backup {0} Job #{1} ({2})'.format(
  86. self._backup.name, self.index, self.getState())
  87. @property
  88. def config(self):
  89. return self._config
  90. @property
  91. def state(self):
  92. if self._state is Job.READY and self in Job.pool:
  93. self.setState(Job.QUEUED)
  94. elif self._state in (Job.STARTING, Job.RUNNING, Job.ENDING):
  95. code = self._process.poll() or self._process.returncode
  96. if code is None and not psutil.pid_exists(self._process.pid):
  97. code = -1
  98. if code is not None:
  99. if code:
  100. self.setState(Job.FAILED)
  101. Job.initPool()
  102. elif self._state is Job.ENDING:
  103. self.setState(Job.SUCCESSFUL)
  104. Job.initPool()
  105. else:
  106. self.start()
  107. return self._state
  108. def queue(self):
  109. if self.state is not Job.READY:
  110. raise StateException('{} not in state to queue'.format(self))
  111. if self in Job.pool:
  112. raise StateException('{} already in queued pool'.format(self))
  113. self.setState(Job.QUEUED)
  114. Job.pool.append(self)
  115. @property
  116. def args(self):
  117. if self._state is Job.STARTING:
  118. return shlex.split(self.pre)
  119. elif self._state is Job.RUNNING:
  120. if Backup.engine == "rdiff-backup":
  121. args = ['rdiff-backup', '-v{}'.format(Backup.verbosity)]
  122. if 'filters' in self.config:
  123. for item in self.config['filters']:
  124. if 'include' in item:
  125. args += ['--include', item['include']]
  126. elif 'exclude' in item:
  127. args += ['--exclude', item['exclude']]
  128. else:
  129. raise BackupException(
  130. '{0} has an invalid filter {1}'.format(self, item))
  131. return args + [self.fromPath, self.toPath]
  132. else:
  133. raise StateException(
  134. 'Invalid backup engine {}'.format(Backup.engine))
  135. elif self._state is Job.ENDING:
  136. return shlex.split(self.post)
  137. else:
  138. raise StateException('Invalid state {}'.format(self.getState()))
  139. @property
  140. def logfile(self):
  141. if not hasattr(self, '_logfile'):
  142. path = os.path.dirname(self.logpath)
  143. if not os.path.exists(path):
  144. os.makedirs(path, exist_ok=True)
  145. self._logfile = open(self.logpath, 'w' if Backup.truncateLogs else 'a')
  146. return self._logfile
  147. @property
  148. def logpath(self):
  149. if not hasattr(self, '_logpath'):
  150. self._logpath = os.path.join(os.path.dirname(
  151. self._backup.logpath), 'job{}.log'.format(self.index))
  152. return self._logpath
  153. @property
  154. def fromPath(self):
  155. if not hasattr(self, '_fromPath'):
  156. fromPath = self.config['from']
  157. if 'roots' in self._backup.config:
  158. roots = self._backup.config['roots']
  159. if 'from' in roots:
  160. if '::' in roots['from']:
  161. if roots['from'].endswith('::'):
  162. fromPath = roots['from'] + fromPath
  163. else:
  164. fromPath = roots['from'] + os.sep + fromPath
  165. else:
  166. fromPath = os.path.join(roots['from'], fromPath)
  167. self._fromPath = fromPath
  168. return self._fromPath
  169. @property
  170. def toPath(self):
  171. if not hasattr(self, '_toPath'):
  172. toPath = self.config['to']
  173. if 'roots' in self._backup.config:
  174. roots = self._backup.config['roots']
  175. if 'to' in roots:
  176. if '::' in roots['to']:
  177. if roots['to'].endswith('::'):
  178. toPath = roots['to'] + toPath
  179. else:
  180. toPath = roots['to'] + os.sep + toPath
  181. else:
  182. toPath = os.path.join(roots['to'], toPath)
  183. self._toPath = toPath
  184. return self._toPath
  185. @property
  186. def process(self):
  187. return self._process
  188. @property
  189. def index(self):
  190. if not hasattr(self, '_index'):
  191. self._index = self._backup.jobs.index(self)
  192. return self._index
  193. @property
  194. def pre(self):
  195. if not hasattr(self, '_pre'):
  196. self._pre = self.config['pre'] if 'pre' in self.config else None
  197. return self._pre
  198. @property
  199. def post(self):
  200. if not hasattr(self, '_post'):
  201. self._post = self.config['post'] if 'post' in self.config else None
  202. return self._post
  203. def log(self, text):
  204. text = '[{0}] (Backup {1} Job #{2}) {3}\n'.format(
  205. datetime.now().isoformat(), self._backup.name, self.index, text)
  206. print(text, end='')
  207. self._backup.logfile.write(text)
  208. self._backup.logfile.flush()
  209. def start(self):
  210. if self._state is self.QUEUED:
  211. self._backup.setStatus(Backup.RUNNING)
  212. self.setState(Job.STARTING)
  213. if self.pre is None:
  214. self.setState(Job.RUNNING)
  215. elif self._state is self.STARTING:
  216. self.setState(Job.RUNNING)
  217. elif self._state is self.RUNNING:
  218. self.setState(Job.ENDING)
  219. if self.post is None:
  220. self.setState(Job.SUCCESSFUL)
  221. return
  222. else:
  223. raise StateException('Invalid state to start {}'.format(self))
  224. args = self.args
  225. self.logfile.write(
  226. "[{0}] {1} &\n".format(
  227. datetime.now().isoformat(),
  228. ' '.join([shlex.quote(x) for x in args])))
  229. self.logfile.flush()
  230. self._process = subprocess.Popen(
  231. args, stdout=self.logfile, stderr=subprocess.STDOUT,
  232. stdin=subprocess.DEVNULL, universal_newlines=True, bufsize=1)
  233. def setState(self, state):
  234. if self._state != state:
  235. self.log('{0} -> {1}'.format(
  236. self.getState(self._state), self.getState(state)))
  237. self._state = state
  238. if state in (Job.SUCCESSFUL, Job.FAILED):
  239. self.logfile.close()
  240. def getState(self, state=None):
  241. return {
  242. Job.READY: 'ready',
  243. Job.QUEUED: 'queued',
  244. Job.STARTING: 'starting',
  245. Job.RUNNING: 'running',
  246. Job.ENDING: 'ending',
  247. Job.FAILED: 'failed',
  248. Job.SUCCESSFUL: 'successful',
  249. }[self.state if state is None else state]
  250. class Backup(object):
  251. instances = {}
  252. _queue = []
  253. logTransitions = False
  254. truncateLogs = True
  255. engine = 'rdiff-backup'
  256. logdir = '/var/log/backup.d'
  257. BLOCKED = 0
  258. READY = 1
  259. QUEUED = 2
  260. RUNNING = 3
  261. FAILED = 4
  262. SUCCESSFUL = 5
  263. @staticmethod
  264. def _log():
  265. print('Backup status:')
  266. for backup in Backup.instances.values():
  267. print(' {}'.format(backup))
  268. for job in backup.pending:
  269. print(' {}'.format(job))
  270. @staticmethod
  271. def load(paths):
  272. for path in paths:
  273. Backup.get(path)
  274. @staticmethod
  275. def blocked():
  276. return [x for x in Backup.instances.values()
  277. if x.status is Backup.BLOCKED]
  278. @staticmethod
  279. def get(path):
  280. if path not in Backup.instances:
  281. Backup(sources()[path])
  282. return Backup.instances[path]
  283. @staticmethod
  284. def start():
  285. for backup in Backup.instances.values():
  286. backup.queue()
  287. Job.initPool()
  288. @staticmethod
  289. def wait(log=False):
  290. while Backup._queue:
  291. for backup in Backup._queue:
  292. if backup.status is Backup.BLOCKED:
  293. for dependency in backup.blocking:
  294. if dependency.status is Backup.READY:
  295. dependency.queue()
  296. if log:
  297. Backup._log()
  298. time.sleep(1)
  299. def __init__(self, config):
  300. self._config = config
  301. self._path = self._config['path']
  302. self._name = slugify(self._path, max_length=255)
  303. self._logpath = os.path.realpath(os.path.join(
  304. Backup.logdir, self.name, 'backup.log'))
  305. self._status = Backup.READY
  306. if self.blocking:
  307. self.setStatus(Backup.BLOCKED)
  308. Backup.instances[self._path] = self
  309. def __str__(self):
  310. return 'Backup {0} ({1}, {2} jobs)'.format(
  311. self.name, self.getStatus(), len([x for x in self.jobs
  312. if x.state not in (Job.FAILED, Job.SUCCESSFUL)]))
  313. @property
  314. def config(self):
  315. return self._config
  316. @property
  317. def name(self):
  318. return self._name
  319. @property
  320. def path(self):
  321. return self._path
  322. @property
  323. def logpath(self):
  324. return self._logpath
  325. @property
  326. def logfile(self):
  327. if not hasattr(self, '_logfile'):
  328. path = os.path.dirname(self.logpath)
  329. if not os.path.exists(path):
  330. os.makedirs(path, exist_ok=True)
  331. self._logfile = open(self.logpath, 'w' if Backup.truncateLogs else 'a')
  332. return self._logfile
  333. def log(self, text):
  334. text = '[{0}] (Backup {1}) {2}\n'.format(
  335. datetime.now().isoformat(), self.name, text)
  336. print(text, end='')
  337. self.logfile.write(text)
  338. self.logfile.flush()
  339. @property
  340. def status(self):
  341. if self.blocking:
  342. if [x for x in self.blocking if x.status is Backup.FAILED]:
  343. self.setStatus(Backup.FAILED)
  344. elif self._status is not Backup.BLOCKED:
  345. self.setStatus(Backup.BLOCKED)
  346. elif self._status is Backup.BLOCKED:
  347. if self not in Backup._queue:
  348. self.setStatus(Backup.READY)
  349. else:
  350. self.setStatus(Backup.QUEUED)
  351. for job in self.ready:
  352. job.queue()
  353. Job.initPool()
  354. elif self._status is Backup.QUEUED and self not in Backup._queue:
  355. self.setStatus(Backup.READY)
  356. if self._status in (Backup.RUNNING, Backup.QUEUED) and not self.pending:
  357. self.setStatus(Backup.FAILED if self.failed else Backup.SUCCESSFUL)
  358. if self._status in (Backup.FAILED, Backup.SUCCESSFUL) \
  359. and self in Backup._queue:
  360. Backup._queue.remove(self)
  361. return self._status
  362. @property
  363. def blocking(self):
  364. return [x for x in self.depends
  365. if x.status is not Backup.SUCCESSFUL]
  366. @property
  367. def depends(self):
  368. if not hasattr(self, '_depends'):
  369. self._depends = []
  370. for path in self.config["depends"]:
  371. if path not in Backup.instances:
  372. Backup(sources()[path])
  373. self._depends.append(Backup.instances[path])
  374. return self._depends
  375. @property
  376. def jobs(self):
  377. if not hasattr(self, '_jobs'):
  378. self._jobs = []
  379. if 'jobs' in self.config:
  380. for job in self.config['jobs']:
  381. self._jobs.append(Job(self, job))
  382. return self._jobs
  383. @property
  384. def pending(self):
  385. return [x for x in self.jobs if x.state not in (Job.FAILED, Job.SUCCESSFUL)]
  386. @property
  387. def ready(self):
  388. return [x for x in self.jobs if x.state is Job.READY]
  389. @property
  390. def failed(self):
  391. return [x for x in self.jobs if x.state is Job.FAILED]
  392. def setStatus(self, status):
  393. if self._status != status:
  394. self.log('{0} -> {1}'.format(
  395. self.getStatus(self._status), self.getStatus(status)))
  396. self._status = status
  397. if status in (Backup.SUCCESSFUL, Backup.FAILED):
  398. self.logfile.close()
  399. def getStatus(self, status=None):
  400. return {
  401. Backup.BLOCKED: 'blocked',
  402. Backup.READY: 'ready',
  403. Backup.QUEUED: 'queued',
  404. Backup.RUNNING: 'running',
  405. Backup.FAILED: 'failed',
  406. Backup.SUCCESSFUL: 'successful'
  407. }[self.status if status is None else status]
  408. def queue(self):
  409. if self in Backup._queue:
  410. raise StateException('Backup already queued')
  411. Backup._queue.append(self)
  412. self.setStatus(Backup.QUEUED)
  413. if self.status is not Backup.BLOCKED:
  414. for job in self.ready:
  415. job.queue()
  416. Job.initPool()
  417. def config():
  418. if hasattr(config, '_handle'):
  419. return config._handle
  420. with pushd(config._root):
  421. with open("backup.yml") as f:
  422. config._handle = yaml.load(f, Loader=yaml.SafeLoader)
  423. return config._handle
  424. def sources():
  425. if hasattr(sources, '_handle'):
  426. return sources._handle
  427. sources._handle = {}
  428. with pushd(config._root):
  429. for source in config()['sources']:
  430. source = os.path.realpath(source)
  431. for path in glob('{}/*.yml'.format(source)) + \
  432. glob('{}/*.yaml'.format(source)):
  433. path = os.path.realpath(path)
  434. with pushd(os.path.dirname(path)), open(path) as f:
  435. data = yaml.load(f, Loader=yaml.SafeLoader)
  436. if "active" in data and data["active"]:
  437. data['path'] = path
  438. if "depends" not in data:
  439. data["depends"] = []
  440. for i in range(0, len(data["depends"])):
  441. data["depends"][i] = os.path.realpath(
  442. '{}.yml'.format(data["depends"][i]))
  443. sources._handle[path] = data
  444. return sources._handle
  445. def main(args):
  446. try:
  447. config._root = args[0] if len(args) else '/etc/backup.d'
  448. if not os.path.exists(config._root):
  449. raise BackupException(
  450. 'Configuration files missing from {}'.format(config._root))
  451. if 'engine' in config():
  452. engine = config()["engine"]
  453. if engine not in ("rdiff-backup"):
  454. raise BackupException('Unknown backup engine: {}'.format(engine))
  455. Backup.engine = engine
  456. if 'logdir' in config():
  457. logdir = config()['logdir']
  458. os.makedirs(logdir, exist_ok=True)
  459. if not os.path.exists(logdir):
  460. raise BackupException(
  461. 'Unable to create logging directory: {}'.format(logdir))
  462. Backup.logdir = logdir
  463. if 'maxthreads' in config():
  464. Job.maxthreads = config()['maxthreads']
  465. if 'verbosity' in config():
  466. Backup.verbosity = config()['verbosity']
  467. Backup.logTransitions = Job.logTransitions = True
  468. Backup.truncateLogs = "truncateLogs" in config() and config["truncateLogs"]
  469. Backup.load(sources().keys())
  470. Backup.start()
  471. Backup.wait()
  472. except BackupException as ex:
  473. print(ex)
  474. sys.exit(1)
  475. except Exception:
  476. from traceback import format_exc
  477. msg = "Error encountered:\n" + format_exc().strip()
  478. print(msg)
  479. sys.exit(1)
  480. if __name__ == '__main__':
  481. main(sys.argv[1:])