Initial, naive implementation
This commit is contained in:
parent
92eeb06ebb
commit
3d719f07d6
1 changed files with 55 additions and 5 deletions
|
@ -1,9 +1,25 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
from prometheus_client import start_http_server, Gauge
|
||||
from time import sleep
|
||||
from time import sleep, mktime
|
||||
from dateutil import parser
|
||||
|
||||
import os
|
||||
import json
|
||||
import subprocess
|
||||
|
||||
HTTP_PORT = 9401
|
||||
BACKUP_DIRECTORY = '/tmp/backup'
|
||||
INTERVAL = 30 * 60 # seconds
|
||||
|
||||
total_chunks = Gauge('borg_repository_total_chunks', 'Number of chunks', ['name'])
|
||||
total_csize = Gauge('borg_repository_total_csize', 'Total compressed and encrypted size of all chunks multiplied with their reference counts', ['name'])
|
||||
total_size = Gauge('borg_repository_total_size', 'Total uncompressed size of all chunks multiplied with their reference counts', ['name'])
|
||||
total_unique_chunks = Gauge('borg_repository_total_unique_chunks', 'Number of unique chunks', ['name'])
|
||||
unique_csize = Gauge('borg_repository_unique_csize', 'Compressed and encrypted size of all chunks', ['name'])
|
||||
unique_size = Gauge('borg_repository_unique_size', 'Uncompressed size of all chunks', ['name'])
|
||||
last_modified = Gauge('borg_repository_last_modified', 'Last modified UNIX timestamp', ['name'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Serve metrics over HTTP.
|
||||
|
@ -13,7 +29,41 @@ if __name__ == '__main__':
|
|||
print(" OK.")
|
||||
|
||||
while True:
|
||||
# TODO: for every repository in $, use borg's JSON interface and export
|
||||
# its content - or user the python bindings if they're stable enough.
|
||||
print("Not implemented yet.")
|
||||
sleep(1)
|
||||
print('=' * 72)
|
||||
entries = []
|
||||
try:
|
||||
print("> Scanning {} for borg repositories...".format(BACKUP_DIRECTORY), end='')
|
||||
entries = os.scandir(BACKUP_DIRECTORY)
|
||||
print(" OK")
|
||||
except Exception as e:
|
||||
print(" Error: {}".format(e))
|
||||
|
||||
for entry in entries:
|
||||
if not entry.is_dir():
|
||||
print(">> Ignoring {} since it is not a directory.".format(entry.name))
|
||||
else:
|
||||
repository_name = entry.name
|
||||
repository_path = entry.path
|
||||
|
||||
try:
|
||||
print(">> Querying borg for {}...".format(repository_name), end='')
|
||||
raw_borg_json = subprocess.check_output(['borg', 'info', '--json', repository_path])
|
||||
info = json.loads(raw_borg_json)
|
||||
|
||||
stats = info['cache']['stats']
|
||||
total_chunks.labels(repository_name).set(stats['total_chunks'])
|
||||
total_csize.labels(repository_name).set(stats['total_csize'])
|
||||
total_size.labels(repository_name).set(stats['total_size'])
|
||||
total_unique_chunks.labels(repository_name).set(stats['total_unique_chunks'])
|
||||
unique_csize.labels(repository_name).set(stats['unique_csize'])
|
||||
unique_size.labels(repository_name).set(stats['unique_size'])
|
||||
last_modified.labels(repository_name).set(
|
||||
mktime(parser.parse(info['repository']['last_modified']).timetuple()))
|
||||
|
||||
|
||||
print(" OK")
|
||||
except Exception as e:
|
||||
print(" Error: {}".format(e))
|
||||
|
||||
print("> Waiting for next iteration - sleeping for {} seconds.".format(INTERVAL))
|
||||
sleep(INTERVAL)
|
||||
|
|
Reference in a new issue