prometheus-borg-exporter/main.go

107 lines
2.4 KiB
Go

package main
import (
"flag"
"fmt"
"log"
"net/http"
"os"
"path/filepath"
"strings"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promhttp"
)
const VERSION = "1.0.0"
const LISTEN_ADDR = ":9403"
const INTERVAL = 30 * time.Minute
var backupDir = flag.String("backup-dir", "/srv/backups", "Directory where the backups are located")
var version_flag = flag.Bool("version", false, "Shows the program version")
var logfile = flag.String("logfile", "-", "Where to write the logs")
func main() {
flag.Parse()
if *logfile != "-" {
f, err := os.OpenFile(*logfile, os.O_CREATE|os.O_RDWR|os.O_APPEND, 0664)
if err != nil {
log.Fatalf("Could not open logfile: %v\n", err)
}
defer f.Close()
log.SetOutput(f)
}
if *version_flag {
fmt.Printf("%v\n", VERSION)
os.Exit(0)
}
reg := prometheus.NewRegistry()
log.Printf("Backup directory is: %v\n", *backupDir)
m := NewMetrics(reg)
go RecordMetrics(*m)
http.Handle("/metrics", promhttp.HandlerFor(reg, promhttp.HandlerOpts{Registry: reg}))
log.Printf("Listening on %v ...\n", LISTEN_ADDR)
log.Fatal(http.ListenAndServe(LISTEN_ADDR, nil))
}
func RecordMetrics(m Metrics) {
for {
entries, err := os.ReadDir(*backupDir)
if err != nil {
log.Fatalln(err)
}
buffer := []MetricsBuffer{}
for _, entry := range entries {
if !entry.IsDir() || strings.HasPrefix(entry.Name(), ".") {
log.Printf(">> Ignoring %v\n", entry.Name())
continue
}
path := filepath.Join(*backupDir, entry.Name())
info, err := GetInfo(path)
if err != nil {
log.Printf(">> Could not get info about %v: %v\n", path, err)
continue
}
list, err := GetList(path)
if err != nil {
log.Printf(">> Could not get archive list from %v: %v\n", path, err)
continue
}
stats := info.Cache.Stats
log.Printf("> Got info for: %v\n", path)
buffer = append(buffer, MetricsBuffer{
RepoName: entry.Name(),
ArchiveCount: float64(len(list.Archives)),
LastArchiveTime: list.LastArchiveUnix(),
LastModified: info.LastmodUnix(),
TotalChunks: stats.Total_chunks,
TotalCsize: stats.Total_csize,
TotalSize: stats.Total_size,
TotalUniqueChunks: stats.Total_unique_chunks,
UniqueCsize: stats.Unique_csize,
UniqueSize: stats.Unique_size,
})
}
m.Update(buffer)
log.Printf("> Waiting %v\n", INTERVAL)
time.Sleep(INTERVAL)
}
}