genesis commit

main
Hanjo Meinhardt 2 years ago
commit 9c67bc06d9

@ -0,0 +1,41 @@
---
kind: pipeline
type: docker
name: default
steps:
- name: validate
image: bunix42/pre-commit
- name: docker-latest
image: plugins/docker
settings:
repo: bunix42/docker-borgbackup-exporter
build_args:
- BORG_VERSION=1.2.0-r0
username:
from_secret: docker_username
password:
from_secret: docker_password
tags:
- latest
- 1.2.0
- name: slack
image: plugins/slack
settings:
webhook:
from_secret: slack_webhook
channel: drone
username: drone
when:
status:
- success
- failure
trigger:
branch:
- main
event:
- push

1
.gitignore vendored

@ -0,0 +1 @@
example

@ -0,0 +1,5 @@
repos:
- repo: https://github.com/hadolint/hadolint
rev: v2.10.0
hooks:
- id: hadolint

@ -0,0 +1,14 @@
FROM alpine:3.16
ARG BORG_VERSION=1.2.0-r0
ARG OPENSSH_VERSION=9.0_p1-r1
RUN apk add --no-cache borgbackup=${BORG_VERSION} openssh=${OPENSSH_VERSION}
COPY assets/*.sh /usr/bin/
ENV CRON_SCHEDULE="0 */1 * * *"
VOLUME ["/root/.ssh", "/output"]
ENTRYPOINT [ "/usr/bin/entrypoint.sh" ]

@ -0,0 +1,35 @@
# docker-borgbackup-exporter
[![Build Status](https://ci.bunix.de/api/badges/bunix42/docker-borgbackup-exporter/status.svg)](https://ci.bunix.de/bunix42/docker-borgbackup-exporter)
[![hadolint](https://img.shields.io/badge/hadolint-passing-brightgreen)](https://github.com/hadolint/hadolint)
[![pre-commit](https://img.shields.io/badge/pre--commit-enabled-brightgreen?logo=pre-commit&logoColor=white)](https://github.com/pre-commit/pre-commit)
This is a simple cron based exporter for borg backup metrics, that are supposed to be delivered via any means like a simple nginx or using node_exporters textfile or adding pushgateway pushes.
## example usage
```
version: '3'
services:
backup:
build: .
environment:
- BORG_PASSPHRASE=ThisIsTheAbsolutlyAlmightySecretCredential
- BORG_REPO=ssh://<user>@<host>/<root folder>
- CRON_SCHEDULE=0 */1 * * *
volumes:
- ./conf/.ssh:/root/.ssh # this volume should contain an prive ssh key and an authorized hosts key
- exporter-data:/output # this volume is used to share the metrics file
borg-exporter-front:
image: nginx:1.21
volumes:
- exporter-data:/usr/share/nginx/html/
ports:
- 8080:80
volumes:
exporter-data:
```

@ -0,0 +1,77 @@
#!/bin/sh
set -eu
PROM_FILE=/output/metrics
TMP_FILE=$PROM_FILE.$$
LIST=$(borg list | awk '{print $1}')
COUNTER=0
for i in $LIST; do
COUNTER=$((COUNTER+1))
done
BORG_INFO=$(borg info "::$i")
ARCHIVE_PREFIX=$(echo "$BORG_INFO" | grep "Archive name" | awk '{print $3}' | awk -F- '{print $1}')
echo "backup_count{archive=\"${ARCHIVE_PREFIX}\"} $COUNTER" > "$TMP_FILE"
{
echo "backup_files{archive=\"${ARCHIVE_PREFIX}\"} $(echo "$BORG_INFO" | grep "Number of files" | awk '{print $4}')"
echo "backup_chunks_unique{archive=\"${ARCHIVE_PREFIX}\"} $(echo "$BORG_INFO" | grep "Chunk index" | awk '{print $3}')"
echo "backup_chunks_total{archive=\"${ARCHIVE_PREFIX}\"} $(echo "$BORG_INFO" | grep "Chunk index" | awk '{print $4}')"
} >> "$TMP_FILE"
function calc_bytes {
NUM=$1
UNIT=$2
case "$UNIT" in
B)
echo "$NUM" | awk '{ print $1 }'
;;
kB)
echo "$NUM" | awk '{ print $1 * 1024 }'
;;
MB)
echo "$NUM" | awk '{ print $1 * 1024 * 1024 }'
;;
GB)
echo "$NUM" | awk '{ print $1 * 1024 * 1024 * 1024 }'
;;
TB)
echo "$NUM" | awk '{ print $1 * 1024 * 1024 * 1024 * 1024 }'
;;
esac
}
# timings
LAST_START=$(echo "$BORG_INFO" | grep "Time (start)" | awk '{print $4" "$5}')
LAST_END=$(echo "$BORG_INFO" | grep "Time (end)" | awk '{print $4" "$5}')
LAST_START_TIMESTAMP=$(date -d "$LAST_START" +"%s")
LAST_END_TIMESTAMP=$(date -d "$LAST_END" +"%s")
# byte sizes
LAST_SIZE=$(calc_bytes $(echo "$BORG_INFO" | grep "This archive" | awk '{print $3}') $(echo "$BORG_INFO" | grep "This archive" | awk '{print $4}'))
LAST_SIZE_COMPRESSED=$(calc_bytes $(echo "$BORG_INFO" | grep "This archive" | awk '{print $5}') $(echo "$BORG_INFO" | grep "This archive" | awk '{print $6}'))
LAST_SIZE_DEDUP=$(calc_bytes $(echo "$BORG_INFO" | grep "This archive" | awk '{print $7}') $(echo "$BORG_INFO" | grep "This archive" | awk '{print $8}'))
TOTAL_SIZE=$(calc_bytes $(echo "$BORG_INFO" | grep "All archives" | awk '{print $3}') $(echo "$BORG_INFO" | grep "All archives" | awk '{print $4}'))
TOTAL_SIZE_COMPRESSED=$(calc_bytes $(echo "$BORG_INFO" | grep "All archives" | awk '{print $5}') $(echo "$BORG_INFO" | grep "All archives" | awk '{print $6}'))
TOTAL_SIZE_DEDUP=$(calc_bytes $(echo "$BORG_INFO" | grep "All archives" | awk '{print $7}') $(echo "$BORG_INFO" | grep "All archives" | awk '{print $8}'))
{
echo "backup_last_start{archive=\"${ARCHIVE_PREFIX}\"} $LAST_START_TIMESTAMP"
echo "backup_last_end{archive=\"${ARCHIVE_PREFIX}\"} $LAST_END_TIMESTAMP"
echo "backup_last_size{archive=\"${ARCHIVE_PREFIX}\"} $LAST_SIZE"
echo "backup_last_size_compressed{archive=\"${ARCHIVE_PREFIX}\"} $LAST_SIZE_COMPRESSED"
echo "backup_last_size_dedup{archive=\"${ARCHIVE_PREFIX}\"} $LAST_SIZE_DEDUP"
echo "backup_total_size{archive=\"${ARCHIVE_PREFIX}\"} $TOTAL_SIZE"
echo "backup_total_size_compressed{archive=\"${ARCHIVE_PREFIX}\"} $TOTAL_SIZE_COMPRESSED"
echo "backup_total_size_dedup{archive=\"${ARCHIVE_PREFIX}\"} $TOTAL_SIZE_DEDUP"
} >> "$TMP_FILE"
mv "$TMP_FILE" "$PROM_FILE"

@ -0,0 +1,6 @@
#!/bin/sh
set -eu
echo "$CRON_SCHEDULE /usr/bin/borg-exporter.sh" >> /var/spool/cron/crontabs/$(whoami)
crond -f
Loading…
Cancel
Save