Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
57 changes: 57 additions & 0 deletions .github/workflows/image-publish.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
name: Publish a container image

on:
push:
release:
types: [published, edited]

jobs:
build:
runs-on: ubuntu-24.04
permissions:
packages: write
steps:
- uses: actions/checkout@v6
- uses: docker/setup-qemu-action@v3
- run: ./build/set_image_metadata
- uses: docker/setup-compose-action@v1
- name: Build an image
id: build-image
uses: redhat-actions/buildah-build@v2
with:
image: ${{ github.event.repository.name }}-dev
tags: ${{ env.TAGS }}
platforms: linux/amd64, linux/arm64/v8
containerfiles: |
./Containerfile
labels: ${{ env.ANNOTATIONS }}

- name: Push the image to GHCR
id: push-to-ghcr
uses: redhat-actions/push-to-registry@v2
with:
image: ${{ steps.build-image.outputs.image }}
tags: ${{ steps.build-image.outputs.tags }}
registry: ghcr.io/${{ github.repository_owner }}
username: ${{ github.actor }}
password: ${{ github.token }}


- name: Set the version tag (only if released)
id: retag-version
if: (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'edited'))
run: |
buildah tag \
${{ steps.build-image.outputs.image }}:${{ github.sha }} \
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}:latest
buildah tag \
${{ steps.build-image.outputs.image }}:${{ github.sha }} \
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}:${{ github.event.release.tag_name }}

- name: Push the release image to GHCR
if: (github.event_name == 'release' && (github.event.action == 'published' || github.event.action == 'edited'))
uses: redhat-actions/push-to-registry@v2
with:
tags: ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}:latest ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}:${{ github.event.release.tag_name }}
username: ${{ github.actor }}
password: ${{ github.token }}
12 changes: 12 additions & 0 deletions Containerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
FROM postgres:18-alpine

RUN apk --no-cache add \
curl \
bash \
rclone

COPY entrypoint.sh /
RUN chmod +x /entrypoint.sh

WORKDIR /backup
ENTRYPOINT /entrypoint.sh
20 changes: 20 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
# pgdump-gcs

Small docker container for creating a backup of a psql database and upload the dump to an external storage using rclone.

## How to use
TO BE DONE ...

## monitoring

Simple curl pushing some basic parameter to a prometheus push gateway.

### metrics
* timestamp
* duration
* size

### labels
* job = pgdump-gcs
* source_type = postgresql
* source_name = `${DB_NAME}`
35 changes: 35 additions & 0 deletions build/set_image_metadata
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
#! /usr/bin/env bash
# --------------------------------------------
# -- Should be used in Github Actions
# --------------------------------------------
#
# --------------------------------------------
# -- To have a multi-line env var in github,
# -- we must define the EOF
# --------------------------------------------
EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
echo "ANNOTATIONS<<$EOF" >> "$GITHUB_ENV"

ANNOTATIONS=$(cat << EOF
org.opencontainers.image.created=$(date +"%Y-%m-%d %T")
org.opencontainers.image.authors=$GITHUB_ACTOR
org.opencontainers.image.url=$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/actions/runs/$GITHUB_RUN_ID
org.opencontainers.image.documentation=$GITHUB_SERVER_URL/$GITHUB_REPOSITORY/blob/main/README.md
org.opencontainers.image.source=$GITHUB_SERVER_URL/$GITHUB_REPOSITORY
org.opencontainers.image.version=$GITHUB_SHA
org.opencontainers.image.revision=$GITHUB_SHA
org.opencontainers.image.vendor=$GITHUB_REPOSITORY_OWNER
org.opencontainers.image.license=GNU GENERAL PUBLIC LICENSE v3
org.opencontainers.image.title=$GITHUB_REPOSITORY
org.opencontainers.image.description=Backup databases using pg_dump and upload backups using rclone
EOF
)

echo "${ANNOTATIONS}" >> "${GITHUB_ENV}"
echo "$EOF" >> "$GITHUB_ENV"
# --------------------------------------------
# -- Set the image tag by commit sha
# --------------------------------------------
echo "TAGS=${GITHUB_SHA}" >> "${GITHUB_ENV}"


48 changes: 48 additions & 0 deletions entrypoint.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
#!/bin/bash
set -e

echo "Prepare configuration for script"
TIMESTAMP=$(date +%F_%R)
START_TIMESTAMP=$(date +%s)
BACKUP_FILE=${DB_NAME}-${TIMESTAMP}.sql.gz
BACKUP_FILE_LATEST=${DB_NAME}-latest.sql.gz
DB_HOST=${DB_HOST:-localhost}
DB_PASSWORD=$(cat ${DB_PASSWORD_FILE})
DB_USER=$(cat ${DB_USERNAME_FILE})
PROM_NAMESPACE=${PROM_NAMESPACE:-dboperator}

if [[ -z "${STORAGE_BUCKET}" ]]; then
echo "Variable STORAGE_BUCKET must be set"
exit 1
fi

# create login credential file
(umask 377 && echo *:5432:*:${DB_USER}:${DB_PASSWORD} >> ~/.pgpass)

echo "Start create backup"
pg_dump -F c -Z 9 -h ${DB_HOST} -p 5432 -U ${DB_USER} ${DB_NAME} -f ${BACKUP_FILE}
BACKUP_SIZE=$(du ${BACKUP_FILE} | awk '{print $1}')
echo "End backup"

## copy to destination
echo "Copy to gcs"
rclone copyto "./${BACKUP_FILE}" "storage://${STORAGE_BUCKET}/${DB_NAME}/${BACKUP_FILE}"
rclone copyto "./${BACKUP_FILE}" "storage://${STORAGE_BUCKET}/${DB_NAME}/${BACKUP_FILE_LATEST}"

END_TIMESTAMP=$(date +%s)
BACKUP_DURATION=$((END_TIMESTAMP - START_TIMESTAMP))
if [[ ! -z "$PROMETHEUS_PUSH_GATEWAY" ]];
then
echo "sending monitoring metrics to ${PROMETHEUS_PUSH_GATEWAY}"
cat <<EOF | curl -s --data-binary @- http://${PROMETHEUS_PUSH_GATEWAY}/metrics/job/pgdump-rclone/source_type/postgresql/source_name/${DB_NAME}
# TYPE ${PROM_NAMESPACE}_backup_timestamp counter
# HELP ${PROM_NAMESPACE}_backup_timestamp Timestamp of last backup run
${PROM_NAMESPACE}_backup_timestamp $END_TIMESTAMP
# TYPE ${PROM_NAMESPACE}_backup_duration gauge
# HELP ${PROM_NAMESPACE}_backup_duration Time the backup run take until finished
${PROM_NAMESPACE}_backup_duration $BACKUP_DURATION
# TYPE ${PROM_NAMESPACE}_backup_size gauge
# HELP ${PROM_NAMESPACE}_backup_size Backup Size in bytes
${PROM_NAMESPACE}_backup_size $BACKUP_SIZE
EOF
fi
9 changes: 9 additions & 0 deletions test/docker-compose.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
version: "3.3"
services:
postgres:
image: postgres:18
ports:
- "5432:5432"
environment:
POSTGRES_PASSWORD: "test1234

123 changes: 123 additions & 0 deletions test/init.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
-- =========================================================
-- Random Test Data Generator for PostgreSQL Backup Testing
-- =========================================================

-- Optional: speed up bulk inserts
SET synchronous_commit = OFF;
SET maintenance_work_mem = '512MB';

-- =========================================================
-- Drop existing tables (safe reset)
-- =========================================================

DROP TABLE IF EXISTS order_items;
DROP TABLE IF EXISTS orders;
DROP TABLE IF EXISTS products;
DROP TABLE IF EXISTS customers;

-- =========================================================
-- Create tables
-- =========================================================

CREATE TABLE customers (
id BIGSERIAL PRIMARY KEY,
first_name TEXT NOT NULL,
last_name TEXT NOT NULL,
email TEXT NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

CREATE TABLE products (
id BIGSERIAL PRIMARY KEY,
name TEXT NOT NULL,
price NUMERIC(10,2) NOT NULL,
created_at TIMESTAMP NOT NULL DEFAULT now()
);

CREATE TABLE orders (
id BIGSERIAL PRIMARY KEY,
customer_id BIGINT NOT NULL REFERENCES customers(id),
order_date TIMESTAMP NOT NULL,
status TEXT NOT NULL
);

CREATE TABLE order_items (
id BIGSERIAL PRIMARY KEY,
order_id BIGINT NOT NULL REFERENCES orders(id),
product_id BIGINT NOT NULL REFERENCES products(id),
quantity INT NOT NULL,
unit_price NUMERIC(10,2) NOT NULL
);

-- =========================================================
-- Insert customers
-- Adjust the number in generate_series for scale
-- =========================================================

INSERT INTO customers (first_name, last_name, email, created_at)
SELECT
'First' || gs,
'Last' || gs,
'user' || gs || '@example.com',
NOW() - (random() * interval '365 days')
FROM generate_series(1, 100000) AS gs;

-- =========================================================
-- Insert products
-- =========================================================

INSERT INTO products (name, price, created_at)
SELECT
'Product ' || gs,
ROUND((random() * 500 + 5)::numeric, 2),
NOW() - (random() * interval '365 days')
FROM generate_series(1, 5000) AS gs;

-- =========================================================
-- Insert orders
-- =========================================================

INSERT INTO orders (customer_id, order_date, status)
SELECT
(random() * 99999 + 1)::BIGINT,
NOW() - (random() * interval '365 days'),
(ARRAY['pending','shipped','delivered','cancelled'])[floor(random()*4)+1]
FROM generate_series(1, 300000);

-- =========================================================
-- Insert order items
-- Each order gets 1–5 items
-- =========================================================

INSERT INTO order_items (order_id, product_id, quantity, unit_price)
SELECT
o.id,
(random() * 4999 + 1)::BIGINT,
(random() * 4 + 1)::INT,
ROUND((random() * 500 + 5)::numeric, 2)
FROM orders o
CROSS JOIN LATERAL generate_series(1, (random()*4 + 1)::INT);

-- =========================================================
-- Indexes (important for realistic backup size)
-- =========================================================

CREATE INDEX idx_orders_customer ON orders(customer_id);
CREATE INDEX idx_order_items_order ON order_items(order_id);
CREATE INDEX idx_order_items_product ON order_items(product_id);

-- =========================================================
-- Analyze for realistic planner stats
-- =========================================================

ANALYZE;

-- =========================================================
-- Summary
-- =========================================================

SELECT
(SELECT count(*) FROM customers) AS customers,
(SELECT count(*) FROM products) AS products,
(SELECT count(*) FROM orders) AS orders,
(SELECT count(*) FROM order_items) AS order_items;
9 changes: 9 additions & 0 deletions test/init_database.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
#!/usr/bin/env bash

export PGHOST=localhost
export PHGPORT=5432
export PGUSER=postgres
export PGPASSWORD=test1234
export PGDATABASE=postgres

psql -f ./test/init.sql