How to Backup Docker Data to a Different Location in Your LAN
Prerequisites
- Docker data located at
/var/lib/docker/volumes
. - SSH access to the target backup system.
Passwordless SSH Login
First, set up passwordless SSH login:
ssh-keygen -t rsa
ssh-copy-id root@192.168.0.225
ssh root@192.168.0.225
Docker Volume Backup Script
Create a backup script named docker_backup.sh
:
#!/bin/bash
set -e
# Define variables
source_dir="/var/lib/docker/volumes"
backup_dir="/opt/docker_backups"
keep_backups=10
current_datetime=$(date +"%Y-%m-%d_%H-%M-%S")
backup_filename="$current_datetime-backup.tar"
remote_user="root"
remote_server="192.168.0.225"
remote_dir="/opt/remote_docker_backups"
# Check if source and backup directories exist
if [ ! -d "$source_dir" ]; then
echo "Source directory does not exist."
exit 1
fi
if [ ! -d "$backup_dir" ]; then
echo "Backup directory does not exist."
exit 1
fi
# Stop running Docker containers
if [ "$(docker ps -q)" ]; then
docker stop $(docker ps -q)
fi
# Create the backup
tar -cpf "$backup_dir/$backup_filename" "$source_dir"
# Start stopped Docker containers
if [ "$(docker ps -a -q)" ]; then
docker start $(docker ps -a -q)
fi
# Compress and transfer the backup
gzip "$backup_dir/$backup_filename"
backup_filename="$current_datetime-backup.tar.gz"
scp "$backup_dir/$backup_filename" "$remote_user@$remote_server:$remote_dir"
# Remove old backups
find "$backup_dir" -type f -name "*-backup.tar.gz" -mtime +$keep_backups -exec rm {} \;
ssh "$remote_user@$remote_server" "find $remote_dir -type f -name '*-backup.tar.gz' -mtime +$keep_backups -exec rm {} \;"
echo "Backup was created: $backup_dir/$backup_filename and copied to $remote_server:$remote_dir."
Run the script:
sudo su
chmod +x docker_backup.sh
./docker_backup.sh
Ansible Alternative
Create an Ansible playbook named docker_backup.yml
:
---
- name: Docker Backup Playbook
hosts: rpidocker
become: yes
vars:
source_dir: "/var/lib/docker/volumes"
backup_dir: "/opt/docker_backups"
keep_backups: 10
current_datetime: "{{ lookup('pipe', 'date +%Y-%m-%d_%H-%M-%S') }}"
backup_filename: "{{ current_datetime }}-backup.tar"
remote_user: "root"
remote_server: "192.168.0.225"
remote_dir: "/opt/remote_docker_backups"
tasks:
- name: Check if source directory exists
stat:
path: "{{ source_dir }}"
register: source_dir_stat
- name: Fail if source directory does not exist
fail:
msg: "Source directory does not exist."
when: not source_dir_stat.stat.exists
- name: Check if backup directory exists
stat:
path: "{{ backup_dir }}"
register: backup_dir_stat
- name: Fail if backup directory does not exist
fail:
msg: "Backup directory does not exist."
when: not backup_dir_stat.stat.exists
- name: Stop running Docker containers
command: docker stop $(docker ps -q)
ignore_errors: yes
- name: Create backup archive
command: tar -cpf "{{ backup_dir }}/{{ backup_filename }}" "{{ source_dir }}"
- name: Start all Docker containers
command: docker start $(docker ps -a -q)
ignore_errors: yes
- name: Compress the backup archive
command: gzip "{{ backup_dir }}/{{ backup_filename }}"
args:
chdir: "{{ backup_dir }}"
- name: Copy backup to remote server
synchronize:
src: "{{ backup_dir }}/{{ backup_filename }}.gz"
dest: "{{ remote_user }}@{{ remote_server }}:{{ remote_dir }}"
mode: push
- name: Delete older backups locally
shell: find "{{ backup_dir }}" -type f -name "*-backup.tar.gz" -mtime +{{ keep_backups }} -exec rm {} \;
- name: Delete older backups on remote server
shell: ssh "{{ remote_user }}@{{ remote_server }}" "find {{ remote_dir }} -type f -name '*-backup.tar.gz' -mtime +{{ keep_backups }} -exec rm {} \;"
Run the playbook:
ansible-playbook -i inventory.ini docker_backup.yml
Your inventory.ini
should look like:
[rpidocker]
192.168.0.224 ansible_user=root ansible_ssh_private_key_file=/path/to/your/private/key
Conclusion
You now have two methods to back up your Docker data securely to another location within your LAN. Choose the one that best fits your needs.