-
-
Notifications
You must be signed in to change notification settings - Fork 50
/
backup.sh
49 lines (42 loc) · 1.26 KB
/
backup.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
#!/bin/bash
#
# Rclone script to archive Jeff Geerling's most important data to an Amazon S3
# Glacier Deep Archive-backed bucket.
#
# Basic usage:
# ./rclone.sh
#
# Script requires valid credentials - set up with `rclone config`.
RCLONE=/usr/local/bin/rclone
# Check if rclone is installed.
if ! [ -x "$(command -v $RCLONE)" ]; then
echo 'Error: rclone is not installed.' >&2
exit 1
fi
# Don't run if an instance of rclone is already running.
if ps -ef | grep -v grep | grep rclone ; then
exit 0
fi
# Variables.
rclone_remote=personal
rclone_s3_bucket=jg-archive
bandwidth_limit=100M
# Make sure bucket exists.
$RCLONE mkdir $rclone_remote:$rclone_s3_bucket
# List of directories to clone. MUST be absolute path, beginning with /.
declare -a dirs=(
"/Volumes/Brachiosaur/App Data"
"/Volumes/Brachiosaur/Presentation Recordings"
"/Volumes/Brachiosaur/Timelapses"
"/Volumes/Brachiosaur/Old School Files"
"/Volumes/Brachiosaur/Old Websites"
"/Volumes/Media/Movies"
"/Volumes/Media/TV Shows"
)
# Clone each directory. Add `--progress` for nicer (but more verbose) output.
for i in "${dirs[@]}"
do
echo "Syncing Directory: $i"
despaced="${i// /_}"
$RCLONE sync "$i" $rclone_remote:$rclone_s3_bucket"$despaced" --skip-links --bwlimit $bandwidth_limit
done