commit
d5ff35e361
4 changed files with 249 additions and 0 deletions
@ -0,0 +1 @@ |
|||||
|
backup-manager |
@ -0,0 +1,33 @@ |
|||||
|
{ |
||||
|
"timeHour": 2, |
||||
|
"timeMinute": 0, |
||||
|
"workingDir": "/mnt/backups/", |
||||
|
"backupPeriods": [ |
||||
|
{ |
||||
|
"interval": 1, |
||||
|
"count": 3 |
||||
|
}, |
||||
|
{ |
||||
|
"interval": 30, |
||||
|
"count": 2 |
||||
|
}, |
||||
|
{ |
||||
|
"interval": 180, |
||||
|
"count": 1 |
||||
|
} |
||||
|
], |
||||
|
"hosts": [ |
||||
|
{ |
||||
|
"name": "example", |
||||
|
"ipAddr": "192.168.1.100", |
||||
|
"port": 2222, |
||||
|
"username": "root", |
||||
|
"privKey": "/id_rsa", |
||||
|
"remoteDir": "/opt/remote/dir", |
||||
|
"exclude": [ |
||||
|
"/opt/remote/dir/exclude1", |
||||
|
"/opt/remote/dir/exclude2" |
||||
|
] |
||||
|
} |
||||
|
] |
||||
|
} |
@ -0,0 +1,3 @@ |
|||||
|
module git.samuelpua.com/backup-manager |
||||
|
|
||||
|
go 1.18 |
@ -0,0 +1,212 @@ |
|||||
|
package main |
||||
|
|
||||
|
import ( |
||||
|
"bytes" |
||||
|
"encoding/json" |
||||
|
"fmt" |
||||
|
"io/ioutil" |
||||
|
"log" |
||||
|
"os" |
||||
|
"os/exec" |
||||
|
"path/filepath" |
||||
|
"strings" |
||||
|
"time" |
||||
|
) |
||||
|
|
||||
|
var ( |
||||
|
configFile = "config.json" |
||||
|
) |
||||
|
|
||||
|
// Config -- General config
|
||||
|
type Config struct { |
||||
|
TimeHour int `json:"timeHour"` |
||||
|
TimeMinute int `json:"timeMinute"` |
||||
|
WorkingDir string `json:"workingDir"` |
||||
|
BackupPeriods []BackupPeriod `json:"backupPeriods"` |
||||
|
Hosts []Host `json:"hosts"` |
||||
|
} |
||||
|
|
||||
|
// Host -- Struct for each host in the config
|
||||
|
type Host struct { |
||||
|
Name string `json:"name"` |
||||
|
IPAddr string `json:"ipAddr"` |
||||
|
Port int `json:"port"` |
||||
|
Username string `json:"username"` |
||||
|
PrivKey string `json:"privKey"` |
||||
|
RemoteDir string `json:"remoteDir"` |
||||
|
Exclude []string `json:"exclude"` |
||||
|
} |
||||
|
|
||||
|
// BackupPeriod -- Struct for individual backup period
|
||||
|
type BackupPeriod struct { |
||||
|
Interval int `json:"interval"` |
||||
|
Count int `json:"count"` |
||||
|
} |
||||
|
|
||||
|
func loadConfig(configFile string) Config { |
||||
|
var config Config |
||||
|
|
||||
|
fileBytes, err := ioutil.ReadFile(configFile) |
||||
|
if err != nil { |
||||
|
log.Println(err) |
||||
|
} |
||||
|
|
||||
|
err = json.Unmarshal(fileBytes, &config) |
||||
|
if err != nil { |
||||
|
log.Println(err) |
||||
|
} |
||||
|
|
||||
|
return config |
||||
|
} |
||||
|
|
||||
|
func runCommand(cmdline string) { |
||||
|
var errBuff bytes.Buffer |
||||
|
var outBuff bytes.Buffer |
||||
|
cmd := exec.Command("/bin/bash", "-c", cmdline) |
||||
|
cmd.Stderr = &errBuff |
||||
|
cmd.Stdout = &outBuff |
||||
|
log.Printf("Executing: " + cmdline) |
||||
|
err := cmd.Run() |
||||
|
log.Printf("Command finished with error: %v", err) |
||||
|
log.Printf("Std Out: %s", outBuff.String()) |
||||
|
log.Printf("Std Err: %s", errBuff.String()) |
||||
|
} |
||||
|
|
||||
|
func getNDay(hour int, min int, offset int) time.Time { |
||||
|
today := time.Now() |
||||
|
location := time.FixedZone("UTC+8", 8*60*60) |
||||
|
nextDate := time.Date(today.Year(), today.Month(), today.Day()+offset, hour, min, 0, 0, location) |
||||
|
return nextDate |
||||
|
} |
||||
|
|
||||
|
func getDir(path string, filter string) ([]string, []time.Time) { |
||||
|
var names []string |
||||
|
var dates []time.Time |
||||
|
|
||||
|
files, err := ioutil.ReadDir(path) |
||||
|
if err != nil { |
||||
|
log.Fatal(err) |
||||
|
} |
||||
|
|
||||
|
for _, file := range files { |
||||
|
if strings.Contains(file.Name(), filter) { |
||||
|
name := file.Name() |
||||
|
workingDateStr := name[len(filter)+1:] |
||||
|
workingDate, err := time.Parse("2006-01-02", workingDateStr) |
||||
|
if err != nil { |
||||
|
log.Println(err) |
||||
|
} else { |
||||
|
names = append(names, name) |
||||
|
dates = append(dates, workingDate) |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
return names, dates |
||||
|
} |
||||
|
|
||||
|
func backupToday(timeToday time.Time, config Config, host Host) { |
||||
|
dirDateStr := fmt.Sprintf("%04d-%02d-%02d", timeToday.Year(), timeToday.Month(), timeToday.Day()) |
||||
|
dirCommand := fmt.Sprintf("mkdir \"%s/%s-%s\"", config.WorkingDir, host.Name, dirDateStr) |
||||
|
runCommand(dirCommand) |
||||
|
|
||||
|
log.Printf("Current Host: %s", host.Name) |
||||
|
dateStr := fmt.Sprintf("%04d-%02d-%02d", timeToday.Year(), timeToday.Month(), timeToday.Day()) |
||||
|
command := fmt.Sprintf("rsync -azvhe \"ssh -i %s -p %d\" %s@%s:\"%s\" \"%s/%s-%s/\"", |
||||
|
host.PrivKey, host.Port, host.Username, host.IPAddr, host.RemoteDir, config.WorkingDir, host.Name, dateStr) |
||||
|
|
||||
|
// Excluding excluded directory
|
||||
|
for _, exclude := range host.Exclude { |
||||
|
command += fmt.Sprintf(" --exclude \"%s\"", exclude) |
||||
|
} |
||||
|
|
||||
|
runCommand(command) |
||||
|
} |
||||
|
|
||||
|
func findBackupDate(dates []time.Time, findDate time.Time) int { |
||||
|
for i, date := range dates { |
||||
|
if date == findDate { |
||||
|
log.Printf("%v found", date) |
||||
|
return i |
||||
|
} |
||||
|
} |
||||
|
log.Printf("%v not found", findDate) |
||||
|
return 0 |
||||
|
} |
||||
|
|
||||
|
func enumerateDates(dates []time.Time, currDate time.Time, searchDayNum int) bool { |
||||
|
for i := 0; i < searchDayNum; i++ { |
||||
|
for _, date := range dates { |
||||
|
if currDate.Add(-time.Hour*time.Duration(24*i)) == date { |
||||
|
return true |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
return false |
||||
|
} |
||||
|
|
||||
|
func main() { |
||||
|
ex, err := os.Executable() |
||||
|
if err != nil { |
||||
|
panic(err) |
||||
|
} |
||||
|
exPath := filepath.Dir(ex) |
||||
|
|
||||
|
config := loadConfig(exPath + "/" + configFile) |
||||
|
logFile, err := os.OpenFile(config.WorkingDir+"backup.log", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) |
||||
|
if err != nil { |
||||
|
log.Println(err) |
||||
|
} |
||||
|
defer logFile.Close() |
||||
|
log.SetOutput(logFile) |
||||
|
for { |
||||
|
timeNow := time.Now() |
||||
|
timeToday := time.Date(timeNow.Year(), timeNow.Month(), timeNow.Day(), 0, 0, 0, 0, timeNow.Location()) |
||||
|
log.Printf("New Date: %v", timeToday) |
||||
|
for _, host := range config.Hosts { |
||||
|
log.Printf("New Host: %s", host.Name) |
||||
|
// creating backup for today
|
||||
|
backupToday(timeToday, config, host) |
||||
|
|
||||
|
// go through each interval for deletion
|
||||
|
workingTime := timeToday |
||||
|
filePaths, fileDates := getDir(config.WorkingDir, host.Name) |
||||
|
for i, backupPeriod := range config.BackupPeriods { |
||||
|
// get to the time
|
||||
|
if err != nil { |
||||
|
fmt.Println(err) |
||||
|
} |
||||
|
workingTime = workingTime.Add(-time.Duration(backupPeriod.Interval) * 24 * time.Hour) |
||||
|
|
||||
|
// check if file exists
|
||||
|
workingFileCount := findBackupDate(fileDates, workingTime) |
||||
|
|
||||
|
// file exist...
|
||||
|
if workingFileCount != 0 { |
||||
|
// if there's a copy out there within interval...
|
||||
|
if i == len(config.BackupPeriods) || enumerateDates(fileDates, workingTime, config.BackupPeriods[i+1].Interval) { |
||||
|
// delete it
|
||||
|
removeCommand := fmt.Sprintf("rm -rf \"%s\"", filePaths[workingFileCount]) |
||||
|
runCommand(removeCommand) |
||||
|
} |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// delete the rest
|
||||
|
for i, date := range fileDates { |
||||
|
if date.After(workingTime) { |
||||
|
removeCommand := fmt.Sprintf("rm -rf \"%s\"", filePaths[i]) |
||||
|
runCommand(removeCommand) |
||||
|
} |
||||
|
} |
||||
|
|
||||
|
// deleteDate := getNDay(0, 0, -1 * config.MaxRecords)
|
||||
|
// deleteDateStr := fmt.Sprintf("%04d-%02d-%02d", deleteDate.Year(), deleteDate.Month(), deleteDate.Day())
|
||||
|
// removeCommand := fmt.Sprintf("rm -rf \"%s/%s-%s\"", config.WorkingDir, host.Name, deleteDateStr)
|
||||
|
// runCommand(removeCommand)
|
||||
|
} |
||||
|
nextDay := getNDay(config.TimeHour, config.TimeMinute, 1) |
||||
|
log.Printf("Sleeping until: %v", nextDay) |
||||
|
time.Sleep(time.Until(nextDay)) |
||||
|
} |
||||
|
} |
Loading…
Reference in new issue