-
Notifications
You must be signed in to change notification settings - Fork 3
/
nightly.py
executable file
·115 lines (99 loc) · 4.38 KB
/
nightly.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
#!/usr/bin/python3
# vim:tabstop=2:shiftwidth=2:expandtab
# Nightly Backup and updates
from authlibs.templateCommon import *
from authlibs.init import authbackend_init
import urllib,requests
import argparse
from datetime import datetime,timedelta
import random
import configparser
import subprocess,os
import glob
import boto3
import paramiko
from stat import *
from authlibs import aclbackup
if __name__ == '__main__':
parser=argparse.ArgumentParser()
parser.add_argument("--verbose","-v",help="verbosity",action="count")
parser.add_argument("--debug","-d",help="verbosity",action="count")
parser.add_argument("--nopayment",help="Do not update payment and waiver data",action="store_true")
parser.add_argument("--noupload",help="Do not send backups to AWS",action="store_true")
parser.add_argument("--nosftp",help="Do not send backups to sftp",action="store_true")
(args,extras) = parser.parse_known_args(sys.argv[1:])
now = datetime.now()
today=now.strftime("%Y-%m-%d")
Config = configparser.ConfigParser({})
Config.read('makeit.ini')
backup_dir = Config.get("backups","db_backup_directory")
aws_token=Config.get("backups","aws_token")
aws_secret_key=Config.get("backups","aws_secret_key")
aws_bucket=Config.get("backups","aws_bucket")
dbfile = Config.get("General","Database")
logdbfile = Config.get("General","LogDatabase")
acldir = Config.get("backups","acl_backup_directory")
localurl = Config.get("backups","localurl")
api_username = Config.get("backups","api_username")
api_password = Config.get("backups","api_password")
localmethod = Config.get("backups","localmethod")
sftp_user = Config.get("backups","sftp_user")
sftp_password = Config.get("backups","sftp_password")
sftp_server = Config.get("backups","sftp_server")
sftp_directory = Config.get("backups","sftp_directory")
# Take Snapshot of databases
if args.verbose: print ("* Snapshotting databases")
os.system("sqlite3 %s '.backup %s/%s-db.sq3'" % (dbfile,backup_dir,today))
os.system("sqlite3 %s '.backup %s/%s-logdb.sq3'" % (logdbfile,backup_dir,today))
# Run nightly payment/waiver update
if not args.nopayment:
if args.verbose: print ("* Updating payment and waiver data")
#os.system("curl http://%s:%s@%s/api/cron/nightly" % ( api_username,api_password,localurl))
req = requests.Session()
api_creds = (api_username,api_password)
url = localurl+"/api/cron/nightly"
r = req.get(url, auth=api_creds)
if r.status_code != 200:
print ("WARNING - error in nightly cron API")
# Make a backup of ACL lists for all resources, and generate reports of changes
if args.verbose: print ("* Backing up ACLs")
aclbackup.do_update()
# Prune old backup files
now = datetime.now()
if args.verbose: print ("* Pruning old files")
for d in (acldir,backup_dir):
files = glob.glob(os.path.join(d,"*"))
for f in files:
mode = os.stat(f)
ft= datetime.fromtimestamp(mode.st_ctime)
age = now-ft
if age > timedelta(days=31):
# File is too old - delete
if args.verbose: print ("DELETING ",f,now-ft)
os.unlink(f)
# Send backups to NAS
if not args.nosftp and localmethod=="sftp":
with paramiko.Transport((sftp_server,22)) as transport:
transport.connect(None,sftp_user,sftp_password)
with paramiko.SFTPClient.from_transport(transport) as sftp:
for d in (acldir,backup_dir):
files = glob.glob(os.path.join(d,today+"*"))
for f in files:
if args.verbose: print ("SFTP",f)
fn = f.split("/")[-1]
rfn = sftp_directory+"/"+fn
print("SFTP PUT",f,rfn)
sftp.put(f,rfn)
# Send backups to Amazon S3 (and Glacier) storage
if not args.noupload:
s3 = boto3.client('s3',
aws_access_key_id= aws_token,
aws_secret_access_key=aws_secret_key)
if args.verbose: print ("* Sending backups to Amazon")
for d in (acldir,backup_dir):
files = glob.glob(os.path.join(d,today+"*"))
for f in files:
fn = f.split("/")[-1]
if args.verbose: "BACKUP",f,fn
#s3.put_object(Body=html, Key=fn,ContentType="text/html",Bucket=aws_bucket)
s3.upload_file(f,aws_bucket,fn)