forked from cosmin/elasticpony
-
Notifications
You must be signed in to change notification settings - Fork 0
/
migrate_to_s3.sh
executable file
·101 lines (84 loc) · 2.2 KB
/
migrate_to_s3.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
#!/usr/bin/env bash
SCRIPT_NAME=$0
usage() {
cat <<EOF
Usage: $SCRIPT_NAME OPTIONS
Required:
-b BUCKET_NAME the bucket name
-l LOCATION the local path to upload (recursively)
Credentials:
-k ACCESS_KEY the AWS access key. From your Security Credentials
-s AWS_SECRET_KEY the AWS secret key. From your Security Credentials
EOF
}
die() {
message=$1
error_code=$2
echo "$SCRIPT_NAME: $message" 1>&2
usage
exit $error_code
}
while getopts "hb:l:k:s:" opt; do
case "$opt" in
h)
usage
exit 0
;;
b)
export BUCKET_NAME="$OPTARG"
;;
l)
export LOCATION="$OPTARG"
;;
k)
export ACCESS_KEY="$OPTARG"
;;
s)
SECRET_KEY="$OPTARG"
touch ~/.aws.secret.key
chmod 600 ~/.aws.secret.key
echo -n "$SECRET_KEY" > ~/.aws.secret.key
SECRET_KEY_FILE="$HOME/.aws.secret.key"
;;
[?])
die "unknown option $opt" 10
;;
esac
done
if [ -z "$BUCKET_NAME" ]; then
die "BUCKET_NAME is required" 1
fi
if [ -z "$LOCATION" ]; then
die "LOCATION is required" 2
else
if [ ! -d "$LOCATION" ]; then
die "LOCATION must point to a directory that exists" 3
else
export LOCATION_PATH=$(cd $LOCATION && pwd)
fi
fi
if [ -z "$ACCESS_KEY" ]; then
die "ACCESS_KEY is required" 4
fi
if [ -z "SECRET_KEY" ]; then
die "SECRET_KEY is required" 5
fi
extract_s3_bash() {
wget https://s3.amazonaws.com/cloudinitfiles/s3-bash.tgz
tar zxvf s3-bash.tgz
}
get_mime() {
python -c "import mimetypes; print mimetypes.guess_type(\"$1\")[0] or \"text/plain\""
}
upload_files() {
HEADER_FILE=`mktemp -t xzawsXXXXXX`
echo "X-Amz-Acl: public-read" > $HEADER_FILE
for file in `find $LOCATION_PATH -type f | sed "s|$LOCATION_PATH/||g"`; do
FULL_PATH="$LOCATION_PATH/$file"
MIME=`get_mime $FULL_PATH`
./s3-bash/s3-put -a "$HEADER_FILE" -k "$ACCESS_KEY" -s "$SECRET_KEY_FILE" -T "$FULL_PATH" -c "$MIME" "/$BUCKET_NAME/$file"
done
rm $HEADER_FILE
}
extract_s3_bash
upload_files