@@ -19,6 +19,28 @@ EBS_SIZE_MULTIPLIER=5
19
19
POSTGRES_VERSION_DEFAULT=10
20
20
AWS_BLOCK_DURATION=0
21
21
22
+ # ######################################
23
+ # Attach an EBS volume containing the database backup (made with pg_basebackup)
24
+ # Globals:
25
+ # DOCKER_MACHINE, AWS_REGION, DB_EBS_VOLUME_ID
26
+ # Arguments:
27
+ # None
28
+ # Returns:
29
+ # None
30
+ # ######################################
31
+ function attach_db_ebs_drive() {
32
+ docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/backup\" "
33
+ docker-machine ssh $DOCKER_MACHINE " wget http://s3.amazonaws.com/ec2metadata/ec2-metadata"
34
+ docker-machine ssh $DOCKER_MACHINE " chmod u+x ec2-metadata"
35
+ local instance_id=$( docker-machine ssh $DOCKER_MACHINE ./ec2-metadata -i)
36
+ instance_id=${instance_id: 13}
37
+ local attach_result=$( aws --region=$AWS_REGION ec2 attach-volume \
38
+ --device /dev/xvdc --volume-id $DB_EBS_VOLUME_ID --instance-id $instance_id )
39
+ sleep 10
40
+ docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdc /home/backup
41
+ dbg $( docker-machine ssh $DOCKER_MACHINE " sudo df -h /dev/xvdc" )
42
+ }
43
+
22
44
# ######################################
23
45
# Print a help
24
46
# Globals:
@@ -150,6 +172,24 @@ function help() {
150
172
- dump in \" custom\" format, made with 'pg_dump -Fc ..' ('*.pgdmp'),
151
173
* sequence of SQL commands specified as in a form of plain text.
152
174
175
+ \033[1m--db-name\033[22m (string)
176
+
177
+ Name of database which must be tested. Name 'test' is internal used name,
178
+ so is not correct value.
179
+
180
+ \033[1m--db-ebs-volume-id\033[22m (string)
181
+
182
+ ID of an AWS EBS volume, containing the database backup (made with pg_basebackup).
183
+
184
+ In the volume's root directory, the following two files are expected:
185
+ - base.tar.gz
186
+ - pg_xlog.tar.gz for Postgres version up to 9.6 or pg_wal.tar.gz for Postgres 10+
187
+
188
+ The following command can be used to get such files:
189
+ 'pg_basebackup -U postgres -zPFt -Z 5 -D /path/to/ebs/volume/root'
190
+ Here '-Z 5' means that level 5 to be used for compression, you can choose any value from 0 to 9.
191
+
192
+
153
193
\033[1m--db-pgbench\033[22m (string)
154
194
155
195
Initialize database for pgbench. Contains pgbench init arguments:
@@ -518,7 +558,9 @@ function check_cli_parameters() {
518
558
[[ ! -z ${WORKLOAD_REAL+x} ]] && let workloads_count=$workloads_count +1
519
559
[[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ]] && let workloads_count=$workloads_count +1
520
560
[[ ! -z ${WORKLOAD_PGBENCH+x} ]] && let workloads_count=$workloads_count +1
521
- if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]] && [[ -z ${DB_PGBENCH+x} ]]; then
561
+
562
+ if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]] \
563
+ && [[ -z ${DB_PGBENCH+x} ]] && [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then
522
564
err " ERROR: The object (database) is not defined."
523
565
exit 1
524
566
fi
@@ -1079,6 +1121,8 @@ while [ $# -gt 0 ]; do
1079
1121
AWS_ZONE=" $2 " ; shift 2 ;;
1080
1122
--aws-block-duration )
1081
1123
AWS_BLOCK_DURATION=$2 ; shift 2 ;;
1124
+ --db-ebs-volume-id )
1125
+ DB_EBS_VOLUME_ID=$2 ; shift 2;;
1082
1126
1083
1127
--s3cfg-path )
1084
1128
S3_CFG_PATH=" $2 " ; shift 2 ;;
@@ -1157,24 +1201,29 @@ elif [[ "$RUN_ON" == "aws" ]]; then
1157
1201
msg " To connect docker machine use:"
1158
1202
msg " docker-machine ssh $DOCKER_MACHINE "
1159
1203
1204
+ if [[ " $RUN_ON " == " aws" ]] && [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1205
+ attach_db_ebs_drive
1206
+ fi
1207
+
1160
1208
docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/storage\" "
1161
1209
if [[ " ${AWS_EC2_TYPE: 0: 2} " == " i3" ]]; then
1162
1210
msg " Using high-speed NVMe SSD disks"
1163
1211
use_ec2_nvme_drive
1164
1212
else
1165
1213
msg " Use EBS volume"
1166
1214
# Create new volume and attach them for non i3 instances if needed
1167
- if [ ! -z ${AWS_EBS_VOLUME_SIZE+x} ]; then
1215
+ if [[ " $RUN_ON " == " aws " ]] && [[ ! -z ${AWS_EBS_VOLUME_SIZE+x} ] ]; then
1168
1216
use_ec2_ebs_drive $AWS_EBS_VOLUME_SIZE
1169
1217
fi
1170
1218
fi
1171
1219
1172
1220
CONTAINER_HASH=$( \
1173
1221
docker ` docker-machine config $DOCKER_MACHINE ` run \
1174
1222
--name=" pg_nancy_${CURRENT_TS} " \
1223
+ --privileged \
1175
1224
-v /home/ubuntu:/machine_home \
1176
1225
-v /home/storage:/storage \
1177
- -v /home/basedump:/basedump \
1226
+ -v /home/backup:/backup \
1178
1227
-dit " postgresmen/postgres-with-stuff:pg${PG_VERSION} "
1179
1228
)
1180
1229
DOCKER_CONFIG=$( docker-machine config $DOCKER_MACHINE )
@@ -1190,6 +1239,61 @@ MACHINE_HOME="/machine_home/nancy_${CONTAINER_HASH}"
1190
1239
alias docker_exec=' docker $DOCKER_CONFIG exec -i ${CONTAINER_HASH} '
1191
1240
get_system_characteristics
1192
1241
1242
+ # ######################################
1243
+ # Extract the database backup from the attached EBS volume.
1244
+ # Globals:
1245
+ # PG_VERSION
1246
+ # Arguments:
1247
+ # None
1248
+ # Returns:
1249
+ # None
1250
+ # ######################################
1251
+ function cp_db_ebs_backup() {
1252
+ # Here we think what postgress stopped
1253
+ msg " Extract database backup from EBS volume"
1254
+ docker_exec bash -c " rm -rf /var/lib/postgresql/9.6/main/*"
1255
+
1256
+ local op_start_time=$( date +%s)
1257
+ docker_exec bash -c " rm -rf /var/lib/postgresql/$PG_VERSION /main/*"
1258
+ local result=$( docker_exec bash -c " ([[ -f /backup/base.tar.gz ]] \
1259
+ && tar -C /storage/postgresql/$PG_VERSION /main/ -xzvf /backup/base.tar.gz) || true" )
1260
+ result=$( docker_exec bash -c " ([[ -f /backup/base.tar ]] \
1261
+ && tar -C /storage/postgresql/$PG_VERSION /main/ -xvf /backup/base.tar) || true" )
1262
+
1263
+ result=$( docker_exec bash -c " ([[ -f /backup/pg_xlog.tar.gz ]] \
1264
+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_xlog -xzvf /backup/pg_xlog.tar.gz) || true" )
1265
+ result=$( docker_exec bash -c " ([[ -f /backup/pg_xlog.tar ]] \
1266
+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_xlog -xvf /backup/pg_xlog.tar) || true" )
1267
+
1268
+ result=$( docker_exec bash -c " ([[ -f /backup/pg_wal.tar.gz ]] \
1269
+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_xlog -xzvf /backup/pg_wal.tar.gz) || true" )
1270
+ result=$( docker_exec bash -c " ([[ -f /backup/pg_wal.tar ]] \
1271
+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_wal -xvf /backup/pg_wal.tar) || true" )
1272
+
1273
+ local end_time=$( date +%s)
1274
+ local duration=$( echo $(( end_time- op_start_time)) | awk ' {printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}' )
1275
+ msg " Time taken to extract database backup from EBS volume: $duration ."
1276
+
1277
+ docker_exec bash -c " chown -R postgres:postgres /storage/postgresql/$PG_VERSION /main"
1278
+ docker_exec bash -c " localedef -f UTF-8 -i en_US en_US.UTF-8"
1279
+ docker_exec bash -c " localedef -f UTF-8 -i ru_RU ru_RU.UTF-8"
1280
+ }
1281
+
1282
+ # ######################################
1283
+ # Detach EBS volume
1284
+ # Globals:
1285
+ # DOCKER_MACHINE, DB_EBS_VOLUME_ID, AWS_REGION
1286
+ # Arguments:
1287
+ # None
1288
+ # Returns:
1289
+ # None
1290
+ # ######################################
1291
+ function dettach_db_ebs_drive() {
1292
+ docker_exec bash -c " umount /backup"
1293
+ docker-machine ssh $DOCKER_MACHINE sudo umount /home/backup
1294
+ local dettach_result=$( aws --region=$AWS_REGION ec2 detach-volume --volume-id $DB_EBS_VOLUME_ID )
1295
+ }
1296
+
1193
1297
docker_exec bash -c " mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME "
1194
1298
if [[ " $RUN_ON " == " aws" ]]; then
1195
1299
docker-machine ssh $DOCKER_MACHINE " sudo chmod a+w /home/storage"
@@ -1201,6 +1305,12 @@ if [[ "$RUN_ON" == "aws" ]]; then
1201
1305
sleep 2 # wait for postgres stopped
1202
1306
docker_exec bash -c " sudo mv /var/lib/postgresql /storage/"
1203
1307
docker_exec bash -c " ln -s /storage/postgresql /var/lib/postgresql"
1308
+
1309
+ if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1310
+ cp_db_ebs_backup
1311
+ dettach_db_ebs_drive
1312
+ fi
1313
+
1204
1314
docker_exec bash -c " sudo /etc/init.d/postgresql start"
1205
1315
sleep 2 # wait for postgres started
1206
1316
fi
@@ -1608,6 +1718,12 @@ function collect_results() {
1608
1718
msg " Time taken to generate and collect artifacts: $DURATION ."
1609
1719
}
1610
1720
1721
+ if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]] && [[ ! " $DB_NAME " == " test" ]]; then
1722
+ docker_exec bash -c " psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'"
1723
+ docker_exec bash -c " psql --set ON_ERROR_STOP=on -U postgres -c 'alter database $DB_NAME rename to test;'"
1724
+ DB_NAME=test
1725
+ fi
1726
+
1611
1727
[ ! -z ${S3_CFG_PATH+x} ] && copy_file $S3_CFG_PATH \
1612
1728
&& docker_exec cp $MACHINE_HOME /.s3cfg /root/.s3cfg
1613
1729
[ ! -z ${DB_DUMP+x} ] && copy_file $DB_DUMP
@@ -1624,7 +1740,9 @@ sleep 2 # wait for postgres up&running
1624
1740
1625
1741
apply_commands_after_container_init
1626
1742
apply_sql_before_db_restore
1627
- restore_dump
1743
+ if [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then
1744
+ restore_dump
1745
+ fi
1628
1746
apply_sql_after_db_restore
1629
1747
docker_exec bash -c " psql -U postgres $DB_NAME -b -c 'create extension if not exists pg_stat_statements;' $VERBOSE_OUTPUT_REDIRECT "
1630
1748
pg_config_init
0 commit comments