@@ -19,6 +19,28 @@ EBS_SIZE_MULTIPLIER=5
1919POSTGRES_VERSION_DEFAULT=10
2020AWS_BLOCK_DURATION=0
2121
22+ # ######################################
23+ # Attach an EBS volume containing the database backup (made with pg_basebackup)
24+ # Globals:
25+ # DOCKER_MACHINE, AWS_REGION, DB_EBS_VOLUME_ID
26+ # Arguments:
27+ # None
28+ # Returns:
29+ # None
30+ # ######################################
31+ function attach_db_ebs_drive() {
32+ docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/backup\" "
33+ docker-machine ssh $DOCKER_MACHINE " wget http://s3.amazonaws.com/ec2metadata/ec2-metadata"
34+ docker-machine ssh $DOCKER_MACHINE " chmod u+x ec2-metadata"
35+ local instance_id=$( docker-machine ssh $DOCKER_MACHINE ./ec2-metadata -i)
36+ instance_id=${instance_id: 13}
37+ local attach_result=$( aws --region=$AWS_REGION ec2 attach-volume \
38+ --device /dev/xvdc --volume-id $DB_EBS_VOLUME_ID --instance-id $instance_id )
39+ sleep 10
40+ docker-machine ssh $DOCKER_MACHINE sudo mount /dev/xvdc /home/backup
41+ dbg $( docker-machine ssh $DOCKER_MACHINE " sudo df -h /dev/xvdc" )
42+ }
43+
2244# ######################################
2345# Print a help
2446# Globals:
@@ -150,6 +172,24 @@ function help() {
150172 - dump in \" custom\" format, made with 'pg_dump -Fc ..' ('*.pgdmp'),
151173 * sequence of SQL commands specified as in a form of plain text.
152174
175+ \033[1m--db-name\033[22m (string)
176+
177+ Name of database which must be tested. Name 'test' is internal used name,
178+ so is not correct value.
179+
180+ \033[1m--db-ebs-volume-id\033[22m (string)
181+
182+ ID of an AWS EBS volume, containing the database backup (made with pg_basebackup).
183+
184+ In the volume's root directory, the following two files are expected:
185+ - base.tar.gz
186+ - pg_xlog.tar.gz for Postgres version up to 9.6 or pg_wal.tar.gz for Postgres 10+
187+
188+ The following command can be used to get such files:
189+ 'pg_basebackup -U postgres -zPFt -Z 5 -D /path/to/ebs/volume/root'
190+ Here '-Z 5' means that level 5 to be used for compression, you can choose any value from 0 to 9.
191+
192+
153193 \033[1m--db-pgbench\033[22m (string)
154194
155195 Initialize database for pgbench. Contains pgbench init arguments:
@@ -518,7 +558,9 @@ function check_cli_parameters() {
518558 [[ ! -z ${WORKLOAD_REAL+x} ]] && let workloads_count=$workloads_count +1
519559 [[ ! -z ${WORKLOAD_CUSTOM_SQL+x} ]] && let workloads_count=$workloads_count +1
520560 [[ ! -z ${WORKLOAD_PGBENCH+x} ]] && let workloads_count=$workloads_count +1
521- if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]] && [[ -z ${DB_PGBENCH+x} ]]; then
561+
562+ if [[ -z ${DB_PREPARED_SNAPSHOT+x} ]] && [[ -z ${DB_DUMP+x} ]] \
563+ && [[ -z ${DB_PGBENCH+x} ]] && [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then
522564 err " ERROR: The object (database) is not defined."
523565 exit 1
524566 fi
@@ -1079,6 +1121,8 @@ while [ $# -gt 0 ]; do
10791121 AWS_ZONE=" $2 " ; shift 2 ;;
10801122 --aws-block-duration )
10811123 AWS_BLOCK_DURATION=$2 ; shift 2 ;;
1124+ --db-ebs-volume-id )
1125+ DB_EBS_VOLUME_ID=$2 ; shift 2;;
10821126
10831127 --s3cfg-path )
10841128 S3_CFG_PATH=" $2 " ; shift 2 ;;
@@ -1157,24 +1201,29 @@ elif [[ "$RUN_ON" == "aws" ]]; then
11571201 msg " To connect docker machine use:"
11581202 msg " docker-machine ssh $DOCKER_MACHINE "
11591203
1204+ if [[ " $RUN_ON " == " aws" ]] && [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1205+ attach_db_ebs_drive
1206+ fi
1207+
11601208 docker-machine ssh $DOCKER_MACHINE " sudo sh -c \" mkdir /home/storage\" "
11611209 if [[ " ${AWS_EC2_TYPE: 0: 2} " == " i3" ]]; then
11621210 msg " Using high-speed NVMe SSD disks"
11631211 use_ec2_nvme_drive
11641212 else
11651213 msg " Use EBS volume"
11661214 # Create new volume and attach them for non i3 instances if needed
1167- if [ ! -z ${AWS_EBS_VOLUME_SIZE+x} ]; then
1215+ if [[ " $RUN_ON " == " aws " ]] && [[ ! -z ${AWS_EBS_VOLUME_SIZE+x} ] ]; then
11681216 use_ec2_ebs_drive $AWS_EBS_VOLUME_SIZE
11691217 fi
11701218 fi
11711219
11721220 CONTAINER_HASH=$( \
11731221 docker ` docker-machine config $DOCKER_MACHINE ` run \
11741222 --name=" pg_nancy_${CURRENT_TS} " \
1223+ --privileged \
11751224 -v /home/ubuntu:/machine_home \
11761225 -v /home/storage:/storage \
1177- -v /home/basedump:/basedump \
1226+ -v /home/backup:/backup \
11781227 -dit " postgresmen/postgres-with-stuff:pg${PG_VERSION} "
11791228 )
11801229 DOCKER_CONFIG=$( docker-machine config $DOCKER_MACHINE )
@@ -1190,6 +1239,61 @@ MACHINE_HOME="/machine_home/nancy_${CONTAINER_HASH}"
11901239alias docker_exec=' docker $DOCKER_CONFIG exec -i ${CONTAINER_HASH} '
11911240get_system_characteristics
11921241
1242+ # ######################################
1243+ # Extract the database backup from the attached EBS volume.
1244+ # Globals:
1245+ # PG_VERSION
1246+ # Arguments:
1247+ # None
1248+ # Returns:
1249+ # None
1250+ # ######################################
1251+ function cp_db_ebs_backup() {
1252+ # Here we think what postgress stopped
1253+ msg " Extract database backup from EBS volume"
1254+ docker_exec bash -c " rm -rf /var/lib/postgresql/9.6/main/*"
1255+
1256+ local op_start_time=$( date +%s)
1257+ docker_exec bash -c " rm -rf /var/lib/postgresql/$PG_VERSION /main/*"
1258+ local result=$( docker_exec bash -c " ([[ -f /backup/base.tar.gz ]] \
1259+ && tar -C /storage/postgresql/$PG_VERSION /main/ -xzvf /backup/base.tar.gz) || true" )
1260+ result=$( docker_exec bash -c " ([[ -f /backup/base.tar ]] \
1261+ && tar -C /storage/postgresql/$PG_VERSION /main/ -xvf /backup/base.tar) || true" )
1262+
1263+ result=$( docker_exec bash -c " ([[ -f /backup/pg_xlog.tar.gz ]] \
1264+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_xlog -xzvf /backup/pg_xlog.tar.gz) || true" )
1265+ result=$( docker_exec bash -c " ([[ -f /backup/pg_xlog.tar ]] \
1266+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_xlog -xvf /backup/pg_xlog.tar) || true" )
1267+
1268+ result=$( docker_exec bash -c " ([[ -f /backup/pg_wal.tar.gz ]] \
1269+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_xlog -xzvf /backup/pg_wal.tar.gz) || true" )
1270+ result=$( docker_exec bash -c " ([[ -f /backup/pg_wal.tar ]] \
1271+ && tar -C /storage/postgresql/$PG_VERSION /main/pg_wal -xvf /backup/pg_wal.tar) || true" )
1272+
1273+ local end_time=$( date +%s)
1274+ local duration=$( echo $(( end_time- op_start_time)) | awk ' {printf "%d:%02d:%02d", $1/3600, ($1/60)%60, $1%60}' )
1275+ msg " Time taken to extract database backup from EBS volume: $duration ."
1276+
1277+ docker_exec bash -c " chown -R postgres:postgres /storage/postgresql/$PG_VERSION /main"
1278+ docker_exec bash -c " localedef -f UTF-8 -i en_US en_US.UTF-8"
1279+ docker_exec bash -c " localedef -f UTF-8 -i ru_RU ru_RU.UTF-8"
1280+ }
1281+
1282+ # ######################################
1283+ # Detach EBS volume
1284+ # Globals:
1285+ # DOCKER_MACHINE, DB_EBS_VOLUME_ID, AWS_REGION
1286+ # Arguments:
1287+ # None
1288+ # Returns:
1289+ # None
1290+ # ######################################
1291+ function dettach_db_ebs_drive() {
1292+ docker_exec bash -c " umount /backup"
1293+ docker-machine ssh $DOCKER_MACHINE sudo umount /home/backup
1294+ local dettach_result=$( aws --region=$AWS_REGION ec2 detach-volume --volume-id $DB_EBS_VOLUME_ID )
1295+ }
1296+
11931297docker_exec bash -c " mkdir $MACHINE_HOME && chmod a+w $MACHINE_HOME "
11941298if [[ " $RUN_ON " == " aws" ]]; then
11951299 docker-machine ssh $DOCKER_MACHINE " sudo chmod a+w /home/storage"
@@ -1201,6 +1305,12 @@ if [[ "$RUN_ON" == "aws" ]]; then
12011305 sleep 2 # wait for postgres stopped
12021306 docker_exec bash -c " sudo mv /var/lib/postgresql /storage/"
12031307 docker_exec bash -c " ln -s /storage/postgresql /var/lib/postgresql"
1308+
1309+ if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]]; then
1310+ cp_db_ebs_backup
1311+ dettach_db_ebs_drive
1312+ fi
1313+
12041314 docker_exec bash -c " sudo /etc/init.d/postgresql start"
12051315 sleep 2 # wait for postgres started
12061316fi
@@ -1608,6 +1718,12 @@ function collect_results() {
16081718 msg " Time taken to generate and collect artifacts: $DURATION ."
16091719}
16101720
1721+ if [[ ! -z ${DB_EBS_VOLUME_ID+x} ]] && [[ ! " $DB_NAME " == " test" ]]; then
1722+ docker_exec bash -c " psql --set ON_ERROR_STOP=on -U postgres -c 'drop database if exists test;'"
1723+ docker_exec bash -c " psql --set ON_ERROR_STOP=on -U postgres -c 'alter database $DB_NAME rename to test;'"
1724+ DB_NAME=test
1725+ fi
1726+
16111727[ ! -z ${S3_CFG_PATH+x} ] && copy_file $S3_CFG_PATH \
16121728 && docker_exec cp $MACHINE_HOME /.s3cfg /root/.s3cfg
16131729[ ! -z ${DB_DUMP+x} ] && copy_file $DB_DUMP
@@ -1624,7 +1740,9 @@ sleep 2 # wait for postgres up&running
16241740
16251741apply_commands_after_container_init
16261742apply_sql_before_db_restore
1627- restore_dump
1743+ if [[ -z ${DB_EBS_VOLUME_ID+x} ]]; then
1744+ restore_dump
1745+ fi
16281746apply_sql_after_db_restore
16291747docker_exec bash -c " psql -U postgres $DB_NAME -b -c 'create extension if not exists pg_stat_statements;' $VERBOSE_OUTPUT_REDIRECT "
16301748pg_config_init
0 commit comments