|
| 1 | +#!/usr/bin/env bash |
| 2 | +set -e |
| 3 | + |
| 4 | +workingDirectory="/mnt/changesets" |
| 5 | +mkdir -p "$workingDirectory" |
| 6 | +CHANGESETS_REPLICATION_FOLDER="replication/changesets" |
| 7 | + |
| 8 | +# Creating config file |
| 9 | +echo "state_file: $workingDirectory/state.yaml |
| 10 | +db: host=$POSTGRES_HOST dbname=$POSTGRES_DB user=$POSTGRES_USER password=$POSTGRES_PASSWORD |
| 11 | +data_dir: $workingDirectory/" >/config.yaml |
| 12 | + |
| 13 | +# Verify the existence of the state.yaml file across all cloud providers. If it's not found, create a new one. |
| 14 | +if [ ! -f "$workingDirectory/state.yaml" ]; then |
| 15 | + echo "File $workingDirectory/state.yaml does not exist in local storage" |
| 16 | + |
| 17 | + if [ "$CLOUDPROVIDER" == "aws" ]; then |
| 18 | + if aws s3 ls "$AWS_S3_BUCKET/$CHANGESETS_REPLICATION_FOLDER/state.yaml" >/dev/null 2>&1; then |
| 19 | + echo "File exists, downloading from AWS - $AWS_S3_BUCKET" |
| 20 | + aws s3 cp "$AWS_S3_BUCKET/$CHANGESETS_REPLICATION_FOLDER/state.yaml" "$workingDirectory/state.yaml" |
| 21 | + fi |
| 22 | + elif [ "$CLOUDPROVIDER" == "gcp" ]; then |
| 23 | + if gsutil -q stat "$GCP_STORAGE_BUCKET/$CHANGESETS_REPLICATION_FOLDER/state.yaml"; then |
| 24 | + echo "File exists, downloading from GCP - $GCP_STORAGE_BUCKET" |
| 25 | + gsutil cp "$GCP_STORAGE_BUCKET/$CHANGESETS_REPLICATION_FOLDER/state.yaml" "$workingDirectory/state.yaml" |
| 26 | + fi |
| 27 | + elif [ "$CLOUDPROVIDER" == "azure" ]; then |
| 28 | + state_file_exists=$(az storage blob exists --container-name "$AZURE_CONTAINER_NAME" --name "$CHANGESETS_REPLICATION_FOLDER/state.yaml" --query "exists" --output tsv) |
| 29 | + if [ "$state_file_exists" == "true" ]; then |
| 30 | + echo "File exists, downloading from Azure - $AZURE_CONTAINER_NAME" |
| 31 | + az storage blob download --container-name "$AZURE_CONTAINER_NAME" --name "$CHANGESETS_REPLICATION_FOLDER/state.yaml" --file "$workingDirectory/state.yaml" |
| 32 | + fi |
| 33 | + fi |
| 34 | + if [ ! -f "$workingDirectory/state.yaml" ]; then |
| 35 | + echo "sequence: 0" >"$workingDirectory/state.yaml" |
| 36 | + fi |
| 37 | +fi |
| 38 | + |
| 39 | +# Creating the replication files |
| 40 | +generateReplication() { |
| 41 | + while true; do |
| 42 | + # Run replication script |
| 43 | + ruby replicate_changesets.rb /config.yaml |
| 44 | + |
| 45 | + # Loop through newly created files |
| 46 | + for local_file in $(find "$workingDirectory/" -cmin -1); do |
| 47 | + if [ -f "$local_file" ]; then |
| 48 | + # Construct the cloud path for the file |
| 49 | + cloud_file="$CHANGESETS_REPLICATION_FOLDER/${local_file#*$workingDirectory/}" |
| 50 | + |
| 51 | + # Log file transfer |
| 52 | + echo "$(date +%F_%H:%M:%S): Copying file $local_file to $cloud_file" |
| 53 | + |
| 54 | + # Handle different cloud providers |
| 55 | + case "$CLOUDPROVIDER" in |
| 56 | + "aws") |
| 57 | + aws s3 cp "$local_file" "$AWS_S3_BUCKET/$cloud_file" --acl public-read |
| 58 | + ;; |
| 59 | + "gcp") |
| 60 | + gsutil cp -a public-read "$local_file" "$GCP_STORAGE_BUCKET/$cloud_file" |
| 61 | + ;; |
| 62 | + "azure") |
| 63 | + az storage blob upload \ |
| 64 | + --container-name "$AZURE_CONTAINER_NAME" \ |
| 65 | + --file "$local_file" \ |
| 66 | + --name "$cloud_file" \ |
| 67 | + --output none |
| 68 | + ;; |
| 69 | + *) |
| 70 | + echo "Unknown cloud provider: $CLOUDPROVIDER" |
| 71 | + ;; |
| 72 | + esac |
| 73 | + fi |
| 74 | + done |
| 75 | + |
| 76 | + # Sleep for 60 seconds before next iteration |
| 77 | + sleep 60s |
| 78 | + done |
| 79 | +} |
| 80 | + |
| 81 | +# Call the function to start the replication process |
| 82 | +generateReplication |
0 commit comments