Skip to content

Commit

Permalink
Merging 'dev/refactory-aws' into stable/0.5
Browse files Browse the repository at this point in the history
  • Loading branch information
carmat88 committed Apr 4, 2018
2 parents 287480d + 698ca09 commit 2a73b0f
Show file tree
Hide file tree
Showing 4 changed files with 23 additions and 23 deletions.
22 changes: 9 additions & 13 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,9 @@ before_install:
https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip
> /tmp/terraform.zip
- unzip /tmp/terraform.zip -d /tmp/
- sudo pip install --upgrade pip
- sudo pip install yamllint=="${YAMLLINT_VERSION}"
- sudo apt-get -qq install shellcheck
- sudo pip install --upgrade -qq pip
- sudo pip install -qq yamllint=="${YAMLLINT_VERSION}"
- sudo apt-get -qq install shellcheck -y

# Check Terraform
- /tmp/terraform fmt bin/*.tf
Expand Down Expand Up @@ -99,6 +99,8 @@ install:

before_script:
# Check whether this build was triggered by our cron job for security-updates
# Note: in a yml file there seems to not be possible to have a space
# before a global variable within an echo string
- echo "TRAVIS_EVENT_TYPE is:$TRAVIS_EVENT_TYPE"
- >
if [ "$TRAVIS_EVENT_TYPE" = 'cron' ]; then
Expand All @@ -119,14 +121,8 @@ before_script:

# GCE login
- echo $GCE_KEY > ./bin/account_file.json

- >
gcloud auth activate-service-account
[email protected]
--key-file=$GCE_ACCOUNT_FILE_PATH
- >
gcloud auth login [email protected]
--project phenomenal-1145 -q
- gcloud auth activate-service-account [email protected] --key-file=$GCE_ACCOUNT_FILE_PATH
- gcloud auth login [email protected] --project phenomenal-1145 -q

script:
# Finally bulding the image with packer
Expand All @@ -146,10 +142,10 @@ after_success:
fi
# The below script used to be run as post-processor for the Openstack's builder.
# In order to avoid running Terraform within Packer, hence getting a cumbersome and tedious log output. travis_retry
# In order to avoid running Terraform within Packer, hence getting a cumbersome and tedious log output.
- >
if [ $HOST_CLOUD = 'openstack' ]; then
bin/os_pp.sh
travis_retry bin/os_pp.sh
fi
notifications:
Expand Down
8 changes: 5 additions & 3 deletions bin/os_pp.sh
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@

# Installing needed tools
# NB: this bash script run in travis where sudo is required. Thus we must put sudo before the usual pip command
sudo pip install --upgrade pip
sudo pip install python-glanceclient python-neutronclient
sudo pip install --upgrade -qq pip
sudo pip install -qq python-glanceclient python-neutronclient

# Building the OS Instance
cd ./bin || exit
Expand Down Expand Up @@ -57,10 +57,12 @@ export OS_TENANT_NAME=$OS_TENANT_NAME
export OS_AUTH_VERSION=3
export OS_POOL_NAME=$OS_POOL_NAME
export OS_EXTERNAL_NET_UUUID=$OS_EXTERNAL_NET_UUUID
# AWS credentials
# AWS credentials and Buckets URLs
export AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID
export AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY
export AWS_DEFAULT_REGION=$AWS_DEFAULT_REGION
export AWS_BUCKET1_URL=$AWS_BUCKET1_URL
export AWS_BUCKET2_URL=$AWS_BUCKET2_URL
" >>/tmp/aws_and_os.sh

# Launching OS instance with terraform
Expand Down
12 changes: 7 additions & 5 deletions bin/os_tf.sh
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,12 @@ md5sum "$kubenow_image_name".qcow2 >"$kubenow_image_name".qcow2.md5

# Uploading the new image format to the AWS S3 bucket. Previous copy will be overwritten.
echo "Uploading new image format into AWS S3 bucket: kubenow-us-east-1 ..."
aws s3 cp "$kubenow_image_name".qcow2 s3://kubenow-us-east-1 --region us-east-1 --acl public-read --quiet
aws s3 cp "$kubenow_image_name".qcow2.md5 s3://kubenow-us-east-1 --region us-east-1 --acl public-read --quiet
bucket1_region=$(echo "$AWS_BUCKET1_URL" | awk -F "/" '{{ print $2 $3 $4 }}' | sed -e 's/kubenow-//')
aws s3 cp "$kubenow_image_name".qcow2 "$AWS_BUCKET1_URL" --region "$bucket1_region" --acl public-read --quiet
aws s3 cp "$kubenow_image_name".qcow2.md5 "$AWS_BUCKET1_URL" --region "$bucket1_region" --acl public-read --quiet

# Copy file to bucket in other aws region
echo "Copying new image format into AWS S3 bucket: kubenow-us-central-1 ..."
aws s3 cp "$kubenow_image_name".qcow2 s3://kubenow-eu-central-1/ --region us-east-1 --region eu-central-1 --acl public-read --quiet
aws s3 cp "$kubenow_image_name".qcow2.md5 s3://kubenow-eu-central-1/ --region us-east-1 --region eu-central-1 --acl public-read --quiet
echo "Copying new image format into AWS S3 bucket: kubenow-eu-central-1 ..."
bucket2_region=$(echo "$AWS_BUCKET2_URL" | awk -F "/" '{{ print $2 $3 $4 }}' | sed -e 's/kubenow-//')
aws s3 cp "$kubenow_image_name".qcow2 "$AWS_BUCKET2_URL" --region "$bucket1_region" --region "$bucket2_region" --acl public-read --quiet
aws s3 cp "$kubenow_image_name".qcow2.md5 "$AWS_BUCKET2_URL" --region "$bucket1_region" --region "$bucket2_region" --acl public-read --quiet
4 changes: 2 additions & 2 deletions requirements.sh
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ sudo DEBIAN_FRONTEND=noninteractive \
upgrade

echo "Installing Kubernetes requirements..."
sudo apt-get install -y \
sudo apt-get -qq install -y \
docker-engine=1.13.1-0~ubuntu-xenial \
kubernetes-cni=0.6.0-00 \
kubeadm=1.9.2-00 \
Expand All @@ -42,7 +42,7 @@ sudo apt-get install -y \

echo "Installing other requirements..."
# APT requirements
sudo apt-get install -y \
sudo apt-get -qq install -y \
python \
daemon \
attr \
Expand Down

0 comments on commit 2a73b0f

Please sign in to comment.