e2e: improve reusability of provisioning scripts (#6942)

This changeset is part of the work to improve our E2E provisioning
process to allow our upgrade tests:

* Move more of the setup into the AMI image creation so it's a little
 more obvious to provisioning config authors which bits are essential
 to deploying a specific version of Nomad.

* Make the service file update do a systemd daemon-reload so that we
  can update an already-running cluster with the same script we use to
  deploy it initially.
This commit is contained in:
Tim Gross 2020-01-16 09:29:36 -05:00 committed by GitHub
parent a4aeea0058
commit 1e600d573d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 47 additions and 107 deletions

View file

@ -26,8 +26,6 @@ NOMADDOWNLOAD=https://releases.hashicorp.com/nomad/${NOMADVERSION}/nomad_${NOMAD
NOMADCONFIGDIR=/etc/nomad.d
NOMADDIR=/opt/nomad
HADOOP_VERSION=2.7.7
# Dependencies
sudo apt-get install -y software-properties-common
sudo apt-get update
@ -83,7 +81,7 @@ sudo chmod 755 $NOMADCONFIGDIR
sudo mkdir -p $NOMADDIR
sudo chmod 755 $NOMADDIR
# Docker
echo "Install Docker"
distro=$(lsb_release -si | tr '[:upper:]' '[:lower:]')
sudo apt-get install -y apt-transport-https ca-certificates gnupg2
curl -fsSL https://download.docker.com/linux/debian/gpg | sudo apt-key add -
@ -91,51 +89,46 @@ sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/${di
sudo apt-get update
sudo apt-get install -y docker-ce
# rkt
VERSION=1.29.0
DOWNLOAD=https://github.com/rkt/rkt/releases/download/v${VERSION}/rkt-v${VERSION}.tar.gz
function install_rkt() {
wget -q -O /tmp/rkt.tar.gz "${DOWNLOAD}"
tar -C /tmp -xvf /tmp/rkt.tar.gz
sudo mv /tmp/rkt-v${VERSION}/rkt /usr/local/bin
sudo mv /tmp/rkt-v${VERSION}/*.aci /usr/local/bin
}
function configure_rkt_networking() {
sudo mkdir -p /etc/rkt/net.d
sudo bash -c 'cat << EOT > /etc/rkt/net.d/99-network.conf
{
"name": "default",
"type": "ptp",
"ipMasq": false,
"ipam": {
"type": "host-local",
"subnet": "172.16.28.0/24",
"routes": [
{
"dst": "0.0.0.0/0"
}
]
}
}
EOT'
}
install_rkt
configure_rkt_networking
# Java
echo "Install Java"
sudo add-apt-repository -y ppa:openjdk-r/ppa
sudo apt-get update
sudo apt-get install -y openjdk-8-jdk
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
# Spark
echo "Install Spark"
sudo wget -P /ops/examples/spark https://nomad-spark.s3.amazonaws.com/spark-2.2.0-bin-nomad-0.7.0.tgz
sudo tar -xvf /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0.tgz --directory /ops/examples/spark
sudo mv /ops/examples/spark/spark-2.2.0-bin-nomad-0.7.0 /usr/local/bin/spark
sudo chown -R root:root /usr/local/bin/spark
# Hadoop (to enable the HDFS CLI)
echo "Install HDFS CLI"
HADOOP_VERSION=2.7.7
HADOOPCONFIGDIR=/usr/local/$HADOOP_VERSION/etc/hadoop
sudo mkdir -p "$HADOOPCONFIGDIR"
wget -O - http://apache.mirror.iphh.net/hadoop/common/hadoop-${HADOOP_VERSION}/hadoop-${HADOOP_VERSION}.tar.gz | sudo tar xz -C /usr/local/
# note this 'EOF' syntax avoids expansion in the heredoc
sudo tee "$HADOOPCONFIGDIR/core-site.xml" << 'EOF'
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://hdfs.service.consul/</value>
</property>
</configuration>
EOF
echo "Configure user shell"
sudo tee -a /home/ubuntu/.bashrc << 'EOF'
IP_ADDRESS=$(/usr/local/bin/sockaddr eval 'GetPrivateIP')
export CONSUL_RPC_ADDR=$IP_ADDRESS:8400
export CONSUL_HTTP_ADDR=$IP_ADDRESS:8500
export VAULT_ADDR=http://$IP_ADDRESS:8200
export NOMAD_ADDR=http://$IP_ADDRESS:4646
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre
# Update PATH
export PATH=$PATH:/usr/local/bin/spark/bin:/usr/local/$HADOOP_VERSION/bin
EOF

View file

@ -1,8 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
<configuration>
<property>
<name>fs.defaultFS</name>
<value>hdfs://hdfs.service.consul/</value>
</property>
</configuration>

View file

@ -3,15 +3,6 @@
set -o errexit
set -o nounset
CONFIGDIR=/ops/shared/config
HADOOP_VERSION=hadoop-2.7.7
HADOOPCONFIGDIR=/usr/local/$HADOOP_VERSION/etc/hadoop
HOME_DIR=ubuntu
IP_ADDRESS=$(/usr/local/bin/sockaddr eval 'GetPrivateIP')
DOCKER_BRIDGE_IP_ADDRESS=$(/usr/local/bin/sockaddr eval 'GetInterfaceIP "docker0"')
CLOUD="$1"
NOMAD_SHA="$2"
NOMAD_CONFIG="$3"
@ -25,32 +16,19 @@ sudo cp "$CONSUL_SRC/retry_$CLOUD.json" "$CONSUL_DEST/"
sudo cp "$CONSUL_SRC/consul_$CLOUD.service" /etc/systemd/system/consul.service
sudo systemctl enable consul.service
sudo systemctl start consul.service
sudo systemctl daemon-reload
sudo systemctl restart consul.service
sleep 10
# Add hostname to /etc/hosts
echo "127.0.0.1 $(hostname)" | sudo tee --append /etc/hosts
# Add Docker bridge network IP to /etc/resolv.conf (at the top)
DOCKER_BRIDGE_IP_ADDRESS=$(/usr/local/bin/sockaddr eval 'GetInterfaceIP "docker0"')
echo "nameserver $DOCKER_BRIDGE_IP_ADDRESS" | sudo tee /etc/resolv.conf.new
cat /etc/resolv.conf | sudo tee --append /etc/resolv.conf.new
sudo mv /etc/resolv.conf.new /etc/resolv.conf
# Hadoop config file to enable HDFS CLI
sudo cp $CONFIGDIR/core-site.xml $HADOOPCONFIGDIR
# Move examples directory to $HOME
sudo mv /ops/examples /home/$HOME_DIR
sudo chown -R $HOME_DIR:$HOME_DIR /home/$HOME_DIR/examples
sudo chmod -R 775 /home/$HOME_DIR/examples
# Set env vars for tool CLIs
echo "export NOMAD_ADDR=http://$IP_ADDRESS:4646" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre" | sudo tee --append /home/$HOME_DIR/.bashrc
# Update PATH
echo "export PATH=$PATH:/usr/local/bin/spark/bin:/usr/local/$HADOOP_VERSION/bin" | sudo tee --append /home/$HOME_DIR/.bashrc
# Nomad
NOMAD_SRC=/ops/shared/nomad
@ -79,5 +57,7 @@ wget -q -O - \
# enable as a systemd service
sudo cp "$NOMAD_SRC/nomad.service" /etc/systemd/system/nomad.service
sudo systemctl enable nomad.service
sudo systemctl start nomad.service
sudo systemctl daemon-reload
sudo systemctl restart nomad.service

View file

@ -3,15 +3,6 @@
set -o errexit
set -o nounset
CONFIGDIR=/ops/shared/config
HADOOP_VERSION=hadoop-2.7.7
HADOOPCONFIGDIR=/usr/local/$HADOOP_VERSION/etc/hadoop
HOME_DIR=ubuntu
IP_ADDRESS=$(/usr/local/bin/sockaddr eval 'GetPrivateIP')
DOCKER_BRIDGE_IP_ADDRESS=$(/usr/local/bin/sockaddr eval 'GetInterfaceIP "docker0"')
CLOUD="$1"
SERVER_COUNT="$2"
NOMAD_SHA="$3"
@ -28,10 +19,9 @@ sudo cp "$CONSUL_SRC/retry_$CLOUD.json" "$CONSUL_DEST/"
sudo cp "$CONSUL_SRC/consul_$CLOUD.service" /etc/systemd/system/consul.service
sudo systemctl enable consul.service
sudo systemctl start consul.service
sudo systemctl daemon-reload
sudo systemctl restart consul.service
sleep 10
export CONSUL_HTTP_ADDR=$IP_ADDRESS:8500
export CONSUL_RPC_ADDR=$IP_ADDRESS:8400
# Vault
VAULT_SRC=/ops/shared/vault
@ -41,35 +31,18 @@ sudo cp "$VAULT_SRC/vault.hcl" "$VAULT_DEST"
sudo cp "$VAULT_SRC/vault.service" /etc/systemd/system/vault.service
sudo systemctl enable vault.service
sudo systemctl start vault.service
sudo systemctl daemon-reload
sudo systemctl restart vault.service
# Add hostname to /etc/hosts
echo "127.0.0.1 $(hostname)" | sudo tee --append /etc/hosts
# Add Docker bridge network IP to /etc/resolv.conf (at the top)
DOCKER_BRIDGE_IP_ADDRESS=$(/usr/local/bin/sockaddr eval 'GetInterfaceIP "docker0"')
echo "nameserver $DOCKER_BRIDGE_IP_ADDRESS" | sudo tee /etc/resolv.conf.new
cat /etc/resolv.conf | sudo tee --append /etc/resolv.conf.new
sudo mv /etc/resolv.conf.new /etc/resolv.conf
# Hadoop
sudo cp $CONFIGDIR/core-site.xml $HADOOPCONFIGDIR
# Move examples directory to $HOME
sudo mv /ops/examples /home/$HOME_DIR
sudo chown -R $HOME_DIR:$HOME_DIR /home/$HOME_DIR/examples
sudo chmod -R 775 /home/$HOME_DIR/examples
# Set env vars for tool CLIs
echo "export CONSUL_RPC_ADDR=$IP_ADDRESS:8400" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export CONSUL_HTTP_ADDR=$IP_ADDRESS:8500" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export VAULT_ADDR=http://$IP_ADDRESS:8200" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export NOMAD_ADDR=http://$IP_ADDRESS:4646" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre" | sudo tee --append /home/$HOME_DIR/.bashrc
# Update PATH
echo "export PATH=$PATH:/usr/local/bin/spark/bin:/usr/local/$HADOOP_VERSION/bin" | sudo tee --append /home/$HOME_DIR/.bashrc
# Nomad
NOMAD_SRC=/ops/shared/nomad
@ -92,5 +65,7 @@ sudo mv "/tmp/$NOMAD_CONFIG_FILENAME" "$NOMAD_DEST/$NOMAD_CONFIG_FILENAME"
# enable as a systemd service
sudo cp "$NOMAD_SRC/nomad.service" /etc/systemd/system/nomad.service
sudo systemctl enable nomad.service
sudo systemctl start nomad.service
sudo systemctl daemon-reload
sudo systemctl restart nomad.service