cleanup build/provision shell scripts

This commit is contained in:
Rob Genova 2017-06-25 11:54:56 -07:00
parent 7f1c84c59c
commit fddf190f5f
3 changed files with 36 additions and 37 deletions

View File

@ -1,12 +1,16 @@
#!/bin/bash #!/bin/bash
set -e
CONFIGDIR=/ops/shared/config CONFIGDIR=/ops/shared/config
CONSULCONFIGDIR=/etc/consul.d CONSULCONFIGDIR=/etc/consul.d
NOMADCONFIGDIR=/etc/nomad.d NOMADCONFIGDIR=/etc/nomad.d
HADOOP_VERSION=hadoop-2.7.3 HADOOP_VERSION=hadoop-2.7.3
HADOOPCONFIGDIR=/usr/local/$HADOOP_VERSION/etc/hadoop HADOOPCONFIGDIR=/usr/local/$HADOOP_VERSION/etc/hadoop
HOME_DIR=ubuntu HOME_DIR=ubuntu
# Wait for network
sleep 15 sleep 15
IP_ADDRESS=$(curl http://instance-data/latest/meta-data/local-ipv4) IP_ADDRESS=$(curl http://instance-data/latest/meta-data/local-ipv4)
@ -25,7 +29,6 @@ sudo service consul start
sleep 10 sleep 10
# Nomad # Nomad
# sed -i "s/SERVER_IP_ADDRESS/$SERVER_IP_ADDRESS/g" $CONFIGDIR/nomad_client.hcl
sed -i "s/IP_ADDRESS/$IP_ADDRESS/g" $CONFIGDIR/nomad_client.hcl sed -i "s/IP_ADDRESS/$IP_ADDRESS/g" $CONFIGDIR/nomad_client.hcl
sudo cp $CONFIGDIR/nomad_client.hcl $NOMADCONFIGDIR/nomad.hcl sudo cp $CONFIGDIR/nomad_client.hcl $NOMADCONFIGDIR/nomad.hcl
sudo cp $CONFIGDIR/nomad_upstart.conf /etc/init/nomad.conf sudo cp $CONFIGDIR/nomad_upstart.conf /etc/init/nomad.conf
@ -35,32 +38,27 @@ sleep 10
export NOMAD_ADDR=http://$IP_ADDRESS:4646 export NOMAD_ADDR=http://$IP_ADDRESS:4646
# Add hostname to /etc/hosts # Add hostname to /etc/hosts
echo "127.0.0.1 $(hostname)" | sudo tee --append /etc/hosts echo "127.0.0.1 $(hostname)" | sudo tee --append /etc/hosts
# Add Docker bridge network IP to /etc/resolv.conf (at the top) # Add Docker bridge network IP to /etc/resolv.conf (at the top)
echo "nameserver $DOCKER_BRIDGE_IP_ADDRESS" | sudo tee /etc/resolv.conf.new echo "nameserver $DOCKER_BRIDGE_IP_ADDRESS" | sudo tee /etc/resolv.conf.new
cat /etc/resolv.conf | sudo tee --append /etc/resolv.conf.new cat /etc/resolv.conf | sudo tee --append /etc/resolv.conf.new
sudo mv /etc/resolv.conf.new /etc/resolv.conf sudo mv /etc/resolv.conf.new /etc/resolv.conf
# Hadoop # Hadoop config file to enable HDFS CLI
sudo cp $CONFIGDIR/core-site.xml $HADOOPCONFIGDIR sudo cp $CONFIGDIR/core-site.xml $HADOOPCONFIGDIR
# Set env vars in bashrc
echo "export VAULT_ADDR=http://$IP_ADDRESS:8200" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export NOMAD_ADDR=http://$IP_ADDRESS:4646" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre" | sudo tee --append /home/$HOME_DIR/.bashrc
# Move examples directory to $HOME # Move examples directory to $HOME
sudo mv /ops/examples /home/$HOME_DIR sudo mv /ops/examples /home/$HOME_DIR
sudo chown -R $HOME_DIR:$HOME_DIR /home/$HOME_DIR/examples sudo chown -R $HOME_DIR:$HOME_DIR /home/$HOME_DIR/examples
sudo chmod -R 775 /home/$HOME_DIR/examples sudo chmod -R 775 /home/$HOME_DIR/examples
# Update PATH in .bashrc # Set env vars for tool CLIs
echo "export VAULT_ADDR=http://$IP_ADDRESS:8200" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export NOMAD_ADDR=http://$IP_ADDRESS:4646" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre" | sudo tee --append /home/$HOME_DIR/.bashrc
# Update PATH
echo "export PATH=$PATH:/usr/local/bin/spark/bin:/usr/local/$HADOOP_VERSION/bin" | sudo tee --append /home/$HOME_DIR/.bashrc echo "export PATH=$PATH:/usr/local/bin/spark/bin:/usr/local/$HADOOP_VERSION/bin" | sudo tee --append /home/$HOME_DIR/.bashrc

View File

@ -1,6 +1,9 @@
#!/bin/bash #!/bin/bash
set -e
CONFIGDIR=/ops/shared/config CONFIGDIR=/ops/shared/config
CONSULCONFIGDIR=/etc/consul.d CONSULCONFIGDIR=/etc/consul.d
VAULTCONFIGDIR=/etc/vault.d VAULTCONFIGDIR=/etc/vault.d
NOMADCONFIGDIR=/etc/nomad.d NOMADCONFIGDIR=/etc/nomad.d
@ -59,6 +62,11 @@ sudo mv /etc/resolv.conf.new /etc/resolv.conf
# Hadoop # Hadoop
sudo cp $CONFIGDIR/core-site.xml $HADOOPCONFIGDIR sudo cp $CONFIGDIR/core-site.xml $HADOOPCONFIGDIR
# Move examples directory to $HOME
sudo mv /ops/examples /home/$HOME_DIR
sudo chown -R $HOME_DIR:$HOME_DIR /home/$HOME_DIR/examples
sudo chmod -R 775 /home/$HOME_DIR/examples
# Set env vars for tool CLIs # Set env vars for tool CLIs
echo "export CONSUL_RPC_ADDR=$IP_ADDRESS:8400" | sudo tee --append /home/$HOME_DIR/.bashrc echo "export CONSUL_RPC_ADDR=$IP_ADDRESS:8400" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export CONSUL_HTTP_ADDR=$IP_ADDRESS:8500" | sudo tee --append /home/$HOME_DIR/.bashrc echo "export CONSUL_HTTP_ADDR=$IP_ADDRESS:8500" | sudo tee --append /home/$HOME_DIR/.bashrc
@ -66,6 +74,5 @@ echo "export VAULT_ADDR=http://$IP_ADDRESS:8200" | sudo tee --append /home/$HOME
echo "export NOMAD_ADDR=http://$IP_ADDRESS:4646" | sudo tee --append /home/$HOME_DIR/.bashrc echo "export NOMAD_ADDR=http://$IP_ADDRESS:4646" | sudo tee --append /home/$HOME_DIR/.bashrc
echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre" | sudo tee --append /home/$HOME_DIR/.bashrc echo "export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64/jre" | sudo tee --append /home/$HOME_DIR/.bashrc
# Update PATH in .bashrc # Update PATH
echo "export PATH=$PATH:/usr/local/bin/spark/bin:/usr/local/$HADOOP_VERSION/bin" | sudo tee --append /home/$HOME_DIR/.bashrc echo "export PATH=$PATH:/usr/local/bin/spark/bin:/usr/local/$HADOOP_VERSION/bin" | sudo tee --append /home/$HOME_DIR/.bashrc

View File

@ -1,4 +1,5 @@
#!/bin/bash #!/bin/bash
set -e set -e
cd /ops cd /ops
@ -20,7 +21,9 @@ NOMADDOWNLOAD=https://releases.hashicorp.com/nomad/${NOMADVERSION}/nomad_${NOMAD
NOMADCONFIGDIR=/etc/nomad.d NOMADCONFIGDIR=/etc/nomad.d
NOMADDIR=/opt/nomad NOMADDIR=/opt/nomad
echo Dependencies... HADOOP_VERSION=2.7.3
# Dependencies
sudo apt-get install -y software-properties-common sudo apt-get install -y software-properties-common
sudo apt-get update sudo apt-get update
sudo apt-get install -y unzip tree redis-tools jq sudo apt-get install -y unzip tree redis-tools jq
@ -28,7 +31,6 @@ sudo apt-get install -y upstart-sysv
sudo update-initramfs -u sudo update-initramfs -u
# Numpy (for Spark) # Numpy (for Spark)
sudo apt-get install -y python-setuptools sudo apt-get install -y python-setuptools
sudo easy_install pip sudo easy_install pip
sudo pip install numpy sudo pip install numpy
@ -37,76 +39,68 @@ sudo pip install numpy
sudo ufw disable sudo ufw disable
## Consul # Consul
echo Fetching Consul...
curl -L $CONSULDOWNLOAD > consul.zip curl -L $CONSULDOWNLOAD > consul.zip
echo Installing Consul... ## Install
sudo unzip consul.zip -d /usr/local/bin sudo unzip consul.zip -d /usr/local/bin
sudo chmod 0755 /usr/local/bin/consul sudo chmod 0755 /usr/local/bin/consul
sudo chown root:root /usr/local/bin/consul sudo chown root:root /usr/local/bin/consul
echo Configuring Consul... ## Configure
sudo mkdir -p $CONSULCONFIGDIR sudo mkdir -p $CONSULCONFIGDIR
sudo chmod 755 $CONSULCONFIGDIR sudo chmod 755 $CONSULCONFIGDIR
sudo mkdir -p $CONSULDIR sudo mkdir -p $CONSULDIR
sudo chmod 755 $CONSULDIR sudo chmod 755 $CONSULDIR
## Vault # Vault
echo Fetching Vault...
curl -L $VAULTDOWNLOAD > vault.zip curl -L $VAULTDOWNLOAD > vault.zip
echo Installing Vault... ## Install
sudo unzip vault.zip -d /usr/local/bin sudo unzip vault.zip -d /usr/local/bin
sudo chmod 0755 /usr/local/bin/vault sudo chmod 0755 /usr/local/bin/vault
sudo chown root:root /usr/local/bin/vault sudo chown root:root /usr/local/bin/vault
echo Configuring Vault... ## Configure
sudo mkdir -p $VAULTCONFIGDIR sudo mkdir -p $VAULTCONFIGDIR
sudo chmod 755 $VAULTCONFIGDIR sudo chmod 755 $VAULTCONFIGDIR
sudo mkdir -p $VAULTDIR sudo mkdir -p $VAULTDIR
sudo chmod 755 $VAULTDIR sudo chmod 755 $VAULTDIR
## Nomad # Nomad
echo Fetching Nomad...
curl -L $NOMADDOWNLOAD > nomad.zip curl -L $NOMADDOWNLOAD > nomad.zip
echo Installing Nomad... ## Install
sudo unzip nomad.zip -d /usr/local/bin sudo unzip nomad.zip -d /usr/local/bin
sudo chmod 0755 /usr/local/bin/nomad sudo chmod 0755 /usr/local/bin/nomad
sudo chown root:root /usr/local/bin/nomad sudo chown root:root /usr/local/bin/nomad
echo Configuring Nomad... ## Configure
sudo mkdir -p $NOMADCONFIGDIR sudo mkdir -p $NOMADCONFIGDIR
sudo chmod 755 $NOMADCONFIGDIR sudo chmod 755 $NOMADCONFIGDIR
sudo mkdir -p $NOMADDIR sudo mkdir -p $NOMADDIR
sudo chmod 755 $NOMADDIR sudo chmod 755 $NOMADDIR
## Docker # Docker
echo deb https://apt.dockerproject.org/repo ubuntu-`lsb_release -c | awk '{print $2}'` main | sudo tee /etc/apt/sources.list.d/docker.list echo deb https://apt.dockerproject.org/repo ubuntu-`lsb_release -c | awk '{print $2}'` main | sudo tee /etc/apt/sources.list.d/docker.list
sudo apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D sudo apt-key adv --keyserver hkp://p80.pool.sks-keyservers.net:80 --recv-keys 58118E89F3A912897C070ADBF76221572C52609D
sudo apt-get update sudo apt-get update
sudo apt-get install -y docker-engine sudo apt-get install -y docker-engine
## Java # Java
sudo add-apt-repository -y ppa:openjdk-r/ppa sudo add-apt-repository -y ppa:openjdk-r/ppa
sudo apt-get update sudo apt-get update
sudo apt-get install -y openjdk-8-jdk sudo apt-get install -y openjdk-8-jdk
JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::") JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::")
## Download and unpack spark # Spark
sudo wget -P /ops/examples/spark https://s3.amazonaws.com/rcgenova-nomad-spark/spark-2.1.0-bin-nomad-preview-6.tgz sudo wget -P /ops/examples/spark https://s3.amazonaws.com/rcgenova-nomad-spark/spark-2.1.0-bin-nomad-preview-6.tgz
sudo tar -xvf /ops/examples/spark/spark-2.1.0-bin-nomad-preview-6.tgz --directory /ops/examples/spark sudo tar -xvf /ops/examples/spark/spark-2.1.0-bin-nomad-preview-6.tgz --directory /ops/examples/spark
sudo mv /ops/examples/spark/spark-2.1.0-bin-nomad-preview-6 /usr/local/bin/spark sudo mv /ops/examples/spark/spark-2.1.0-bin-nomad-preview-6 /usr/local/bin/spark
sudo chown -R root:root /usr/local/bin/spark sudo chown -R root:root /usr/local/bin/spark
## Install Hadoop to enable the HDFS CLI # Hadoop (to enable the HDFS CLI)
export HADOOP_VERSION=2.7.3
wget -O - http://apache.mirror.iphh.net/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz | sudo tar xz -C /usr/local/ wget -O - http://apache.mirror.iphh.net/hadoop/common/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz | sudo tar xz -C /usr/local/