Commit 4e6fa3a8 by Carson Gee

Create vagrant for building and testing cluster roles

parent e0040f84
# - name: Configure group cluster
# hosts: all
# sudo: True
# gather_facts: True
# vars:
# mongo_cluster_members:
# - "cluster1"
# - "cluster2"
# - "cluster3"
# MONGO_CLUSTERED: yes
# MONGO_CLUSTER_KEY: 'password'
# mongo_create_users: no
# ELASTICSEARCH_CLUSTERED: yes
# MARIADB_CLUSTERED: yes
# MARIADB_CREATE_DBS: no
# vars_files:
# - "group_vars/all"
# roles:
# - user
# - mongo
# - oraclejdk
# - elasticsearch
# - mariadb
# - edx_ansible
# # Rabbit needs to be built serially
# - name: Configure group cluster serial roles
# hosts: all
# sudo: True
# gather_facts: True
# forks: 1
# vars:
# rabbitmq_clustered_hosts:
# - "rabbit@cluster1"
# - "rabbit@cluster2"
# - "rabbit@cluster3"
# rabbitmq_ip: ""
# vars_files:
# - "group_vars/all"
# roles:
# - rabbitmq
# Mongo user doesn't handle slave's gracefully when
# creating users and there are race conditions
# in MariaDB occasionally so this play will work
# but will also show as failed
- name: Configure group with tasks that will always fail
hosts: all
sudo: True
gather_facts: True
vars:
mongo_cluster_members:
- "cluster1"
- "cluster2"
- "cluster3"
MONGO_CLUSTERED: yes
MONGO_CLUSTER_KEY: 'password'
mongo_create_users: yes
RABBITMQ_CLUSTERED: yes
MARIADB_CLUSTERED: yes
MARIADB_CREATE_DBS: yes
vars_files:
- "group_vars/all"
- "roles/analytics-api/defaults/main.yml"
roles:
- mongo
# - mariadb
# -*- mode: ruby -*-
# vi: set ft=ruby :
VAGRANTFILE_API_VERSION = "2"
Vagrant.require_version ">= 1.5.0"
$script = <<SCRIPT
# Silly Ubuntu 12.04 doesn't have the
# --stdin option in the passwd utility
echo root:vagrant | chpasswd
cat << EOF >> /etc/hosts
192.168.33.100 cluster1
192.168.33.110 cluster2
192.168.33.120 cluster3
EOF
SCRIPT
Vagrant.configure(VAGRANTFILE_API_VERSION) do |config|
config.vm.box = "precise64"
config.vm.box_url = "http://files.vagrantup.com/precise64.box"
# Turn off shared folders
#config.vm.synced_folder ".", "/vagrant", id: "vagrant-root", disabled: true
# Begin cluster1
config.vm.define "cluster1" do |cluster1_config|
cluster1_config.vm.hostname = "cluster1"
cluster1_config.vm.provision "shell", inline: $script
cluster1_config.vm.network :private_network, ip: "192.168.33.100"
cluster1_config.vm.provider "virtualbox" do |v|
v.customize ["modifyvm", :id, "--memory", "2048"]
v.customize ["modifyvm", :id, "--cpus", "2"]
end
end
# End cluster1
# Begin cluster2
config.vm.define "cluster2" do |cluster2_config|
cluster2_config.vm.hostname = "cluster2"
cluster2_config.vm.provision "shell", inline: $script
cluster2_config.vm.network :private_network, ip: "192.168.33.110"
cluster2_config.vm.provider "virtualbox" do |v|
v.customize ["modifyvm", :id, "--memory", "2048"]
v.customize ["modifyvm", :id, "--cpus", "2"]
end
end
# End cluster2
# Begin cluster3
config.vm.define "cluster3" do |cluster3_config|
cluster3_config.vm.hostname = "cluster3"
cluster3_config.vm.provision "shell", inline: $script
cluster3_config.vm.network :private_network, ip: "192.168.33.120"
cluster3_config.vm.provider "virtualbox" do |v|
v.customize ["modifyvm", :id, "--memory", "2048"]
v.customize ["modifyvm", :id, "--cpus", "2"]
end
# Now that all machines are up, provision the group
# See https://github.com/mitchellh/vagrant/issues/1784 for why
# we do it here
cluster3_config.vm.provision :ansible do |ansible|
# point Vagrant at the location of your playbook you want to run
ansible.playbook = "../../../playbooks/vagrant-cluster.yml"
#ansible.verbose = "vvv"
ansible.inventory_path = "inventory.ini"
ansible.limit = 'all'
end
end
# End cluster3
end
# config file for ansible -- http://ansible.github.com
# nearly all parameters can be overridden in ansible-playbook or with command line flags
# ansible will read ~/.ansible.cfg or /etc/ansible/ansible.cfg, whichever it finds first
[defaults]
jinja2_extensions=jinja2.ext.do
host_key_checking = False
roles_path=../../ansible-roles/roles:../../ansible-private/roles:../../ansible-roles/
[cluster]
cluster1 ansible_ssh_host=192.168.33.100 ansible_ssh_user=vagrant ansible_ssh_private_key_file=~/.vagrant.d/insecure_private_key
cluster2 ansible_ssh_host=192.168.33.110 ansible_ssh_user=vagrant ansible_ssh_private_key_file=~/.vagrant.d/insecure_private_key
cluster3 ansible_ssh_host=192.168.33.120 ansible_ssh_user=vagrant ansible_ssh_private_key_file=~/.vagrant.d/insecure_private_key
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment