Commit 848fd291 by Filippo Panessa

Merge conflict

parents 11ad0661 911ce6a3
...@@ -4,6 +4,6 @@ Configuration Pull Request ...@@ -4,6 +4,6 @@ Configuration Pull Request
Make sure that the following steps are done before merging Make sure that the following steps are done before merging
- [ ] @devops team member has commented with :+1: - [ ] @devops team member has commented with :+1:
- [ ] are you adding any new default values that need to be overriden when this goes live? - [ ] are you adding any new default values that need to be overridden when this goes live?
- [ ] Open a ticket (DEVOPS) to make sure that they have been added to secure vars. - [ ] Open a ticket (DEVOPS) to make sure that they have been added to secure vars.
- [ ] Add an entry to the CHANGELOG. - [ ] Add an entry to the CHANGELOG.
# Travis CI configuration file for running tests # Travis CI configuration file for running tests
language: python language: python
python:
- "2.7"
branches: branches:
only: only:
- master - master
python:
- "2.7"
services: services:
- docker - docker
......
...@@ -201,3 +201,10 @@ ...@@ -201,3 +201,10 @@
- Changed MONGO_STORAGE_ENGINE to default to wiredTiger which is the default in 3.2 and 3.4 and what edX suggests be used even on 3.0. - Changed MONGO_STORAGE_ENGINE to default to wiredTiger which is the default in 3.2 and 3.4 and what edX suggests be used even on 3.0.
If you have a mmapv1 3.0 install, override MONGO_STORAGE_ENGINE to be mmapv1 which was the old default. If you have a mmapv1 3.0 install, override MONGO_STORAGE_ENGINE to be mmapv1 which was the old default.
- Ready for deploying Mongo 3.2 - Ready for deploying Mongo 3.2
- Role: xqueue
- Added `EDXAPP_CELERY_BROKER_USE_SSL` to allow configuring celery to use TLS.
- Role: edxapp
- Added `XQUEUE_RABBITMQ_VHOST` to allow configuring the xqueue RabbitMQ host.
- Added `XQUEUE_RABBITMQ_PORT` and `XQUEUE_RABBITMQ_TLS` to allow configuring the RabbitMQ port, and enabling TLS respectively.
...@@ -26,7 +26,9 @@ test: docker.test ...@@ -26,7 +26,9 @@ test: docker.test
pkg: docker.pkg pkg: docker.pkg
clean: clean: docker.clean
docker.clean:
rm -rf .build rm -rf .build
docker.test.shard: $(foreach image,$(shell echo $(images) | python util/balancecontainers.py $(SHARDS) | awk 'NR%$(SHARDS)==$(SHARD)'),$(docker_test)$(image)) docker.test.shard: $(foreach image,$(shell echo $(images) | python util/balancecontainers.py $(SHARDS) | awk 'NR%$(SHARDS)==$(SHARD)'),$(docker_test)$(image))
......
FROM edxops/precise-common:latest FROM edxops/xenial-common:latest
MAINTAINER edxops MAINTAINER edxops
RUN apt-get update RUN apt-get update
......
...@@ -19,3 +19,6 @@ ANALYTICS_API_DATABASES: ...@@ -19,3 +19,6 @@ ANALYTICS_API_DATABASES:
PASSWORD: 'password' PASSWORD: 'password'
HOST: "db.{{ DOCKER_TLD }}" HOST: "db.{{ DOCKER_TLD }}"
PORT: '3306' PORT: '3306'
# Change this if you want to build a specific version of the ANALYTICS_API
ANALYTICS_API_VERSION: 'master'
...@@ -7,7 +7,7 @@ ...@@ -7,7 +7,7 @@
# This allows the dockerfile to update /edx/app/edx_ansible/edx_ansible # This allows the dockerfile to update /edx/app/edx_ansible/edx_ansible
# with the currently checked-out configuration repo. # with the currently checked-out configuration repo.
FROM edxops/trusty-common:latest FROM edxops/xenial-common:latest
MAINTAINER edxops MAINTAINER edxops
ENV DISCOVERY_VERSION=master ENV DISCOVERY_VERSION=master
......
# To build this Dockerfile:
#
# From the root of configuration:
#
# docker build -f docker/build/docker-tools/Dockerfile .
#
# This allows the dockerfile to update /edx/app/edx_ansible/edx_ansible
# with the currently checked-out configuration repo.
FROM edxops/xenial-common:latest
MAINTAINER edxops
ENV PROGRAMS_VERSION=master
ENV REPO_OWNER=edx
ADD . /edx/app/edx_ansible/edx_ansible
WORKDIR /edx/app/edx_ansible/edx_ansible/docker/plays
COPY docker/build/docker-tools/ansible_overrides.yml /
RUN /edx/app/edx_ansible/venvs/edx_ansible/bin/ansible-playbook docker-tools.yml \
-c local -i '127.0.0.1,' \
-t 'install'
RUN which docker
RUN which docker-compose
...@@ -42,6 +42,9 @@ RUN apt-get update && apt-get install -y \ ...@@ -42,6 +42,9 @@ RUN apt-get update && apt-get install -y \
php5-common \ php5-common \
php5-cli php5-cli
# Install dependencies needed for Ansible 2.x
RUN apt-get update && apt-get install -y libffi-dev libssl-dev
# Install drush (drupal shell) for access to Drupal commands/Acquia # Install drush (drupal shell) for access to Drupal commands/Acquia
RUN php -r "readfile('http://files.drush.org/drush.phar');" > drush && \ RUN php -r "readfile('http://files.drush.org/drush.phar');" > drush && \
chmod +x drush && \ chmod +x drush && \
...@@ -56,7 +59,19 @@ RUN /bin/bash /tmp/docker/docker_install.sh ...@@ -56,7 +59,19 @@ RUN /bin/bash /tmp/docker/docker_install.sh
RUN usermod -aG docker go RUN usermod -aG docker go
# Assign the go user root privlidges # Assign the go user root privlidges
RUN printf "\ngo ALL=(ALL:ALL) NOPASSWD: /usr/bin/pip\n" >> /etc/sudoers RUN printf "\ngo ALL=(ALL:ALL) NOPASSWD: /usr/bin/pip, /usr/local/bin/pip\n" >> /etc/sudoers
# Upgrade pip and setup tools. Needed for Ansible 2.x
# Must upgrade to latest before pinning to work around bug
# https://github.com/pypa/pip/issues/3862
RUN \
pip install --upgrade pip && \
#pip may have moved from /usr/bin/ to /usr/local/bin/. This clears bash's path cache.
hash -r && \
pip install --upgrade pip==8.1.2 && \
# upgrade setuptools early to avoid no distribution errors
pip install --upgrade setuptools==24.0.3
# Install AWS command-line interface - for AWS operations in a go-agent task. # Install AWS command-line interface - for AWS operations in a go-agent task.
RUN pip install awscli RUN pip install awscli
......
...@@ -29,6 +29,11 @@ necessary. ...@@ -29,6 +29,11 @@ necessary.
##Building and Uploading the container to ECS ##Building and Uploading the container to ECS
* Copy the go-agent GitHub private key to this path:
- ```docker/build/go-agent/files/go_github_key.pem```
- A dummy key is in the repo file.
- The actual private key is kept in LastPass - see DevOps for access.
- WARNING: Do *NOT* commit/push the real private key to the public configuration repo!
* Create image * Create image
- This must be run from the root of the configuration repository - This must be run from the root of the configuration repository
- ```docker build -f docker/build/go-agent/Dockerfile .``` - ```docker build -f docker/build/go-agent/Dockerfile .```
...@@ -36,9 +41,10 @@ necessary. ...@@ -36,9 +41,10 @@ necessary.
- ```make docker.test.go-agent``` - ```make docker.test.go-agent```
* Log docker in to AWS * Log docker in to AWS
- ```sh -c `aws ecr get-login --region us-east-1` ``` - ```sh -c `aws ecr get-login --region us-east-1` ```
- You might need to remove the `-e` option returned by that command in order to successfully login.
* Tag image * Tag image
- ```docker tag -f <image_id> ############.dkr.ecr.us-east-1.amazonaws.com/release-pipeline:latest``` - ```docker tag <image_id> ############.dkr.ecr.us-east-1.amazonaws.com/prod-tools-goagent:latest```
- ```docker tag -f <image_id> ############.dkr.ecr.us-east-1.amazonaws.com/release-pipeline:<version_number>``` - ```docker tag <image_id> ############.dkr.ecr.us-east-1.amazonaws.com/prod-tools-goagent:<version_number>```
* upload: * upload:
- ```docker push ############.dkr.ecr.us-east-1.amazonaws.com/edx/release-pipeline/go-agent/python:latest``` - ```docker push ############.dkr.ecr.us-east-1.amazonaws.com/edx/release-pipeline/prod-tools-goagent:latest```
- ```docker push ############.dkr.ecr.us-east-1.amazonaws.com/edx/release-pipeline/go-agent/python:<version_number>``` - ```docker push ############.dkr.ecr.us-east-1.amazonaws.com/edx/release-pipeline/prod-tools-goagent:<version_number>```
\ No newline at end of file \ No newline at end of file
FROM edxops/precise-common:latest FROM edxops/xenial-common:latest
MAINTAINER edxops MAINTAINER edxops
ADD . /edx/app/edx_ansible/edx_ansible ADD . /edx/app/edx_ansible/edx_ansible
......
...@@ -4,7 +4,7 @@ DOCKER_TLD: "edx" ...@@ -4,7 +4,7 @@ DOCKER_TLD: "edx"
# In addition, on systemd systems, and newer rsyslogd # In addition, on systemd systems, and newer rsyslogd
# there may be issues with /dev/log existing # there may be issues with /dev/log existing
# http://www.projectatomic.io/blog/2014/09/running-syslog-within-a-docker-container/ # http://www.projectatomic.io/blog/2014/09/running-syslog-within-a-docker-container/
PROGRAMS_DJANGO_SETTINGS_MODULE: programs.settings.local PROGRAMS_DJANGO_SETTINGS_MODULE: programs.settings.devstack
PROGRAMS_DATABASES: PROGRAMS_DATABASES:
# rw user # rw user
default: default:
......
...@@ -2,8 +2,6 @@ ...@@ -2,8 +2,6 @@
DOCKER_TLD: "xqueue" DOCKER_TLD: "xqueue"
CONFIGURATION_REPO: "https://github.com/edx/configuration.git"
CONFIGURATION_VERSION: "hack2015/docker"
XQUEUE_SYSLOG_SERVER: "localhost" XQUEUE_SYSLOG_SERVER: "localhost"
XQUEUE_RABBITMQ_HOSTNAME: "rabbit.{{ DOCKER_TLD }}" XQUEUE_RABBITMQ_HOSTNAME: "rabbit.{{ DOCKER_TLD }}"
XQUEUE_MYSQL_HOST: "db.{{ DOCKER_TLD }}" XQUEUE_MYSQL_HOST: "db.{{ DOCKER_TLD }}"
- name: build a VM with docker-tools
hosts: all
sudo: True
gather_facts: True
roles:
- docker
- docker-tools
...@@ -9,9 +9,10 @@ try: ...@@ -9,9 +9,10 @@ try:
import hipchat import hipchat
except ImportError: except ImportError:
hipchat = None hipchat = None
from ansible.plugins.callback import CallbackBase
class CallbackModule(object): class CallbackModule(CallbackBase):
"""Send status updates to a HipChat channel during playbook execution. """Send status updates to a HipChat channel during playbook execution.
This plugin makes use of the following environment variables: This plugin makes use of the following environment variables:
......
...@@ -28,9 +28,10 @@ except ImportError: ...@@ -28,9 +28,10 @@ except ImportError:
else: else:
import boto.sqs import boto.sqs
from boto.exception import NoAuthHandlerFound from boto.exception import NoAuthHandlerFound
from ansible.plugins.callback import CallbackBase
class CallbackModule(object): class CallbackModule(CallbackBase):
""" """
This Ansible callback plugin sends task events This Ansible callback plugin sends task events
to SQS. to SQS.
......
...@@ -238,7 +238,7 @@ class CallbackModule(CallbackBase): ...@@ -238,7 +238,7 @@ class CallbackModule(CallbackBase):
Record the start of a play. Record the start of a play.
""" """
self.playbook_name, _ = splitext( self.playbook_name, _ = splitext(
basename(self.play.playbook.filename) basename(self.play.get_name())
) )
self.playbook_timestamp = Timestamp() self.playbook_timestamp = Timestamp()
......
...@@ -12,3 +12,4 @@ ansible_managed=This file is created and updated by ansible, edit at your peril ...@@ -12,3 +12,4 @@ ansible_managed=This file is created and updated by ansible, edit at your peril
[ssh_connection] [ssh_connection]
ssh_args=-o ControlMaster=auto -o ControlPersist=60s -o ControlPath="~/.ansible/tmp/ansible-ssh-%h-%p-%r" -o ServerAliveInterval=30 ssh_args=-o ControlMaster=auto -o ControlPersist=60s -o ControlPath="~/.ansible/tmp/ansible-ssh-%h-%p-%r" -o ServerAliveInterval=30
retries=5
\ No newline at end of file
...@@ -13,11 +13,13 @@ ...@@ -13,11 +13,13 @@
# - APPLICATION_NAME - The name of the application that we are migrating. # - APPLICATION_NAME - The name of the application that we are migrating.
# - APPLICATION_USER - user which is meant to run the application # - APPLICATION_USER - user which is meant to run the application
# - ARTIFACT_PATH - the path where the migration artifacts should be copied after completion # - ARTIFACT_PATH - the path where the migration artifacts should be copied after completion
# - DB_MIGRATION_USER - the database username
# - DB_MIGRATION_PASS - the database password
#
# Other variables:
# - HIPCHAT_TOKEN - API token to send messages to hipchat # - HIPCHAT_TOKEN - API token to send messages to hipchat
# - HIPCHAT_ROOM - ID or name of the room to send the notification # - HIPCHAT_ROOM - ID or name of the room to send the notification
# - HIPCHAT_URL - URL of the hipchat API (defaults to v1 of the api) # - HIPCHAT_URL - URL of the hipchat API (defaults to v1 of the api)
#
# Other variables:
# - migration_plan - the filename where the unapplied migration YAML output is stored # - migration_plan - the filename where the unapplied migration YAML output is stored
# - migration_result - the filename where the migration output is saved # - migration_result - the filename where the migration output is saved
# - SUB_APPLICATION_NAME - used for migrations in edxapp {lms|cms}, must be specified # - SUB_APPLICATION_NAME - used for migrations in edxapp {lms|cms}, must be specified
...@@ -59,7 +61,7 @@ ...@@ -59,7 +61,7 @@
shell: '{{ COMMAND_PREFIX }} {{ SUB_APPLICATION_NAME }} show_unapplied_migrations --database "{{ item }}" --output_file "{{ temp_output_dir.stdout }}/{{ item }}_{{ migration_plan }}" --settings "{{ EDX_PLATFORM_SETTINGS }}"' shell: '{{ COMMAND_PREFIX }} {{ SUB_APPLICATION_NAME }} show_unapplied_migrations --database "{{ item }}" --output_file "{{ temp_output_dir.stdout }}/{{ item }}_{{ migration_plan }}" --settings "{{ EDX_PLATFORM_SETTINGS }}"'
become_user: "{{ APPLICATION_USER }}" become_user: "{{ APPLICATION_USER }}"
when: APPLICATION_NAME == "edxapp" and item != "read_replica" when: APPLICATION_NAME == "edxapp" and item != "read_replica"
with_items: edxapp_databases.keys() with_items: "{{ edxapp_databases.keys() }}"
- name: migrate to apply any unapplied migrations - name: migrate to apply any unapplied migrations
shell: '{{ COMMAND_PREFIX }} run_migrations --output_file "{{ temp_output_dir.stdout }}/{{ migration_result }}"' shell: '{{ COMMAND_PREFIX }} run_migrations --output_file "{{ temp_output_dir.stdout }}/{{ migration_result }}"'
...@@ -70,7 +72,7 @@ ...@@ -70,7 +72,7 @@
shell: '{{ COMMAND_PREFIX }} {{ SUB_APPLICATION_NAME }} run_migrations --database "{{ item }}" --settings "{{ EDX_PLATFORM_SETTINGS }}" --output_file "{{ temp_output_dir.stdout }}/{{ migration_result }}"' shell: '{{ COMMAND_PREFIX }} {{ SUB_APPLICATION_NAME }} run_migrations --database "{{ item }}" --settings "{{ EDX_PLATFORM_SETTINGS }}" --output_file "{{ temp_output_dir.stdout }}/{{ migration_result }}"'
become_user: "{{ APPLICATION_USER }}" become_user: "{{ APPLICATION_USER }}"
when: APPLICATION_NAME == "edxapp" and item != "read_replica" when: APPLICATION_NAME == "edxapp" and item != "read_replica"
with_items: edxapp_databases.keys() with_items: "{{ edxapp_databases.keys() }}"
- name: List all migration files - name: List all migration files
action: "command ls -1 {{ temp_output_dir.stdout }}" action: "command ls -1 {{ temp_output_dir.stdout }}"
......
...@@ -13,25 +13,27 @@ ...@@ -13,25 +13,27 @@
keyfile: "/home/{{ owner }}/.ssh/authorized_keys" keyfile: "/home/{{ owner }}/.ssh/authorized_keys"
serial: "{{ serial_count }}" serial: "{{ serial_count }}"
tasks: tasks:
- fail: msg="You must pass in a public_key" - fail:
msg: "You must pass in a public_key"
when: public_key is not defined when: public_key is not defined
- fail: msg="public does not exist in secrets" - fail:
msg: "public does not exist in secrets"
when: ubuntu_public_keys[public_key] is not defined when: ubuntu_public_keys[public_key] is not defined
- command: mktemp - command: mktemp
register: mktemp register: mktemp
- name: Validate the public key before we add it to authorized_keys - name: Validate the public key before we add it to authorized_keys
copy: > copy:
content="{{ ubuntu_public_keys[public_key] }}" content: "{{ ubuntu_public_keys[public_key] }}"
dest={{ mktemp.stdout }} dest: "{{ mktemp.stdout }}"
# This tests the public key and will not continue if it does not look valid # This tests the public key and will not continue if it does not look valid
- command: ssh-keygen -l -f {{ mktemp.stdout }} - command: ssh-keygen -l -f {{ mktemp.stdout }}
- file: > - file:
path={{ mktemp.stdout }} path: "{{ mktemp.stdout }}"
state=absent state: absent
- lineinfile: > - lineinfile:
dest={{ keyfile }} dest: "{{ keyfile }}"
line="{{ ubuntu_public_keys[public_key] }}" line: "{{ ubuntu_public_keys[public_key] }}"
- file: > - file:
path={{ keyfile }} path: "{{ keyfile }}"
owner={{ owner }} owner: "{{ owner }}"
mode=0600 mode: 0600
...@@ -14,7 +14,8 @@ ...@@ -14,7 +14,8 @@
serial: "{{ serial_count }}" serial: "{{ serial_count }}"
pre_tasks: pre_tasks:
- action: ec2_facts - action: ec2_facts
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: "{{ ansible_ec2_instance_id }}"
when: elb_pre_post when: elb_pre_post
- name: Instance De-register - name: Instance De-register
local_action: ec2_elb local_action: ec2_elb
...@@ -26,8 +27,9 @@ ...@@ -26,8 +27,9 @@
become: False become: False
when: elb_pre_post when: elb_pre_post
tasks: tasks:
- debug: msg="{{ ansible_ec2_local_ipv4 }}" - debug:
with_items: list.results var: "{{ ansible_ec2_local_ipv4 }}"
with_items: "{{ list.results }}"
- command: rabbitmqctl stop_app - command: rabbitmqctl stop_app
- command: rabbitmqctl join_cluster rabbit@ip-{{ hostvars.keys()[0]|replace('.', '-') }} - command: rabbitmqctl join_cluster rabbit@ip-{{ hostvars.keys()[0]|replace('.', '-') }}
when: hostvars.keys()[0] != ansible_ec2_local_ipv4 when: hostvars.keys()[0] != ansible_ec2_local_ipv4
...@@ -39,10 +41,9 @@ ...@@ -39,10 +41,9 @@
local_action: ec2_elb local_action: ec2_elb
args: args:
instance_id: "{{ ansible_ec2_instance_id }}" instance_id: "{{ ansible_ec2_instance_id }}"
ec2_elbs: "{{ item }}" ec2_elbs: "{{ ec2_elbs }}"
region: us-east-1 region: us-east-1
state: present state: present
wait_timeout: 60 wait_timeout: 60
with_items: ec2_elbs
become: False become: False
when: elb_pre_post when: elb_pre_post
...@@ -47,11 +47,10 @@ ...@@ -47,11 +47,10 @@
local_action: ec2_elb local_action: ec2_elb
args: args:
instance_id: "{{ ansible_ec2_instance_id }}" instance_id: "{{ ansible_ec2_instance_id }}"
ec2_elbs: "{{ item }}" ec2_elbs: "{{ ec2_elbs }}"
region: us-east-1 region: us-east-1
state: present state: present
wait_timeout: 60 wait_timeout: 60
with_items: ec2_elbs
become: False become: False
when: elb_pre_post when: elb_pre_post
# #
......
...@@ -13,9 +13,9 @@ ...@@ -13,9 +13,9 @@
# is called it will use the new MYSQL connection # is called it will use the new MYSQL connection
# info. # info.
- name: Update RDS to point to the sandbox clone - name: Update RDS to point to the sandbox clone
lineinfile: > lineinfile:
dest=/edx/app/edx_ansible/server-vars.yml dest: /edx/app/edx_ansible/server-vars.yml
line="{{ item }}" line: "{{ item }}"
with_items: with_items:
- "EDXAPP_MYSQL_HOST: {{ EDXAPP_MYSQL_HOST }}" - "EDXAPP_MYSQL_HOST: {{ EDXAPP_MYSQL_HOST }}"
- "EDXAPP_MYSQL_DB_NAME: {{ EDXAPP_MYSQL_DB_NAME }}" - "EDXAPP_MYSQL_DB_NAME: {{ EDXAPP_MYSQL_DB_NAME }}"
...@@ -24,9 +24,9 @@ ...@@ -24,9 +24,9 @@
tags: update_edxapp_mysql_host tags: update_edxapp_mysql_host
- name: Update mongo to point to the sandbox mongo clone - name: Update mongo to point to the sandbox mongo clone
lineinfile: > lineinfile:
dest=/edx/app/edx_ansible/server-vars.yml dest: /edx/app/edx_ansible/server-vars.yml
line="{{ item }}" line: "{{ item }}"
with_items: with_items:
- "EDXAPP_MONGO_HOSTS: {{ EDXAPP_MONGO_HOSTS }}" - "EDXAPP_MONGO_HOSTS: {{ EDXAPP_MONGO_HOSTS }}"
- "EDXAPP_MONGO_DB_NAME: {{ EDXAPP_MONGO_DB_NAME }}" - "EDXAPP_MONGO_DB_NAME: {{ EDXAPP_MONGO_DB_NAME }}"
...@@ -35,6 +35,5 @@ ...@@ -35,6 +35,5 @@
tags: update_edxapp_mysql_host tags: update_edxapp_mysql_host
- name: call update on edx-platform - name: call update on edx-platform
shell: > shell: "/edx/bin/update edx-platform {{ edxapp_version }}"
/edx/bin/update edx-platform {{ edxapp_version }}
tags: update_edxapp_mysql_host tags: update_edxapp_mysql_host
...@@ -53,27 +53,27 @@ ...@@ -53,27 +53,27 @@
- MySQL-python - MySQL-python
- name: create mysql databases - name: create mysql databases
mysql_db: > mysql_db:
db={{ item.name}} db: "{{ item.name}}"
state={{ item.state }} state: "{{ item.state }}"
encoding={{ item.encoding }} encoding: "{{ item.encoding }}"
login_host={{ item.login_host }} login_host: "{{ item.login_host }}"
login_user={{ item.login_user }} login_user: "{{ item.login_user }}"
login_password={{ item.login_password }} login_password: "{{ item.login_password }}"
with_items: databases with_items: "{{ databases }}"
tags: tags:
- dbs - dbs
- name: create mysql users and assign privileges - name: create mysql users and assign privileges
mysql_user: > mysql_user:
name="{{ item.name }}" name: "{{ item.name }}"
priv="{{ '/'.join(item.privileges) }}" priv: "{{ '/'.join(item.privileges) }}"
password="{{ item.password }}" password: "{{ item.password }}"
host={{ item.host }} host: "{{ item.host }}"
login_host={{ item.login_host }} login_host: "{{ item.login_host }}"
login_user={{ item.login_user }} login_user: "{{ item.login_user }}"
login_password={{ item.login_password }} login_password: "{{ item.login_password }}"
append_privs=yes append_privs: yes
with_items: database_users with_items: "{{ database_users }}"
tags: tags:
- users - users
...@@ -41,4 +41,4 @@ ...@@ -41,4 +41,4 @@
roles: "{{ item.roles }}" roles: "{{ item.roles }}"
state: present state: present
replica_set: "{{ repl_set }}" replica_set: "{{ repl_set }}"
with_items: MONGO_USERS with_items: "{{ MONGO_USERS }}"
...@@ -21,7 +21,14 @@ ...@@ -21,7 +21,14 @@
dns_zone: sandbox.edx.org dns_zone: sandbox.edx.org
name_tag: sandbox-temp name_tag: sandbox-temp
elb: false elb: false
vpc_subnet_id: subnet-cd867aba ec2_vpc_subnet_id: subnet-cd867aba
instance_userdata: |
#!/bin/bash
set -x
set -e
export RUN_ANSIBLE=false;
wget https://raw.githubusercontent.com/edx/configuration/{{ configuration_version }}/util/install/ansible-bootstrap.sh -O - | bash;
launch_wait_time: 5
roles: roles:
- role: launch_ec2 - role: launch_ec2
keypair: "{{ keypair }}" keypair: "{{ keypair }}"
...@@ -34,23 +41,27 @@ ...@@ -34,23 +41,27 @@
dns_name: "{{ dns_name }}" dns_name: "{{ dns_name }}"
dns_zone: "{{ dns_zone }}" dns_zone: "{{ dns_zone }}"
zone: "{{ zone }}" zone: "{{ zone }}"
vpc_subnet_id: "{{ vpc_subnet_id }}" vpc_subnet_id: "{{ ec2_vpc_subnet_id }}"
assign_public_ip: yes assign_public_ip: yes
terminate_instance: true terminate_instance: true
instance_profile_name: sandbox instance_profile_name: sandbox
user_data: "{{ instance_userdata }}"
launch_ec2_wait_time: "{{ launch_wait_time }}"
- name: Configure instance(s) - name: Configure instance(s)
hosts: launched hosts: launched
become: True become: True
gather_facts: True gather_facts: False
vars: vars:
elb: false elb: False
pre_tasks: pre_tasks:
- name: Wait for cloud-init to finish - name: Wait for cloud-init to finish
wait_for: > wait_for:
path=/var/log/cloud-init.log path: /var/log/cloud-init.log
timeout=15 timeout: 15
search_regex="final-message" search_regex: "final-message"
- name: gather_facts
setup: ""
vars_files: vars_files:
- roles/edxapp/defaults/main.yml - roles/edxapp/defaults/main.yml
- roles/xqueue/defaults/main.yml - roles/xqueue/defaults/main.yml
......
---
- name: Build service RDS instances
hosts: all
connection: local
# Needed for timestamps
gather_facts: True
roles:
- edx_service_rds
---
# Sample command: ansible-playbook -c local -i localhost, edx_vpc.yml -e@/Users/feanil/src/edx-secure/cloud_migrations/vpcs/test.yml -vvv
- name: Create a simple empty vpc
hosts: all
connection: local
gather_facts: False
vars:
vpc_state: present
roles:
- edx_vpc
...@@ -8,9 +8,9 @@ ...@@ -8,9 +8,9 @@
- edxapp - edxapp
tasks: tasks:
- name: migrate lms - name: migrate lms
shell: > shell: "python manage.py lms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws"
chdir={{ edxapp_code_dir }} args:
python manage.py lms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws chdir: "{{ edxapp_code_dir }}"
environment: environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}" DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}" DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
...@@ -21,9 +21,9 @@ ...@@ -21,9 +21,9 @@
tags: tags:
- always - always
- name: migrate cms - name: migrate cms
shell: > shell: "python manage.py cms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws"
chdir={{ edxapp_code_dir }} args:
python manage.py cms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws chdir: "{{ edxapp_code_dir }}"
environment: environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}" DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}" DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
......
...@@ -12,7 +12,8 @@ ...@@ -12,7 +12,8 @@
pre_tasks: pre_tasks:
- action: ec2_facts - action: ec2_facts
when: elb_pre_post when: elb_pre_post
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: ansible_ec2_instance_id
when: elb_pre_post when: elb_pre_post
- name: Instance De-register - name: Instance De-register
local_action: ec2_elb local_action: ec2_elb
...@@ -29,16 +30,16 @@ ...@@ -29,16 +30,16 @@
- oraclejdk - oraclejdk
- elasticsearch - elasticsearch
post_tasks: post_tasks:
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: ansible_ec2_instance_id
when: elb_pre_post when: elb_pre_post
- name: Register instance in the elb - name: Register instance in the elb
local_action: ec2_elb local_action: ec2_elb
args: args:
instance_id: "{{ ansible_ec2_instance_id }}" instance_id: "{{ ansible_ec2_instance_id }}"
ec2_elbs: "{{ item }}" ec2_elbs: "{{ ec2_elbs }}"
region: us-east-1 region: us-east-1
state: present state: present
wait_timeout: 60 wait_timeout: 60
with_items: ec2_elbs
become: False become: False
when: elb_pre_post when: elb_pre_post
...@@ -14,11 +14,11 @@ ...@@ -14,11 +14,11 @@
- name: stop certs service - name: stop certs service
service: name="certificates" state="stopped" service: name="certificates" state="stopped"
- name: checkout code - name: checkout code
git_2_0_1: > git:
repo="{{ repo_url }}" repo: "{{ repo_url }}"
dest="{{ repo_path }}" dest: "{{ repo_path }}"
version="{{ certificates_version }}" version: "{{ certificates_version }}"
accept_hostkey=yes accept_hostkey: yes
environment: environment:
GIT_SSH: "{{ git_ssh_script }}" GIT_SSH: "{{ git_ssh_script }}"
- name: install requirements - name: install requirements
...@@ -29,11 +29,11 @@ ...@@ -29,11 +29,11 @@
# Need to do this because the www-data user is not properly setup # Need to do this because the www-data user is not properly setup
# and can't run ssh. # and can't run ssh.
- name: change owner to www-data - name: change owner to www-data
file: > file:
path="{{ repo_path }}" path: "{{ repo_path }}"
owner="www-data" owner: "www-data"
group="www-data" group: "www-data"
recurse=yes recurse: yes
state="directory" state: "directory"
- name: start certs service - name: start certs service
service: name="certificates" state="started" service: name="certificates" state="started"
...@@ -79,6 +79,8 @@ ...@@ -79,6 +79,8 @@
manage_path: /edx/bin/manage.edxapp manage_path: /edx/bin/manage.edxapp
ignore_user_creation_errors: no ignore_user_creation_errors: no
deployment_settings: "{{ EDXAPP_SETTINGS | default('aws') }}" deployment_settings: "{{ EDXAPP_SETTINGS | default('aws') }}"
vars_files:
- roles/common_vars/defaults/main.yml
tasks: tasks:
- name: Manage groups - name: Manage groups
shell: > shell: >
...@@ -86,7 +88,9 @@ ...@@ -86,7 +88,9 @@
manage_group {{ item.name | quote }} manage_group {{ item.name | quote }}
{% if item.get('permissions', []) | length %}--permissions {{ item.permissions | default([]) | map('quote') | join(' ') }}{% endif %} {% if item.get('permissions', []) | length %}--permissions {{ item.permissions | default([]) | map('quote') | join(' ') }}{% endif %}
{% if item.get('remove') %}--remove{% endif %} {% if item.get('remove') %}--remove{% endif %}
with_items: django_groups with_items: "{{ django_groups }}"
become: true
become_user: "{{ common_web_user }}"
- name: Manage users - name: Manage users
shell: > shell: >
...@@ -98,6 +102,8 @@ ...@@ -98,6 +102,8 @@
{% if item.get('staff') %}--staff{% endif %} {% if item.get('staff') %}--staff{% endif %}
{% if item.get('unusable_password') %}--unusable-password{% endif %} {% if item.get('unusable_password') %}--unusable-password{% endif %}
{% if item.get('initial_password_hash') %}--initial-password-hash {{ item.initial_password_hash | quote }}{% endif %} {% if item.get('initial_password_hash') %}--initial-password-hash {{ item.initial_password_hash | quote }}{% endif %}
with_items: django_users with_items: "{{ django_users }}"
register: manage_users_result register: manage_users_result
failed_when: (manage_users_result | failed) and not (ignore_user_creation_errors | bool) failed_when: (manage_users_result | failed) and not (ignore_user_creation_errors | bool)
become: true
become_user: "{{ common_web_user }}"
...@@ -72,7 +72,7 @@ ...@@ -72,7 +72,7 @@
install_recommends: yes install_recommends: yes
force: yes force: yes
update_cache: yes update_cache: yes
with_items: mongodb_debian_pkgs with_items: "{{ mongodb_debian_pkgs }}"
- name: wait for mongo server to start - name: wait for mongo server to start
wait_for: wait_for:
port: 27017 port: 27017
......
...@@ -48,7 +48,7 @@ ...@@ -48,7 +48,7 @@
install_recommends: yes install_recommends: yes
force: yes force: yes
update_cache: yes update_cache: yes
with_items: mongodb_debian_pkgs with_items: "{{ mongodb_debian_pkgs }}"
- name: wait for mongo server to start - name: wait for mongo server to start
wait_for: wait_for:
port: 27017 port: 27017
......
...@@ -9,5 +9,6 @@ ...@@ -9,5 +9,6 @@
- "roles/ecommerce/defaults/main.yml" - "roles/ecommerce/defaults/main.yml"
- "roles/programs/defaults/main.yml" - "roles/programs/defaults/main.yml"
- "roles/credentials/defaults/main.yml" - "roles/credentials/defaults/main.yml"
- "roles/discovery/defaults/main.yml"
roles: roles:
- oauth_client_setup - oauth_client_setup
...@@ -46,9 +46,7 @@ ...@@ -46,9 +46,7 @@
dest: "{{ xblock_config_temp_directory.stdout }}/{{ file | basename }}" dest: "{{ xblock_config_temp_directory.stdout }}/{{ file | basename }}"
register: xblock_config_file register: xblock_config_file
- name: Manage xblock configurations - name: Manage xblock configurations
shell: > shell: "{{ python_path }} {{ manage_path }} lms --settings=aws populate_model -f {{ xblock_config_file.dest | quote }} -u {{ user }}"
{{ python_path }} {{ manage_path }} lms --settings=aws
populate_model -f {{ xblock_config_file.dest | quote }} -u {{ user }}
register: command_result register: command_result
changed_when: "'Import complete, 0 new entries created' not in command_result.stdout" changed_when: "'Import complete, 0 new entries created' not in command_result.stdout"
- debug: msg="{{ command_result.stdout }}" - debug: msg="{{ command_result.stdout }}"
......
...@@ -17,7 +17,8 @@ ...@@ -17,7 +17,8 @@
pre_tasks: pre_tasks:
- action: ec2_facts - action: ec2_facts
when: elb_pre_post when: elb_pre_post
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: ansible_ec2_instance_id
when: elb_pre_post when: elb_pre_post
- name: Instance De-register - name: Instance De-register
local_action: ec2_elb local_action: ec2_elb
...@@ -32,16 +33,16 @@ ...@@ -32,16 +33,16 @@
- aws - aws
- rabbitmq - rabbitmq
post_tasks: post_tasks:
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: ansible_ec2_instance_id
when: elb_pre_post when: elb_pre_post
- name: Register instance in the elb - name: Register instance in the elb
local_action: ec2_elb local_action: ec2_elb
args: args:
instance_id: "{{ ansible_ec2_instance_id }}" instance_id: "{{ ansible_ec2_instance_id }}"
ec2_elbs: "{{ item }}" ec2_elbs: "{{ ec2_elbs }}"
region: us-east-1 region: us-east-1
state: present state: present
wait_timeout: 60 wait_timeout: 60
with_items: ec2_elbs
become: False become: False
when: elb_pre_post when: elb_pre_post
...@@ -17,22 +17,21 @@ ...@@ -17,22 +17,21 @@
register: mktemp register: mktemp
# This command will fail if this returns zero lines which will prevent # This command will fail if this returns zero lines which will prevent
# the last key from being removed # the last key from being removed
- shell: > - shell: "grep -Fv '{{ ubuntu_public_keys[public_key] }}' {{ keyfile }} > {{ mktemp.stdout }}"
grep -Fv '{{ ubuntu_public_keys[public_key] }}' {{ keyfile }} > {{ mktemp.stdout }} - shell: "while read line; do ssh-keygen -lf /dev/stdin <<<$line; done <{{ mktemp.stdout }}"
- shell: > args:
while read line; do ssh-keygen -lf /dev/stdin <<<$line; done <{{ mktemp.stdout }} executable: /bin/bash
executable=/bin/bash
register: keycheck register: keycheck
- fail: msg="public key check failed!" - fail: msg="public key check failed!"
when: keycheck.stderr != "" when: keycheck.stderr != ""
- command: cp {{ mktemp.stdout }} {{ keyfile }} - command: cp {{ mktemp.stdout }} {{ keyfile }}
- file: > - file:
path={{ keyfile }} path: "{{ keyfile }}"
owner={{ owner }} owner: "{{ owner }}"
mode=0600 mode: 0600
- file: > - file:
path={{ mktemp.stdout }} path: "{{ mktemp.stdout }}"
state=absent state: absent
- shell: wc -l < {{ keyfile }} - shell: wc -l < {{ keyfile }}
register: line_count register: line_count
- fail: msg="There should only be one line in ubuntu's authorized_keys" - fail: msg="There should only be one line in ubuntu's authorized_keys"
......
...@@ -7,6 +7,6 @@ ...@@ -7,6 +7,6 @@
- roles/supervisor/defaults/main.yml - roles/supervisor/defaults/main.yml
tasks: tasks:
- name: supervisor | restart supervisor - name: supervisor | restart supervisor
service: > service:
name={{ supervisor_service }} name: "{{ supervisor_service }}"
state=restarted state: restarted
...@@ -12,8 +12,8 @@ ...@@ -12,8 +12,8 @@
- name: Set hostname - name: Set hostname
hostname: name={{ hostname_fqdn.split('.')[0] }} hostname: name={{ hostname_fqdn.split('.')[0] }}
- name: Update /etc/hosts - name: Update /etc/hosts
lineinfile: > lineinfile:
dest=/etc/hosts dest: /etc/hosts
regexp="^127\.0\.1\.1" regexp: "^127\\.0\\.1\\.1"
line="127.0.1.1{{'\t'}}{{ hostname_fqdn.split('.')[0] }}{{'\t'}}{{ hostname_fqdn }}{{'\t'}}localhost" line: "127.0.1.1{{ '\t' }}{{ hostname_fqdn.split('.')[0] }}{{ '\t' }}{{ hostname_fqdn }}{{ '\t' }}localhost"
state=present state: present
...@@ -11,7 +11,8 @@ ...@@ -11,7 +11,8 @@
pre_tasks: pre_tasks:
- action: ec2_facts - action: ec2_facts
when: elb_pre_post when: elb_pre_post
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: "{{ ansible_ec2_instance_id }}"
when: elb_pre_post when: elb_pre_post
- name: Instance De-register - name: Instance De-register
local_action: ec2_elb local_action: ec2_elb
...@@ -25,16 +26,16 @@ ...@@ -25,16 +26,16 @@
tasks: tasks:
- shell: echo "test" - shell: echo "test"
post_tasks: post_tasks:
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: "{{ ansible_ec2_instance_id }}"
when: elb_pre_post when: elb_pre_post
- name: Register instance in the elb - name: Register instance in the elb
local_action: ec2_elb local_action: ec2_elb
args: args:
instance_id: "{{ ansible_ec2_instance_id }}" instance_id: "{{ ansible_ec2_instance_id }}"
ec2_elbs: "{{ item }}" ec2_elbs: "{{ ec2_elbs }}"
region: us-east-1 region: us-east-1
state: present state: present
wait_timeout: 60 wait_timeout: 60
with_items: ec2_elbs
become: False become: False
when: elb_pre_post when: elb_pre_post
...@@ -14,7 +14,8 @@ ...@@ -14,7 +14,8 @@
pre_tasks: pre_tasks:
- action: ec2_facts - action: ec2_facts
when: elb_pre_post when: elb_pre_post
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: "{{ ansible_ec2_instance_id }}"
when: elb_pre_post when: elb_pre_post
- name: Instance De-register - name: Instance De-register
local_action: ec2_elb local_action: ec2_elb
...@@ -38,16 +39,16 @@ ...@@ -38,16 +39,16 @@
- role: newrelic - role: newrelic
when: COMMON_ENABLE_NEWRELIC when: COMMON_ENABLE_NEWRELIC
post_tasks: post_tasks:
- debug: var="{{ ansible_ec2_instance_id }}" - debug:
var: "{{ ansible_ec2_instance_id }}"
when: elb_pre_post when: elb_pre_post
- name: Register instance in the elb - name: Register instance in the elb
local_action: ec2_elb local_action: ec2_elb
args: args:
instance_id: "{{ ansible_ec2_instance_id }}" instance_id: "{{ ansible_ec2_instance_id }}"
ec2_elbs: "{{ item }}" ec2_elbs: "{{ ec2_elbs }}"
region: us-east-1 region: us-east-1
state: present state: present
wait_timeout: 60 wait_timeout: 60
with_items: ec2_elbs
become: False become: False
when: elb_pre_post when: elb_pre_post
...@@ -40,8 +40,6 @@ ...@@ -40,8 +40,6 @@
- role: mongo - role: mongo
when: "'localhost' in EDXAPP_MONGO_HOSTS" when: "'localhost' in EDXAPP_MONGO_HOSTS"
- { role: 'rabbitmq', rabbitmq_ip: '127.0.0.1' } - { role: 'rabbitmq', rabbitmq_ip: '127.0.0.1' }
- role: aws
when: EDXAPP_SETTINGS == 'aws'
- { role: 'edxapp', celery_worker: True } - { role: 'edxapp', celery_worker: True }
- edxapp - edxapp
- role: ecommerce - role: ecommerce
......
...@@ -96,22 +96,10 @@ def main(): ...@@ -96,22 +96,10 @@ def main():
aws_secret_key=dict(aliases=['ec2_secret_key', 'secret_key'], aws_secret_key=dict(aliases=['ec2_secret_key', 'secret_key'],
no_log=True), no_log=True),
aws_access_key=dict(aliases=['ec2_access_key', 'access_key']), aws_access_key=dict(aliases=['ec2_access_key', 'access_key']),
tags=dict(default=None), tags=dict(default=None, type='dict'),
) )
) )
tags_param = module.params.get('tags')
tags = {}
if isinstance(tags_param, list):
for item in module.params.get('tags'):
for k,v in item.iteritems():
tags[k] = v
elif isinstance(tags_param, dict):
tags = tags_param
else:
module.fail_json(msg="Invalid format for tags")
aws_secret_key = module.params.get('aws_secret_key') aws_secret_key = module.params.get('aws_secret_key')
aws_access_key = module.params.get('aws_access_key') aws_access_key = module.params.get('aws_access_key')
region = module.params.get('region') region = module.params.get('region')
...@@ -137,7 +125,7 @@ def main(): ...@@ -137,7 +125,7 @@ def main():
instances = [] instances = []
instance_ids = [] instance_ids = []
for res in ec2.get_all_instances(filters={'tag:' + tag: value for res in ec2.get_all_instances(filters={'tag:' + tag: value
for tag, value in tags.iteritems()}): for tag, value in module.params.get('tags').iteritems()}):
for inst in res.instances: for inst in res.instances:
if inst.state == "running": if inst.state == "running":
instances.append({k: v for k, v in inst.__dict__.iteritems() instances.append({k: v for k, v in inst.__dict__.iteritems()
......
...@@ -66,7 +66,7 @@ tasks: ...@@ -66,7 +66,7 @@ tasks:
- name: tag my launched instances - name: tag my launched instances
local_action: ec2_tag resource={{ item.id }} region=eu-west-1 state=present local_action: ec2_tag resource={{ item.id }} region=eu-west-1 state=present
with_items: ec2.instances with_items: "{{ ec2.instances }}"
args: args:
tags: tags:
Name: webserver Name: webserver
...@@ -76,7 +76,7 @@ tasks: ...@@ -76,7 +76,7 @@ tasks:
tasks: tasks:
- name: tag my instance - name: tag my instance
local_action: ec2_ntag resource={{ item.id }} region=us-east-1 state=present local_action: ec2_ntag resource={{ item.id }} region=us-east-1 state=present
with_items: ec2.instances with_items: "{{ ec2.instances }}"
args: args:
tags: tags:
- Name: "{{ some_variable }}" - Name: "{{ some_variable }}"
...@@ -101,7 +101,7 @@ def main(): ...@@ -101,7 +101,7 @@ def main():
argument_spec = ec2_argument_spec() argument_spec = ec2_argument_spec()
argument_spec.update(dict( argument_spec.update(dict(
resource = dict(required=True), resource = dict(required=True),
tags = dict(), tags = dict(required=False, type='list'),
state = dict(default='present', choices=['present', 'absent', 'list']), state = dict(default='present', choices=['present', 'absent', 'list']),
) )
) )
......
...@@ -24,7 +24,7 @@ ...@@ -24,7 +24,7 @@
apt: apt:
name: "{{ item }}" name: "{{ item }}"
state: present state: present
with_items: ad_hoc_reporting_debian_pkgs with_items: "{{ ad_hoc_reporting_debian_pkgs }}"
tags: tags:
- install:system-requirements - install:system-requirements
...@@ -58,7 +58,7 @@ ...@@ -58,7 +58,7 @@
name: "{{ item }}" name: "{{ item }}"
state: present state: present
extra_args: "-i {{ COMMON_PYPI_MIRROR_URL }}" extra_args: "-i {{ COMMON_PYPI_MIRROR_URL }}"
with_items: ad_hoc_reporting_pip_pkgs with_items: "{{ ad_hoc_reporting_pip_pkgs }}"
tags: tags:
- install:app-requirements - install:app-requirements
...@@ -92,7 +92,7 @@ ...@@ -92,7 +92,7 @@
- scripts - scripts
- scripts:mysql - scripts:mysql
- install:code - install:code
with_items: AD_HOC_REPORTING_REPLICA_DB_HOSTS with_items: "{{ AD_HOC_REPORTING_REPLICA_DB_HOSTS }}"
# These templates rely on there being a global # These templates rely on there being a global
# read_only mongo user, you must override the default # read_only mongo user, you must override the default
......
...@@ -27,3 +27,6 @@ ...@@ -27,3 +27,6 @@
## ##
# Defaults for role add_user # Defaults for role add_user
# #
#
#
dirs: []
...@@ -65,8 +65,7 @@ ...@@ -65,8 +65,7 @@
owner: "{{ item.owner }}" owner: "{{ item.owner }}"
group: "{{ item.group }}" group: "{{ item.group }}"
mode: "{{ item.mode | default('0755') }}" mode: "{{ item.mode | default('0755') }}"
with_items: dirs with_items: "{{ dirs }}"
when: dirs is defined
tags: tags:
- install - install
- install:base - install:base
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
notify: restart alton notify: restart alton
- name: Checkout the code - name: Checkout the code
git_2_0_1: git:
dest: "{{ alton_code_dir }}" dest: "{{ alton_code_dir }}"
repo: "{{ alton_source_repo }}" repo: "{{ alton_source_repo }}"
version: "{{ alton_version }}" version: "{{ alton_version }}"
......
...@@ -33,42 +33,40 @@ ...@@ -33,42 +33,40 @@
# #
- name: setup the analytics_api env file - name: setup the analytics_api env file
template: > template:
src="edx/app/analytics_api/analytics_api_env.j2" src: "edx/app/analytics_api/analytics_api_env.j2"
dest="{{ analytics_api_home }}/analytics_api_env" dest: "{{ analytics_api_home }}/analytics_api_env"
owner={{ analytics_api_user }} owner: "{{ analytics_api_user }}"
group={{ analytics_api_user }} group: "{{ analytics_api_user }}"
mode=0644 mode: 0644
tags: tags:
- install - install
- install:configuration - install:configuration
- name: "add gunicorn configuration file" - name: "add gunicorn configuration file"
template: > template:
src=edx/app/analytics_api/analytics_api_gunicorn.py.j2 src: edx/app/analytics_api/analytics_api_gunicorn.py.j2
dest={{ analytics_api_home }}/analytics_api_gunicorn.py dest: "{{ analytics_api_home }}/analytics_api_gunicorn.py"
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: install application requirements - name: install application requirements
pip: > pip:
requirements="{{ analytics_api_requirements_base }}/{{ item }}" requirements: "{{ analytics_api_requirements_base }}/{{ item }}"
virtualenv="{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}" virtualenv: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}"
state=present state: present
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
with_items: analytics_api_requirements with_items: "{{ analytics_api_requirements }}"
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: migrate - name: migrate
shell: > shell: "DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}' DB_MIGRATION_PASS='{{ COMMON_MYSQL_MIGRATE_PASS }}' {{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python ./manage.py migrate --noinput"
chdir={{ analytics_api_code_dir }} args:
DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}' chdir: "{{ analytics_api_code_dir }}"
DB_MIGRATION_PASS='{{ COMMON_MYSQL_MIGRATE_PASS }}'
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python ./manage.py migrate --noinput
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}" environment: "{{ analytics_api_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes" when: migrate_db is defined and migrate_db|lower == "yes"
...@@ -77,9 +75,9 @@ ...@@ -77,9 +75,9 @@
- migrate:db - migrate:db
- name: run collectstatic - name: run collectstatic
shell: > shell: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py collectstatic --noinput"
chdir={{ analytics_api_code_dir }} args:
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py collectstatic --noinput chdir: "{{ analytics_api_code_dir }}"
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}" environment: "{{ analytics_api_environment }}"
tags: tags:
...@@ -87,40 +85,44 @@ ...@@ -87,40 +85,44 @@
- assets:gather - assets:gather
- name: create api users - name: create api users
shell: > shell: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py set_api_key {{ item.key }} {{ item.value }}"
chdir={{ analytics_api_code_dir }} args:
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py set_api_key {{ item.key }} {{ item.value }} chdir: "{{ analytics_api_code_dir }}"
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}" environment: "{{ analytics_api_environment }}"
with_dict: ANALYTICS_API_USERS with_dict: "{{ ANALYTICS_API_USERS }}"
tags: tags:
- manage - manage
- manage:app-users - manage:app-users
- name: write out the supervisor wrapper - name: write out the supervisor wrapper
template: > template:
src=edx/app/analytics_api/analytics_api.sh.j2 src: edx/app/analytics_api/analytics_api.sh.j2
dest={{ analytics_api_home }}/{{ analytics_api_service_name }}.sh dest: "{{ analytics_api_home }}/{{ analytics_api_service_name }}.sh"
mode=0650 owner={{ supervisor_user }} group={{ common_web_user }} mode: 0650
owner: "{{ supervisor_user }}"
group: "{{ common_web_user }}"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: write supervisord config - name: write supervisord config
template: > template:
src=edx/app/supervisor/conf.d.available/analytics_api.conf.j2 src: edx/app/supervisor/conf.d.available/analytics_api.conf.j2
dest="{{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf" dest: "{{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf"
owner={{ supervisor_user }} group={{ common_web_user }} mode=0644 owner: "{{ supervisor_user }}"
group: "{{ common_web_user }}"
mode: 0644
tags: tags:
- install - install
- install:configuration - install:configuration
- name: enable supervisor script - name: enable supervisor script
file: > file:
src={{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf src: "{{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf"
dest={{ supervisor_cfg_dir }}/{{ analytics_api_service_name }}.conf dest: "{{ supervisor_cfg_dir }}/{{ analytics_api_service_name }}.conf"
state=link state: link
force=yes force: yes
when: not disable_edx_services when: not disable_edx_services
tags: tags:
- install - install
...@@ -134,10 +136,10 @@ ...@@ -134,10 +136,10 @@
- manage:start - manage:start
- name: create symlinks from the venv bin dir - name: create symlinks from the venv bin dir
file: > file:
src="{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/{{ item }}" src: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/{{ item }}"
dest="{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api" dest: "{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api"
state=link state: link
with_items: with_items:
- python - python
- pip - pip
...@@ -147,10 +149,10 @@ ...@@ -147,10 +149,10 @@
- install:base - install:base
- name: create symlinks from the repo dir - name: create symlinks from the repo dir
file: > file:
src="{{ analytics_api_code_dir }}/{{ item }}" src: "{{ analytics_api_code_dir }}/{{ item }}"
dest="{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api" dest: "{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api"
state=link state: link
with_items: with_items:
- manage.py - manage.py
tags: tags:
...@@ -158,11 +160,11 @@ ...@@ -158,11 +160,11 @@
- install:base - install:base
- name: restart analytics_api - name: restart analytics_api
supervisorctl: > supervisorctl:
state=restarted state: restarted
supervisorctl_path={{ supervisor_ctl }} supervisorctl_path: "{{ supervisor_ctl }}"
config={{ supervisor_cfg }} config: "{{ supervisor_cfg }}"
name={{ analytics_api_service_name }} name: "{{ analytics_api_service_name }}"
when: not disable_edx_services when: not disable_edx_services
become_user: "{{ supervisor_service_user }}" become_user: "{{ supervisor_service_user }}"
tags: tags:
......
...@@ -11,12 +11,17 @@ ...@@ -11,12 +11,17 @@
# Defaults for role analytics_pipeline # Defaults for role analytics_pipeline
# #
ANALYTICS_PIPELINE_OUTPUT_DATABASE_USER: pipeline001
ANALYTICS_PIPELINE_OUTPUT_DATABASE_PASSWORD: password
ANALYTICS_PIPELINE_OUTPUT_DATABASE_HOST: localhost
ANALYTICS_PIPELINE_OUTPUT_DATABASE_PORT: 3306
ANALYTICS_PIPELINE_OUTPUT_DATABASE_NAME: "{{ ANALYTICS_API_REPORTS_DB_NAME }}" ANALYTICS_PIPELINE_OUTPUT_DATABASE_NAME: "{{ ANALYTICS_API_REPORTS_DB_NAME }}"
ANALYTICS_PIPELINE_OUTPUT_DATABASE: ANALYTICS_PIPELINE_OUTPUT_DATABASE:
username: pipeline001 username: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_USER }}"
password: password password: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_PASSWORD }}"
host: localhost host: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_HOST }}"
port: 3306 port: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_PORT }}"
ANALYTICS_PIPELINE_INPUT_DATABASE: ANALYTICS_PIPELINE_INPUT_DATABASE:
username: "{{ COMMON_MYSQL_READ_ONLY_USER }}" username: "{{ COMMON_MYSQL_READ_ONLY_USER }}"
......
...@@ -89,7 +89,7 @@ ...@@ -89,7 +89,7 @@
- install:configuration - install:configuration
- name: Util library source checked out - name: Util library source checked out
git_2_0_1: git:
repo: "{{ analytics_pipeline_util_library.repo }}" repo: "{{ analytics_pipeline_util_library.repo }}"
dest: "{{ analytics_pipeline_util_library.path }}" dest: "{{ analytics_pipeline_util_library.path }}"
version: "{{ analytics_pipeline_util_library.version }}" version: "{{ analytics_pipeline_util_library.version }}"
......
...@@ -3,13 +3,13 @@ ...@@ -3,13 +3,13 @@
# #
# Tasks for role {{ role_name }} # Tasks for role {{ role_name }}
# #
# Overview: # Overview:
# #
# #
# Dependencies: # Dependencies:
# #
# #
# Example play: # Example play:
# #
# #
...@@ -149,7 +149,7 @@ ...@@ -149,7 +149,7 @@
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: run collectstatic - name: run collectstatic
command: make static command: make static
args: args:
...@@ -161,7 +161,7 @@ ...@@ -161,7 +161,7 @@
- assets:gather - assets:gather
- name: restart the application - name: restart the application
supervisorctl: supervisorctl:
state: restarted state: restarted
supervisorctl_path: "{{ '{{' }} supervisor_ctl }}" supervisorctl_path: "{{ '{{' }} supervisor_ctl }}"
config: "{{ '{{' }} supervisor_cfg }}" config: "{{ '{{' }} supervisor_cfg }}"
...@@ -173,20 +173,24 @@ ...@@ -173,20 +173,24 @@
- manage:start - manage:start
- name: Copying nginx configs for {{ role_name }} - name: Copying nginx configs for {{ role_name }}
template: > template:
src=edx/app/nginx/sites-available/{{ role_name }}.j2 src: "edx/app/nginx/sites-available/{{ role_name }}.j2"
dest={{ '{{' }} nginx_sites_available_dir }}/{{ role_name }} dest: "{{ '{{' }} nginx_sites_available_dir }}/{{ role_name }}"
owner=root group={{ '{{' }} common_web_user }} mode=0640 owner: root
group: "{{ '{{' }} common_web_user }}"
mode: 0640
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
- install:vhosts - install:vhosts
- name: Creating nginx config links for {{ role_name }} - name: Creating nginx config links for {{ role_name }}
file: > file:
src={{ '{{' }} nginx_sites_available_dir }}/{{ role_name }} src: "{{ '{{' }} nginx_sites_available_dir }}/{{ role_name }}"
dest={{ '{{' }} nginx_sites_enabled_dir }}/{{ role_name }} dest: "{{ '{{' }} nginx_sites_enabled_dir }}/{{ role_name }}"
state=link owner=root group=root state: link
owner: root
group: root
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
......
...@@ -23,41 +23,41 @@ ...@@ -23,41 +23,41 @@
- name: install antivirus system packages - name: install antivirus system packages
apt: pkg={{ item }} install_recommends=yes state=present apt: pkg={{ item }} install_recommends=yes state=present
with_items: antivirus_debian_pkgs with_items: "{{ antivirus_debian_pkgs }}"
- name: create antivirus scanner user - name: create antivirus scanner user
user: > user:
name="{{ antivirus_user }}" name: "{{ antivirus_user }}"
home="{{ antivirus_app_dir }}" home: "{{ antivirus_app_dir }}"
createhome=no createhome: no
shell=/bin/false shell: /bin/false
- name: create antivirus app and data dirs - name: create antivirus app and data dirs
file: > file:
path="{{ item }}" path: "{{ item }}"
state=directory state: directory
owner="{{ antivirus_user }}" owner: "{{ antivirus_user }}"
group="{{ antivirus_user }}" group: "{{ antivirus_user }}"
with_items: with_items:
- "{{ antivirus_app_dir }}" - "{{ antivirus_app_dir }}"
- "{{ antivirus_app_dir }}/data" - "{{ antivirus_app_dir }}/data"
- name: install antivirus s3 scanner script - name: install antivirus s3 scanner script
template: > template:
src=s3_bucket_virus_scan.sh.j2 src: s3_bucket_virus_scan.sh.j2
dest={{ antivirus_app_dir }}/s3_bucket_virus_scan.sh dest: "{{ antivirus_app_dir }}/s3_bucket_virus_scan.sh"
mode=0555 mode: "0555"
owner={{ antivirus_user }} owner: "{{ antivirus_user }}"
group={{ antivirus_user }} group: "{{ antivirus_user }}"
- name: install antivirus s3 scanner cronjob - name: install antivirus s3 scanner cronjob
cron: > cron:
name="antivirus-{{ item }}" name: "antivirus-{{ item }}"
job="{{ antivirus_app_dir }}/s3_bucket_virus_scan.sh -b '{{ item }}' -m '{{ ANTIVIRUS_MAILTO }}' -f '{{ ANTIVIRUS_MAILFROM }}'" job: "{{ antivirus_app_dir }}/s3_bucket_virus_scan.sh -b '{{ item }}' -m '{{ ANTIVIRUS_MAILTO }}' -f '{{ ANTIVIRUS_MAILFROM }}'"
backup=yes backup: yes
cron_file=antivirus-{{ item }} cron_file: "antivirus-{{ item }}"
user={{ antivirus_user }} user: "{{ antivirus_user }}"
hour="*" hour: "*"
minute="0" minute: "0"
day="*" day: "*"
with_items: ANTIVIRUS_BUCKETS with_items: "{{ ANTIVIRUS_BUCKETS }}"
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for role asqatasun
#
ASQATASUN_LOCALE: 'en_US.UTF-8'
ASQATASUN_DATABASE_NAME: 'asqatasun'
ASQATASUN_DATABASE_USER: 'asqatasun'
ASQATASUN_DATABASE_PASSWORD: 'changeme'
ASQATASUN_DATABASE_HOST: 'localhost'
ASQATASUN_DATABASE_ENCODING: 'utf8'
ASQATASUN_DATABASE_COLLATION: 'utf8_general_ci'
ASQATASUN_URL: 'http://localhost:8080/asqatasun/'
ASQATASUN_ADMIN_EMAIL: 'admin@example.com'
ASQATASUN_ADMIN_PASSWORD: 'changeme'
asqatasun_debian_pkgs:
- wget
- bzip2
- openjdk-7-jre
- unzip
- mysql-server
- libmysql-java
- python-mysqldb
- libtomcat7-java
- tomcat7
- libspring-instrument-java
- xvfb
- libdbus-glib-1-2
- mailutils
- postfix
locale: "{{ ASQATASUN_LOCALE }}"
asqatasun_download_link: "http://download.asqatasun.org/asqatasun-latest.tar.gz"
# Asqatasun version that you want to install, get the full list of releases
#by clicking in the release tab of the github main interface.
asqatasun_version: "asqatasun-4.0.0-rc.1"
# Go this link to find your desired ESR Firefox
# For 32-bit architecture
# http://download-origin.cdn.mozilla.net/pub/firefox/releases/31.4.0esr/linux-i686/
# For 64-bit architecture
# http://download-origin.cdn.mozilla.net/pub/firefox/releases/31.4.0esr/linux-x86_64/
# Default is en-US in our example
fixfox_esr_link: "http://download-origin.cdn.mozilla.net/pub/firefox/releases/31.4.0esr/linux-x86_64/en-US/firefox-31.4.0esr.tar.bz2"
# MySQL variables for Asqatasun
default_character_set: "utf8"
collation_server: "utf8_general_ci"
init_connect: "SET NAMES utf8"
character_set_server: "utf8"
mysql_max_allowed_packet: "64M"
asqatasun_parameters:
db_name: "{{ ASQATASUN_DATABASE_NAME }}"
db_user: "{{ ASQATASUN_DATABASE_USER }}"
db_password: "{{ ASQATASUN_DATABASE_PASSWORD }}"
db_host: "{{ ASQATASUN_DATABASE_HOST }}"
db_encoding: "{{ ASQATASUN_DATABASE_ENCODING }}"
db_collation: "{{ ASQATASUN_DATABASE_COLLATION }}"
url: "{{ ASQATASUN_URL }}"
admin_email: "{{ ASQATASUN_ADMIN_EMAIL }}"
admin_passwd: "{{ ASQATASUN_ADMIN_PASSWORD }}"
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
# Tasks for role asqatasun
#
# Overview:
#
# Install the Asqatasun, an opensource web site analyzer,
# used for web accessibility (a11y) and Search Engine Optimization (SEO)
#
# ansible-playbook -i 'asqatasun.example.com,' ./asqatasun.yml -e@/ansible/vars/deployment.yml -e@/ansible/vars/env-deployment.yml
#
- name: Set Postfix options
debconf:
name: postifx
question: "{{ item.question }}"
value: "{{ item.value }} "
vtype: "string"
with_items:
- { question: "postfix/mailname", value: " " }
- { question: "postfix/main_mailer_type", value: "Satellite system" }
tags:
- install
- install:configuration
- name: Update locale Setting
locale_gen:
name: "{{ locale }}"
state: present
register: set_locale
tags:
- install
- install:base
- name: Reconfigure locale
command: dpkg-reconfigure locales
when: set_locale.changed
- name: Install the Asqatasun Prerequisites
apt:
name: "{{ item }}"
update_cache: yes
state: installed
with_items: asqatasun_debian_pkgs
tags:
- install
- install:base
- name: Copy the asqatasun.cnf template to /etc/mysql/conf.d
template:
dest: /etc/mysql/conf.d/asqatasun.cnf
src: etc/mysql/conf.d/asqatasun.cnf.j2
owner: root
group: root
when: "'{{ asqatasun_parameters.db_host }}' == 'localhost'"
register: my_cnf
tags:
- install
- install:configuration
- name: Restart MySQL
service:
name: mysql
state: restarted
when: my_cnf.changed
- name: Create a soft link for tomcat jar and mysql connector
file:
dest: "{{ item.dest }}"
src: "{{ item.src }}"
state: link
with_items:
- { src: '/usr/share/java/spring3-instrument-tomcat.jar', dest: '/usr/share/tomcat7/lib/spring3-instrument-tomcat.jar' }
- { src: '/usr/share/java/mysql-connector-java.jar', dest: '/usr/share/tomcat7/lib/mysql-connector-java.jar'}
tags:
- install
- install:configuration
- name: Copy the xvfb template to /etc/init.d
template:
dest: /etc/init.d/xvfb
src: etc/init.d/xvfb.j2
owner: root
group: root
mode: 755
register: xvfb
tags:
- install
- install:config
- name: Restart xvfb
service:
name: xvfb
pattern: /etc/init.d/xvfb
state: restarted
enabled: yes
when: xvfb.changed
tags:
- install
- install:config
- name: Download the latest ESR Firfox
get_url:
url: "{{ fixfox_esr_link }}"
dest: "/tmp/{{ fixfox_esr_link | basename }}"
tags:
- install
- install:base
- name: Unzip the downloaded Firfox zipped file
unarchive:
src: "/tmp/{{ fixfox_esr_link | basename }}"
dest: /opt
copy: no
tags:
- install
- install:base
- name: Download the latest Asqatasun tarball
get_url:
url: "{{ asqatasun_download_link }}"
dest: "/tmp/{{ asqatasun_download_link | basename }}"
tags:
- install
- install:base
- name: Unzip the downloaded Asqatasun tarball
unarchive:
src: "/tmp/{{ asqatasun_download_link | basename }}"
dest: "/tmp/"
copy: no
tags:
- install
- install:base
- name: Create MySQL database for Asqatasun
mysql_db:
name: "{{ asqatasun_parameters.db_name }}"
state: present
encoding: "{{ asqatasun_parameters.db_encoding }}"
collation: "{{ asqatasun_parameters.db_collation }}"
tags:
- migrate
- migrate:db
- name: Create MySQL user for Asqatasun
mysql_user:
name: "{{ asqatasun_parameters.db_user }}"
password: "{{ asqatasun_parameters.db_password }}"
host: "{{ asqatasun_parameters.db_host }}"
priv: "{{ asqatasun_parameters.db_name }}.*:ALL"
state: present
tags:
- migrate
- migrate:db
- name: Check that asqatasun app is running
shell: >
/bin/ps aux | grep -i asqatasun
register: asqatasun_app
changed_when: no
tags:
- install
- install:base
- name: Install the Asqatasun
shell: >
/bin/echo "yes" | ./install.sh --database-user "{{ asqatasun_parameters.db_user }}" \
--database-passwd "{{ asqatasun_parameters.db_password }}" \
--database-db "{{ asqatasun_parameters.db_name }}" \
--database-host "{{ asqatasun_parameters.db_host }}" \
--asqatasun-url http://localhost:8080/asqatasun/ \
--tomcat-webapps /var/lib/tomcat7/webapps/ \
--tomcat-user tomcat7 \
--asqa-admin-email "{{ asqatasun_parameters.admin_email }}" \
--asqa-admin-passwd "{{ asqatasun_parameters.admin_passwd }}" \
--firefox-esr-binary-path /opt/firefox-esr/firefox
--display-port ":99"
args:
chdir: "/tmp/{{ asqatasun_version }}.i386"
when: "asqatasun_app.stdout.find('/etc/asqatasun') == -1"
register: asqatasun_install
tags:
- install
- install:base
- name: Restart tomcat7
service:
name: tomcat7
state: restarted
when: asqatasun_install.changed
#!/bin/sh
### BEGIN INIT INFO
# Provides: xvfb
# Required-Start: $remote_fs $syslog
# Required-Stop: $remote_fs $syslog
# Default-Start: 2 3 4 5
# Default-Stop: 0 1 6
# Short-Description: XVFB - Virtual X server display
# Description: XVFB - Virtual X server display
### END INIT INFO
# Author: Matthieu Faure <mfaure@asqatasun.org>
# Do NOT "set -e"
# TODO: improve with help from /etc/init.d/skeleton
RUN_AS_USER=tomcat7
OPTS=":99 -screen 1 1024x768x24 -nolisten tcp"
XVFB_DIR=/usr/bin
PIDFILE=/var/run/xvfb
case $1 in
start)
start-stop-daemon --chuid $RUN_AS_USER -b --start --exec $XVFB_DIR/Xvfb --make-pidfile --pidfile $PIDFILE -- $OPTS &
;;
stop)
start-stop-daemon --stop --user $RUN_AS_USER --pidfile $PIDFILE
rm -f $PIDFILE
;;
restart)
if start-stop-daemon --test --stop --user $RUN_AS_USER --pidfile $PIDFILE >/dev/null; then
$0 stop
fi;
$0 start
;;
*)
echo "Usage: $0 (start|restart|stop)"
exit 1
;;
esac
exit 0
[client]
default-character-set={{ default_character_set }}
[mysql]
default-character-set={{ default_character_set }}
[mysqld]
collation-server = {{ collation_server }}
init-connect={{ "\'" + init_connect + "\'" }}
character-set-server = {{ character_set_server }}
max_allowed_packet = {{ mysql_max_allowed_packet }}
...@@ -102,8 +102,5 @@ ...@@ -102,8 +102,5 @@
file: file:
path: "{{ item.item }}" path: "{{ item.item }}"
mode: "0644" mode: "0644"
when: > when: vagrant_home_dir.stat.exists == False and ansible_distribution in common_debian_variants and item.stat.exists
vagrant_home_dir.stat.exists == false and with_items: "{{ motd_files_exist.results }}"
ansible_distribution in common_debian_variants and
item.stat.exists
with_items: motd_files_exist.results
# Install browsermob-proxy, which is used for page performance testing with bok-choy # Install browsermob-proxy, which is used for page performance testing with bok-choy
--- ---
- name: get zip file - name: get zip file
get_url: > get_url:
url={{ browsermob_proxy_url }} url: "{{ browsermob_proxy_url }}"
dest=/var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip dest: "/var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip"
register: download_browsermob_proxy register: download_browsermob_proxy
- name: unzip into /var/tmp/ - name: unzip into /var/tmp/
shell: > shell: "unzip /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip"
unzip /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip args:
chdir=/var/tmp chdir: "/var/tmp"
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: move to /etc/browsermob-proxy/ - name: move to /etc/browsermob-proxy/
shell: > shell: "mv /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }} /etc/browsermob-proxy"
mv /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }} /etc/browsermob-proxy
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: change permissions of main script - name: change permissions of main script
file: > file:
path=/etc/browsermob-proxy/bin/browsermob-proxy path: "/etc/browsermob-proxy/bin/browsermob-proxy"
mode=0755 mode: 0755
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: add wrapper script /usr/local/bin/browsermob-proxy - name: add wrapper script /usr/local/bin/browsermob-proxy
copy: > copy:
src=browsermob-proxy src: browsermob-proxy
dest=/usr/local/bin/browsermob-proxy dest: /usr/local/bin/browsermob-proxy
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: change permissions of wrapper script - name: change permissions of wrapper script
file: > file:
path=/usr/local/bin/browsermob-proxy path: /usr/local/bin/browsermob-proxy
mode=0755 mode: 0755
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
...@@ -8,12 +8,12 @@ ...@@ -8,12 +8,12 @@
- name: download browser debian packages from S3 - name: download browser debian packages from S3
get_url: dest="/tmp/{{ item.name }}" url="{{ item.url }}" get_url: dest="/tmp/{{ item.name }}" url="{{ item.url }}"
register: download_deb register: download_deb
with_items: browser_s3_deb_pkgs with_items: "{{ browser_s3_deb_pkgs }}"
- name: install browser debian packages - name: install browser debian packages
shell: gdebi -nq /tmp/{{ item.name }} shell: gdebi -nq /tmp/{{ item.name }}
when: download_deb.changed when: download_deb.changed
with_items: browser_s3_deb_pkgs with_items: "{{ browser_s3_deb_pkgs }}"
# Because the source location has been deprecated, we need to # Because the source location has been deprecated, we need to
# ensure it does not interfere with subsequent apt commands # ensure it does not interfere with subsequent apt commands
...@@ -50,15 +50,15 @@ ...@@ -50,15 +50,15 @@
- "chromedriver.stat.mode == '0755'" - "chromedriver.stat.mode == '0755'"
- name: download PhantomJS - name: download PhantomJS
get_url: > get_url:
url={{ phantomjs_url }} url: "{{ phantomjs_url }}"
dest=/var/tmp/{{ phantomjs_tarfile }} dest: "/var/tmp/{{ phantomjs_tarfile }}"
register: download_phantom_js register: download_phantom_js
- name: unpack the PhantomJS tarfile - name: unpack the PhantomJS tarfile
shell: > shell: "tar -xjf /var/tmp/{{ phantomjs_tarfile }}"
tar -xjf /var/tmp/{{ phantomjs_tarfile }} args:
chdir=/var/tmp chdir: "/var/tmp"
when: download_phantom_js.changed when: download_phantom_js.changed
- name: move PhantomJS binary to /usr/local - name: move PhantomJS binary to /usr/local
......
...@@ -30,7 +30,7 @@ ...@@ -30,7 +30,7 @@
file: file:
path: "{{ cassandra_data_dir_prefix }}/{{ item }}" path: "{{ cassandra_data_dir_prefix }}/{{ item }}"
state: directory state: directory
with_items: cassandra_data_dirs with_items: "{{ cassandra_data_dirs }}"
- name: Mount ephemeral disks - name: Mount ephemeral disks
mount: mount:
...@@ -49,7 +49,7 @@ ...@@ -49,7 +49,7 @@
path: "{{ cassandra_data_dir_prefix }}/{{ item }}" path: "{{ cassandra_data_dir_prefix }}/{{ item }}"
owner: "{{ cassandra_user }}" owner: "{{ cassandra_user }}"
group: "{{ cassandra_group }}" group: "{{ cassandra_group }}"
with_items: cassandra_data_dirs with_items: "{{ cassandra_data_dirs }}"
- name: Add the datastax repository apt-key - name: Add the datastax repository apt-key
apt_key: apt_key:
......
...@@ -3,10 +3,12 @@ ...@@ -3,10 +3,12 @@
template: template:
src: "{{ item.src }}" src: "{{ item.src }}"
dest: "{{ certs_app_dir }}/{{ item.dest }}" dest: "{{ certs_app_dir }}/{{ item.dest }}"
owner: "{{ certs_user }}"
group: "{{ common_web_user }}"
mode: "0640"
with_items: with_items:
- { src: 'certs.env.json.j2', dest: 'env.json' } - { src: 'certs.env.json.j2', dest: 'env.json' }
- { src: 'certs.auth.json.j2', dest: 'auth.json' } - { src: 'certs.auth.json.j2', dest: 'auth.json' }
become_user: "{{ certs_user }}"
- name: Writing supervisor script for certificates - name: Writing supervisor script for certificates
template: template:
...@@ -44,7 +46,7 @@ ...@@ -44,7 +46,7 @@
when: CERTS_GIT_IDENTITY != "none" when: CERTS_GIT_IDENTITY != "none"
- name: "Checkout certificates repo into {{ certs_code_dir }}" - name: "Checkout certificates repo into {{ certs_code_dir }}"
git_2_0_1: git:
dest: "{{ certs_code_dir }}" dest: "{{ certs_code_dir }}"
repo: "{{ CERTS_REPO }}" repo: "{{ CERTS_REPO }}"
version: "{{ certs_version }}" version: "{{ certs_version }}"
...@@ -56,7 +58,7 @@ ...@@ -56,7 +58,7 @@
when: CERTS_GIT_IDENTITY != "none" when: CERTS_GIT_IDENTITY != "none"
- name: Checkout certificates repo into {{ certs_code_dir }} - name: Checkout certificates repo into {{ certs_code_dir }}
git_2_0_1: git:
dest: "{{ certs_code_dir }}" dest: "{{ certs_code_dir }}"
repo: "{{ CERTS_REPO }}" repo: "{{ CERTS_REPO }}"
version: "{{ certs_version }}" version: "{{ certs_version }}"
......
...@@ -4,3 +4,4 @@ ...@@ -4,3 +4,4 @@
# role depends. This is to allow sharing vars without creating # role depends. This is to allow sharing vars without creating
# side-effects. Any vars requred by this role should be added to # side-effects. Any vars requred by this role should be added to
# common_vars/defaults/main.yml # common_vars/defaults/main.yml
#
...@@ -3,7 +3,7 @@ ...@@ -3,7 +3,7 @@
fail: fail:
msg: "Configuration Sources Checking (COMMON_EXTRA_CONFIGURATION_SOURCES_CHECKING) is enabled, you must define {{ item }}" msg: "Configuration Sources Checking (COMMON_EXTRA_CONFIGURATION_SOURCES_CHECKING) is enabled, you must define {{ item }}"
when: COMMON_EXTRA_CONFIGURATION_SOURCES_CHECKING and ({{ item }} is not defined or {{ item }} != True) when: COMMON_EXTRA_CONFIGURATION_SOURCES_CHECKING and ({{ item }} is not defined or {{ item }} != True)
with_items: COMMON_EXTRA_CONFIGURATION_SOURCES with_items: "{{ COMMON_EXTRA_CONFIGURATION_SOURCES }}"
tags: tags:
- "install" - "install"
- "install:configuration" - "install:configuration"
......
...@@ -230,7 +230,6 @@ credentials_log_dir: "{{ COMMON_LOG_DIR }}/{{ credentials_service_name }}" ...@@ -230,7 +230,6 @@ credentials_log_dir: "{{ COMMON_LOG_DIR }}/{{ credentials_service_name }}"
credentials_requirements_base: "{{ credentials_code_dir }}/requirements" credentials_requirements_base: "{{ credentials_code_dir }}/requirements"
credentials_requirements: credentials_requirements:
- production.txt - production.txt
- optional.txt
# #
# OS packages # OS packages
......
...@@ -10,13 +10,13 @@ ...@@ -10,13 +10,13 @@
# #
# #
# Tasks for role credentials # Tasks for role credentials
# #
# Overview: # Overview:
# #
# #
# Dependencies: # Dependencies:
# #
# #
# Example play: # Example play:
# #
# #
...@@ -43,9 +43,9 @@ ...@@ -43,9 +43,9 @@
- install:app-requirements - install:app-requirements
- name: create nodeenv - name: create nodeenv
shell: > shell: "{{ credentials_venv_dir }}/bin/nodeenv {{ credentials_nodeenv_dir }} --prebuilt"
creates={{ credentials_nodeenv_dir }} args:
{{ credentials_venv_dir }}/bin/nodeenv {{ credentials_nodeenv_dir }} --prebuilt creates: "{{ credentials_nodeenv_dir }}"
become_user: "{{ credentials_user }}" become_user: "{{ credentials_user }}"
tags: tags:
- install - install
...@@ -74,9 +74,12 @@ ...@@ -74,9 +74,12 @@
# var should have more permissive permissions than the rest # var should have more permissive permissions than the rest
- name: create credentials var dirs - name: create credentials var dirs
file: > file:
path="{{ item }}" state=directory mode=0775 path: "{{ item }}"
owner="{{ credentials_user }}" group="{{ common_web_group }}" state: directory
mode: 0775
owner: "{{ credentials_user }}"
group: "{{ common_web_group }}"
with_items: with_items:
- "{{ CREDENTIALS_MEDIA_ROOT }}" - "{{ CREDENTIALS_MEDIA_ROOT }}"
tags: tags:
...@@ -180,7 +183,7 @@ ...@@ -180,7 +183,7 @@
- assets:gather - assets:gather
- name: restart the application - name: restart the application
supervisorctl: supervisorctl:
state: restarted state: restarted
supervisorctl_path: "{{ supervisor_ctl }}" supervisorctl_path: "{{ supervisor_ctl }}"
config: "{{ supervisor_cfg }}" config: "{{ supervisor_cfg }}"
...@@ -192,20 +195,24 @@ ...@@ -192,20 +195,24 @@
- manage:start - manage:start
- name: Copying nginx configs for credentials - name: Copying nginx configs for credentials
template: > template:
src=edx/app/nginx/sites-available/credentials.j2 src: edx/app/nginx/sites-available/credentials.j2
dest={{ nginx_sites_available_dir }}/credentials dest: "{{ nginx_sites_available_dir }}/credentials"
owner=root group={{ common_web_user }} mode=0640 owner: root
group: "{{ common_web_user }}"
mode: 0640
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
- install:vhosts - install:vhosts
- name: Creating nginx config links for credentials - name: Creating nginx config links for credentials
file: > file:
src={{ nginx_sites_available_dir }}/credentials src: "{{ nginx_sites_available_dir }}/credentials"
dest={{ nginx_sites_enabled_dir }}/credentials dest: "{{ nginx_sites_enabled_dir }}/credentials"
state=link owner=root group=root state: link
owner: root
group: root
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
......
--- ---
DATADOG_API_KEY: "SPECIFY_KEY_HERE" DATADOG_API_KEY: "SPECIFY_KEY_HERE"
datadog_agent_version: '1:5.1.1-546' datadog_agent_version: '1:5.10.1-1'
datadog_apt_key: "0x226AE980C7A7DA52" datadog_apt_key: "0x226AE980C7A7DA52"
datadog_debian_pkgs: datadog_debian_pkgs:
......
--- ---
- name: check out the demo course - name: check out the demo course
git_2_0_1: > git:
dest={{ demo_code_dir }} repo={{ demo_repo }} version={{ demo_version }} dest: "{{ demo_code_dir }}"
accept_hostkey=yes repo: "{{ demo_repo }}"
version: "{{ demo_version }}"
accept_hostkey: yes
become_user: "{{ demo_edxapp_user }}" become_user: "{{ demo_edxapp_user }}"
register: demo_checkout register: demo_checkout
- name: import demo course - name: import demo course
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py cms --settings=aws import {{ demo_edxapp_course_data_dir }} {{ demo_code_dir }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py cms --settings=aws import {{ demo_edxapp_course_data_dir }} {{ demo_code_dir }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
when: demo_checkout.changed when: demo_checkout.changed
- name: create some test users - name: create some test users
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user {{ item.username}} {{ item.email }} --initial-password-hash {{ item.hashed_password | quote }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user {{ item.username}} {{ item.email }} --initial-password-hash {{ item.hashed_password | quote }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
with_items: demo_test_users with_items: "{{ demo_test_users }}"
when: demo_checkout.changed when: demo_checkout.changed
- name: create staff user - name: create staff user
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user staff staff@example.com --initial-password-hash {{ demo_hashed_password | quote }} --staff"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user staff staff@example.com --initial-password-hash {{ demo_hashed_password | quote }} --staff args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
when: when:
- demo_checkout.changed - demo_checkout.changed
- DEMO_CREATE_STAFF_USER - DEMO_CREATE_STAFF_USER
- name: enroll test users in the demo course - name: enroll test users in the demo course
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms enroll_user_in_course -e {{ item.email }} -c {{ demo_course_id }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms enroll_user_in_course -e {{ item.email }} -c {{ demo_course_id }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
with_items: with_items:
- "{{ demo_test_users }}" - "{{ demo_test_users }}"
...@@ -43,15 +45,15 @@ ...@@ -43,15 +45,15 @@
- name: add test users to the certificate whitelist - name: add test users to the certificate whitelist
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms cert_whitelist -a {{ item.email }} -c {{ demo_course_id }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms cert_whitelist -a {{ item.email }} -c {{ demo_course_id }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
with_items: demo_test_users with_items: "{{ demo_test_users }}"
when: demo_checkout.changed when: demo_checkout.changed
- name: seed the forums for the demo course - name: seed the forums for the demo course
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws seed_permissions_roles {{ demo_course_id }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws seed_permissions_roles {{ demo_course_id }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
with_items: demo_test_users with_items: "{{ demo_test_users }}"
when: demo_checkout.changed when: demo_checkout.changed
...@@ -31,8 +31,10 @@ ...@@ -31,8 +31,10 @@
# - demo # - demo
- name: create demo app and data dirs - name: create demo app and data dirs
file: > file:
path="{{ demo_app_dir }}" state=directory path: "{{ demo_app_dir }}"
owner="{{ demo_edxapp_user }}" group="{{ common_web_group }}" state: directory
owner: "{{ demo_edxapp_user }}"
group: "{{ common_web_group }}"
- include: deploy.yml tags=deploy - include: deploy.yml tags=deploy
...@@ -77,9 +77,9 @@ ...@@ -77,9 +77,9 @@
- devstack:install - devstack:install
- name: create nodeenv - name: create nodeenv
shell: > shell: "{{ discovery_venv_dir }}/bin/nodeenv {{ discovery_nodeenv_dir }} --node={{ discovery_node_version }} --prebuilt"
creates={{ discovery_nodeenv_dir }} args:
{{ discovery_venv_dir }}/bin/nodeenv {{ discovery_nodeenv_dir }} --node={{ discovery_node_version }} --prebuilt creates: "{{ discovery_nodeenv_dir }}"
become_user: "{{ discovery_user }}" become_user: "{{ discovery_user }}"
tags: tags:
- install - install
...@@ -94,9 +94,9 @@ ...@@ -94,9 +94,9 @@
- install:app-requirements - install:app-requirements
- name: install bower dependencies - name: install bower dependencies
shell: > shell: ". {{ discovery_nodeenv_bin }}/activate && {{ discovery_node_bin }}/bower install --production --config.interactive=false"
chdir={{ discovery_code_dir }} args:
. {{ discovery_nodeenv_bin }}/activate && {{ discovery_node_bin }}/bower install --production --config.interactive=false chdir: "{{ discovery_code_dir }}"
become_user: "{{ discovery_user }}" become_user: "{{ discovery_user }}"
tags: tags:
- install - install
......
...@@ -7,15 +7,28 @@ COMMAND=$1 ...@@ -7,15 +7,28 @@ COMMAND=$1
case $COMMAND in case $COMMAND in
start) start)
{% set discovery_venv_bin = discovery_home + "/venvs/" + discovery_service_name + "/bin" %} {% set discovery_venv_bin = discovery_venv_dir + "/bin" %}
{{ supervisor_venv_bin }}/supervisord --configuration {{ supervisor_cfg }} {{ supervisor_venv_bin }}/supervisord --configuration {{ supervisor_cfg }}
# Needed to run bower as root. See explaination around 'discovery_user=root'
echo '{ "allow_root": true }' > /root/.bowerrc
cd /edx/app/edx_ansible/edx_ansible/docker/plays cd /edx/app/edx_ansible/edx_ansible/docker/plays
ansible-playbook discovery.yml -c local -i '127.0.0.1,' \ /edx/app/edx_ansible/venvs/edx_ansible/bin/ansible-playbook discovery.yml -c local -i '127.0.0.1,' \
-t 'install:app-requirements,assets:gather,devstack,migrate,manage:start' \ -t 'install:app-requirements,assets:gather,devstack,migrate' \
--extra-vars="migrate_db=yes" \ --extra-vars="migrate_db=yes" \
--extra-vars="@/ansible_overrides.yml" --extra-vars="@/ansible_overrides.yml" \
--extra-vars="discovery_user=root" # Needed when sharing the volume with the host machine because node/bower drops
# everything in the code directory by default. So we get issues with permissions
# on folders owned by the developer.
# Need to start supervisord and nginx manually because systemd is hard to run on docker
# http://developers.redhat.com/blog/2014/05/05/running-systemd-within-docker-container/
# Both daemon by default
nginx
/edx/app/supervisor/venvs/supervisor/bin/supervisord --configuration /edx/app/supervisor/supervisord.conf
# Docker requires an active foreground task. Tail the logs to appease Docker and # Docker requires an active foreground task. Tail the logs to appease Docker and
# provide useful output for development. # provide useful output for development.
......
cache_valid_time: 3600
docker_tools_deps_deb_pkgs:
- apt-transport-https
- ca-certificates
- python-pip
docker_apt_keyserver: "hkp://ha.pool.sks-keyservers.net:80"
docker_apt_key_id: "58118E89F3A912897C070ADBF76221572C52609D"
docker_repo: "deb https://apt.dockerproject.org/repo ubuntu-xenial main"
docker_group: "docker"
docker_users: []
# Install docker-engine and docker-compose
# Add users to docker group
---
- name: add docker group
group:
name: "{{ docker_group }}"
tags:
- install
- install:base
- name: add users to docker group
user:
name: "{{ item }}"
groups: "{{ docker_group }}"
append: yes
with_items: "{{ docker_users }}"
tags:
- install
- install:base
- name: install package dependencies
apt:
name: "{{ docker_tools_deps_deb_pkgs }}"
update_cache: yes
cache_valid_time: "{{ cache_valid_time }}"
tags:
- install
- install:system-requirements
- name: add docker apt key
apt_key:
keyserver: "{{ docker_apt_keyserver }}"
id: "{{ docker_apt_key_id }}"
tags:
- install
- install:configuration
- name: add docker repo
apt_repository:
repo: "{{ docker_repo }}"
tags:
- install
- install:configuration
- name: install docker-engine
apt:
name: "docker-engine"
update_cache: yes
cache_valid_time: "{{ cache_valid_time }}"
tags:
- install
- install:system-requirements
- name: start docker service
service:
name: docker
enabled: yes
state: started
tags:
- install
- install:configuration
- name: install docker-compose
pip:
name: "docker-compose"
tags:
- install
- install:system-requirements
...@@ -21,16 +21,20 @@ ECOMMERCE_NGINX_PORT: "18130" ...@@ -21,16 +21,20 @@ ECOMMERCE_NGINX_PORT: "18130"
ECOMMERCE_SSL_NGINX_PORT: 48130 ECOMMERCE_SSL_NGINX_PORT: 48130
ECOMMERCE_DEFAULT_DB_NAME: 'ecommerce' ECOMMERCE_DEFAULT_DB_NAME: 'ecommerce'
ECOMMERCE_DATABASE_USER: "ecomm001"
ECOMMERCE_DATABASE_PASSWORD: "password"
ECOMMERCE_DATABASE_HOST: "localhost"
ECOMMERCE_DATABASE_PORT: 3306
ECOMMERCE_DATABASES: ECOMMERCE_DATABASES:
# rw user # rw user
default: default:
ENGINE: 'django.db.backends.mysql' ENGINE: 'django.db.backends.mysql'
NAME: '{{ ECOMMERCE_DEFAULT_DB_NAME }}' NAME: '{{ ECOMMERCE_DEFAULT_DB_NAME }}'
USER: 'ecomm001' USER: '{{ ECOMMERCE_DATABASE_USER }}'
PASSWORD: 'password' PASSWORD: '{{ ECOMMERCE_DATABASE_PASSWORD }}'
HOST: 'localhost' HOST: '{{ ECOMMERCE_DATABASE_HOST }}'
PORT: '3306' PORT: '{{ ECOMMERCE_DATABASE_PORT }}'
ATOMIC_REQUESTS: true ATOMIC_REQUESTS: true
CONN_MAX_AGE: 60 CONN_MAX_AGE: 60
...@@ -51,7 +55,7 @@ ECOMMERCE_JWT_DECODE_HANDLER: 'ecommerce.extensions.api.handlers.jwt_decode_hand ...@@ -51,7 +55,7 @@ ECOMMERCE_JWT_DECODE_HANDLER: 'ecommerce.extensions.api.handlers.jwt_decode_hand
ECOMMERCE_JWT_ISSUERS: ECOMMERCE_JWT_ISSUERS:
- '{{ ECOMMERCE_LMS_URL_ROOT }}/oauth2' - '{{ ECOMMERCE_LMS_URL_ROOT }}/oauth2'
- 'ecommerce_worker' # Must match the value of JWT_ISSUER configured for the ecommerce worker. - 'ecommerce_worker' # Must match the value of JWT_ISSUER configured for the ecommerce worker.
ECOMMERCE_JWT_LEEWAY: 1
# NOTE: We have an array of keys to allow for support of multiple when, for example, # NOTE: We have an array of keys to allow for support of multiple when, for example,
# we change keys. This will ensure we continue to operate with JWTs issued signed with the old key # we change keys. This will ensure we continue to operate with JWTs issued signed with the old key
# while migrating to the new key. # while migrating to the new key.
...@@ -149,7 +153,7 @@ ECOMMERCE_SERVICE_CONFIG: ...@@ -149,7 +153,7 @@ ECOMMERCE_SERVICE_CONFIG:
JWT_SECRET_KEY: '{{ ECOMMERCE_JWT_SECRET_KEY }}' JWT_SECRET_KEY: '{{ ECOMMERCE_JWT_SECRET_KEY }}'
JWT_ALGORITHM: '{{ ECOMMERCE_JWT_ALGORITHM }}' JWT_ALGORITHM: '{{ ECOMMERCE_JWT_ALGORITHM }}'
JWT_VERIFY_EXPIRATION: '{{ ECOMMERCE_JWT_VERIFY_EXPIRATION }}' JWT_VERIFY_EXPIRATION: '{{ ECOMMERCE_JWT_VERIFY_EXPIRATION }}'
JWT_LEEWAY: 1 JWT_LEEWAY: '{{ ECOMMERCE_JWT_LEEWAY }}'
JWT_DECODE_HANDLER: '{{ ECOMMERCE_JWT_DECODE_HANDLER }}' JWT_DECODE_HANDLER: '{{ ECOMMERCE_JWT_DECODE_HANDLER }}'
JWT_ISSUERS: '{{ ECOMMERCE_JWT_ISSUERS }}' JWT_ISSUERS: '{{ ECOMMERCE_JWT_ISSUERS }}'
JWT_SECRET_KEYS: '{{ ECOMMERCE_JWT_SECRET_KEYS }}' JWT_SECRET_KEYS: '{{ ECOMMERCE_JWT_SECRET_KEYS }}'
......
...@@ -84,11 +84,9 @@ ...@@ -84,11 +84,9 @@
- migrate:db - migrate:db
- name: Populate countries - name: Populate countries
shell: > shell: "DB_MIGRATION_USER={{ COMMON_MYSQL_MIGRATE_USER }} DB_MIGRATION_PASS={{ COMMON_MYSQL_MIGRATE_PASS }} {{ ecommerce_venv_dir }}/bin/python ./manage.py oscar_populate_countries"
chdir={{ ecommerce_code_dir }} args:
DB_MIGRATION_USER={{ COMMON_MYSQL_MIGRATE_USER }} chdir: "{{ ecommerce_code_dir }}"
DB_MIGRATION_PASS={{ COMMON_MYSQL_MIGRATE_PASS }}
{{ ecommerce_venv_dir }}/bin/python ./manage.py oscar_populate_countries
become_user: "{{ ecommerce_user }}" become_user: "{{ ecommerce_user }}"
environment: "{{ ecommerce_environment }}" environment: "{{ ecommerce_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes" when: migrate_db is defined and migrate_db|lower == "yes"
......
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
virtualenv: '{{ ecommerce_worker_home }}/venvs/{{ ecommerce_worker_service_name }}' virtualenv: '{{ ecommerce_worker_home }}/venvs/{{ ecommerce_worker_service_name }}'
state: present state: present
become_user: '{{ ecommerce_worker_user }}' become_user: '{{ ecommerce_worker_user }}'
with_items: ecommerce_worker_requirements with_items: "{{ ecommerce_worker_requirements }}"
- name: write out the supervisor wrapper - name: write out the supervisor wrapper
template: template:
......
--- ---
- name: Git checkout edx_ansible repo into edx_ansible_code_dir - name: Git checkout edx_ansible repo into edx_ansible_code_dir
git_2_0_1: git:
dest: "{{ edx_ansible_code_dir }}" dest: "{{ edx_ansible_code_dir }}"
repo: "{{ edx_ansible_source_repo }}" repo: "{{ edx_ansible_source_repo }}"
version: "{{ configuration_version }}" version: "{{ configuration_version }}"
......
...@@ -51,7 +51,7 @@ ...@@ -51,7 +51,7 @@
state: present state: present
extra_args: "--exists-action w" extra_args: "--exists-action w"
become_user: "{{ edx_notes_api_user }}" become_user: "{{ edx_notes_api_user }}"
with_items: edx_notes_api_requirements with_items: "{{ edx_notes_api_requirements }}"
- name: Migrate - name: Migrate
shell: > shell: >
......
...@@ -16,6 +16,7 @@ ...@@ -16,6 +16,7 @@
# #
edx_service_name: edx_service edx_service_name: edx_service
edx_service_repos: []
# #
# OS packages # OS packages
# #
......
...@@ -99,6 +99,7 @@ ...@@ -99,6 +99,7 @@
tags: tags:
- install - install
- install:configuration - install:configuration
- install:app-configuration
- name: Install a bunch of system packages on which edx_service relies - name: Install a bunch of system packages on which edx_service relies
apt: apt:
...@@ -126,18 +127,19 @@ ...@@ -126,18 +127,19 @@
action: ec2_facts action: ec2_facts
tags: tags:
- to-remove - to-remove
#old syntax - should be fixed
- name: Tag instance - name: Tag instance
ec2_tag_local: resource={{ ansible_ec2_instance_id }} region={{ ansible_ec2_placement_region }} ec2_tag_local:
args: args:
resource: "{{ ansible_ec2_instance_id }}"
region: "{{ ansible_ec2_placement_region }}"
tags: tags:
- Name: version:{{ edx_service_name }} - Name: "version:{{ edx_service_name }}"
Value: "{{ item.0.DOMAIN }}/{{ item.0.PATH }}/{{ item.0.REPO }} {{ item.1.after |truncate(7,True,'') }}" Value: "{{ item.0.DOMAIN }}/{{ item.0.PATH }}/{{ item.0.REPO }} {{ item.1.after |truncate(7,True,'') }}"
when: item.1.after is defined and COMMON_TAG_EC2_INSTANCE and edx_service_repos is defined when: item.1.after is defined and COMMON_TAG_EC2_INSTANCE and edx_service_repos is defined
with_together: with_together:
- edx_service_repos - "{{ edx_service_repos }}"
- code_checkout.results - "{{ code_checkout.results }}"
tags: tags:
- to-remove - to-remove
......
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for role edx_service_rds
#
#
# vars are namespaced with the module name.
#
edx_service_rds_role_name: edx_service_rds
E_D_C: "prod-sample-app"
EDX_SERVICE_RDS_INSTANCE_SIZE: 10
EDX_SERVICE_RDS_INSTANCE_TYPE: "db.m1.small"
EDX_SERVICE_RDS_ROOT_USER: "root"
# no unicode, c cedilla , passwords
EDX_SERVICE_RDS_ROOT_PASSWORD: "plus_ca_change"
EDX_SERVICE_RDS_ENGINE: "MySQL"
EDX_SERVICE_RDS_ENGINE_VERSION: "5.6.22"
EDX_SERVICE_RDS_PARAM_GROUP_ENGINE: "mysql5.6"
# will vary depending upon engine, examples assume
# MySQL 56
EDX_SERVICE_RDS_PARAM_GROUP_PARAMS:
character_set_client: "utf8"
character_set_connection: "utf8"
character_set_database: "utf8"
character_set_filesystem: "utf8"
character_set_results: "utf8"
character_set_server: "utf8"
collation_connection: "utf8_unicode_ci"
collation_server: "utf8_unicode_ci"
EDX_SERVICE_RDS_MULTI_AZ: No
EDX_SERVICE_RDS_MAINT_WINDOW: "Mon:00:00-Mon:01:15"
EDX_SERVICE_RDS_BACKUP_DAYS: 30
EDX_SERVICE_RDS_BACKUP_WINDOW: "02:00-03:00"
EDX_SERVICE_RDS_SUBNET_1_AZ: "us-east-1c"
EDX_SERVICE_RDS_SUBNET_1_CIDR: "{{ vpc_class_b }}.50.0/24"
EDX_SERVICE_RDS_SUBNET_2_AZ: "us-east-1d"
EDX_SERVICE_RDS_SUBNET_2_CIDR: "{{ vpc_class_b }}.51.0/24"
# The defaults are permissive, override
EDX_SERVICE_RDS_SECURITY_GROUP:
name: "{{ e_d_c }}-rds-sg"
description: "RDS ingress and egress."
rules:
- proto: "tcp"
from_port: "3306"
to_port: "3306"
cidr_ip: "0.0.0.0/0"
rules_egress:
- proto: "tcp"
from_port: "3306"
to_port: "3306"
cidr_ip: "0.0.0.0/0"
# The defaults are permissive, override
EDX_SERVICE_RDS_VPC_DB_ACL:
name: "{{ e_d_c }}-db"
rules:
- number: "100"
type: "ingress"
protocol: "tcp"
from_port: 3306
to_port: 3306
cidr_block: "0.0.0.0/0"
rule_action: "allow"
- number: "100"
type: "egress"
protocol: "all"
from_port: 0
to_port: 65535
cidr_block: "0.0.0.0/0"
rule_action: "allow"
EDX_SERVICE_RDS_VPC_DB_ROUTE_TABLE:
- cidr: "{{ vpc_class_b }}.0.0/16"
gateway: 'local'
# typically override the all caps vars, but may
# be convenient to override the entire structure
# if you spanning more than two subnets
edx_service_rds_vpc_db_subnets:
- name: "{{ E_D_C }}-db-{{ EDX_SERVICE_RDS_SUBNET_1_AZ }}"
cidr: "{{ EDX_SERVICE_RDS_SUBNET_1_CIDR }}"
az: "{{ EDX_SERVICE_RDS_SUBNET_1_AZ }}"
- name: "{{ E_D_C }}-db-{{ EDX_SERVICE_RDS_SUBNET_2_AZ }}"
cidr: "{{ EDX_SERVICE_RDS_SUBNET_2_CIDR }}"
az: "{{ EDX_SERVICE_RDS_SUBNET_2_AZ }}"
edx_service_rds_state: "present"
edx_service_rds_db:
state: "{{ edx_service_rds_state }}"
name: "{{ E_D_C }}-primary"
size: "{{ EDX_SERVICE_RDS_INSTANCE_SIZE }}"
instance_type: "{{ EDX_SERVICE_RDS_INSTANCE_TYPE }}"
root_user: "{{ EDX_SERVICE_RDS_ROOT_USER }}"
root_password: "{{ EDX_SERVICE_RDS_ROOT_PASSWORD }}"
engine: "{{ EDX_SERVICE_RDS_ENGINE }}"
engine_version: "{{ EDX_SERVICE_RDS_ENGINE_VERSION }}"
multi_az: "{{ EDX_SERVICE_RDS_MULTI_AZ }}"
maint_window: "{{ EDX_SERVICE_RDS_MAINT_WINDOW }}"
backup_days: "{{ EDX_SERVICE_RDS_BACKUP_DAYS }}"
backup_window: "{{ EDX_SERVICE_RDS_BACKUP_WINDOW }}"
param_group:
name: "{{ E_D_C}}"
engine: "{{ EDX_SERVICE_RDS_PARAM_GROUP_ENGINE }}"
params: "{{ EDX_SERVICE_RDS_PARAM_GROUP_PARAMS }}"
#
# OS packages
#
edx_service_rds_debian_pkgs: []
edx_service_rds_redhat_pkgs: []
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Tasks for role edx_service_rds
#
# Overview:
#
# Creates a VPC RDS instance and related network infrastructure, e.g.,
# subnets, subnet groups, acls, as well as an instance specific
# parameter group.
#
# NB: When using a boto profile other than the default, you will need
# to export AWS_PROFILE because some tasks do not properly process
# the profile argument.
#
# NB: You should currently not use this play for deleting databases as
# the final snapshot functionality doesn't work properly in the ansible
# module. First it default to not taking a final snapshot and
# when you specify one, it throw a key error.
#
# Dependencies:
#
# Assumes a working VPC, ideally created via the edx_vpc role as this
# role will produce configuration output that this role requires
# like the VPC, route table and subnet IDs.
#
# Example play:
#
# export AWS_PROFILE=sandbox
# ansible-playbook -i 'localhost,' edx_service_rds.yml -e@/path/to/secure-repo/cloud_migrations/vpcs/vpc-file.yml -e@/path/to/secure-repo/cloud_migrations/dbs/e-d-c-rds.yml
#
# TODO:
# - handle db deletes and updates
# - handle DNS updates, consider that a different profile may be required for this.
#
- name: create database route table
ec2_rt:
profile: "{{ profile }}"
vpc_id: "{{ vpc_id }}"
region: "{{ aws_region }}"
state: "{{ edx_service_rds_state }}"
name: "{{ e_d_c }}-db"
routes: "{{ EDX_SERVICE_RDS_VPC_DB_ROUTE_TABLE }}"
register: created_db_rt
- name: create db network acl
ec2_acl:
profile: "{{ profile }}"
name: "{{ EDX_SERVICE_RDS_VPC_DB_ACL.name }}"
vpc_id: "{{ vpc_id }}"
state: "{{ edx_service_rds_state }}"
region: "{{ aws_region }}"
rules: "{{ EDX_SERVICE_RDS_VPC_DB_ACL.rules }}"
register: created_db_acl
- name: create db subnets
ec2_subnet:
profile: "{{ profile }}"
vpc_id: "{{ vpc_id }}"
region: "{{ aws_region }}"
state: "{{ edx_service_rds_state }}"
name: "{{ item.name }}"
cidr: "{{ item.cidr }}"
az: "{{ item.az }}"
route_table_id: "{{ created_db_rt.id }}"
network_acl_id: "{{ created_db_acl.id }}"
with_items: edx_service_rds_vpc_db_subnets
register: created_db_subnets
- name: Apply function to subnet data
util_map:
function: 'zip_to_list'
input: "{{ created_db_subnets.results }}"
args:
- "subnet_id"
register: subnet_data
- name:
rds_subnet_group:
state: "{{ edx_service_rds_state }}"
profile: "{{ profile }}"
region: "{{ aws_region }}"
name: "{{ e_d_c }}"
description: "{{ e_d_c }}"
subnets: "{{ subnet_data.function_output }}"
- name: create RDS security group
ec2_group:
profile: "{{ profile }}"
vpc_id: "{{ vpc_id }}"
state: "{{ edx_service_rds_state }}"
region: "{{ aws_region }}"
name: "{{ EDX_SERVICE_RDS_SECURITY_GROUP.name }}"
rules: "{{ EDX_SERVICE_RDS_SECURITY_GROUP.rules }}"
description: "{{ EDX_SERVICE_RDS_SECURITY_GROUP.description }}"
rules_egress: "{{ EDX_SERVICE_RDS_SECURITY_GROUP.rules_egress }}"
register: created_rds_security_group
- name: create instance parameter group
rds_param_group:
state: "{{ edx_service_rds_state }}"
region: "{{ aws_region }}"
name: "{{ edx_service_rds_db.param_group.name }}"
description: "{{ edx_service_rds_db.param_group.name }}"
engine: "{{ edx_service_rds_db.param_group.engine }}"
params: "{{ edx_service_rds_db.param_group.params }}"
register: created_param_group
#
# Create the database
#
- name: Create service database
rds:
command: "create"
region: "{{ aws_region }}"
instance_name: "{{ edx_service_rds_db.name }}"
db_engine: "{{ edx_service_rds_db.engine }}"
engine_version: "{{ edx_service_rds_db.engine_version }}"
size: "{{ edx_service_rds_db.size }}"
instance_type: "{{ edx_service_rds_db.instance_type }}"
username: "{{ edx_service_rds_db.root_user }}"
password: "{{ edx_service_rds_db.root_password }}"
subnet: "{{ e_d_c }}"
vpc_security_groups: "{{ created_rds_security_group.group_id }}"
multi_zone: "{{ edx_service_rds_db.multi_az }}"
maint_window: "{{ edx_service_rds_db.maint_window }}"
backup_window: "{{ edx_service_rds_db.backup_window }}"
backup_retention: "{{ edx_service_rds_db.backup_days }}"
parameter_group: "{{ edx_service_rds_db.param_group.name }}"
tags:
Environment: "{{ env }}"
Application: "{{ deployment }}"
when: edx_service_rds_db.state == 'present'
register: created_db
#
# Delete the database, need to debug module for this to
# full work.
#
- name: Delete service database
rds:
command: "delete"
region: "{{ aws_region }}"
instance_name: "{{ edx_service_rds_db.name }}"
# bug inthe module related to final snapshots
#snapshot: "{{ edx_service_rds_db.name }}-final-{{ ansible_date_time.epoch }}"
snapshot: "red-blue"
when: edx_service_rds_db.state == 'absent'
...@@ -50,7 +50,7 @@ ...@@ -50,7 +50,7 @@
shell: /bin/bash shell: /bin/bash
groups: "{{ themes_group }}" groups: "{{ themes_group }}"
append: yes append: yes
with_items: theme_users with_items: "{{ theme_users }}"
when: theme_users is defined when: theme_users is defined
- name: update .bashrc to set umask value - name: update .bashrc to set umask value
......
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for role edx_vpc
#
#
# vars are namespace with the module name.
#
vpc_role_name: vpc
#
# OS packages
#
vpc_debian_pkgs: []
vpc_redhat_pkgs: []
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Tasks for role edx_vpc
#
# Overview:
# This role creates an opinionated vpc for containing cluster of edx services.
#
# It currently assumes that we will be multi-az, with a single NAT, and all
# traffic going over that NAT. A public subnet, and both public and private
# route tables are created by default that can be used by new services in this
# vpc. The public subnet should house ELBs and any newly created private subnets
# can use the existing private route table to be able to reach the internet from
# private machines.
#
#
# Example play:
#
# ansible-playbook -c local -i localhost, edx_vpc.yml -e@/Users/feanil/src/edx-secure/cloud_migrations/vpcs/test.yml
# DO NOT use the subnet or route table sections of this command.
# They will delete any subnets or rts not defined here which is
# probably not what you want, since other services were added
# to the vpc whose subnets and rts are not enumerated here.
- name: create a vpc
local_action:
profile: "{{ vpc_aws_profile }}"
module: "ec2_vpc_local"
resource_tags: "{{ vpc_tags }}"
cidr_block: "{{ vpc_cidr }}"
region: "{{ vpc_aws_region }}"
state: "{{ vpc_state }}"
internet_gateway: yes
wait: yes
register: created_vpc
# A default network acl is created when a vpc is created so each VPC
# should have one but we create one here that allows access to the
# outside world using the internet gateway.
- name: create public network acl
ec2_acl:
profile: "{{ vpc_aws_profile }}"
name: "{{ vpc_public_acl.name }}"
vpc_id: "{{ created_vpc.vpc_id }}"
state: "present"
region: "{{ vpc_aws_region }}"
rules: "{{ vpc_public_acl.rules }}"
register: created_public_acl
- name: create public route table
ec2_rt:
profile: "{{ vpc_aws_profile }}"
vpc_id: "{{ created_vpc.vpc_id }}"
region: "{{ vpc_aws_region }}"
state: "present"
name: "{{ vpc_name }}-public"
routes: "{{ vpc_public_route_table }}"
register: created_public_rt
- name: create public subnets
ec2_subnet:
profile: "{{ vpc_aws_profile }}"
vpc_id: "{{ created_vpc.vpc_id }}"
region: "{{ vpc_aws_region }}"
state: "present"
name: "{{ item.name }}"
cidr: "{{ item.cidr }}"
az: "{{ item.az }}"
route_table_id: "{{ created_public_rt.id }}"
network_acl_id: "{{ created_public_acl.id }}"
with_items: vpc_public_subnets
register: created_public_subnets
- name: create NAT security group
ec2_group:
profile: "{{ vpc_aws_profile }}"
vpc_id: "{{ created_vpc.vpc_id }}"
state: "present"
region: "{{ vpc_aws_region }}"
name: "{{ nat_security_group.name }}"
rules: "{{ nat_security_group.rules }}"
description: "{{ nat_security_group.description }}"
rules_egress: "{{ nat_security_group.rules_egress }}"
register: created_nat_security_group
- name: check to see if we already have a nat instance
local_action:
module: "ec2_lookup"
region: "{{ vpc_aws_region }}"
tags:
- Name: "{{ vpc_name }}-nat-instance"
register: nat_instance
- name: create nat instance
local_action:
module: "ec2"
state: "present"
wait: yes
source_dest_check: false
region: "{{ vpc_aws_region }}"
profile: "{{ vpc_aws_profile }}"
group_id: "{{ created_nat_security_group.group_id }}"
key_name: "{{ vpc_keypair }}"
vpc_subnet_id: "{{ created_public_subnets.results[0].subnet_id }}"
instance_type: "{{ vpc_nat_instance_type }}"
instance_tags:
Name: "{{ vpc_name }}-nat-instance"
image: "{{ vpc_nat_ami_id }}"
register: new_nat_instance
when: nat_instance.instances|length == 0
# We need to do this instead of registering the output of the above
# command because if the above command get skipped, the output does
# not contain information about the instance.
- name: lookup the created nat_instance
local_action:
module: "ec2_lookup"
region: "{{ vpc_aws_region }}"
tags:
- Name: "{{ vpc_name }}-nat-instance"
register: nat_instance
- name: assign eip to nat
ec2_eip:
profile: "{{ vpc_aws_profile }}"
region: "{{ vpc_aws_region }}"
instance_id: "{{ nat_instance.instances[0].id }}"
in_vpc: true
reuse_existing_ip_allowed: true
when: new_nat_instance.changed
- name: create private route table
ec2_rt:
profile: "{{ vpc_aws_profile }}"
vpc_id: "{{ created_vpc.vpc_id }}"
region: "{{ vpc_aws_region }}"
state: "present"
name: "{{ vpc_name }}-private"
routes: "{{ vpc_private_route_table }}"
register: created_private_rt
- name: output a vpc_config for using to build services
local_action:
module: template
src: "vpc_config.yml.j2"
dest: "~/{{ e_d }}.yml"
#
# Configuration for the environment-deployment
#
profile: "{{ vpc_aws_profile }}"
vpc_id: "{{ created_vpc.vpc_id }}"
vpc_cidr: "{{ vpc_cidr }}"
vpc_class_b: "{{ vpc_class_b }}"
env: "{{ vpc_environment }}"
deployment: "{{ vpc_deployment }}"
e_d_c: "{{ vpc_environment }}-{{ vpc_deployment }}-{{ '{{' }} cluster {{ '}}' }}"
aws_region: "{{ vpc_aws_region }}"
aws_availability_zones:
{% for subnet in vpc_public_subnets %}
- {{ subnet.az }}
{% endfor %}
# Should this be service specific
ssl_cert: "{{ vpc_ssl_cert }}"
# used for ELB
public_route_table: "{{ created_public_rt.id }}"
# used for service subnet
private_route_table: "{{ created_private_rt.id }}"
instance_key_name: "{{ vpc_keypair }}"
# subject to change #TODO: provide the correct var for the eni
nat_device: "{{ nat_instance.instances[0].id }}"
public_subnet_1: "{{ vpc_public_subnets[0].cidr }}"
public_subnet_2: "{{ vpc_public_subnets[1].cidr }}"
# /28 per AZ NEEDE?
# private_subnet_1: "{{ vpc_class_b }}.110.16/28"
# private_subnet_2: "{{ vpc_class_b }}.120.16/28"
elb_subnets:
{% for subnet in created_public_subnets.results %}
- "{{ subnet.subnet_id }}"
{% endfor %}
#
# Do not use vars in policies :(
# Should be specific to the service right?
role_policies: []
# - name: "{{ '{{ ' + 'e_d_c' + '}}' }}-s3-policy"
# document: |
# {
# "Statement":[
# {
# "Effect":"Allow",
# "Action":["s3:*"],
# "Resource":["arn:aws:s3:::edx-stage-edx"]
# }
# ]
# }
# - name: "{{ '{{ ' + 'e_d_c' + '}}' }}-create-instance-tags"
# document: |
# {
# "Statement": [
# {
# "Effect": "Allow",
# "Action": ["ec2:CreateTags"],
# "Resource": ["arn:aws:ec2:us-east-1:xxxxxxxxxxxx:instance/*"]
# }
# ]
# }
# - name: "{{ '{{ ' + 'e_d_c' + '}}' }}-describe-ec2"
# document: |
# {"Statement":[
# {"Resource":"*",
# "Action":["ec2:DescribeInstances","ec2:DescribeTags","ec2:DescribeVolumes"],
# "Effect":"Allow"}]}
...@@ -44,6 +44,7 @@ EDXAPP_AWS_ACCESS_KEY_ID: "None" ...@@ -44,6 +44,7 @@ EDXAPP_AWS_ACCESS_KEY_ID: "None"
EDXAPP_AWS_SECRET_ACCESS_KEY: "None" EDXAPP_AWS_SECRET_ACCESS_KEY: "None"
EDXAPP_AWS_QUERYSTRING_AUTH: false EDXAPP_AWS_QUERYSTRING_AUTH: false
EDXAPP_AWS_STORAGE_BUCKET_NAME: "SET-ME-PLEASE (ex. bucket-name)" EDXAPP_AWS_STORAGE_BUCKET_NAME: "SET-ME-PLEASE (ex. bucket-name)"
EDXAPP_IMPORT_EXPORT_BUCKET: "SET-ME-PLEASE (ex. bucket-name)"
EDXAPP_AWS_S3_CUSTOM_DOMAIN: "SET-ME-PLEASE (ex. bucket-name.s3.amazonaws.com)" EDXAPP_AWS_S3_CUSTOM_DOMAIN: "SET-ME-PLEASE (ex. bucket-name.s3.amazonaws.com)"
EDXAPP_SWIFT_USERNAME: "None" EDXAPP_SWIFT_USERNAME: "None"
EDXAPP_SWIFT_KEY: "None" EDXAPP_SWIFT_KEY: "None"
...@@ -55,7 +56,6 @@ EDXAPP_SWIFT_REGION_NAME: "None" ...@@ -55,7 +56,6 @@ EDXAPP_SWIFT_REGION_NAME: "None"
EDXAPP_SWIFT_USE_TEMP_URLS: false EDXAPP_SWIFT_USE_TEMP_URLS: false
EDXAPP_SWIFT_TEMP_URL_KEY: "None" EDXAPP_SWIFT_TEMP_URL_KEY: "None"
EDXAPP_SWIFT_TEMP_URL_DURATION: 1800 # seconds EDXAPP_SWIFT_TEMP_URL_DURATION: 1800 # seconds
EDXAPP_USE_SWIFT_STORAGE: false
EDXAPP_DEFAULT_FILE_STORAGE: "django.core.files.storage.FileSystemStorage" EDXAPP_DEFAULT_FILE_STORAGE: "django.core.files.storage.FileSystemStorage"
EDXAPP_XQUEUE_BASIC_AUTH: [ "{{ COMMON_HTPASSWD_USER }}", "{{ COMMON_HTPASSWD_PASS }}" ] EDXAPP_XQUEUE_BASIC_AUTH: [ "{{ COMMON_HTPASSWD_USER }}", "{{ COMMON_HTPASSWD_PASS }}" ]
EDXAPP_XQUEUE_DJANGO_AUTH: EDXAPP_XQUEUE_DJANGO_AUTH:
...@@ -134,6 +134,7 @@ EDXAPP_ZENDESK_API_KEY: "" ...@@ -134,6 +134,7 @@ EDXAPP_ZENDESK_API_KEY: ""
EDXAPP_CELERY_USER: 'celery' EDXAPP_CELERY_USER: 'celery'
EDXAPP_CELERY_PASSWORD: 'celery' EDXAPP_CELERY_PASSWORD: 'celery'
EDXAPP_CELERY_BROKER_VHOST: "" EDXAPP_CELERY_BROKER_VHOST: ""
EDXAPP_CELERY_BROKER_USE_SSL: false
EDXAPP_VIDEO_CDN_URLS: EDXAPP_VIDEO_CDN_URLS:
EXAMPLE_COUNTRY_CODE: "http://example.com/edx/video?s3_url=" EXAMPLE_COUNTRY_CODE: "http://example.com/edx/video?s3_url="
...@@ -498,8 +499,8 @@ EDXAPP_CELERY_WORKERS: ...@@ -498,8 +499,8 @@ EDXAPP_CELERY_WORKERS:
monitor: False monitor: False
max_tasks_per_child: 1 max_tasks_per_child: 1
EDXAPP_RECALCULATE_GRADES_ROUTING_KEY: 'edx.lms.core.default' EDXAPP_RECALCULATE_GRADES_ROUTING_KEY: 'edx.lms.core.default'
EDXAPP_LMS_CELERY_QUEUES: "{{ edxapp_workers|selectattr('service_variant', 'equalto', 'lms')|map(attribute='queue')|map('regex_replace', '(.*)', 'edx.lms.core.\\\\1')|list }}" EDXAPP_LMS_CELERY_QUEUES: "{{ edxapp_workers|selectattr('service_variant', 'equalto', 'lms')|map(attribute='queue')|map('regex_replace', '(.*)', 'edx.lms.core.\\1')|list }}"
EDXAPP_CMS_CELERY_QUEUES: "{{ edxapp_workers|selectattr('service_variant', 'equalto', 'cms')|map(attribute='queue')|map('regex_replace', '(.*)', 'edx.cms.core.\\\\1')|list }}" EDXAPP_CMS_CELERY_QUEUES: "{{ edxapp_workers|selectattr('service_variant', 'equalto', 'cms')|map(attribute='queue')|map('regex_replace', '(.*)', 'edx.cms.core.\\1')|list }}"
EDXAPP_DEFAULT_CACHE_VERSION: "1" EDXAPP_DEFAULT_CACHE_VERSION: "1"
EDXAPP_OAUTH_ENFORCE_SECURE: True EDXAPP_OAUTH_ENFORCE_SECURE: True
...@@ -639,10 +640,12 @@ edxapp_venvs_dir: "{{ edxapp_app_dir }}/venvs" ...@@ -639,10 +640,12 @@ edxapp_venvs_dir: "{{ edxapp_app_dir }}/venvs"
edxapp_venv_dir: "{{ edxapp_venvs_dir }}/edxapp" edxapp_venv_dir: "{{ edxapp_venvs_dir }}/edxapp"
edxapp_venv_bin: "{{ edxapp_venv_dir }}/bin" edxapp_venv_bin: "{{ edxapp_venv_dir }}/bin"
edxapp_nodeenv_dir: "{{ edxapp_app_dir }}/nodeenvs/edxapp" edxapp_nodeenv_dir: "{{ edxapp_app_dir }}/nodeenvs/edxapp"
edxapp_node_bin: "{{ edxapp_nodeenv_dir }}/bin" edxapp_nodeenv_bin: "{{ edxapp_nodeenv_dir }}/bin"
edxapp_node_version: "0.10.37" edxapp_node_version: "6.9.2"
# This is where node installs modules, not node itself
edxapp_node_bin: "{{ edxapp_code_dir }}/node_modules/.bin"
edxapp_user: edxapp edxapp_user: edxapp
edxapp_deploy_path: "{{ edxapp_venv_bin }}:{{ edxapp_code_dir }}/bin:{{ edxapp_node_bin }}:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin" edxapp_deploy_path: "{{ edxapp_venv_bin }}:{{ edxapp_code_dir }}/bin:{{ edxapp_node_bin }}:{{ edxapp_nodeenv_bin }}:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
edxapp_staticfile_dir: "{{ edxapp_data_dir }}/staticfiles" edxapp_staticfile_dir: "{{ edxapp_data_dir }}/staticfiles"
edxapp_media_dir: "{{ edxapp_data_dir }}/media" edxapp_media_dir: "{{ edxapp_data_dir }}/media"
edxapp_course_static_dir: "{{ edxapp_data_dir }}/course_static" edxapp_course_static_dir: "{{ edxapp_data_dir }}/course_static"
...@@ -799,8 +802,6 @@ edxapp_generic_auth_config: &edxapp_generic_auth ...@@ -799,8 +802,6 @@ edxapp_generic_auth_config: &edxapp_generic_auth
generic_cache_config: &default_generic_cache generic_cache_config: &default_generic_cache
BACKEND: 'django.core.cache.backends.memcached.MemcachedCache' BACKEND: 'django.core.cache.backends.memcached.MemcachedCache'
KEY_FUNCTION: 'util.memcache.safe_key' KEY_FUNCTION: 'util.memcache.safe_key'
KEY_PREFIX: 'default'
LOCATION: "{{ EDXAPP_MEMCACHE }}"
generic_env_config: &edxapp_generic_env generic_env_config: &edxapp_generic_env
ECOMMERCE_PUBLIC_URL_ROOT: "{{ EDXAPP_ECOMMERCE_PUBLIC_URL_ROOT }}" ECOMMERCE_PUBLIC_URL_ROOT: "{{ EDXAPP_ECOMMERCE_PUBLIC_URL_ROOT }}"
...@@ -821,6 +822,7 @@ generic_env_config: &edxapp_generic_env ...@@ -821,6 +822,7 @@ generic_env_config: &edxapp_generic_env
ANALYTICS_DATA_URL: "{{ EDXAPP_ANALYTICS_DATA_URL }}" ANALYTICS_DATA_URL: "{{ EDXAPP_ANALYTICS_DATA_URL }}"
ANALYTICS_DASHBOARD_URL: '{{ EDXAPP_ANALYTICS_DASHBOARD_URL }}' ANALYTICS_DASHBOARD_URL: '{{ EDXAPP_ANALYTICS_DASHBOARD_URL }}'
CELERY_BROKER_VHOST: "{{ EDXAPP_CELERY_BROKER_VHOST }}" CELERY_BROKER_VHOST: "{{ EDXAPP_CELERY_BROKER_VHOST }}"
CELERY_BROKER_USE_SSL: "{{ EDXAPP_CELERY_BROKER_USE_SSL }}"
PAYMENT_SUPPORT_EMAIL: "{{ EDXAPP_PAYMENT_SUPPORT_EMAIL }}" PAYMENT_SUPPORT_EMAIL: "{{ EDXAPP_PAYMENT_SUPPORT_EMAIL }}"
ZENDESK_URL: "{{ EDXAPP_ZENDESK_URL }}" ZENDESK_URL: "{{ EDXAPP_ZENDESK_URL }}"
COURSES_WITH_UNSAFE_CODE: "{{ EDXAPP_COURSES_WITH_UNSAFE_CODE }}" COURSES_WITH_UNSAFE_CODE: "{{ EDXAPP_COURSES_WITH_UNSAFE_CODE }}"
...@@ -884,23 +886,29 @@ generic_env_config: &edxapp_generic_env ...@@ -884,23 +886,29 @@ generic_env_config: &edxapp_generic_env
default: default:
<<: *default_generic_cache <<: *default_generic_cache
KEY_PREFIX: 'default' KEY_PREFIX: 'default'
LOCATION: "{{ EDXAPP_MEMCACHE }}"
VERSION: "{{ EDXAPP_DEFAULT_CACHE_VERSION }}" VERSION: "{{ EDXAPP_DEFAULT_CACHE_VERSION }}"
general: general:
<<: *default_generic_cache <<: *default_generic_cache
KEY_PREFIX: 'general' KEY_PREFIX: 'general'
LOCATION: "{{ EDXAPP_MEMCACHE }}"
mongo_metadata_inheritance: mongo_metadata_inheritance:
<<: *default_generic_cache <<: *default_generic_cache
KEY_PREFIX: 'mongo_metadata_inheritance' KEY_PREFIX: 'mongo_metadata_inheritance'
TIMEOUT: 300 TIMEOUT: 300
LOCATION: "{{ EDXAPP_MEMCACHE }}"
staticfiles: staticfiles:
<<: *default_generic_cache <<: *default_generic_cache
KEY_PREFIX: "{{ ansible_hostname|default('staticfiles') }}_general" KEY_PREFIX: "{{ ansible_hostname|default('staticfiles') }}_general"
LOCATION: "{{ EDXAPP_MEMCACHE }}"
configuration: configuration:
<<: *default_generic_cache <<: *default_generic_cache
KEY_PREFIX: "{{ ansible_hostname|default('configuration') }}" KEY_PREFIX: "{{ ansible_hostname|default('configuration') }}"
LOCATION: "{{ EDXAPP_MEMCACHE }}"
celery: celery:
<<: *default_generic_cache <<: *default_generic_cache
KEY_PREFIX: 'celery' KEY_PREFIX: 'celery'
LOCATION: "{{ EDXAPP_MEMCACHE }}"
TIMEOUT: "7200" TIMEOUT: "7200"
course_structure_cache: course_structure_cache:
<<: *default_generic_cache <<: *default_generic_cache
...@@ -1029,6 +1037,7 @@ lms_env_config: ...@@ -1029,6 +1037,7 @@ lms_env_config:
DOC_LINK_BASE_URL: "{{ EDXAPP_LMS_DOC_LINK_BASE_URL }}" DOC_LINK_BASE_URL: "{{ EDXAPP_LMS_DOC_LINK_BASE_URL }}"
RECALCULATE_GRADES_ROUTING_KEY: "{{ EDXAPP_RECALCULATE_GRADES_ROUTING_KEY }}" RECALCULATE_GRADES_ROUTING_KEY: "{{ EDXAPP_RECALCULATE_GRADES_ROUTING_KEY }}"
CELERY_QUEUES: "{{ EDXAPP_LMS_CELERY_QUEUES }}" CELERY_QUEUES: "{{ EDXAPP_LMS_CELERY_QUEUES }}"
ALTERNATE_WORKER_QUEUES: "cms"
cms_auth_config: cms_auth_config:
<<: *edxapp_generic_auth <<: *edxapp_generic_auth
...@@ -1060,6 +1069,8 @@ cms_env_config: ...@@ -1060,6 +1069,8 @@ cms_env_config:
GIT_REPO_EXPORT_DIR: "{{ EDXAPP_GIT_REPO_EXPORT_DIR }}" GIT_REPO_EXPORT_DIR: "{{ EDXAPP_GIT_REPO_EXPORT_DIR }}"
DOC_LINK_BASE_URL: "{{ EDXAPP_CMS_DOC_LINK_BASE_URL }}" DOC_LINK_BASE_URL: "{{ EDXAPP_CMS_DOC_LINK_BASE_URL }}"
CELERY_QUEUES: "{{ EDXAPP_CMS_CELERY_QUEUES }}" CELERY_QUEUES: "{{ EDXAPP_CMS_CELERY_QUEUES }}"
ALTERNATE_WORKER_QUEUES: "lms"
COURSE_IMPORT_EXPORT_BUCKET: "{{ EDXAPP_IMPORT_EXPORT_BUCKET }}"
# install dir for the edx-platform repo # install dir for the edx-platform repo
edxapp_code_dir: "{{ edxapp_app_dir }}/edx-platform" edxapp_code_dir: "{{ edxapp_app_dir }}/edx-platform"
......
...@@ -8,5 +8,3 @@ dependencies: ...@@ -8,5 +8,3 @@ dependencies:
theme_users: theme_users:
- "{{ edxapp_user }}" - "{{ edxapp_user }}"
when: "{{ EDXAPP_ENABLE_COMPREHENSIVE_THEMING }}" when: "{{ EDXAPP_ENABLE_COMPREHENSIVE_THEMING }}"
- role: openstack
when: "{{ EDXAPP_USE_SWIFT_STORAGE }}"
...@@ -45,7 +45,7 @@ ...@@ -45,7 +45,7 @@
# Do A Checkout # Do A Checkout
- name: checkout edx-platform repo into {{ edxapp_code_dir }} - name: checkout edx-platform repo into {{ edxapp_code_dir }}
git_2_0_1: git:
dest: "{{ edxapp_code_dir }}" dest: "{{ edxapp_code_dir }}"
repo: "{{ edx_platform_repo }}" repo: "{{ edx_platform_repo }}"
version: "{{ edx_platform_version }}" version: "{{ edx_platform_version }}"
...@@ -72,7 +72,7 @@ ...@@ -72,7 +72,7 @@
# (yes, lowercase) to a Stanford-style theme and set # (yes, lowercase) to a Stanford-style theme and set
# edxapp_theme_name (again, lowercase) to its name. # edxapp_theme_name (again, lowercase) to its name.
- name: checkout Stanford-style theme - name: checkout Stanford-style theme
git_2_0_1: git:
dest: "{{ edxapp_app_dir }}/themes/{{ edxapp_theme_name }}" dest: "{{ edxapp_app_dir }}/themes/{{ edxapp_theme_name }}"
repo: "{{ edxapp_theme_source_repo }}" repo: "{{ edxapp_theme_source_repo }}"
version: "{{ edxapp_theme_version }}" version: "{{ edxapp_theme_version }}"
...@@ -110,10 +110,10 @@ ...@@ -110,10 +110,10 @@
- install:app-requirements - install:app-requirements
- name: Create the virtualenv to install the Python requirements - name: Create the virtualenv to install the Python requirements
command: > command: "virtualenv {{ edxapp_venv_dir }}"
virtualenv {{ edxapp_venv_dir }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
creates={{ edxapp_venv_dir }}/bin/pip creates: "{{ edxapp_venv_dir }}/bin/pip"
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
tags: tags:
...@@ -134,9 +134,9 @@ ...@@ -134,9 +134,9 @@
# Need to use command rather than pip so that we can maintain the context of our current working directory; some # Need to use command rather than pip so that we can maintain the context of our current working directory; some
# requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly # requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly
# installs everything into that virtual environment. # installs everything into that virtual environment.
command: > command: "{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item.item }}"
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item.item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
when: item.stat.exists when: item.stat.exists
...@@ -151,9 +151,9 @@ ...@@ -151,9 +151,9 @@
# Need to use shell rather than pip so that we can maintain the context of our current working directory; some # Need to use shell rather than pip so that we can maintain the context of our current working directory; some
# requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly # requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly
# installs everything into that virtual environment. # installs everything into that virtual environment.
shell: > shell: "{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }}"
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
with_items: with_items:
- "{{ private_requirements_file }}" - "{{ private_requirements_file }}"
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
...@@ -172,7 +172,7 @@ ...@@ -172,7 +172,7 @@
extra_args: "--exists-action w {{ item.extra_args|default('') }}" extra_args: "--exists-action w {{ item.extra_args|default('') }}"
virtualenv: "{{ edxapp_venv_dir }}" virtualenv: "{{ edxapp_venv_dir }}"
state: present state: present
with_items: EDXAPP_EXTRA_REQUIREMENTS with_items: "{{ EDXAPP_EXTRA_REQUIREMENTS }}"
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
tags: tags:
- install - install
...@@ -197,9 +197,9 @@ ...@@ -197,9 +197,9 @@
# Need to use shell rather than pip so that we can maintain the context of our current working directory; some # Need to use shell rather than pip so that we can maintain the context of our current working directory; some
# requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly # requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly
# installs everything into that virtual environment. # installs everything into that virtual environment.
shell: > shell: "{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }}"
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
with_items: with_items:
- "{{ sandbox_base_requirements }}" - "{{ sandbox_base_requirements }}"
- "{{ sandbox_local_requirements }}" - "{{ sandbox_local_requirements }}"
...@@ -211,8 +211,7 @@ ...@@ -211,8 +211,7 @@
- install:app-requirements - install:app-requirements
- name: create nodeenv - name: create nodeenv
shell: > shell: "{{ edxapp_venv_dir }}/bin/nodeenv {{ edxapp_nodeenv_dir }} --node={{ edxapp_node_version }} --prebuilt"
{{ edxapp_venv_dir }}/bin/nodeenv {{ edxapp_nodeenv_dir }} --node={{ edxapp_node_version }} --prebuilt
args: args:
creates: "{{ edxapp_nodeenv_dir }}" creates: "{{ edxapp_nodeenv_dir }}"
tags: tags:
...@@ -223,8 +222,7 @@ ...@@ -223,8 +222,7 @@
# This needs to be done as root since npm is weird about # This needs to be done as root since npm is weird about
# chown - https://github.com/npm/npm/issues/3565 # chown - https://github.com/npm/npm/issues/3565
- name: Set the npm registry - name: Set the npm registry
shell: > shell: "npm config set registry '{{ COMMON_NPM_MIRROR_URL }}'"
npm config set registry '{{ COMMON_NPM_MIRROR_URL }}'
args: args:
creates: "{{ edxapp_app_dir }}/.npmrc" creates: "{{ edxapp_app_dir }}/.npmrc"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
...@@ -244,7 +242,7 @@ ...@@ -244,7 +242,7 @@
- name: install node dependencies - name: install node dependencies
npm: npm:
executable: "{{ edxapp_node_bin }}/npm" executable: "{{ edxapp_nodeenv_bin }}/npm"
path: "{{ edxapp_code_dir }}" path: "{{ edxapp_code_dir }}"
production: yes production: yes
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
...@@ -279,9 +277,9 @@ ...@@ -279,9 +277,9 @@
- install:app-requirements - install:app-requirements
- name: code sandbox | Install sandbox requirements into sandbox venv - name: code sandbox | Install sandbox requirements into sandbox venv
shell: > shell: "{{ edxapp_sandbox_venv_dir }}/bin/pip install -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }}"
{{ edxapp_sandbox_venv_dir }}/bin/pip install -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
with_items: with_items:
- "{{ sandbox_local_requirements }}" - "{{ sandbox_local_requirements }}"
- "{{ sandbox_post_requirements }}" - "{{ sandbox_post_requirements }}"
......
...@@ -3,27 +3,35 @@ ...@@ -3,27 +3,35 @@
template: template:
src: "{{ item[0] }}.{{ item[1] }}.json.j2" src: "{{ item[0] }}.{{ item[1] }}.json.j2"
dest: "{{ edxapp_app_dir }}/{{ item[0] }}.{{ item[1] }}.json" dest: "{{ edxapp_app_dir }}/{{ item[0] }}.{{ item[1] }}.json"
become_user: "{{ edxapp_user }}" owner: "{{ edxapp_user }}"
with_nested: group: "{{ common_web_group }}"
mode: 0640
become: true
with_nested:
- "{{ service_variants_enabled }}" - "{{ service_variants_enabled }}"
- [ 'env', 'auth' ] - [ 'env', 'auth' ]
tags: tags:
- install - install
- install:configuration - install:configuration
- edxapp_cfg - install:app-configuration
- edxapp_cfg # Old deprecated tag, will remove when possible
- name: create auth and application yaml config - name: create auth and application yaml config
template: template:
src: "{{ item[0] }}.{{ item[1] }}.yaml.j2" src: "{{ item[0] }}.{{ item[1] }}.yaml.j2"
dest: "{{ EDXAPP_CFG_DIR }}/{{ item[0] }}.{{ item[1] }}.yaml" dest: "{{ EDXAPP_CFG_DIR }}/{{ item[0] }}.{{ item[1] }}.yaml"
become_user: "{{ edxapp_user }}" owner: "{{ edxapp_user }}"
group: "{{ common_web_group }}"
mode: 0640
become: true
with_nested: with_nested:
- "{{ service_variants_enabled }}" - "{{ service_variants_enabled }}"
- [ 'env', 'auth' ] - [ 'env', 'auth' ]
tags: tags:
- install - install
- install:configuration - install:configuration
- edxapp_cfg - install:app-configuration
- edxapp_cfg # Old deprecated tag, will remove when possible
# write the supervisor scripts for the service variants # write the supervisor scripts for the service variants
- name: "writing {{ item }} supervisor script" - name: "writing {{ item }} supervisor script"
...@@ -32,6 +40,7 @@ ...@@ -32,6 +40,7 @@
dest: "{{ supervisor_available_dir }}/{{ item }}.conf" dest: "{{ supervisor_available_dir }}/{{ item }}.conf"
owner: "{{ supervisor_user }}" owner: "{{ supervisor_user }}"
group: "{{ supervisor_user }}" group: "{{ supervisor_user }}"
mode: 0644
become_user: "{{ supervisor_user }}" become_user: "{{ supervisor_user }}"
with_items: "{{ service_variants_enabled }}" with_items: "{{ service_variants_enabled }}"
tags: tags:
...@@ -45,6 +54,7 @@ ...@@ -45,6 +54,7 @@
dest: "{{ supervisor_available_dir }}/{{ item }}" dest: "{{ supervisor_available_dir }}/{{ item }}"
owner: "{{ supervisor_user }}" owner: "{{ supervisor_user }}"
group: "{{ supervisor_user }}" group: "{{ supervisor_user }}"
mode: 0644
become_user: "{{ supervisor_user }}" become_user: "{{ supervisor_user }}"
with_items: with_items:
- edxapp.conf - edxapp.conf
...@@ -57,6 +67,7 @@ ...@@ -57,6 +67,7 @@
template: template:
src: "{{ item }}_gunicorn.py.j2" src: "{{ item }}_gunicorn.py.j2"
dest: "{{ edxapp_app_dir }}/{{ item }}_gunicorn.py" dest: "{{ edxapp_app_dir }}/{{ item }}_gunicorn.py"
mode: 0644
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
with_items: "{{ service_variants_enabled }}" with_items: "{{ service_variants_enabled }}"
tags: tags:
......
...@@ -10,7 +10,7 @@ command={{ executable }} -c {{ edxapp_app_dir }}/cms_gunicorn.py {{ EDXAPP_CMS_G ...@@ -10,7 +10,7 @@ command={{ executable }} -c {{ edxapp_app_dir }}/cms_gunicorn.py {{ EDXAPP_CMS_G
user={{ common_web_user }} user={{ common_web_user }}
directory={{ edxapp_code_dir }} directory={{ edxapp_code_dir }}
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_CMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PORT={{ edxapp_cms_gunicorn_port }},ADDRESS={{ edxapp_cms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_CMS_ENV }},SERVICE_VARIANT="cms",ALTERNATE_WORKER_QUEUES="lms" environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_CMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PORT={{ edxapp_cms_gunicorn_port }},ADDRESS={{ edxapp_cms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_CMS_ENV }},SERVICE_VARIANT="cms"
stdout_logfile={{ supervisor_log_dir }}/%(program_name)s-stdout.log stdout_logfile={{ supervisor_log_dir }}/%(program_name)s-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log
killasgroup=true killasgroup=true
......
...@@ -10,7 +10,7 @@ command={{ executable }} -c {{ edxapp_app_dir }}/lms_gunicorn.py lms.wsgi ...@@ -10,7 +10,7 @@ command={{ executable }} -c {{ edxapp_app_dir }}/lms_gunicorn.py lms.wsgi
user={{ common_web_user }} user={{ common_web_user }}
directory={{ edxapp_code_dir }} directory={{ edxapp_code_dir }}
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_LMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},NEW_RELIC_CONFIG_FILE={{ edxapp_app_dir }}/newrelic.ini,{% endif -%} PORT={{ edxapp_lms_gunicorn_port }},ADDRESS={{ edxapp_lms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_LMS_ENV }},SERVICE_VARIANT="lms",ALTERNATE_WORKER_QUEUES="cms",PATH="{{ edxapp_deploy_path }}" environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_LMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},NEW_RELIC_CONFIG_FILE={{ edxapp_app_dir }}/newrelic.ini,{% endif -%} PORT={{ edxapp_lms_gunicorn_port }},ADDRESS={{ edxapp_lms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_LMS_ENV }},SERVICE_VARIANT="lms",PATH="{{ edxapp_deploy_path }}"
stdout_logfile={{ supervisor_log_dir }}/%(program_name)s-stdout.log stdout_logfile={{ supervisor_log_dir }}/%(program_name)s-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log
killasgroup=true killasgroup=true
......
...@@ -19,13 +19,13 @@ edxlocal_databases: ...@@ -19,13 +19,13 @@ edxlocal_databases:
edxlocal_database_users: edxlocal_database_users:
- { - {
db: "{{ ECOMMERCE_DEFAULT_DB_NAME | default(None) }}", db: "{{ ECOMMERCE_DEFAULT_DB_NAME | default(None) }}",
user: "{{ ECOMMERCE_DATABASES.default.USER | default(None) }}", user: "{{ ECOMMERCE_DATABASE_USER | default(None) }}",
pass: "{{ ECOMMERCE_DATABASES.default.PASSWORD | default(None) }}" pass: "{{ ECOMMERCE_DATABASE_PASSWORD | default(None) }}"
} }
- { - {
db: "{{ INSIGHTS_DATABASE_NAME | default(None) }}", db: "{{ INSIGHTS_DATABASE_NAME | default(None) }}",
user: "{{ INSIGHTS_DATABASES.default.USER | default(None) }}", user: "{{ INSIGHTS_MYSQL_USER | default(None) }}",
pass: "{{ INSIGHTS_DATABASES.default.PASSWORD | default(None) }}" pass: "{{ INSIGHTS_MYSQL_USER | default(None) }}"
} }
- { - {
db: "{{ XQUEUE_MYSQL_DB_NAME | default(None) }}", db: "{{ XQUEUE_MYSQL_DB_NAME | default(None) }}",
...@@ -44,18 +44,18 @@ edxlocal_database_users: ...@@ -44,18 +44,18 @@ edxlocal_database_users:
} }
- { - {
db: "{{ PROGRAMS_DEFAULT_DB_NAME | default(None) }}", db: "{{ PROGRAMS_DEFAULT_DB_NAME | default(None) }}",
user: "{{ PROGRAMS_DATABASES.default.USER | default(None) }}", user: "{{ PROGRAMS_DATABASE_USER | default(None) }}",
pass: "{{ PROGRAMS_DATABASES.default.PASSWORD | default(None) }}" pass: "{{ PROGRAMS_DATABASE_PASSWORD | default(None) }}"
} }
- { - {
db: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_NAME | default(None) }}", db: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_NAME | default(None) }}",
user: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE.username }}", user: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_USER | default(None) }}",
pass: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE.password }}" pass: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_PASSWORD | default(None) }}"
} }
- { - {
db: "{{ HIVE_METASTORE_DATABASE_NAME | default(None) }}", db: "{{ HIVE_METASTORE_DATABASE_NAME | default(None) }}",
user: "{{ HIVE_METASTORE_DATABASE.user | default(None) }}", user: "{{ HIVE_METASTORE_DATABASE_USER | default(None) }}",
pass: "{{ HIVE_METASTORE_DATABASE.password | default(None) }}" pass: "{{ HIVE_METASTORE_DATABASE_PASSWORD | default(None) }}"
} }
- { - {
db: "{{ CREDENTIALS_DEFAULT_DB_NAME | default(None) }}", db: "{{ CREDENTIALS_DEFAULT_DB_NAME | default(None) }}",
......
...@@ -21,30 +21,27 @@ ...@@ -21,30 +21,27 @@
# #
# #
- name: download elasticsearch plugin - name: download elasticsearch plugin
shell: > shell: "./npi fetch {{ ELASTICSEARCH_MONITOR_PLUGIN }} -y"
./npi fetch {{ ELASTICSEARCH_MONITOR_PLUGIN }} -y
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
creates: "{{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}.compressed" creates: "{{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}.compressed"
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
- name: prepare elasticsearch plugin - name: prepare elasticsearch plugin
shell: > shell: "./npi prepare {{ ELASTICSEARCH_MONITOR_PLUGIN }} -n"
./npi prepare {{ ELASTICSEARCH_MONITOR_PLUGIN }} -n
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
- name: configure elasticsearch plugin - name: configure elasticsearch plugin
template: > template:
src=plugins/me.snov.newrelic-elasticsearch/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json.j2 src: "plugins/me.snov.newrelic-elasticsearch/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json.j2"
dest={{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json dest: "{{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json"
owner={{ NEWRELIC_USER }} owner: "{{ NEWRELIC_USER }}"
mode=0644 mode: 0644
- name: register/start elasticsearch plugin - name: register/start elasticsearch plugin
shell: > shell: "./npi add-service {{ ELASTICSEARCH_MONITOR_PLUGIN }} --start --user={{ NEWRELIC_USER }}"
./npi add-service {{ ELASTICSEARCH_MONITOR_PLUGIN }} --start --user={{ NEWRELIC_USER }}
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
become_user: "root" become_user: "root"
......
...@@ -27,7 +27,7 @@ ...@@ -27,7 +27,7 @@
- name: Test for enhanced networking - name: Test for enhanced networking
local_action: local_action:
module: shell aws --profile {{ profile }} ec2 describe-instance-attribute --instance-id {{ ansible_ec2_instance_id }} --attribute sriovNetSupport module: shell aws ec2 describe-instance-attribute --instance-id {{ ansible_ec2_instance_id }} --attribute sriovNetSupport
changed_when: False changed_when: False
become: False become: False
register: enhanced_networking_raw register: enhanced_networking_raw
...@@ -56,7 +56,7 @@ ...@@ -56,7 +56,7 @@
- name: Set enhanced networking instance attribute - name: Set enhanced networking instance attribute
local_action: local_action:
module: shell aws --profile {{ profile }} ec2 modify-instance-attribute --instance-id {{ ansible_ec2_instance_id }} --sriov-net-support simple module: shell aws ec2 modify-instance-attribute --instance-id {{ ansible_ec2_instance_id }} --sriov-net-support simple
when: supports_enhanced_networking and has_ixgbevf_kernel_module and not enhanced_networking_already_on when: supports_enhanced_networking and has_ixgbevf_kernel_module and not enhanced_networking_already_on
- name: Start instances - name: Start instances
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment