Commit f713b79b by Feanil Patel

Merge pull request #1678 from edx/feanil/rc-lavash-0

Feanil/rc lavash 0
parents 3f695726 82d97ecb
- Role: edxapp
- A new var was added to make it easy ot invalidate the default
memcache store to make it easier to invalidate sessions. Updating
the edxapp env.json files will result in all users getting logged
out. This is a one time penalty as long as the value of `EDXAPP_DEFAULT_CACHE_VERSION`
is not explicitly changed.
- Role: nginx
- New html templates for server errors added.
Defaults for a ratelimiting static page and server error static page.
CMS/LMS are set to use them by default, wording can be changed in the
Nginx default vars.
- Role: edxapp
- We now have an all caps variable override for celery workers
- Role: common - Role: common
- We now remove the default syslog.d conf file (50-default.conf) this will - We now remove the default syslog.d conf file (50-default.conf) this will
break people who have hand edited that file. break people who have hand edited that file.
...@@ -5,6 +20,10 @@ ...@@ -5,6 +20,10 @@
- Role: edxapp - Role: edxapp
- Updated the module store settings to match the new settings format. - Updated the module store settings to match the new settings format.
- Update, possible breaking change: the edxapp role vars edxapp_lms_env and edxapp_cms_env have
been changed to EDXAPP_LMS_ENV and EDXAPP_CMS_ENV to indicate, via our convention,
that overridding them is expected. The default values remain the same.
- Role: analytics-api - Role: analytics-api
- Added a new role for the analytics-api Django app. Currently a private repo - Added a new role for the analytics-api Django app. Currently a private repo
...@@ -29,3 +48,7 @@ ...@@ -29,3 +48,7 @@
- Role: Mongo - Role: Mongo
- Fixed case of variable used in if block that breaks cluster configuration - Fixed case of variable used in if block that breaks cluster configuration
by changing mongo_clustered to MONGO_CLUSTERED. by changing mongo_clustered to MONGO_CLUSTERED.
- Role: Edxapp
- Added EDXAPP_LMS_AUTH_EXTRA and EDXAPP_CMS_AUTH_EXTRA for passing unique AUTH_EXTRA configurations to the LMS and CMS.
Both variables default to EDXAPP_AUTH_EXTRA for backward compatibility
import os import os
import prettytable
import hipchat
import time import time
import random
from ansible import utils from ansible import utils
try:
import prettytable
except ImportError:
prettytable = None
try:
import hipchat
except ImportError:
hipchat = None
class CallbackModule(object): class CallbackModule(object):
...@@ -24,30 +29,40 @@ class CallbackModule(object): ...@@ -24,30 +29,40 @@ class CallbackModule(object):
""" """
def __init__(self): def __init__(self):
self.enabled = "HIPCHAT_TOKEN" in os.environ
if 'HIPCHAT_TOKEN' in os.environ: if not self.enabled:
self.start_time = time.time() return
self.task_report = []
self.last_task = None # make sure we got our imports
self.last_task_changed = False if not hipchat:
self.last_task_count = 0 raise ImportError(
self.last_task_delta = 0 "The hipchat plugin requires the hipchat Python module, "
self.last_task_start = time.time() "which is not installed or was not found."
self.condensed_task_report = (os.getenv('HIPCHAT_CONDENSED', True) == True) )
self.room = os.getenv('HIPCHAT_ROOM', 'ansible') if not prettytable:
self.from_name = os.getenv('HIPCHAT_FROM', 'ansible') raise ImportError(
self.allow_notify = (os.getenv('HIPCHAT_NOTIFY') != 'false') "The hipchat plugin requires the prettytable Python module, "
try: "which is not installed or was not found."
self.hipchat_conn = hipchat.HipChat(token=os.getenv('HIPCHAT_TOKEN')) )
except Exception as e: self.start_time = time.time()
utils.warning("Unable to connect to hipchat: {}".format(e)) self.task_report = []
self.hipchat_msg_prefix = os.getenv('HIPCHAT_MSG_PREFIX', '') self.last_task = None
self.hipchat_msg_color = os.getenv('HIPCHAT_MSG_COLOR', '') self.last_task_changed = False
self.printed_playbook = False self.last_task_count = 0
self.playbook_name = None self.last_task_delta = 0
self.enabled = True self.last_task_start = time.time()
else: self.condensed_task_report = (os.getenv('HIPCHAT_CONDENSED', True) == True)
self.enabled = False self.room = os.getenv('HIPCHAT_ROOM', 'ansible')
self.from_name = os.getenv('HIPCHAT_FROM', 'ansible')
self.allow_notify = (os.getenv('HIPCHAT_NOTIFY') != 'false')
try:
self.hipchat_conn = hipchat.HipChat(token=os.getenv('HIPCHAT_TOKEN'))
except Exception as e:
utils.warning("Unable to connect to hipchat: {}".format(e))
self.hipchat_msg_prefix = os.getenv('HIPCHAT_MSG_PREFIX', '')
self.hipchat_msg_color = os.getenv('HIPCHAT_MSG_COLOR', '')
self.printed_playbook = False
self.playbook_name = None
def _send_hipchat(self, message, room=None, from_name=None, color=None, message_format='text'): def _send_hipchat(self, message, room=None, from_name=None, color=None, message_format='text'):
...@@ -221,7 +236,7 @@ class CallbackModule(object): ...@@ -221,7 +236,7 @@ class CallbackModule(object):
summary_output = "<b>{}</b>: <i>{}</i> - ".format(self.hipchat_msg_prefix, host) summary_output = "<b>{}</b>: <i>{}</i> - ".format(self.hipchat_msg_prefix, host)
for summary_item in ['ok', 'changed', 'unreachable', 'failures']: for summary_item in ['ok', 'changed', 'unreachable', 'failures']:
if stats[summary_item] != 0: if stats[summary_item] != 0:
summary_output += "<b>{}</b> - {} ".format(summary_item, stats[summary_item]) summary_output += "<b>{}</b> - {} ".format(summary_item, stats[summary_item])
summary_all_host_output.append(summary_output) summary_all_host_output.append(summary_output)
self._send_hipchat("<br />".join(summary_all_host_output), message_format='html') self._send_hipchat("<br />".join(summary_all_host_output), message_format='html')
msg = "<b>{description}</b>: Finished Ansible run for <b><i>{play}</i> in {min:02} minutes, {sec:02} seconds</b><br /><br />".format( msg = "<b>{description}</b>: Finished Ansible run for <b><i>{play}</i> in {min:02} minutes, {sec:02} seconds</b><br /><br />".format(
......
...@@ -22,11 +22,12 @@ import time ...@@ -22,11 +22,12 @@ import time
import json import json
import socket import socket
try: try:
import boto
except ImportError:
boto = None
else:
import boto.sqs import boto.sqs
from boto.exception import NoAuthHandlerFound from boto.exception import NoAuthHandlerFound
except ImportError:
print "Boto is required for the sqs_notify callback plugin"
raise
class CallbackModule(object): class CallbackModule(object):
...@@ -47,36 +48,42 @@ class CallbackModule(object): ...@@ -47,36 +48,42 @@ class CallbackModule(object):
- START events - START events
""" """
def __init__(self): def __init__(self):
self.enable_sqs = 'ANSIBLE_ENABLE_SQS' in os.environ
if not self.enable_sqs:
return
# make sure we got our imports
if not boto:
raise ImportError(
"The sqs callback module requires the boto Python module, "
"which is not installed or was not found."
)
self.start_time = time.time() self.start_time = time.time()
if 'ANSIBLE_ENABLE_SQS' in os.environ: if not 'SQS_REGION' in os.environ:
self.enable_sqs = True print 'ANSIBLE_ENABLE_SQS enabled but SQS_REGION ' \
if not 'SQS_REGION' in os.environ: 'not defined in environment'
print 'ANSIBLE_ENABLE_SQS enabled but SQS_REGION ' \ sys.exit(1)
'not defined in environment' self.region = os.environ['SQS_REGION']
sys.exit(1) try:
self.region = os.environ['SQS_REGION'] self.sqs = boto.sqs.connect_to_region(self.region)
try: except NoAuthHandlerFound:
self.sqs = boto.sqs.connect_to_region(self.region) print 'ANSIBLE_ENABLE_SQS enabled but cannot connect ' \
except NoAuthHandlerFound: 'to AWS due invalid credentials'
print 'ANSIBLE_ENABLE_SQS enabled but cannot connect ' \ sys.exit(1)
'to AWS due invalid credentials' if not 'SQS_NAME' in os.environ:
sys.exit(1) print 'ANSIBLE_ENABLE_SQS enabled but SQS_NAME not ' \
if not 'SQS_NAME' in os.environ: 'defined in environment'
print 'ANSIBLE_ENABLE_SQS enabled but SQS_NAME not ' \ sys.exit(1)
'defined in environment' self.name = os.environ['SQS_NAME']
sys.exit(1) self.queue = self.sqs.create_queue(self.name)
self.name = os.environ['SQS_NAME'] if 'SQS_MSG_PREFIX' in os.environ:
self.queue = self.sqs.create_queue(self.name) self.prefix = os.environ['SQS_MSG_PREFIX']
if 'SQS_MSG_PREFIX' in os.environ:
self.prefix = os.environ['SQS_MSG_PREFIX']
else:
self.prefix = ''
self.last_seen_ts = {}
else: else:
self.enable_sqs = False self.prefix = ''
self.last_seen_ts = {}
def runner_on_failed(self, host, res, ignore_errors=False): def runner_on_failed(self, host, res, ignore_errors=False):
if self.enable_sqs: if self.enable_sqs:
......
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aide - aide
- role: datadog - role: datadog
......
...@@ -3,5 +3,8 @@ ...@@ -3,5 +3,8 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- alton - alton
- name: Deploy Antivirus Scanner
hosts: all
sudo: True
gather_facts: True
roles:
- antivirus
- role: datadog
when: COMMON_ENABLE_DATADOG
- role: splunkforwarder
when: COMMON_ENABLE_SPLUNKFORWARDER
- role: newrelic
when: COMMON_ENABLE_NEWRELIC
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aws - aws
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- bastion - bastion
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aws - aws
- certs - certs
......
# ansible-playbook -i ec2.py commoncluster.yml --limit tag_Name_stage-edx-commoncluster -e@/path/to/vars/env-deployment.yml -T 30 --list-hosts # ansible-playbook -i ec2.py cluster_rabbitmq.yml --limit tag_Name_stage-edx-commoncluster -e@/path/to/vars/env-deployment.yml -T 30 --list-hosts
- hosts: all - hosts: all
sudo: True sudo: True
...@@ -28,14 +28,9 @@ ...@@ -28,14 +28,9 @@
tasks: tasks:
- debug: msg="{{ ansible_ec2_local_ipv4 }}" - debug: msg="{{ ansible_ec2_local_ipv4 }}"
with_items: list.results with_items: list.results
- shell: echo "rabbit@ip-{{ item|replace('.', '-') }}"
when: item != ansible_ec2_local_ipv4
with_items: hostvars.keys()
register: list
- command: rabbitmqctl stop_app - command: rabbitmqctl stop_app
- command: rabbitmqctl join_cluster {{ item.stdout }} - command: rabbitmqctl join_cluster rabbit@ip-{{ hostvars.keys()[0]|replace('.', '-') }}
when: item.stdout is defined when: hostvars.keys()[0] != ansible_ec2_local_ipv4
with_items: list.results
- command: rabbitmqctl start_app - command: rabbitmqctl start_app
post_tasks: post_tasks:
- debug: var="{{ ansible_ec2_instance_id }}" - debug: var="{{ ansible_ec2_instance_id }}"
......
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- common - common
- role: datadog - role: datadog
......
...@@ -31,7 +31,7 @@ ...@@ -31,7 +31,7 @@
- "EDXAPP_MONGO_HOSTS: {{ EDXAPP_MONGO_HOSTS }}" - "EDXAPP_MONGO_HOSTS: {{ EDXAPP_MONGO_HOSTS }}"
- "EDXAPP_MONGO_DB_NAME: {{ EDXAPP_MONGO_DB_NAME }}" - "EDXAPP_MONGO_DB_NAME: {{ EDXAPP_MONGO_DB_NAME }}"
- "EDXAPP_MONGO_USER: {{ EDXAPP_MONGO_USER }}" - "EDXAPP_MONGO_USER: {{ EDXAPP_MONGO_USER }}"
- "EDXAPP_MONGO_PASS: {{ EDXAPP_MONGO_PASS }}" - "EDXAPP_MONGO_PASSWORD: {{ EDXAPP_MONGO_PASSWORD }}"
tags: update_edxapp_mysql_host tags: update_edxapp_mysql_host
- name: call update on edx-platform - name: call update on edx-platform
......
...@@ -40,6 +40,10 @@ ...@@ -40,6 +40,10 @@
sudo: yes sudo: yes
with_items: with_items:
- python-mysqldb - python-mysqldb
# When this is run on jenkins the package will already
# exist and can't run as the jenkins user because it
# does not have sudo privs.
when: ansible_ssh_user != 'jenkins'
- name: create mysql databases for the edX stack - name: create mysql databases for the edX stack
mysql_db: > mysql_db: >
db={{ item[0] }}{{ item[1].db_name }} db={{ item[0] }}{{ item[1].db_name }}
......
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- demo - demo
- role: datadog - role: datadog
......
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- devpi - devpi
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aws - aws
- role: nginx - role: nginx
......
- name: Deploy the edx_ansible role - name: Deploy the edx_ansible role
hosts: all hosts: all
sudo: True sudo: True
gather_facts: False gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- edx_ansible - edx_ansible
...@@ -12,7 +12,6 @@ ...@@ -12,7 +12,6 @@
nginx_sites: nginx_sites:
- cms - cms
- lms - lms
- ora
- xqueue - xqueue
- xserver - xserver
- certs - certs
...@@ -31,7 +30,6 @@ ...@@ -31,7 +30,6 @@
- forum - forum
- { role: "xqueue", update_users: True } - { role: "xqueue", update_users: True }
- xserver - xserver
- ora
- certs - certs
- edx_ansible - edx_ansible
- analytics-api - analytics-api
......
- name: Create ec2 instance - name: Create ec2 instance
hosts: localhost hosts: localhost
connection: local connection: local
gather_facts: False gather_facts: True
vars: vars:
keypair: continuous-integration keypair: continuous-integration
instance_type: t2.medium instance_type: t2.medium
......
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
sudo: True sudo: True
gather_facts: True gather_facts: True
vars: vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aws - aws
- role: nginx - role: nginx
...@@ -20,6 +22,10 @@ ...@@ -20,6 +22,10 @@
- role: splunkforwarder - role: splunkforwarder
when: COMMON_ENABLE_SPLUNKFORWARDER when: COMMON_ENABLE_SPLUNKFORWARDER
- role: newrelic - role: newrelic
NEWRELIC_LOGWATCH:
- logwatch-503.j2
- logwatch-cms-errors.j2
- logwatch-lms-errors.j2
when: COMMON_ENABLE_NEWRELIC when: COMMON_ENABLE_NEWRELIC
- role: minos - role: minos
when: COMMON_ENABLE_MINOS when: COMMON_ENABLE_MINOS
...@@ -4,15 +4,27 @@ ...@@ -4,15 +4,27 @@
gather_facts: False gather_facts: False
vars: vars:
db_dry_run: "--db-dry-run" db_dry_run: "--db-dry-run"
syncdb: false
tasks: tasks:
# Syncdb with migrate when the migrate user is overridden in extra vars # Syncdb with migrate when the migrate user is overridden in extra vars
- name: syncdb and migrate - name: migrate
shell: > shell: >
chdir={{ edxapp_code_dir }} chdir={{ edxapp_code_dir }}
python manage.py {{ item }} syncdb --migrate --noinput --settings=aws_migrate {{ db_dry_run }} python manage.py {{ item }} migrate --noinput {{ db_dry_run }} --settings=aws_migrate
environment: environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}" DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}" DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
with_items: with_items:
- lms - lms
- cms - cms
- name: syncdb
shell: >
chdir={{ edxapp_code_dir }}
python manage.py {{ item }} syncdb --noinput --settings=aws_migrate
environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
when: syncdb
with_items:
- lms
- cms
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- flower - flower
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aws - aws
- role: nginx - role: nginx
......
- name: Deploy Insights
hosts: all
sudo: True
gather_facts: True
vars:
ENABLE_DATADOG: False
ENABLE_SPLUNKFORWARDER: False
ENABLE_NEWRELIC: True
roles:
- role: nginx
nginx_sites:
- insights
- aws
- insights
- role: datadog
when: COMMON_ENABLE_DATADOG
- role: splunkforwarder
when: COMMON_ENABLE_SPLUNKFORWARDER
- role: newrelic
when: COMMON_ENABLE_NEWRELIC
...@@ -3,5 +3,8 @@ ...@@ -3,5 +3,8 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- jenkins_admin - jenkins_admin
...@@ -8,6 +8,8 @@ ...@@ -8,6 +8,8 @@
gather_facts: True gather_facts: True
vars: vars:
mongo_enable_journal: False mongo_enable_journal: False
serial_count: 1
serial: "{{ serial_count }}"
vars_files: vars_files:
- roles/edxapp/defaults/main.yml - roles/edxapp/defaults/main.yml
- roles/ora/defaults/main.yml - roles/ora/defaults/main.yml
...@@ -18,4 +20,5 @@ ...@@ -18,4 +20,5 @@
- edxlocal - edxlocal
- mongo - mongo
- browsers - browsers
- browsermob-proxy
- jenkins_worker - jenkins_worker
...@@ -3,7 +3,9 @@ ...@@ -3,7 +3,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
serial: 1 vars:
serial_count: 1
serial: "{{ serial_count }}"
vars_files: vars_files:
- "{{secure_dir}}/vars/{{COMMON_ENVIRONMENT}}/legacy-ora.yml" - "{{secure_dir}}/vars/{{COMMON_ENVIRONMENT}}/legacy-ora.yml"
roles: roles:
......
...@@ -41,6 +41,18 @@ class LifecycleInventory(): ...@@ -41,6 +41,18 @@ class LifecycleInventory():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
self.profile = profile self.profile = profile
def get_e_d_from_tags(self, group):
environment = "default_environment"
deployment = "default_deployment"
for r in group.tags:
if r.key == "environment":
environment = r.value
elif r.key == "deployment":
deployment = r.value
return environment,deployment
def get_instance_dict(self): def get_instance_dict(self):
ec2 = boto.connect_ec2(profile_name=self.profile) ec2 = boto.connect_ec2(profile_name=self.profile)
reservations = ec2.get_all_instances() reservations = ec2.get_all_instances()
...@@ -64,10 +76,12 @@ class LifecycleInventory(): ...@@ -64,10 +76,12 @@ class LifecycleInventory():
for instance in group.instances: for instance in group.instances:
private_ip_address = instances[instance.instance_id].private_ip_address private_ip_address = instances[instance.instance_id].private_ip_address
if private_ip_address:
inventory[group.name].append(private_ip_address) environment,deployment = self.get_e_d_from_tags(group)
inventory[group.name + "_" + instance.lifecycle_state].append(private_ip_address) inventory[environment + "_" + deployment + "_" + instance.lifecycle_state.replace(":","_")].append(private_ip_address)
inventory[instance.lifecycle_state.replace(":","_")].append(private_ip_address) inventory[group.name].append(private_ip_address)
inventory[group.name + "_" + instance.lifecycle_state.replace(":","_")].append(private_ip_address)
inventory[instance.lifecycle_state.replace(":","_")].append(private_ip_address)
print json.dumps(inventory, sort_keys=True, indent=2) print json.dumps(inventory, sort_keys=True, indent=2)
...@@ -77,8 +91,8 @@ if __name__=="__main__": ...@@ -77,8 +91,8 @@ if __name__=="__main__":
parser.add_argument('-p', '--profile', help='The aws profile to use when connecting.') parser.add_argument('-p', '--profile', help='The aws profile to use when connecting.')
parser.add_argument('-l', '--list', help='Ansible passes this, we ignore it.', action='store_true', default=True) parser.add_argument('-l', '--list', help='Ansible passes this, we ignore it.', action='store_true', default=True)
args = parser.parse_args() args = parser.parse_args()
LifecycleInventory(args.profile).run() LifecycleInventory(args.profile).run()
...@@ -3,6 +3,9 @@ ...@@ -3,6 +3,9 @@
sudo: True sudo: True
gather_facts: True gather_facts: True
vars: vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- common - common
- minos - aws
- minos
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- mongo - mongo
- mongo_mms - mongo_mms
......
- name: Configure notifier instance
hosts: all
sudo: True
gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles:
- aws
- notifier
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- role: nginx - role: nginx
nginx_sites: nginx_sites:
......
# ansible-playbook -i ./lifecycle_inventory.py ./retire_host.yml # ansible-playbook -i ./lifecycle_inventory.py ./retire_host.yml
# -e@/vars/env.yml --limit Terminating_Wait # -e@/vars/env.yml --limit Terminating_Wait -e TARGET="Terminating_Wait"
#
# Note that the target now must be specified as an argument
#
# #
# This is separate because it's use of handlers # This is separate because it's use of handlers
# leads to various race conditions. # leads to various race conditions.
# #
- name: Stop all services - name: Stop all services
hosts: Terminating_Wait hosts: "{{TARGET}}"
sudo: True sudo: True
gather_facts: False gather_facts: False
vars: vars:
...@@ -15,41 +18,30 @@ ...@@ -15,41 +18,30 @@
- stop_all_edx_services - stop_all_edx_services
- name: Server retirement workflow - name: Server retirement workflow
hosts: Terminating_Wait hosts: "{{TARGET}}"
sudo: True sudo: True
gather_facts: False gather_facts: False
tasks: tasks:
- name: Force a log rotation - name: Terminate existing s3 log sync
command: /usr/sbin/logrotate -f /etc/logrotate.d/{{ item }} command: /usr/bin/pkill send-logs-to-s3 || true
with_items: - name: "Ensure send-logs-to-s3 script is in the logrotate file"
- "apport" shell: grep send-logs-to-s3 /etc/logrotate.d/hourly/tracking.log
- "apt" # We only force a rotation of edx logs.
- "aptitude" # Forced rotation of system logfiles will only
- "dpkg" # work if there hasn't already been a previous rotation
- "hourly" # The logrotate will also call send-logs-to-s3 but hasn't
- "landscape-client" # been updated for all servers yet.
- "newrelic-sysmond" - name: Force a log rotation which will call the log sync
- "nginx"
- "nginx-access"
- "nginx-error"
- "ppp"
- "rsyslog"
- "ufw"
- "unattended-upgrades"
- "upstart"
- name: Force a log rotation
command: /usr/sbin/logrotate -f /etc/logrotate.d/hourly/{{ item }} command: /usr/sbin/logrotate -f /etc/logrotate.d/hourly/{{ item }}
with_items: with_items:
- "tracking.log" - "tracking.log"
- "edx-services" - "edx-services"
- name: Terminate existing s3 log sync # This catches the case where tracking.log is 0b
command: /usr/bin/pkill send-logs-to-s3 || true - name: Sync again
- name: Send logs to s3 command: /edx/bin/send-logs-to-s3 -d "{{ COMMON_LOG_DIR }}/tracking/*" -b "{{ COMMON_AWS_SYNC_BUCKET }}/logs/tracking"
command: /edx/bin/send-logs-to-s3
- name: Run minos verification - name: Run minos verification
hosts: Terminating_Wait hosts: "{{TARGET}}"
sudo: True sudo: True
gather_facts: False gather_facts: False
tasks: tasks:
......
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- snort - snort
- role: datadog - role: datadog
......
...@@ -2,5 +2,8 @@ ...@@ -2,5 +2,8 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: False gather_facts: False
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- stop_all_edx_services - stop_all_edx_services
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- testcourses - testcourses
- role: datadog - role: datadog
......
...@@ -35,6 +35,8 @@ ...@@ -35,6 +35,8 @@
- role: splunkforwarder - role: splunkforwarder
when: COMMON_ENABLE_SPLUNKFORWARDER when: COMMON_ENABLE_SPLUNKFORWARDER
- role: newrelic - role: newrelic
NEWRELIC_LOGWATCH:
- logwatch-xqueue-errors.j2
when: COMMON_ENABLE_NEWRELIC when: COMMON_ENABLE_NEWRELIC
post_tasks: post_tasks:
- debug: var="{{ ansible_ec2_instance_id }}" - debug: var="{{ ansible_ec2_instance_id }}"
......
...@@ -8,6 +8,8 @@ ...@@ -8,6 +8,8 @@
ENABLE_DATADOG: False ENABLE_DATADOG: False
ENABLE_SPLUNKFORWARDER: False ENABLE_SPLUNKFORWARDER: False
ENABLE_NEWRELIC: False ENABLE_NEWRELIC: False
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aws - aws
- xqwatcher - xqwatcher
...@@ -16,4 +18,4 @@ ...@@ -16,4 +18,4 @@
- role: splunkforwarder - role: splunkforwarder
when: COMMON_ENABLE_SPLUNKFORWARDER when: COMMON_ENABLE_SPLUNKFORWARDER
- role: newrelic - role: newrelic
when: COMMON_ENABLE_NEWRELIC when: COMMON_ENABLE_NEWRELIC
\ No newline at end of file
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
hosts: all hosts: all
sudo: True sudo: True
gather_facts: True gather_facts: True
vars:
serial_count: 1
serial: "{{ serial_count }}"
roles: roles:
- aws - aws
- role: nginx - role: nginx
......
...@@ -31,14 +31,14 @@ ...@@ -31,14 +31,14 @@
tasks: tasks:
- name: edX configuration - name: edX configuration
cloudformation: > cloudformation: >
stack_name="$name" state=present stack_name="{{ name }}" state=present
region=$region disable_rollback=false region="{{ region }}" disable_rollback=false
template=../cloudformation_templates/edx-server-multi-instance.json template=../cloudformation_templates/edx-server-multi-instance.json
args: args:
template_parameters: template_parameters:
KeyName: $key KeyName: "{{key}}"
InstanceType: m1.small InstanceType: m1.small
GroupTag: $group GroupTag: "{{group}}"
register: stack register: stack
- name: show stack outputs - name: show stack outputs
debug: msg="My stack outputs are ${stack.stack_outputs}" debug: msg="My stack outputs are {{stack.stack_outputs}}"
...@@ -18,6 +18,7 @@ ...@@ -18,6 +18,7 @@
# These should stay false for the public AMI # These should stay false for the public AMI
COMMON_ENABLE_DATADOG: False COMMON_ENABLE_DATADOG: False
COMMON_ENABLE_SPLUNKFORWARDER: False COMMON_ENABLE_SPLUNKFORWARDER: False
ENABLE_LEGACY_ORA: !!null
roles: roles:
- role: nginx - role: nginx
nginx_sites: nginx_sites:
...@@ -38,7 +39,8 @@ ...@@ -38,7 +39,8 @@
- elasticsearch - elasticsearch
- forum - forum
- { role: "xqueue", update_users: True } - { role: "xqueue", update_users: True }
- ora - role: ora
when: ENABLE_LEGACY_ORA
- certs - certs
- edx_ansible - edx_ansible
- role: datadog - role: datadog
......
- name: setup the alton env
template: >
src="alton_env.j2" dest="{{ alton_app_dir }}/alton_env"
owner="{{ alton_user }}" group="{{ common_web_user }}"
mode=0644
notify: restart alton
- name: configure the boto profiles for alton - name: configure the boto profiles for alton
template: > template: >
src="boto.j2" src="boto.j2"
......
...@@ -34,11 +34,4 @@ ...@@ -34,11 +34,4 @@
- "{{ alton_app_dir }}" - "{{ alton_app_dir }}"
- "{{ alton_venvs_dir }}" - "{{ alton_venvs_dir }}"
- name: setup the alton env
template: >
src="alton_env.j2" dest="{{ alton_app_dir }}/alton_env"
owner="{{ alton_user }}" group="{{ common_web_user }}"
mode=0644
notify: restart alton
- include: deploy.yml tags=deploy - include: deploy.yml tags=deploy
...@@ -6,3 +6,5 @@ export {{ name }}="{{ value }}" ...@@ -6,3 +6,5 @@ export {{ name }}="{{ value }}"
{% endif %} {% endif %}
{%- endfor %} {%- endfor %}
export WILL_BOTO_PROFILES="{{ ALTON_AWS_CREDENTIALS|join(';') }}"
{% for deployment, creds in ALTON_AWS_CREDENTIALS.iteritems() %} {% for deployment, creds in ALTON_AWS_CREDENTIALS.iteritems() %}
[profile {{deployment}}] [profile {{ deployment }}]
aws_access_key_id = {{ creds.access_id }} aws_access_key_id = {{ creds.access_id }}
aws_secret_access_key = {{ creds.secret_key }} aws_secret_access_key = {{ creds.secret_key }}
......
...@@ -19,46 +19,58 @@ ANALYTICS_API_NEWRELIC_APPNAME: "{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT } ...@@ -19,46 +19,58 @@ ANALYTICS_API_NEWRELIC_APPNAME: "{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }
ANALYTICS_API_PIP_EXTRA_ARGS: "-i {{ COMMON_PYPI_MIRROR_URL }}" ANALYTICS_API_PIP_EXTRA_ARGS: "-i {{ COMMON_PYPI_MIRROR_URL }}"
ANALYTICS_API_NGINX_PORT: "18100" ANALYTICS_API_NGINX_PORT: "18100"
ANALYTICS_API_DATABASES:
# rw user
default:
ENGINE: 'django.db.backends.mysql'
NAME: 'analytics-api'
USER: 'api001'
PASSWORD: 'password'
HOST: 'localhost'
PORT: '3306'
# read-only user
reports:
ENGINE: 'django.db.backends.mysql'
NAME: 'reports'
USER: 'reports001'
PASSWORD: 'password'
HOST: 'localhost'
PORT: '3306'
ANALYTICS_API_VERSION: "master" ANALYTICS_API_VERSION: "master"
# Default dummy user, override this!! # Default dummy user, override this!!
ANALYTICS_API_USERS: ANALYTICS_API_USERS:
"dummy-api-user": "changeme" "dummy-api-user": "changeme"
ANALYTICS_API_SECRET_KEY: 'Your secret key here'
ANALYTICS_API_TIME_ZONE: 'UTC'
ANALYTICS_API_LANGUAGE_CODE: 'en-us'
ANALYTICS_API_EMAIL_HOST: 'localhost'
ANALYTICS_API_EMAIL_HOST_USER: 'mail_user'
ANALYTICS_API_EMAIL_HOST_PASSWORD: 'mail_password'
ANALYTICS_API_EMAIL_PORT: 587
ANALYTICS_API_AUTH_TOKEN: 'put-your-api-token-here'
ANALYTICS_API_CONFIG: ANALYTICS_API_CONFIG:
ANALYTICS_DATABASE: 'reports' ANALYTICS_DATABASE: 'reports'
SECRET_KEY: 'Your secret key here' SECRET_KEY: '{{ ANALYTICS_API_SECRET_KEY }}'
TIME_ZONE: 'America/New_York' TIME_ZONE: '{{ ANALYTICS_API_TIME_ZONE }}'
LANGUAGE_CODE: 'en-us' LANGUAGE_CODE: '{{ANALYTICS_API_LANGUAGE_CODE }}'
# email config # email config
EMAIL_HOST: 'smtp.example.com' EMAIL_HOST: '{{ ANALYTICS_API_EMAIL_HOST }}'
EMAIL_HOST_PASSWORD: "" EMAIL_HOST_PASSWORD: '{{ ANALYTICS_API_EMAIL_HOST_PASSWORD }}'
EMAIL_HOST_USER: "" EMAIL_HOST_USER: '{{ ANALYTICS_API_EMAIL_HOST_USER }}'
EMAIL_PORT: 587 EMAIL_PORT: $ANALYTICS_API_EMAIL_PORT
API_AUTH_TOKEN: 'put-your-api-token-here' API_AUTH_TOKEN: '{{ ANALYTICS_API_AUTH_TOKEN }}'
STATICFILES_DIRS: [] STATICFILES_DIRS: ['static']
STATIC_ROOT: "{{ COMMON_DATA_DIR }}/{{ analytics_api_service_name }}/staticfiles" STATIC_ROOT: "{{ COMMON_DATA_DIR }}/{{ analytics_api_service_name }}/staticfiles"
# db config # db config
DATABASE_OPTIONS: DATABASE_OPTIONS:
connect_timeout: 10 connect_timeout: 10
DATABASES: DATABASES: '{{ ANALYTICS_API_DATABASES }}'
# rw user
default:
ENGINE: 'django.db.backends.mysql'
NAME: 'analytics-api'
USER: 'api001'
PASSWORD: 'password'
HOST: 'localhost'
PORT: '3306'
# read-only user
reports:
ENGINE: 'django.db.backends.mysql'
NAME: 'reports'
USER: 'reports001'
PASSWORD: 'password'
HOST: 'localhost'
PORT: '3306'
ANALYTICS_API_GUNICORN_WORKERS: "2" ANALYTICS_API_GUNICORN_WORKERS: "2"
ANALYTICS_API_GUNICORN_EXTRA: ""
# #
# vars are namespace with the module name. # vars are namespace with the module name.
# #
......
...@@ -32,7 +32,7 @@ ...@@ -32,7 +32,7 @@
# ansible-playbook -i 'api.example.com,' ./analyticsapi.yml -e@/ansible/vars/deployment.yml -e@/ansible/vars/env-deployment.yml # ansible-playbook -i 'api.example.com,' ./analyticsapi.yml -e@/ansible/vars/deployment.yml -e@/ansible/vars/env-deployment.yml
# #
- fail: msg="You must provide an private key for the analytics repo" - fail: msg="You must provide a private key for the analytics repo"
when: not ANALYTICS_API_GIT_IDENTITY when: not ANALYTICS_API_GIT_IDENTITY
- include: deploy.yml tags=deploy - include: deploy.yml tags=deploy
...@@ -15,4 +15,4 @@ export NEW_RELIC_LICENSE_KEY="{{ NEWRELIC_LICENSE_KEY }}" ...@@ -15,4 +15,4 @@ export NEW_RELIC_LICENSE_KEY="{{ NEWRELIC_LICENSE_KEY }}"
source {{ analytics_api_app_dir }}/analytics_api_env source {{ analytics_api_app_dir }}/analytics_api_env
{{ executable }} --pythonpath={{ analytics_api_code_dir }} -b {{ analytics_api_gunicorn_host }}:{{ analytics_api_gunicorn_port }} -w {{ ANALYTICS_API_GUNICORN_WORKERS }} --timeout={{ analytics_api_gunicorn_timeout }} analyticsdataserver.wsgi:application {{ executable }} --pythonpath={{ analytics_api_code_dir }} -b {{ analytics_api_gunicorn_host }}:{{ analytics_api_gunicorn_port }} -w {{ ANALYTICS_API_GUNICORN_WORKERS }} --timeout={{ analytics_api_gunicorn_timeout }} {{ ANALYTICS_API_GUNICORN_EXTRA }} analyticsdataserver.wsgi:application
...@@ -19,6 +19,7 @@ AS_SERVER_PORT: '9000' ...@@ -19,6 +19,7 @@ AS_SERVER_PORT: '9000'
AS_ENV_LANG: 'en_US.UTF-8' AS_ENV_LANG: 'en_US.UTF-8'
AS_LOG_LEVEL: 'INFO' AS_LOG_LEVEL: 'INFO'
AS_WORKERS: '2' AS_WORKERS: '2'
AS_GUNICORN_EXTRA: ""
# add public keys to enable the automator user # add public keys to enable the automator user
# for running manage.py commands # for running manage.py commands
...@@ -40,14 +41,14 @@ analytics_auth_config: ...@@ -40,14 +41,14 @@ analytics_auth_config:
DATABASES: DATABASES:
analytics: analytics:
<<: *databases_default <<: *databases_default
USER: $AS_DB_ANALYTICS_USER USER: "{{ AS_DB_ANALYTICS_USER }}"
PASSWORD: $AS_DB_ANALYTICS_PASSWORD PASSWORD: "{{ AS_DB_ANALYTICS_PASSWORD }}"
HOST: $AS_DB_ANALYTICS_HOST HOST: "{{ AS_DB_ANALYTICS_HOST }}"
ANALYTICS_API_KEY: $AS_API_KEY ANALYTICS_API_KEY: "{{ AS_API_KEY }}"
ANALYTICS_RESULTS_DB: ANALYTICS_RESULTS_DB:
MONGO_URI: $AS_DB_RESULTS_URL MONGO_URI: "{{ AS_DB_RESULTS_URL }}"
MONGO_DB: $AS_DB_RESULTS_DB MONGO_DB: "{{ AS_DB_RESULTS_DB }}"
MONGO_STORED_QUERIES_COLLECTION: $AS_DB_RESULTS_COLLECTION MONGO_STORED_QUERIES_COLLECTION: "{{ AS_DB_RESULTS_COLLECTION }}"
as_role_name: "analytics-server" as_role_name: "analytics-server"
as_user: "analytics-server" as_user: "analytics-server"
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
accept_hostkey=yes accept_hostkey=yes
version={{ as_version }} force=true version={{ as_version }} force=true
environment: environment:
GIT_SSH: $as_git_ssh GIT_SSH: "{{ as_git_ssh }}"
notify: restart the analytics service notify: restart the analytics service
notify: start the analytics service notify: start the analytics service
tags: tags:
......
...@@ -18,4 +18,4 @@ env DJANGO_SETTINGS_MODULE={{ as_django_settings }} ...@@ -18,4 +18,4 @@ env DJANGO_SETTINGS_MODULE={{ as_django_settings }}
chdir {{ as_code_dir }} chdir {{ as_code_dir }}
setuid {{ as_web_user }} setuid {{ as_web_user }}
exec {{ as_venv_dir }}/bin/gunicorn -b 0.0.0.0:$PORT -w $WORKERS --pythonpath={{ as_code_dir }}/anserv anserv.wsgi exec {{ as_venv_dir }}/bin/gunicorn -b 0.0.0.0:$PORT -w $WORKERS --pythonpath={{ as_code_dir }}/anserv {{ AS_GUNICORN_EXTRA }} anserv.wsgi
...@@ -19,6 +19,7 @@ ANALYTICS_SERVER_PORT: '9000' ...@@ -19,6 +19,7 @@ ANALYTICS_SERVER_PORT: '9000'
ANALYTICS_ENV_LANG: 'en_US.UTF-8' ANALYTICS_ENV_LANG: 'en_US.UTF-8'
ANALYTICS_LOG_LEVEL: 'INFO' ANALYTICS_LOG_LEVEL: 'INFO'
ANALYTICS_WORKERS: '2' ANALYTICS_WORKERS: '2'
ANALYTICS_GUNICORN_EXTRA: ""
DATABASES: DATABASES:
default: &databases_default default: &databases_default
...@@ -33,14 +34,14 @@ analytics_auth_config: ...@@ -33,14 +34,14 @@ analytics_auth_config:
DATABASES: DATABASES:
analytics: analytics:
<<: *databases_default <<: *databases_default
USER: $ANALYTICS_DB_ANALYTICS_USER USER: "{{ ANALYTICS_DB_ANALYTICS_USER }}"
PASSWORD: $ANALYTICS_DB_ANALYTICS_PASSWORD PASSWORD: "{{ ANALYTICS_DB_ANALYTICS_PASSWORD }}"
HOST: $ANALYTICS_DB_ANALYTICS_HOST HOST: "{{ ANALYTICS_DB_ANALYTICS_HOST }}"
ANALYTICS_API_KEY: $ANALYTICS_API_KEY ANALYTICS_API_KEY: "{{ ANALYTICS_API_KEY }}"
ANALYTICS_RESULTS_DB: ANALYTICS_RESULTS_DB:
MONGO_URI: $ANALYTICS_DB_RESULTS_URL MONGO_URI: "{{ ANALYTICS_DB_RESULTS_URL }}"
MONGO_DB: $ANALYTICS_DB_RESULTS_DB MONGO_DB: "{{ ANALYTICS_DB_RESULTS_DB }}"
MONGO_STORED_QUERIES_COLLECTION: $ANALYTICS_DB_RESULTS_COLLECTION MONGO_STORED_QUERIES_COLLECTION: "{{ ANALYTICS_DB_RESULTS_COLLECTION }}"
analytics_role_name: "analytics" analytics_role_name: "analytics"
analytics_user: "analytics" analytics_user: "analytics"
......
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
accept_hostkey=yes accept_hostkey=yes
version={{ analytics_version }} force=true version={{ analytics_version }} force=true
environment: environment:
GIT_SSH: $analytics_git_ssh GIT_SSH: "{{ analytics_git_ssh }}"
notify: restart the analytics service notify: restart the analytics service
notify: start the analytics service notify: start the analytics service
tags: tags:
......
...@@ -18,4 +18,4 @@ env DJANGO_SETTINGS_MODULE={{ analytics_django_settings }} ...@@ -18,4 +18,4 @@ env DJANGO_SETTINGS_MODULE={{ analytics_django_settings }}
chdir {{ analytics_code_dir }} chdir {{ analytics_code_dir }}
setuid {{ analytics_web_user }} setuid {{ analytics_web_user }}
exec {{ analytics_venv_dir }}/bin/gunicorn -b 0.0.0.0:$PORT -w $WORKERS --pythonpath={{ analytics_code_dir }}/anserv anserv.wsgi exec {{ analytics_venv_dir }}/bin/gunicorn -b 0.0.0.0:$PORT -w $WORKERS --pythonpath={{ analytics_code_dir }}/anserv {{ ANALYTICS_GUNICORN_EXTRA }} anserv.wsgi
...@@ -10,7 +10,7 @@ ...@@ -10,7 +10,7 @@
when: role_exists | success when: role_exists | success
- name: create role directories - name: create role directories
file: path=roles/{{role_name}}/{{ item }} state=directory file: path=roles/{{ role_name }}/{{ item }} state=directory
with_items: with_items:
- tasks - tasks
- meta - meta
......
...@@ -6,7 +6,7 @@ ...@@ -6,7 +6,7 @@
- debug - debug
- name: Dump lms auth|env file - name: Dump lms auth|env file
template: src=../../edxapp/templates/lms.{{item}}.json.j2 dest=/tmp/lms.{{item}}.json mode=0600 template: src=../../edxapp/templates/lms.{{ item }}.json.j2 dest=/tmp/lms.{{ item }}.json mode=0600
with_items: with_items:
- env - env
- auth - auth
...@@ -16,7 +16,7 @@ ...@@ -16,7 +16,7 @@
- debug - debug
- name: Dump lms-preview auth|env file - name: Dump lms-preview auth|env file
template: src=../../edxapp/templates/lms-preview.{{item}}.json.j2 dest=/tmp/lms-preview.{{item}}.json mode=0600 template: src=../../edxapp/templates/lms-preview.{{ item }}.json.j2 dest=/tmp/lms-preview.{{ item }}.json mode=0600
with_items: with_items:
- env - env
- auth - auth
...@@ -26,7 +26,7 @@ ...@@ -26,7 +26,7 @@
- debug - debug
- name: Dump cms auth|env file - name: Dump cms auth|env file
template: src=../../edxapp/templates/cms.{{item}}.json.j2 dest=/tmp/cms.{{item}}.json mode=0600 template: src=../../edxapp/templates/cms.{{ item }}.json.j2 dest=/tmp/cms.{{ item }}.json mode=0600
with_items: with_items:
- env - env
- auth - auth
...@@ -44,7 +44,7 @@ ...@@ -44,7 +44,7 @@
- name: fetch remote files - name: fetch remote files
# fetch is fail-safe for remote files that don't exist # fetch is fail-safe for remote files that don't exist
# setting mode is not an option # setting mode is not an option
fetch: src=/tmp/{{item}} dest=/tmp/{{ansible_hostname}}-{{item}} flat=True fetch: src=/tmp/{{ item }} dest=/tmp/{{ ansible_hostname }}-{{item}} flat=True
with_items: with_items:
- ansible.all.json - ansible.all.json
- ansible.all.yml - ansible.all.yml
......
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://github.com/edx/configuration/wiki
# code style: https://github.com/edx/configuration/wiki/Ansible-Coding-Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for role antivirus
#
#
# vars are namespace with the module name.
#
antivirus_role_name: antivirus
#
# OS packages
#
antivirus_debian_pkgs: [clamav]
antivirus_redhat_pkgs: []
antivirus_pip_pkgs: []
antivirus_app_dir: /edx/app/antivirus
antivirus_user: "antivirus"
ANTIVIRUS_BUCKETS: !!null
ANTIVIRUS_MAILTO: "{{ EDXAPP_TECH_SUPPORT_EMAIL }}"
ANTIVIRUS_MAILFROM: "{{ EDXAPP_DEFAULT_FROM_EMAIL }}"
ANTIVIRUS_AWS_KEY: ""
ANTIVIRUS_AWS_SECRET: ""
ANTIVIRUS_S3_AWS_KEY: "{{ ANTIVIRUS_AWS_KEY }}"
ANTIVIRUS_SES_AWS_KEY: "{{ ANTIVIRUS_AWS_KEY }}"
ANTIVIRUS_S3_AWS_SECRET: "{{ ANTIVIRUS_AWS_SECRET}}"
ANTIVIRUS_SES_AWS_SECRET: "{{ ANTIVIRUS_AWS_SECRET}}"
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://github.com/edx/configuration/wiki
# code style: https://github.com/edx/configuration/wiki/Ansible-Coding-Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Handlers for role antivirus
#
# Overview:
#
#
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://github.com/edx/configuration/wiki
# code style: https://github.com/edx/configuration/wiki/Ansible-Coding-Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Role includes for role antivirus
#
dependencies:
- role: user
user_info: "{{ BASTION_USER_INFO }}"
- aws
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://github.com/edx/configuration/wiki
# code style: https://github.com/edx/configuration/wiki/Ansible-Coding-Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Tasks for role antivirus
#
# Overview:
#
#
# Dependencies:
#
#
# Example play:
#
#
- name: install antivirus system packages
apt: pkg={{ item }} install_recommends=yes state=present
with_items: antivirus_debian_pkgs
- name: create antivirus scanner user
user: >
name="{{ antivirus_user }}"
home="{{ antivirus_app_dir }}"
createhome=no
shell=/bin/false
- name: create antivirus app and data dirs
file: >
path="{{ item }}"
state=directory
owner="{{ antivirus_user }}"
group="{{ antivirus_user }}"
with_items:
- "{{ antivirus_app_dir }}"
- "{{ antivirus_app_dir }}/data"
- name: install antivirus s3 scanner script
template: >
src=s3_bucket_virus_scan.sh.j2
dest={{ antivirus_app_dir }}/s3_bucket_virus_scan.sh
mode=0555
owner={{ antivirus_user }}
group={{ antivirus_user }}
- name: install antivirus s3 scanner cronjob
cron: >
name="antivirus-{{ item }}"
job="{{ antivirus_app_dir }}/s3_bucket_virus_scan.sh -b '{{ item }}' -m '{{ ANTIVIRUS_MAILTO }}' -f '{{ ANTIVIRUS_MAILFROM }}'"
backup=yes
cron_file=antivirus-{{ item }}
user={{ antivirus_user }}
hour="*"
minute="0"
day="*"
with_items: ANTIVIRUS_BUCKETS
#! /bin/bash
DEBUG="false"
BUCKETNAME="none"
MAILTO=""
MAILFROM=""
ANTIVIRUS_S3_AWS_KEY="{{ ANTIVIRUS_S3_AWS_KEY }}"
ANTIVIRUS_SES_AWS_KEY="{{ ANTIVIRUS_SES_AWS_KEY }}"
ANTIVIRUS_S3_AWS_SECRET="{{ ANTIVIRUS_S3_AWS_SECRET}}"
ANTIVIRUS_SES_AWS_SECRET="{{ ANTIVIRUS_SES_AWS_SECRET}}"
AWS_DEFAULT_REGION="{{ aws_region }}"
function usage {
echo "$0 - $VERSION";
echo "Run ClamAV against the contents of an S3 Bucket.";
echo "Usage: $0 [options]";
echo "options:";
echo " -d Debug mode";
echo " -h Usage (this screen)";
echo " -b <bucket name>";
echo " -m <notify mail address>";
echo " -f <notify from address>";
echo " -k <AWS Key ID>";
echo " -s <AWS Secret Key>"
}
while getopts "dhb:m:f:k:s:" optionName; do
case "$optionName" in
d)
DEBUG="true"
;;
h)
usage;
exit;
;;
[?])
usage;
exit;
;;
b)
BUCKETNAME=$OPTARG;
;;
m)
MAILTO=$OPTARG;
;;
f)
MAILFROM=$OPTARG;
;;
k)
AWS_ACCESS_KEY_ID=$OPTARG;
ANTIVIRUS_S3_AWS_KEY=$OPTARG;
ANTIVIRUS_SES_AWS_KEY=$OPTARG;
;;
s)
AWS_SECRET_ACCESS_KEY=$OPTARG;
ANTIVIRUS_S3_AWS_SECRET=$OPTARG;
ANTIVIRUS_SES_AWS_SECRET=$OPTARG;
;;
esac
done
cd {{ antivirus_app_dir }}
export AWS_ACCESS_KEY_ID=$ANTIVIRUS_S3_AWS_KEY
export AWS_SECRET_ACCESS_KEY=$ANTIVIRUS_S3_AWS_SECRET
export AWS_DEFAULT_REGION
mkdir -p data/$BUCKETNAME
aws s3 sync s3://$BUCKETNAME/ data/$BUCKETNAME
CLAMOUT=$(clamscan -ri data/$BUCKETNAME);
if [[ $? -ne 0 ]]; then
export AWS_ACCESS_KEY_ID=$ANTIVIRUS_SES_AWS_KEY
export AWS_SECRET_ACCESS_KEY=$ANTIVIRUS_SES_AWS_SECRET
aws ses send-email --to $MAILTO --from $MAILFROM --subject "Virus Scanner malicious file on $BUCKETNAME" --text "$CLAMOUT"
fi
WSGIPythonHome {{ edxapp_venv_dir }} WSGIPythonHome {{ edxapp_venv_dir }}
WSGIRestrictEmbedded On WSGIRestrictEmbedded On
<VirtualHost *:{{ apache_port }}> <VirtualHost *:*>
ServerName https://{{ lms_env_config.SITE_NAME }} ServerName https://{{ lms_env_config.SITE_NAME }}
ServerAlias *.{{ lms_env_config.SITE_NAME }} ServerAlias *.{{ lms_env_config.SITE_NAME }}
UseCanonicalName On UseCanonicalName On
......
...@@ -23,29 +23,34 @@ AWS_S3_LOGS: false ...@@ -23,29 +23,34 @@ AWS_S3_LOGS: false
# This relies on your server being able to send mail # This relies on your server being able to send mail
AWS_S3_LOGS_NOTIFY_EMAIL: dummy@example.com AWS_S3_LOGS_NOTIFY_EMAIL: dummy@example.com
AWS_S3_LOGS_FROM_EMAIL: dummy@example.com AWS_S3_LOGS_FROM_EMAIL: dummy@example.com
# Separate buckets for tracking logs and everything else
# You should be overriding the environment and deployment vars
# Order of precedence is left to right for exclude and include options
AWS_S3_LOG_PATHS:
- bucket: "edx-{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}/logs/tracking"
path: "{{ COMMON_LOG_DIR }}/tracking/*"
- bucket: "edx-{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}/logs/application"
path: "{{ COMMON_LOG_DIR }}/!(*tracking*)"
- bucket: "edx-{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}/logs/system"
path: "/var/log/*"
# #
# vars are namespace with the module name. # vars are namespace with the module name.
# #
aws_role_name: aws aws_role_name: aws
aws_data_dir: "{{ COMMON_DATA_DIR }}/aws"
aws_app_dir: "{{ COMMON_APP_DIR }}/aws" aws_dirs:
aws_s3_sync_script: "{{ aws_app_dir }}/send-logs-to-s3" home:
aws_s3_logfile: "{{ aws_log_dir }}/s3-log-sync.log" path: "{{ COMMON_APP_DIR }}/{{ aws_role_name }}"
aws_log_dir: "{{ COMMON_LOG_DIR }}/aws" owner: "root"
group: "root"
mode: "0755"
logs:
path: "{{ COMMON_LOG_DIR }}/{{ aws_role_name }}"
owner: "syslog"
group: "syslog"
mode: "0700"
data:
path: "{{ COMMON_DATA_DIR }}/{{ aws_role_name }}"
owner: "root"
group: "root"
mode: "0700"
aws_s3_sync_script: "{{ aws_dirs.home.path }}/send-logs-to-s3"
aws_s3_logfile: "{{ aws_dirs.logs.path }}/s3-log-sync.log"
aws_region: "us-east-1" aws_region: "us-east-1"
# default path to the aws binary # default path to the aws binary
s3cmd_cmd: "{{ COMMON_BIN_DIR }}/s3cmd" aws_s3cmd: "{{ COMMON_BIN_DIR }}/s3cmd"
aws_cmd: "/usr/local/bin/aws" aws_cmd: "/usr/local/bin/aws"
# #
# OS packages # OS packages
......
...@@ -21,26 +21,14 @@ ...@@ -21,26 +21,14 @@
# #
# #
- name: create data directories - name: create all service directories
file: > file: >
path={{ item }} path="{{ item.value.path }}"
state=directory state="directory"
owner=root owner="{{ item.value.owner }}"
group=root group="{{ item.value.group }}"
mode=0700 mode="{{ item.value.mode }}"
with_items: with_dict: aws_dirs
- "{{ aws_data_dir }}"
- "{{ aws_log_dir }}"
- name: create app directory
file: >
path={{ item }}
state=directory
owner=root
group=root
mode=0755
with_items:
- "{{ aws_app_dir }}"
- name: install system packages - name: install system packages
apt: > apt: >
...@@ -57,18 +45,18 @@ ...@@ -57,18 +45,18 @@
- name: get s3cmd - name: get s3cmd
get_url: > get_url: >
url={{ aws_s3cmd_url }} url={{ aws_s3cmd_url }}
dest={{ aws_data_dir }}/ dest={{ aws_dirs.data.path }}/
- name: untar s3cmd - name: untar s3cmd
shell: > shell: >
tar xf {{ aws_data_dir }}/{{ aws_s3cmd_version }}.tar.gz tar xf {{ aws_dirs.data.path }}/{{ aws_s3cmd_version }}.tar.gz
creates={{ aws_app_dir }}/{{ aws_s3cmd_version }}/s3cmd creates={{ aws_dirs.data.path }}/{{ aws_s3cmd_version }}/s3cmd
chdir={{ aws_app_dir }} chdir={{ aws_dirs.home.path }}
- name: create symlink for s3cmd - name: create symlink for s3cmd
file: > file: >
src={{ aws_app_dir }}/{{ aws_s3cmd_version }}/s3cmd src={{ aws_dirs.home.path }}/{{ aws_s3cmd_version }}/s3cmd
dest={{ COMMON_BIN_DIR }}/s3cmd dest={{ aws_s3cmd }}
state=link state=link
- name: create s3 log sync script - name: create s3 log sync script
...@@ -84,7 +72,7 @@ ...@@ -84,7 +72,7 @@
dest={{ COMMON_BIN_DIR }}/{{ aws_s3_sync_script|basename }} dest={{ COMMON_BIN_DIR }}/{{ aws_s3_sync_script|basename }}
when: AWS_S3_LOGS when: AWS_S3_LOGS
- name: run s3 log sync script on supervisor shutdown - name: force logrotate on supervisor stop
template: > template: >
src=etc/init/sync-on-stop.conf.j2 src=etc/init/sync-on-stop.conf.j2
dest=/etc/init/sync-on-stop.conf dest=/etc/init/sync-on-stop.conf
...@@ -99,4 +87,5 @@ ...@@ -99,4 +87,5 @@
user: root user: root
minute: 0 minute: 0
job: "{{ aws_s3_sync_script }} > /dev/null 2>&1" job: "{{ aws_s3_sync_script }} > /dev/null 2>&1"
state: absent
when: AWS_S3_LOGS when: AWS_S3_LOGS
start on stopped supervisor start on stopped supervisor
description "sync s3 logs on supervisor shutdown" description "sync tracking logs on supervisor shutdown"
script script
/bin/bash {{ aws_s3_sync_script }} /usr/sbin/logrotate -f /etc/logrotate.d/hourly/tracking.log
/usr/sbin/logrotate -f /etc/logrotate.d/hourly/edx-services
end script end script
...@@ -4,13 +4,23 @@ ...@@ -4,13 +4,23 @@
# #
# This script can be called from logrotate # This script can be called from logrotate
# to sync logs to s3 # to sync logs to s3
#
if (( $EUID != 0 )); then if (( $EUID != 0 )); then
echo "Please run as the root user" echo "Please run as the root user"
exit 1 exit 1
fi fi
exec > >(tee "{{ aws_s3_logfile }}") #
# Ensure the log processors can read without
# running as root
if [ ! -f "{{ aws_s3_logfile }}" ]; then
sudo -u syslog touch "{{ aws_s3_logfile }}"
else
chown syslog.syslog "{{ aws_s3_logfile }}"
fi
exec > >(tee -a "{{ aws_s3_logfile }}")
exec 2>&1 exec 2>&1
# s3cmd sync requires a valid home # s3cmd sync requires a valid home
...@@ -31,10 +41,12 @@ usage() { ...@@ -31,10 +41,12 @@ usage() {
-v add verbosity (set -x) -v add verbosity (set -x)
-n echo what will be done -n echo what will be done
-h this -h this
-d directory to sync
-b bucket path to sync to
EO EO
} }
while getopts "vhn" opt; do while getopts "vhnb:d:" opt; do
case $opt in case $opt in
v) v)
set -x set -x
...@@ -48,9 +60,21 @@ while getopts "vhn" opt; do ...@@ -48,9 +60,21 @@ while getopts "vhn" opt; do
noop="echo Would have run: " noop="echo Would have run: "
shift shift
;; ;;
d)
directory=$OPTARG
;;
b)
bucket_path=$OPTARG
;;
esac esac
done done
if [[ -z $bucket_path || -z $directory ]]; then
echo "ERROR: You must provide a directory and a bucket to sync!"
usage
exit 1
fi
# grab the first security group for the instance # grab the first security group for the instance
# which will be used as a directory name in the s3 # which will be used as a directory name in the s3
# bucket # bucket
...@@ -90,9 +114,7 @@ instance_id=$(ec2metadata --instance-id) ...@@ -90,9 +114,7 @@ instance_id=$(ec2metadata --instance-id)
ip=$(ec2metadata --local-ipv4) ip=$(ec2metadata --local-ipv4)
availability_zone=$(ec2metadata --availability-zone) availability_zone=$(ec2metadata --availability-zone)
# region isn't available via the metadata service # region isn't available via the metadata service
region=${availability_zone:0:${{lb}}#availability_zone{{rb}} - 1} region=${availability_zone:0:${{ lb }}#availability_zone{{ rb }} - 1}
s3_path="${2}/$sec_grp/" s3_path="${2}/$sec_grp/"
{% for item in AWS_S3_LOG_PATHS -%} $noop {{ aws_s3cmd }} --multipart-chunk-size-mb 5120 --disable-multipart sync $directory "s3://${bucket_path}/${sec_grp}/${instance_id}-${ip}/"
$noop {{ s3cmd_cmd }} sync {{ item['path'] }} "s3://{{ item['bucket'] }}/$sec_grp/${instance_id}-${ip}/"
{% endfor %}
#!/bin/sh #!/bin/sh
/etc/browsermob-proxy/bin/browsermob-proxy /etc/browsermob-proxy/bin/browsermob-proxy $*
...@@ -13,7 +13,7 @@ browser_deb_pkgs: ...@@ -13,7 +13,7 @@ browser_deb_pkgs:
# which often causes spurious acceptance test failures. # which often causes spurious acceptance test failures.
browser_s3_deb_pkgs: browser_s3_deb_pkgs:
- { name: "google-chrome-stable_30.0.1599.114-1_amd64.deb", url: "https://s3.amazonaws.com/vagrant.testeng.edx.org/google-chrome-stable_30.0.1599.114-1_amd64.deb" } - { name: "google-chrome-stable_30.0.1599.114-1_amd64.deb", url: "https://s3.amazonaws.com/vagrant.testeng.edx.org/google-chrome-stable_30.0.1599.114-1_amd64.deb" }
- { name: "firefox_25.0+build3-0ubuntu0.12.04.1_amd64.deb", url: "https://s3.amazonaws.com/vagrant.testeng.edx.org/firefox_25.0%2Bbuild3-0ubuntu0.12.04.1_amd64.deb" } - { name: "firefox_28.0+build2-0ubuntu0.12.04.1_amd64.deb", url: "https://s3.amazonaws.com/vagrant.testeng.edx.org/firefox_28.0%2Bbuild2-0ubuntu0.12.04.1_amd64.deb" }
# Chrome and ChromeDriver # Chrome and ChromeDriver
chromedriver_version: 2.6 chromedriver_version: 2.6
......
...@@ -71,25 +71,25 @@ certs_env_config: ...@@ -71,25 +71,25 @@ certs_env_config:
# CERTS_DATA is legacy, not used # CERTS_DATA is legacy, not used
CERT_DATA: {} CERT_DATA: {}
QUEUE_NAME: "certificates" QUEUE_NAME: "certificates"
QUEUE_URL: $CERTS_QUEUE_URL QUEUE_URL: "{{ CERTS_QUEUE_URL }}"
CERT_BUCKET: $CERTS_BUCKET CERT_BUCKET: "{{ CERTS_BUCKET }}"
# gnupg signing key # gnupg signing key
CERT_KEY_ID: $CERTS_KEY_ID CERT_KEY_ID: "{{ CERTS_KEY_ID }}"
LOGGING_ENV: "" LOGGING_ENV: ""
CERT_GPG_DIR: $certs_gpg_dir CERT_GPG_DIR: "{{ certs_gpg_dir }}"
CERT_URL: $CERTS_URL CERT_URL: "{{ CERTS_URL }}"
CERT_DOWNLOAD_URL: $CERTS_DOWNLOAD_URL CERT_DOWNLOAD_URL: "{{ CERTS_DOWNLOAD_URL }}"
CERT_WEB_ROOT: $CERTS_WEB_ROOT CERT_WEB_ROOT: "{{ CERTS_WEB_ROOT }}"
COPY_TO_WEB_ROOT: $CERTS_COPY_TO_WEB_ROOT COPY_TO_WEB_ROOT: "{{ CERTS_COPY_TO_WEB_ROOT }}"
S3_UPLOAD: $CERTS_S3_UPLOAD S3_UPLOAD: "{{ CERTS_S3_UPLOAD }}"
CERT_VERIFY_URL: $CERTS_VERIFY_URL CERT_VERIFY_URL: "{{ CERTS_VERIFY_URL }}"
TEMPLATE_DATA_DIR: $CERTS_TEMPLATE_DATA_DIR TEMPLATE_DATA_DIR: "{{ CERTS_TEMPLATE_DATA_DIR }}"
certs_auth_config: certs_auth_config:
QUEUE_USER: $CERTS_QUEUE_USER QUEUE_USER: "{{ CERTS_QUEUE_USER }}"
QUEUE_PASS: $CERTS_QUEUE_PASS QUEUE_PASS: "{{ CERTS_QUEUE_PASS }}"
QUEUE_AUTH_USER: $CERTS_XQUEUE_AUTH_USER QUEUE_AUTH_USER: "{{ CERTS_XQUEUE_AUTH_USER }}"
QUEUE_AUTH_PASS: $CERTS_XQUEUE_AUTH_PASS QUEUE_AUTH_PASS: "{{ CERTS_XQUEUE_AUTH_PASS }}"
CERT_KEY_ID: $CERTS_KEY_ID CERT_KEY_ID: "{{ CERTS_KEY_ID }}"
CERT_AWS_ID: $CERTS_AWS_ID CERT_AWS_ID: "{{ CERTS_AWS_ID }}"
CERT_AWS_KEY: $CERTS_AWS_KEY CERT_AWS_KEY: "{{ CERTS_AWS_KEY }}"
...@@ -7,7 +7,12 @@ ...@@ -7,7 +7,12 @@
COMMON_ENABLE_BASIC_AUTH: False COMMON_ENABLE_BASIC_AUTH: False
COMMON_HTPASSWD_USER: edx COMMON_HTPASSWD_USER: edx
COMMON_HTPASSWD_PASS: edx COMMON_HTPASSWD_PASS: edx
# Turn on syncing logs on rotation for edx
# application and tracking logs, must also
# have the AWS role installed
COMMON_AWS_SYNC: False
COMMON_AWS_SYNC_BUCKET: "edx-{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}"
COMMON_AWS_S3_SYNC_SCRIPT: "{{ COMMON_BIN_DIR }}/send-logs-to-s3"
COMMON_BASE_DIR: /edx COMMON_BASE_DIR: /edx
COMMON_DATA_DIR: "{{ COMMON_BASE_DIR}}/var" COMMON_DATA_DIR: "{{ COMMON_BASE_DIR}}/var"
COMMON_APP_DIR: "{{ COMMON_BASE_DIR}}/app" COMMON_APP_DIR: "{{ COMMON_BASE_DIR}}/app"
...@@ -24,6 +29,7 @@ COMMON_ENVIRONMENT: 'default_env' ...@@ -24,6 +29,7 @@ COMMON_ENVIRONMENT: 'default_env'
COMMON_DEPLOYMENT: 'default_deployment' COMMON_DEPLOYMENT: 'default_deployment'
COMMON_PYPI_MIRROR_URL: 'https://pypi.python.org/simple' COMMON_PYPI_MIRROR_URL: 'https://pypi.python.org/simple'
COMMON_NPM_MIRROR_URL: 'http://registry.npmjs.org' COMMON_NPM_MIRROR_URL: 'http://registry.npmjs.org'
COMMON_UBUNTU_APT_KEYSERVER: "http://keyserver.ubuntu.com/pks/lookup?op=get&fingerprint=on&search="
# do not include http/https # do not include http/https
COMMON_GIT_MIRROR: 'github.com' COMMON_GIT_MIRROR: 'github.com'
# override this var to set a different hostname # override this var to set a different hostname
...@@ -38,6 +44,7 @@ COMMON_CUSTOM_DHCLIENT_CONFIG: false ...@@ -38,6 +44,7 @@ COMMON_CUSTOM_DHCLIENT_CONFIG: false
COMMON_MOTD_TEMPLATE: "motd.tail.j2" COMMON_MOTD_TEMPLATE: "motd.tail.j2"
COMMON_SSH_PASSWORD_AUTH: "no" COMMON_SSH_PASSWORD_AUTH: "no"
COMMON_SECURITY_UPDATES: no
# These are three maintenance accounts across all databases # These are three maintenance accounts across all databases
# the read only user is is granted select privs on all dbs # the read only user is is granted select privs on all dbs
# the admin user is granted create user privs on all dbs # the admin user is granted create user privs on all dbs
...@@ -69,6 +76,7 @@ common_debian_pkgs: ...@@ -69,6 +76,7 @@ common_debian_pkgs:
- mosh - mosh
- rsyslog - rsyslog
- screen - screen
- tmux
- tree - tree
- git - git
- unzip - unzip
...@@ -102,6 +110,10 @@ disable_edx_services: False ...@@ -102,6 +110,10 @@ disable_edx_services: False
# so different start scripts are generated in dev mode. # so different start scripts are generated in dev mode.
devstack: False devstack: False
# Some cluster apps need special settings when in vagrant
# due to eth0 always being the same IP address
vagrant_cluster: False
common_debian_variants: common_debian_variants:
- Ubuntu - Ubuntu
- Debian - Debian
......
...@@ -2,5 +2,5 @@ ...@@ -2,5 +2,5 @@
dependencies: dependencies:
- role: user - role: user
user_info: "{{ COMMON_USER_INFO }}" user_info: "{{ COMMON_USER_INFO }}"
- role: security
when: COMMON_SECURITY_UPDATES
--- ---
- name: Update CA Certificates
shell: >
/usr/sbin/update-ca-certificates
- name: Add user www-data - name: Add user www-data
# This is the default user for nginx # This is the default user for nginx
user: > user: >
......
...@@ -57,6 +57,6 @@ request subnet-mask, broadcast-address, time-offset, routers, ...@@ -57,6 +57,6 @@ request subnet-mask, broadcast-address, time-offset, routers,
#} #}
interface "eth0" { interface "eth0" {
prepend domain-search {% for search in COMMON_DHCLIENT_DNS_SEARCH -%}"{{search}}"{%- if not loop.last -%},{%- endif -%} prepend domain-search {% for search in COMMON_DHCLIENT_DNS_SEARCH -%}"{{ search }}"{%- if not loop.last -%},{%- endif -%}
{%- endfor -%}; {%- endfor -%};
} }
...@@ -11,4 +11,9 @@ ...@@ -11,4 +11,9 @@
postrotate postrotate
/usr/bin/killall -HUP rsyslogd /usr/bin/killall -HUP rsyslogd
endscript endscript
lastaction
{% if COMMON_AWS_SYNC -%}
{{ COMMON_AWS_S3_SYNC_SCRIPT }} -d "{{ COMMON_LOG_DIR }}/tracking/*" -b "{{ COMMON_AWS_SYNC_BUCKET }}/logs/tracking"
{% endif -%}
endscript
} }
--- ---
DATADOG_API_KEY: "SPECIFY_KEY_HERE" DATADOG_API_KEY: "SPECIFY_KEY_HERE"
datadog_apt_key: "http://keyserver.ubuntu.com/pks/lookup?op=get&search=0x226AE980C7A7DA52" datadog_agent_version: '1:5.0.4-516'
datadog_apt_key: "0x226AE980C7A7DA52"
datadog_debian_pkgs: datadog_debian_pkgs:
- apparmor-utils - apparmor-utils
- build-essential - build-essential
......
...@@ -22,17 +22,22 @@ ...@@ -22,17 +22,22 @@
- datadog - datadog
- name: add apt key - name: add apt key
apt_key: id=C7A7DA52 url={{datadog_apt_key}} state=present apt_key: id=C7A7DA52 url={{ COMMON_UBUNTU_APT_KEYSERVER }}{{ datadog_apt_key }} state=present
tags:
- datadog
- name: remove unstable apt repository
apt_repository_1.8: repo='deb http://apt.datadoghq.com/ unstable main' validate_certs=no state=absent
tags: tags:
- datadog - datadog
- name: install apt repository - name: install apt repository
apt_repository_1.8: repo='deb http://apt.datadoghq.com/ unstable main' update_cache=yes validate_certs=no apt_repository_1.8: repo='deb http://apt.datadoghq.com/ stable main' update_cache=yes validate_certs=no
tags: tags:
- datadog - datadog
- name: install datadog agent - name: install datadog agent
apt: pkg="datadog-agent" apt: pkg="datadog-agent={{ datadog_agent_version }}"
tags: tags:
- datadog - datadog
......
...@@ -30,6 +30,6 @@ demo_test_users: ...@@ -30,6 +30,6 @@ demo_test_users:
password: edx password: edx
demo_edxapp_user: 'edxapp' demo_edxapp_user: 'edxapp'
demo_edxapp_venv_bin: '{{COMMON_APP_DIR}}/{{demo_edxapp_user}}/venvs/{{demo_edxapp_user}}/bin' demo_edxapp_venv_bin: '{{ COMMON_APP_DIR }}/{{ demo_edxapp_user }}/venvs/{{demo_edxapp_user}}/bin'
demo_edxapp_course_data_dir: '{{COMMON_DATA_DIR}}/{{demo_edxapp_user}}/data' demo_edxapp_course_data_dir: '{{ COMMON_DATA_DIR }}/{{ demo_edxapp_user }}/data'
demo_edxapp_code_dir: '{{COMMON_APP_DIR}}/{{demo_edxapp_user}}/edx-platform' demo_edxapp_code_dir: '{{ COMMON_APP_DIR }}/{{ demo_edxapp_user }}/edx-platform'
...@@ -11,7 +11,7 @@ DISCERN_MYSQL_PASSWORD: 'password' ...@@ -11,7 +11,7 @@ DISCERN_MYSQL_PASSWORD: 'password'
DISCERN_MYSQL_HOST: 'localhost' DISCERN_MYSQL_HOST: 'localhost'
DISCERN_MYSQL_PORT: '3306' DISCERN_MYSQL_PORT: '3306'
DISCERN_LANG: "en_US.UTF-8" DISCERN_LANG: "en_US.UTF-8"
DISCERN_GUNICORN_EXTRA: ""
discern_app_dir: "{{ COMMON_APP_DIR }}/discern" discern_app_dir: "{{ COMMON_APP_DIR }}/discern"
discern_code_dir: "{{ discern_app_dir }}/discern" discern_code_dir: "{{ discern_app_dir }}/discern"
...@@ -53,23 +53,23 @@ discern_env_config: ...@@ -53,23 +53,23 @@ discern_env_config:
discern_auth_config: discern_auth_config:
AWS_ACCESS_KEY_ID: $DISCERN_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID: "{{ DISCERN_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: $DISCERN_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY: "{{ DISCERN_SECRET_ACCESS_KEY }}"
BROKER_URL: $DISCERN_BROKER_URL BROKER_URL: "{{ DISCERN_BROKER_URL }}"
CACHES: CACHES:
default: default:
BACKEND: 'django.core.cache.backends.memcached.MemcachedCache' BACKEND: 'django.core.cache.backends.memcached.MemcachedCache'
LOCATION: $DISCERN_MEMCACHE LOCATION: "{{ DISCERN_MEMCACHE }}"
CELERY_RESULT_BACKEND: $DISCERN_RESULT_BACKEND CELERY_RESULT_BACKEND: "{{ DISCERN_RESULT_BACKEND }}"
DATABASES: DATABASES:
default: default:
ENGINE: django.db.backends.mysql ENGINE: django.db.backends.mysql
HOST: $DISCERN_MYSQL_HOST HOST: "{{ DISCERN_MYSQL_HOST }}"
NAME: $DISCERN_MYSQL_DB_NAME NAME: "{{ DISCERN_MYSQL_DB_NAME }}"
PASSWORD: $DISCERN_MYSQL_PASSWORD PASSWORD: "{{ DISCERN_MYSQL_PASSWORD }}"
PORT: $DISCERN_MYSQL_PORT PORT: "{{ DISCERN_MYSQL_PORT }}"
USER: $DISCERN_MYSQL_USER USER: "{{ DISCERN_MYSQL_USER }}"
GOOGLE_ANALYTICS_PROPERTY_ID: $DISCERN_GOOGLE_ANALYTICS_PROPERTY_ID GOOGLE_ANALYTICS_PROPERTY_ID: "{{ DISCERN_GOOGLE_ANALYTICS_PROPERTY_ID }}"
discern_debian_pkgs: discern_debian_pkgs:
......
...@@ -51,7 +51,7 @@ ...@@ -51,7 +51,7 @@
#Numpy has to be a pre-requirement in order for scipy to build #Numpy has to be a pre-requirement in order for scipy to build
- name : install python pre-requirements for discern and ease - name : install python pre-requirements for discern and ease
pip: > pip: >
requirements={{item}} virtualenv={{ discern_venv_dir }} state=present requirements={{ item }} virtualenv={{ discern_venv_dir }} state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}" extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ discern_user }}" sudo_user: "{{ discern_user }}"
notify: notify:
...@@ -62,7 +62,7 @@ ...@@ -62,7 +62,7 @@
- name : install python requirements for discern and ease - name : install python requirements for discern and ease
pip: > pip: >
requirements={{item}} virtualenv={{ discern_venv_dir }} state=present requirements={{ item }} virtualenv={{ discern_venv_dir }} state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}" extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ discern_user }}" sudo_user: "{{ discern_user }}"
notify: notify:
...@@ -84,8 +84,8 @@ ...@@ -84,8 +84,8 @@
tar zxf {{ discern_nltk_tmp_file }} tar zxf {{ discern_nltk_tmp_file }}
rm -f {{ discern_nltk_tmp_file }} rm -f {{ discern_nltk_tmp_file }}
touch {{ discern_nltk_download_url|basename }}-installed touch {{ discern_nltk_download_url|basename }}-installed
creates={{ discern_data_dir }}/{{ discern_nltk_download_url|basename }}-installed creates={{ discern_data_dir }}/{{ discern_nltk_download_url|basename }}-installed
chdir={{ discern_data_dir }} chdir={{ discern_data_dir }}
sudo_user: "{{ discern_user }}" sudo_user: "{{ discern_user }}"
notify: notify:
- restart discern - restart discern
...@@ -95,8 +95,8 @@ ...@@ -95,8 +95,8 @@
#support virtualenvs as of this comment #support virtualenvs as of this comment
- name: django syncdb migrate and collectstatic for discern - name: django syncdb migrate and collectstatic for discern
shell: > shell: >
{{ discern_venv_dir }}/bin/python {{discern_code_dir}}/manage.py {{item}} --noinput --settings={{discern_settings}} --pythonpath={{discern_code_dir}} {{ discern_venv_dir }}/bin/python {{ discern_code_dir }}/manage.py {{ item }} --noinput --settings={{discern_settings}} --pythonpath={{discern_code_dir}}
chdir={{ discern_code_dir }} chdir={{ discern_code_dir }}
sudo_user: "{{ discern_user }}" sudo_user: "{{ discern_user }}"
notify: notify:
- restart discern - restart discern
...@@ -107,8 +107,8 @@ ...@@ -107,8 +107,8 @@
#Have this separate from the other three because it doesn't take the noinput flag #Have this separate from the other three because it doesn't take the noinput flag
- name: django update_index for discern - name: django update_index for discern
shell: > shell: >
{{ discern_venv_dir}}/bin/python {{discern_code_dir}}/manage.py update_index --settings={{discern_settings}} --pythonpath={{discern_code_dir}} {{ discern_venv_dir}}/bin/python {{ discern_code_dir }}/manage.py update_index --settings={{ discern_settings }} --pythonpath={{discern_code_dir}}
chdir={{ discern_code_dir }} chdir={{ discern_code_dir }}
sudo_user: "{{ discern_user }}" sudo_user: "{{ discern_user }}"
notify: notify:
- restart discern - restart discern
......
...@@ -9,9 +9,9 @@ stop on runlevel [!2345] ...@@ -9,9 +9,9 @@ stop on runlevel [!2345]
respawn respawn
respawn limit 3 30 respawn limit 3 30
env DJANGO_SETTINGS_MODULE={{discern_settings}} env DJANGO_SETTINGS_MODULE={{ discern_settings }}
chdir {{ discern_code_dir }} chdir {{ discern_code_dir }}
setuid {{discern_user}} setuid {{ discern_user }}
exec {{ discern_venv_dir }}/bin/python {{ discern_code_dir }}/manage.py celeryd --loglevel=info --settings={{ discern_settings }} --pythonpath={{ discern_code_dir }} -B --autoscale={{ ansible_processor_cores * 2 }},1 exec {{ discern_venv_dir }}/bin/python {{ discern_code_dir }}/manage.py celeryd --loglevel=info --settings={{ discern_settings }} --pythonpath={{ discern_code_dir }} -B --autoscale={{ ansible_processor_cores * 2 }},1
[program:discern] [program:discern]
{% if ansible_processor|length > 0 %} {% if ansible_processor|length > 0 %}
command={{ discern_venv_bin }}/gunicorn --preload -b {{ discern_gunicorn_host }}:{{ discern_gunicorn_port }} -w {{ ansible_processor|length * discern_worker_mult }} --timeout=30 --pythonpath={{ discern_code_dir }} discern.wsgi command={{ discern_venv_bin }}/gunicorn --preload -b {{ discern_gunicorn_host }}:{{ discern_gunicorn_port }} -w {{ ansible_processor|length * discern_worker_mult }} --timeout=30 --pythonpath={{ discern_code_dir }} {{ DISCERN_GUNICORN_EXTRA }} discern.wsgi
{% else %} {% else %}
command={{ discern_venv_bin }}/gunicorn --preload -b {{ discern_gunicorn_host }}:{{ discern_gunicorn_port }} -w {{ discern_worker_mult }} --timeout=30 --pythonpath={{ discern_code_dir }} discern.wsgi command={{ discern_venv_bin }}/gunicorn --preload -b {{ discern_gunicorn_host }}:{{ discern_gunicorn_port }} -w {{ discern_worker_mult }} --timeout=30 --pythonpath={{ discern_code_dir }} {{ DISCERN_GUNICORN_EXTRA }} discern.wsgi
{% endif %} {% endif %}
user={{ common_web_user }} user={{ common_web_user }}
directory={{ discern_code_dir }} directory={{ discern_code_dir }}
......
...@@ -12,7 +12,7 @@ IFS="," ...@@ -12,7 +12,7 @@ IFS=","
-v add verbosity to edx_ansible run -v add verbosity to edx_ansible run
-h this -h this
<repo> - must be one of edx-platform, xqueue, cs_comments_service, xserver, ease, edx-ora, configuration, read-only-certificate-code edx-analytics-data-api <repo> - must be one of edx-platform, xqueue, cs_comments_service, xserver, ease, edx-ora, configuration, read-only-certificate-code, edx-analytics-data-api
<version> - can be a commit or tag <version> - can be a commit or tag
EO EO
...@@ -51,6 +51,7 @@ repos_to_cmd["configuration"]="$edx_ansible_cmd edx_ansible.yml -e 'configuratio ...@@ -51,6 +51,7 @@ repos_to_cmd["configuration"]="$edx_ansible_cmd edx_ansible.yml -e 'configuratio
repos_to_cmd["read-only-certificate-code"]="$edx_ansible_cmd certs.yml -e 'certs_version=$2'" repos_to_cmd["read-only-certificate-code"]="$edx_ansible_cmd certs.yml -e 'certs_version=$2'"
repos_to_cmd["edx-analytics-data-api"]="$edx_ansible_cmd analyticsapi.yml -e 'ANALYTICS_API_VERSION=$2'" repos_to_cmd["edx-analytics-data-api"]="$edx_ansible_cmd analyticsapi.yml -e 'ANALYTICS_API_VERSION=$2'"
repos_to_cmd["edx-ora2"]="$edx_ansible_cmd ora2.yml -e 'ora2_version=$2'" repos_to_cmd["edx-ora2"]="$edx_ansible_cmd ora2.yml -e 'ora2_version=$2'"
repos_to_cmd["insights"]="$edx_ansible_cmd insights.yml -e 'INSIGHTS_VERSION=$2'"
if [[ -z $1 || -z $2 ]]; then if [[ -z $1 || -z $2 ]]; then
......
...@@ -27,11 +27,11 @@ ...@@ -27,11 +27,11 @@
when: EDXAPP_USE_GIT_IDENTITY when: EDXAPP_USE_GIT_IDENTITY
# Do A Checkout # Do A Checkout
- name: checkout edx-platform repo into {{edxapp_code_dir}} - name: checkout edx-platform repo into {{ edxapp_code_dir }}
git: > git: >
dest={{edxapp_code_dir}} dest={{ edxapp_code_dir }}
repo={{edx_platform_repo}} repo={{ edx_platform_repo }}
version={{edx_platform_version}} version={{ edx_platform_version }}
accept_hostkey=yes accept_hostkey=yes
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
environment: environment:
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
- "restart edxapp_workers" - "restart edxapp_workers"
- name: git clean after checking out edx-platform - name: git clean after checking out edx-platform
shell: cd {{edxapp_code_dir}} && git clean -xdf shell: cd {{ edxapp_code_dir }} && git clean -xdf
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
notify: notify:
- "restart edxapp" - "restart edxapp"
...@@ -50,9 +50,9 @@ ...@@ -50,9 +50,9 @@
- name: checkout theme - name: checkout theme
git: > git: >
dest={{ edxapp_app_dir }}/themes/{{edxapp_theme_name}} dest={{ edxapp_app_dir }}/themes/{{ edxapp_theme_name }}
repo={{edxapp_theme_source_repo}} repo={{ edxapp_theme_source_repo }}
version={{edxapp_theme_version}} version={{ edxapp_theme_version }}
accept_hostkey=yes accept_hostkey=yes
when: edxapp_theme_name != '' when: edxapp_theme_name != ''
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
...@@ -91,8 +91,8 @@ ...@@ -91,8 +91,8 @@
- name: gem install bundler - name: gem install bundler
shell: > shell: >
gem install bundle gem install bundle
chdir={{ edxapp_code_dir }} chdir={{ edxapp_code_dir }}
executable=/bin/bash executable=/bin/bash
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
notify: notify:
...@@ -102,8 +102,8 @@ ...@@ -102,8 +102,8 @@
- name: bundle install - name: bundle install
shell: > shell: >
bundle install --binstubs bundle install --binstubs
chdir={{ edxapp_code_dir }} chdir={{ edxapp_code_dir }}
executable=/bin/bash executable=/bin/bash
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
notify: notify:
...@@ -144,8 +144,8 @@ ...@@ -144,8 +144,8 @@
# Install the python pre requirements into {{ edxapp_venv_dir }} # Install the python pre requirements into {{ edxapp_venv_dir }}
- name : install python pre-requirements - name : install python pre-requirements
pip: > pip: >
requirements="{{pre_requirements_file}}" requirements="{{ pre_requirements_file }}"
virtualenv="{{edxapp_venv_dir}}" virtualenv="{{ edxapp_venv_dir }}"
state=present state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}" extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
...@@ -173,8 +173,8 @@ ...@@ -173,8 +173,8 @@
# Install the python post requirements into {{ edxapp_venv_dir }} # Install the python post requirements into {{ edxapp_venv_dir }}
- name : install python post-requirements - name : install python post-requirements
pip: > pip: >
requirements="{{post_requirements_file}}" requirements="{{ post_requirements_file }}"
virtualenv="{{edxapp_venv_dir}}" virtualenv="{{ edxapp_venv_dir }}"
state=present state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}" extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
...@@ -187,8 +187,8 @@ ...@@ -187,8 +187,8 @@
# Install the python paver requirements into {{ edxapp_venv_dir }} # Install the python paver requirements into {{ edxapp_venv_dir }}
- name : install python paver-requirements - name : install python paver-requirements
pip: > pip: >
requirements="{{paver_requirements_file}}" requirements="{{ paver_requirements_file }}"
virtualenv="{{edxapp_venv_dir}}" virtualenv="{{ edxapp_venv_dir }}"
state=present state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}" extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
...@@ -257,7 +257,7 @@ ...@@ -257,7 +257,7 @@
- name: install CAS attribute module - name: install CAS attribute module
pip: > pip: >
name="{{ EDXAPP_CAS_ATTRIBUTE_PACKAGE }}" name="{{ EDXAPP_CAS_ATTRIBUTE_PACKAGE }}"
virtualenv="{{edxapp_venv_dir}}" virtualenv="{{ edxapp_venv_dir }}"
state=present state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w --use-mirrors" extra_args="-i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w --use-mirrors"
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
...@@ -294,8 +294,8 @@ ...@@ -294,8 +294,8 @@
- name: code sandbox | Install base sandbox requirements and create sandbox virtualenv - name: code sandbox | Install base sandbox requirements and create sandbox virtualenv
pip: > pip: >
requirements="{{sandbox_base_requirements}}" requirements="{{ sandbox_base_requirements }}"
virtualenv="{{edxapp_sandbox_venv_dir}}" virtualenv="{{ edxapp_sandbox_venv_dir }}"
state=present state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w --use-mirrors" extra_args="-i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w --use-mirrors"
sudo_user: "{{ edxapp_sandbox_user }}" sudo_user: "{{ edxapp_sandbox_user }}"
......
...@@ -26,6 +26,7 @@ ...@@ -26,6 +26,7 @@
- "{{ edxapp_theme_dir }}" - "{{ edxapp_theme_dir }}"
- "{{ edxapp_staticfile_dir }}" - "{{ edxapp_staticfile_dir }}"
- "{{ edxapp_course_static_dir }}" - "{{ edxapp_course_static_dir }}"
- "{{ edxapp_course_data_dir }}"
# This is a symlink that has to exist because # This is a symlink that has to exist because
# we currently can't override the DATA_DIR var # we currently can't override the DATA_DIR var
...@@ -38,7 +39,6 @@ ...@@ -38,7 +39,6 @@
state=link state=link
owner="{{ edxapp_user }}" owner="{{ edxapp_user }}"
group="{{ common_web_group }}" group="{{ common_web_group }}"
- name: create edxapp log dir - name: create edxapp log dir
file: > file: >
...@@ -70,6 +70,12 @@ ...@@ -70,6 +70,12 @@
- "restart edxapp" - "restart edxapp"
- "restart edxapp_workers" - "restart edxapp_workers"
- name: set up edxapp .npmrc
template:
src=.npmrc.j2 dest={{ edxapp_app_dir }}/.npmrc
owner={{ edxapp_user }} group={{ common_web_group }}
mode=0600
- name: create log directories for service variants - name: create log directories for service variants
notify: notify:
- "restart edxapp" - "restart edxapp"
...@@ -81,7 +87,7 @@ ...@@ -81,7 +87,7 @@
with_items: service_variants_enabled with_items: service_variants_enabled
# Set up the python sandbox execution environment # Set up the python sandbox execution environment
- include: python_sandbox_env.yml - include: python_sandbox_env.yml tags=deploy
when: EDXAPP_PYTHON_SANDBOX when: EDXAPP_PYTHON_SANDBOX
- include: deploy.yml tags=deploy - include: deploy.yml tags=deploy
# Set the alternatives this way for blas and lapack to work correctly for the
# MITx 6.341x course.
# TODO: Switch to using alternatives module in 1.6
- name: code sandbox | Use libblas for 3gf
command: update-alternatives --set libblas.so.3gf /usr/lib/libblas/libblas.so.3gf
# TODO: Switch to using alternatives module in 1.6
- name: code sandbox | Use liblapac for 3gf
command: update-alternatives --set liblapack.so.3gf /usr/lib/lapack/liblapack.so.3gf
- name: code sandbox | Create edxapp sandbox user - name: code sandbox | Create edxapp sandbox user
user: name={{ edxapp_sandbox_user }} shell=/bin/false home={{ edxapp_sandbox_venv_dir }} user: name={{ edxapp_sandbox_user }} shell=/bin/false home={{ edxapp_sandbox_venv_dir }}
notify: notify:
......
- name: make the course data updatable by the edxapp user
file:
path="{{ edxapp_course_data_dir }}"
state=directory
recurse=yes
owner="{{ edxapp_user }}"
group="{{ edxapp_user }}"
- name: clone the xml course repo - name: clone the xml course repo
git: > git: >
repo="{{ item.repo_url }}" repo="{{ item.repo_url }}"
......
registry={{ COMMON_NPM_MIRROR_URL }}
{% if devstack %} {% if devstack %}
{{ edxapp_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:{{ edxapp_sandbox_venv_dir }}/bin/python {{ edxapp_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:{{ edxapp_sandbox_venv_dir }}/bin/python
{{ edxapp_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:/bin/rm /tmp/codejail-*/tmp {{ edxapp_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:/usr/bin/find /tmp/codejail-*/tmp -mindepth 1 -maxdepth 1 -exec rm -rf {} ;
{{ edxapp_user }} ALL=(ALL) NOPASSWD:/bin/kill {{ edxapp_user }} ALL=(ALL) NOPASSWD:/bin/kill
{{ edxapp_user }} ALL=(ALL) NOPASSWD:/usr/bin/pkill {{ edxapp_user }} ALL=(ALL) NOPASSWD:/usr/bin/pkill
{% else %} {% else %}
{{ common_web_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:{{ edxapp_sandbox_venv_dir }}/bin/python {{ common_web_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:{{ edxapp_sandbox_venv_dir }}/bin/python
{{ common_web_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:/bin/rm /tmp/codejail-*/tmp {{ common_web_user }} ALL=({{ edxapp_sandbox_user }}) SETENV:NOPASSWD:/usr/bin/find /tmp/codejail-*/tmp -mindepth 1 -maxdepth 1 -exec rm -rf {} ;
{{ common_web_user }} ALL=(ALL) NOPASSWD:/bin/kill {{ common_web_user }} ALL=(ALL) NOPASSWD:/bin/kill
{{ common_web_user }} ALL=(ALL) NOPASSWD:/usr/bin/pkill {{ common_web_user }} ALL=(ALL) NOPASSWD:/usr/bin/pkill
{% endif %} {% endif %}
{% do cms_auth_config.update(EDXAPP_AUTH_EXTRA) %} {% do cms_auth_config.update(EDXAPP_CMS_AUTH_EXTRA) %}
{% for key, value in cms_auth_config.iteritems() %} {% for key, value in cms_auth_config.iteritems() %}
{% if value == 'None' %} {% if value == 'None' %}
{% do cms_auth_config.update({key: None }) %} {% do cms_auth_config.update({key: None }) %}
......
...@@ -16,15 +16,15 @@ command={{ executable }} {{ max_req }} --preload -b {{ edxapp_cms_gunicorn_host ...@@ -16,15 +16,15 @@ command={{ executable }} {{ max_req }} --preload -b {{ edxapp_cms_gunicorn_host
{% else -%} {% else -%}
{# This is for backwards compatibility, set workers explicitely using EDXAPP_WORKERS #} {# This is for backwards compatibility, set workers explicitely using EDXAPP_WORKERS #}
{% if ansible_processor|length > 0 -%} {% if ansible_processor|length > 0 -%}
command={{ executable }} {{ max_req }} --preload -b {{ edxapp_cms_gunicorn_host }}:{{ edxapp_cms_gunicorn_port }} -w {{ ansible_processor|length * worker_core_mult.cms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} cms.wsgi command={{ executable }} {{ max_req }} --preload -b {{ edxapp_cms_gunicorn_host }}:{{ edxapp_cms_gunicorn_port }} -w {{ ansible_processor|length * worker_core_mult.cms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} {{ EDXAPP_CMS_GUNICORN_EXTRA }} cms.wsgi
{% else -%} {% else -%}
command={{ executable }} {{ max_req }} --preload -b {{ edxapp_cms_gunicorn_host }}:{{ edxapp_cms_gunicorn_port }} -w {{ worker_core_mult.cms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} cms.wsgi command={{ executable }} {{ max_req }} --preload -b {{ edxapp_cms_gunicorn_host }}:{{ edxapp_cms_gunicorn_port }} -w {{ worker_core_mult.cms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} {{ EDXAPP_CMS_GUNICORN_EXTRA }} cms.wsgi
{% endif -%} {% endif -%}
{% endif -%} {% endif -%}
user={{ common_web_user }} user={{ common_web_user }}
directory={{ edxapp_code_dir }} directory={{ edxapp_code_dir }}
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_CMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PORT={{edxapp_cms_gunicorn_port}},ADDRESS={{edxapp_cms_gunicorn_host}},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ edxapp_cms_env }},SERVICE_VARIANT="cms" environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_CMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PORT={{ edxapp_cms_gunicorn_port }},ADDRESS={{ edxapp_cms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_CMS_ENV }},SERVICE_VARIANT="cms"
stdout_logfile={{ supervisor_log_dir }}/%(program_name)-stdout.log stdout_logfile={{ supervisor_log_dir }}/%(program_name)-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)-stderr.log stderr_logfile={{ supervisor_log_dir }}/%(program_name)-stderr.log
killasgroup=true killasgroup=true
......
...@@ -9,7 +9,7 @@ ...@@ -9,7 +9,7 @@
/tmp/codejail-*/** wrix, /tmp/codejail-*/** wrix,
# #
# Whitelist particiclar shared objects from the system # Whitelist particular shared objects from the system
# python installation # python installation
# #
/usr/lib/python2.7/lib-dynload/_json.so mr, /usr/lib/python2.7/lib-dynload/_json.so mr,
...@@ -21,6 +21,22 @@ ...@@ -21,6 +21,22 @@
/usr/lib/python2.7/lib-dynload/_elementtree.so mr, /usr/lib/python2.7/lib-dynload/_elementtree.so mr,
/usr/lib/python2.7/lib-dynload/pyexpat.so mr, /usr/lib/python2.7/lib-dynload/pyexpat.so mr,
# Matplot lib needs a place for temp caches
{{ edxapp_sandbox_venv_dir }}/.config/ wrix,
{{ edxapp_sandbox_venv_dir }}/.cache/ wrix,
{{ edxapp_sandbox_venv_dir }}/.config/** wrix,
{{ edxapp_sandbox_venv_dir }}/.cache/** wrix,
# Matplotlib related libraries
/usr/lib/python2.7/lib-dynload/termios.so mr,
/usr/lib/python2.7/lib-dynload/parser.so mr,
# Matplot lib needs fonts to make graphs
/usr/share/fonts/ r,
/usr/share/fonts/** r,
/usr/local/share/fonts/ r,
/usr/local/share/fonts/** r,
# #
# Allow access to selections from /proc # Allow access to selections from /proc
# #
......
{% do lms_auth_config.update(EDXAPP_AUTH_EXTRA) %} {% do lms_auth_config.update(EDXAPP_LMS_AUTH_EXTRA) %}
{% for key, value in lms_auth_config.iteritems() %} {% for key, value in lms_auth_config.iteritems() %}
{% if value == 'None' %} {% if value == 'None' %}
{% do lms_auth_config.update({key: None }) %} {% do lms_auth_config.update({key: None }) %}
......
...@@ -17,15 +17,15 @@ command={{ executable }} {{ max_req }} --preload -b {{ edxapp_lms_gunicorn_host ...@@ -17,15 +17,15 @@ command={{ executable }} {{ max_req }} --preload -b {{ edxapp_lms_gunicorn_host
{% else -%} {% else -%}
{# This is for backwards compatibility, set workers explicitely using EDXAPP_WORKERS #} {# This is for backwards compatibility, set workers explicitely using EDXAPP_WORKERS #}
{% if ansible_processor|length > 0 -%} {% if ansible_processor|length > 0 -%}
command={{ executable }} {{ max_req }} --preload -b {{ edxapp_lms_gunicorn_host }}:{{ edxapp_lms_gunicorn_port }} -w {{ ansible_processor|length * worker_core_mult.lms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} lms.wsgi command={{ executable }} {{ max_req }} --preload -b {{ edxapp_lms_gunicorn_host }}:{{ edxapp_lms_gunicorn_port }} -w {{ ansible_processor|length * worker_core_mult.lms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} {{ EDXAPP_LMS_GUNICORN_EXTRA }} lms.wsgi
{% else -%} {% else -%}
command={{ executable }} {{ max_req }} --preload -b {{ edxapp_lms_gunicorn_host }}:{{ edxapp_lms_gunicorn_port }} -w {{ worker_core_mult.lms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} lms.wsgi command={{ executable }} {{ max_req }} --preload -b {{ edxapp_lms_gunicorn_host }}:{{ edxapp_lms_gunicorn_port }} -w {{ worker_core_mult.lms }} --timeout=300 --pythonpath={{ edxapp_code_dir }} {{ EDXAPP_LMS_GUNICORN_EXTRA }} lms.wsgi
{% endif %} {% endif %}
{% endif %} {% endif %}
user={{ common_web_user }} user={{ common_web_user }}
directory={{ edxapp_code_dir }} directory={{ edxapp_code_dir }}
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_LMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%} PORT={{edxapp_lms_gunicorn_port}},ADDRESS={{edxapp_lms_gunicorn_host}},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ edxapp_lms_env }},SERVICE_VARIANT="lms",PATH="{{ edxapp_deploy_path }}" environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_LMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%} PORT={{ edxapp_lms_gunicorn_port }},ADDRESS={{ edxapp_lms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_LMS_ENV }},SERVICE_VARIANT="lms",PATH="{{ edxapp_deploy_path }}"
stdout_logfile={{ supervisor_log_dir }}/%(program_name)-stdout.log stdout_logfile={{ supervisor_log_dir }}/%(program_name)-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)-stderr.log stderr_logfile={{ supervisor_log_dir }}/%(program_name)-stderr.log
killasgroup=true killasgroup=true
......
...@@ -17,7 +17,7 @@ ...@@ -17,7 +17,7 @@
mysql_user: > mysql_user: >
name={{ EDXAPP_MYSQL_USER }} name={{ EDXAPP_MYSQL_USER }}
password={{ EDXAPP_MYSQL_PASSWORD }} password={{ EDXAPP_MYSQL_PASSWORD }}
priv='{{EDXAPP_MYSQL_DB_NAME}}.*:ALL' priv='{{ EDXAPP_MYSQL_DB_NAME }}.*:ALL'
when: EDXAPP_MYSQL_USER is defined when: EDXAPP_MYSQL_USER is defined
- name: create a database for edxapp - name: create a database for edxapp
...@@ -31,7 +31,7 @@ ...@@ -31,7 +31,7 @@
mysql_user: > mysql_user: >
name={{ XQUEUE_MYSQL_USER }} name={{ XQUEUE_MYSQL_USER }}
password={{ XQUEUE_MYSQL_PASSWORD }} password={{ XQUEUE_MYSQL_PASSWORD }}
priv='{{XQUEUE_MYSQL_DB_NAME}}.*:ALL' priv='{{ XQUEUE_MYSQL_DB_NAME }}.*:ALL'
when: XQUEUE_MYSQL_USER is defined when: XQUEUE_MYSQL_USER is defined
- name: create a database for xqueue - name: create a database for xqueue
...@@ -45,7 +45,7 @@ ...@@ -45,7 +45,7 @@
mysql_user: > mysql_user: >
name={{ ORA_MYSQL_USER }} name={{ ORA_MYSQL_USER }}
password={{ ORA_MYSQL_PASSWORD }} password={{ ORA_MYSQL_PASSWORD }}
priv='{{ORA_MYSQL_DB_NAME}}.*:ALL' priv='{{ ORA_MYSQL_DB_NAME }}.*:ALL'
when: ORA_MYSQL_USER is defined when: ORA_MYSQL_USER is defined
- name: create a database for ora - name: create a database for ora
......
...@@ -3,11 +3,11 @@ ...@@ -3,11 +3,11 @@
# Path to directory where to store index data allocated for this node. # Path to directory where to store index data allocated for this node.
# #
path.data: {{elasticsearch_data_dir}} path.data: {{ elasticsearch_data_dir }}
# Path to log files: # Path to log files:
# #
path.logs: {{elasticsearch_log_dir}} path.logs: {{ elasticsearch_log_dir }}
# ElasticSearch performs poorly when JVM starts swapping: you should ensure that # ElasticSearch performs poorly when JVM starts swapping: you should ensure that
# it _never_ swaps. # it _never_ swaps.
...@@ -43,3 +43,8 @@ script.disable_dynamic: true ...@@ -43,3 +43,8 @@ script.disable_dynamic: true
discovery.zen.ping.unicast.hosts: ['{{hosts|join("\',\'") }}'] discovery.zen.ping.unicast.hosts: ['{{hosts|join("\',\'") }}']
{% endif -%} {% endif -%}
{% if vagrant_cluster|bool %}
network:
host: {{ ansible_ssh_host }}
{% endif %}
...@@ -24,4 +24,4 @@ flower_deploy_path: "{{ flower_venv_bin }}:/usr/local/sbin:/usr/local/bin:/usr/b ...@@ -24,4 +24,4 @@ flower_deploy_path: "{{ flower_venv_bin }}:/usr/local/sbin:/usr/local/bin:/usr/b
flower_broker: "amqp://{{ FLOWER_BROKER_USERNAME }}:{{ FLOWER_BROKER_PASSWORD }}@{{ FLOWER_BROKER_HOST }}:{{ FLOWER_BROKER_PORT }}" flower_broker: "amqp://{{ FLOWER_BROKER_USERNAME }}:{{ FLOWER_BROKER_PASSWORD }}@{{ FLOWER_BROKER_HOST }}:{{ FLOWER_BROKER_PORT }}"
flower_environment: flower_environment:
PATH: $flower_deploy_path PATH: "{{ flower_deploy_path }}"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment