Commit a3d6efc3 by Feanil Patel

Add spacing to match coding conventions.

parent 84926ee7
{% for deployment, creds in ALTON_AWS_CREDENTIALS.iteritems() %}
[profile {{deployment}}]
[profile {{ deployment }}]
aws_access_key_id = {{ creds.access_id }}
aws_secret_access_key = {{ creds.secret_key }}
......
......@@ -41,14 +41,14 @@ analytics_auth_config:
DATABASES:
analytics:
<<: *databases_default
USER: "{{AS_DB_ANALYTICS_USER}}"
PASSWORD: "{{AS_DB_ANALYTICS_PASSWORD}}"
HOST: "{{AS_DB_ANALYTICS_HOST}}"
ANALYTICS_API_KEY: "{{AS_API_KEY}}"
USER: "{{ AS_DB_ANALYTICS_USER }}"
PASSWORD: "{{ AS_DB_ANALYTICS_PASSWORD }}"
HOST: "{{ AS_DB_ANALYTICS_HOST }}"
ANALYTICS_API_KEY: "{{ AS_API_KEY }}"
ANALYTICS_RESULTS_DB:
MONGO_URI: "{{AS_DB_RESULTS_URL}}"
MONGO_DB: "{{AS_DB_RESULTS_DB}}"
MONGO_STORED_QUERIES_COLLECTION: "{{AS_DB_RESULTS_COLLECTION}}"
MONGO_URI: "{{ AS_DB_RESULTS_URL }}"
MONGO_DB: "{{ AS_DB_RESULTS_DB }}"
MONGO_STORED_QUERIES_COLLECTION: "{{ AS_DB_RESULTS_COLLECTION }}"
as_role_name: "analytics-server"
as_user: "analytics-server"
......
......@@ -28,7 +28,7 @@
accept_hostkey=yes
version={{ as_version }} force=true
environment:
GIT_SSH: "{{as_git_ssh}}"
GIT_SSH: "{{ as_git_ssh }}"
notify: restart the analytics service
notify: start the analytics service
tags:
......
......@@ -10,7 +10,7 @@
when: role_exists | success
- name: create role directories
file: path=roles/{{role_name}}/{{ item }} state=directory
file: path=roles/{{ role_name }}/{{ item }} state=directory
with_items:
- tasks
- meta
......
......@@ -6,7 +6,7 @@
- debug
- name: Dump lms auth|env file
template: src=../../edxapp/templates/lms.{{item}}.json.j2 dest=/tmp/lms.{{item}}.json mode=0600
template: src=../../edxapp/templates/lms.{{ item }}.json.j2 dest=/tmp/lms.{{ item }}.json mode=0600
with_items:
- env
- auth
......@@ -16,7 +16,7 @@
- debug
- name: Dump lms-preview auth|env file
template: src=../../edxapp/templates/lms-preview.{{item}}.json.j2 dest=/tmp/lms-preview.{{item}}.json mode=0600
template: src=../../edxapp/templates/lms-preview.{{ item }}.json.j2 dest=/tmp/lms-preview.{{ item }}.json mode=0600
with_items:
- env
- auth
......@@ -26,7 +26,7 @@
- debug
- name: Dump cms auth|env file
template: src=../../edxapp/templates/cms.{{item}}.json.j2 dest=/tmp/cms.{{item}}.json mode=0600
template: src=../../edxapp/templates/cms.{{ item }}.json.j2 dest=/tmp/cms.{{ item }}.json mode=0600
with_items:
- env
- auth
......@@ -44,7 +44,7 @@
- name: fetch remote files
# fetch is fail-safe for remote files that don't exist
# setting mode is not an option
fetch: src=/tmp/{{item}} dest=/tmp/{{ansible_hostname}}-{{item}} flat=True
fetch: src=/tmp/{{ item }} dest=/tmp/{{ ansible_hostname }}-{{item}} flat=True
with_items:
- ansible.all.json
- ansible.all.yml
......
......@@ -105,7 +105,7 @@ instance_id=$(ec2metadata --instance-id)
ip=$(ec2metadata --local-ipv4)
availability_zone=$(ec2metadata --availability-zone)
# region isn't available via the metadata service
region=${availability_zone:0:${{lb}}#availability_zone{{rb}} - 1}
region=${availability_zone:0:${{ lb }}#availability_zone{{ rb }} - 1}
s3_path="${2}/$sec_grp/"
$noop {{ aws_s3cmd }} --multipart-chunk-size-mb 5120 --disable-multipart sync $directory "s3://${bucket_path}/${sec_grp}/${instance_id}-${ip}/"
......@@ -71,25 +71,25 @@ certs_env_config:
# CERTS_DATA is legacy, not used
CERT_DATA: {}
QUEUE_NAME: "certificates"
QUEUE_URL: "{{CERTS_QUEUE_URL}}"
CERT_BUCKET: "{{CERTS_BUCKET}}"
QUEUE_URL: "{{ CERTS_QUEUE_URL }}"
CERT_BUCKET: "{{ CERTS_BUCKET }}"
# gnupg signing key
CERT_KEY_ID: "{{CERTS_KEY_ID}}"
CERT_KEY_ID: "{{ CERTS_KEY_ID }}"
LOGGING_ENV: ""
CERT_GPG_DIR: "{{certs_gpg_dir}}"
CERT_URL: "{{CERTS_URL}}"
CERT_DOWNLOAD_URL: "{{CERTS_DOWNLOAD_URL}}"
CERT_WEB_ROOT: "{{CERTS_WEB_ROOT}}"
COPY_TO_WEB_ROOT: "{{CERTS_COPY_TO_WEB_ROOT}}"
S3_UPLOAD: "{{CERTS_S3_UPLOAD}}"
CERT_VERIFY_URL: "{{CERTS_VERIFY_URL}}"
TEMPLATE_DATA_DIR: "{{CERTS_TEMPLATE_DATA_DIR}}"
CERT_GPG_DIR: "{{ certs_gpg_dir }}"
CERT_URL: "{{ CERTS_URL }}"
CERT_DOWNLOAD_URL: "{{ CERTS_DOWNLOAD_URL }}"
CERT_WEB_ROOT: "{{ CERTS_WEB_ROOT }}"
COPY_TO_WEB_ROOT: "{{ CERTS_COPY_TO_WEB_ROOT }}"
S3_UPLOAD: "{{ CERTS_S3_UPLOAD }}"
CERT_VERIFY_URL: "{{ CERTS_VERIFY_URL }}"
TEMPLATE_DATA_DIR: "{{ CERTS_TEMPLATE_DATA_DIR }}"
certs_auth_config:
QUEUE_USER: "{{CERTS_QUEUE_USER}}"
QUEUE_PASS: "{{CERTS_QUEUE_PASS}}"
QUEUE_AUTH_USER: "{{CERTS_XQUEUE_AUTH_USER}}"
QUEUE_AUTH_PASS: "{{CERTS_XQUEUE_AUTH_PASS}}"
CERT_KEY_ID: "{{CERTS_KEY_ID}}"
CERT_AWS_ID: "{{CERTS_AWS_ID}}"
CERT_AWS_KEY: "{{CERTS_AWS_KEY}}"
QUEUE_USER: "{{ CERTS_QUEUE_USER }}"
QUEUE_PASS: "{{ CERTS_QUEUE_PASS }}"
QUEUE_AUTH_USER: "{{ CERTS_XQUEUE_AUTH_USER }}"
QUEUE_AUTH_PASS: "{{ CERTS_XQUEUE_AUTH_PASS }}"
CERT_KEY_ID: "{{ CERTS_KEY_ID }}"
CERT_AWS_ID: "{{ CERTS_AWS_ID }}"
CERT_AWS_KEY: "{{ CERTS_AWS_KEY }}"
......@@ -57,6 +57,6 @@ request subnet-mask, broadcast-address, time-offset, routers,
#}
interface "eth0" {
prepend domain-search {% for search in COMMON_DHCLIENT_DNS_SEARCH -%}"{{search}}"{%- if not loop.last -%},{%- endif -%}
prepend domain-search {% for search in COMMON_DHCLIENT_DNS_SEARCH -%}"{{ search }}"{%- if not loop.last -%},{%- endif -%}
{%- endfor -%};
}
......@@ -22,7 +22,7 @@
- datadog
- name: add apt key
apt_key: id=C7A7DA52 url={{datadog_apt_key}} state=present
apt_key: id=C7A7DA52 url={{ datadog_apt_key }} state=present
tags:
- datadog
......
......@@ -30,6 +30,6 @@ demo_test_users:
password: edx
demo_edxapp_user: 'edxapp'
demo_edxapp_venv_bin: '{{COMMON_APP_DIR}}/{{demo_edxapp_user}}/venvs/{{demo_edxapp_user}}/bin'
demo_edxapp_course_data_dir: '{{COMMON_DATA_DIR}}/{{demo_edxapp_user}}/data'
demo_edxapp_code_dir: '{{COMMON_APP_DIR}}/{{demo_edxapp_user}}/edx-platform'
demo_edxapp_venv_bin: '{{ COMMON_APP_DIR }}/{{ demo_edxapp_user }}/venvs/{{demo_edxapp_user}}/bin'
demo_edxapp_course_data_dir: '{{ COMMON_DATA_DIR }}/{{ demo_edxapp_user }}/data'
demo_edxapp_code_dir: '{{ COMMON_APP_DIR }}/{{ demo_edxapp_user }}/edx-platform'
......@@ -53,23 +53,23 @@ discern_env_config:
discern_auth_config:
AWS_ACCESS_KEY_ID: "{{DISCERN_AWS_ACCESS_KEY_ID}}"
AWS_SECRET_ACCESS_KEY: "{{DISCERN_SECRET_ACCESS_KEY}}"
BROKER_URL: "{{DISCERN_BROKER_URL}}"
AWS_ACCESS_KEY_ID: "{{ DISCERN_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "{{ DISCERN_SECRET_ACCESS_KEY }}"
BROKER_URL: "{{ DISCERN_BROKER_URL }}"
CACHES:
default:
BACKEND: 'django.core.cache.backends.memcached.MemcachedCache'
LOCATION: "{{DISCERN_MEMCACHE}}"
CELERY_RESULT_BACKEND: "{{DISCERN_RESULT_BACKEND}}"
LOCATION: "{{ DISCERN_MEMCACHE }}"
CELERY_RESULT_BACKEND: "{{ DISCERN_RESULT_BACKEND }}"
DATABASES:
default:
ENGINE: django.db.backends.mysql
HOST: "{{DISCERN_MYSQL_HOST}}"
NAME: "{{DISCERN_MYSQL_DB_NAME}}"
PASSWORD: "{{DISCERN_MYSQL_PASSWORD}}"
PORT: "{{DISCERN_MYSQL_PORT}}"
USER: "{{DISCERN_MYSQL_USER}}"
GOOGLE_ANALYTICS_PROPERTY_ID: "{{DISCERN_GOOGLE_ANALYTICS_PROPERTY_ID}}"
HOST: "{{ DISCERN_MYSQL_HOST }}"
NAME: "{{ DISCERN_MYSQL_DB_NAME }}"
PASSWORD: "{{ DISCERN_MYSQL_PASSWORD }}"
PORT: "{{ DISCERN_MYSQL_PORT }}"
USER: "{{ DISCERN_MYSQL_USER }}"
GOOGLE_ANALYTICS_PROPERTY_ID: "{{ DISCERN_GOOGLE_ANALYTICS_PROPERTY_ID }}"
discern_debian_pkgs:
......
......@@ -51,7 +51,7 @@
#Numpy has to be a pre-requirement in order for scipy to build
- name : install python pre-requirements for discern and ease
pip: >
requirements={{item}} virtualenv={{ discern_venv_dir }} state=present
requirements={{ item }} virtualenv={{ discern_venv_dir }} state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ discern_user }}"
notify:
......@@ -62,7 +62,7 @@
- name : install python requirements for discern and ease
pip: >
requirements={{item}} virtualenv={{ discern_venv_dir }} state=present
requirements={{ item }} virtualenv={{ discern_venv_dir }} state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ discern_user }}"
notify:
......@@ -95,7 +95,7 @@
#support virtualenvs as of this comment
- name: django syncdb migrate and collectstatic for discern
shell: >
{{ discern_venv_dir }}/bin/python {{discern_code_dir}}/manage.py {{item}} --noinput --settings={{discern_settings}} --pythonpath={{discern_code_dir}}
{{ discern_venv_dir }}/bin/python {{ discern_code_dir }}/manage.py {{ item }} --noinput --settings={{discern_settings}} --pythonpath={{discern_code_dir}}
chdir={{ discern_code_dir }}
sudo_user: "{{ discern_user }}"
notify:
......@@ -107,7 +107,7 @@
#Have this separate from the other three because it doesn't take the noinput flag
- name: django update_index for discern
shell: >
{{ discern_venv_dir}}/bin/python {{discern_code_dir}}/manage.py update_index --settings={{discern_settings}} --pythonpath={{discern_code_dir}}
{{ discern_venv_dir}}/bin/python {{ discern_code_dir }}/manage.py update_index --settings={{ discern_settings }} --pythonpath={{discern_code_dir}}
chdir={{ discern_code_dir }}
sudo_user: "{{ discern_user }}"
notify:
......
......@@ -9,9 +9,9 @@ stop on runlevel [!2345]
respawn
respawn limit 3 30
env DJANGO_SETTINGS_MODULE={{discern_settings}}
env DJANGO_SETTINGS_MODULE={{ discern_settings }}
chdir {{ discern_code_dir }}
setuid {{discern_user}}
setuid {{ discern_user }}
exec {{ discern_venv_dir }}/bin/python {{ discern_code_dir }}/manage.py celeryd --loglevel=info --settings={{ discern_settings }} --pythonpath={{ discern_code_dir }} -B --autoscale={{ ansible_processor_cores * 2 }},1
......@@ -27,11 +27,11 @@
when: EDXAPP_USE_GIT_IDENTITY
# Do A Checkout
- name: checkout edx-platform repo into {{edxapp_code_dir}}
- name: checkout edx-platform repo into {{ edxapp_code_dir }}
git: >
dest={{edxapp_code_dir}}
repo={{edx_platform_repo}}
version={{edx_platform_version}}
dest={{ edxapp_code_dir }}
repo={{ edx_platform_repo }}
version={{ edx_platform_version }}
accept_hostkey=yes
sudo_user: "{{ edxapp_user }}"
environment:
......@@ -42,7 +42,7 @@
- "restart edxapp_workers"
- name: git clean after checking out edx-platform
shell: cd {{edxapp_code_dir}} && git clean -xdf
shell: cd {{ edxapp_code_dir }} && git clean -xdf
sudo_user: "{{ edxapp_user }}"
notify:
- "restart edxapp"
......@@ -50,9 +50,9 @@
- name: checkout theme
git: >
dest={{ edxapp_app_dir }}/themes/{{edxapp_theme_name}}
repo={{edxapp_theme_source_repo}}
version={{edxapp_theme_version}}
dest={{ edxapp_app_dir }}/themes/{{ edxapp_theme_name }}
repo={{ edxapp_theme_source_repo }}
version={{ edxapp_theme_version }}
accept_hostkey=yes
when: edxapp_theme_name != ''
sudo_user: "{{ edxapp_user }}"
......@@ -144,8 +144,8 @@
# Install the python pre requirements into {{ edxapp_venv_dir }}
- name : install python pre-requirements
pip: >
requirements="{{pre_requirements_file}}"
virtualenv="{{edxapp_venv_dir}}"
requirements="{{ pre_requirements_file }}"
virtualenv="{{ edxapp_venv_dir }}"
state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ edxapp_user }}"
......@@ -173,8 +173,8 @@
# Install the python post requirements into {{ edxapp_venv_dir }}
- name : install python post-requirements
pip: >
requirements="{{post_requirements_file}}"
virtualenv="{{edxapp_venv_dir}}"
requirements="{{ post_requirements_file }}"
virtualenv="{{ edxapp_venv_dir }}"
state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ edxapp_user }}"
......@@ -187,8 +187,8 @@
# Install the python paver requirements into {{ edxapp_venv_dir }}
- name : install python paver-requirements
pip: >
requirements="{{paver_requirements_file}}"
virtualenv="{{edxapp_venv_dir}}"
requirements="{{ paver_requirements_file }}"
virtualenv="{{ edxapp_venv_dir }}"
state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ edxapp_user }}"
......@@ -257,7 +257,7 @@
- name: install CAS attribute module
pip: >
name="{{ EDXAPP_CAS_ATTRIBUTE_PACKAGE }}"
virtualenv="{{edxapp_venv_dir}}"
virtualenv="{{ edxapp_venv_dir }}"
state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w --use-mirrors"
sudo_user: "{{ edxapp_user }}"
......@@ -294,8 +294,8 @@
- name: code sandbox | Install base sandbox requirements and create sandbox virtualenv
pip: >
requirements="{{sandbox_base_requirements}}"
virtualenv="{{edxapp_sandbox_venv_dir}}"
requirements="{{ sandbox_base_requirements }}"
virtualenv="{{ edxapp_sandbox_venv_dir }}"
state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w --use-mirrors"
sudo_user: "{{ edxapp_sandbox_user }}"
......
......@@ -24,7 +24,7 @@ command={{ executable }} {{ max_req }} --preload -b {{ edxapp_cms_gunicorn_host
user={{ common_web_user }}
directory={{ edxapp_code_dir }}
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_CMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PORT={{edxapp_cms_gunicorn_port}},ADDRESS={{edxapp_cms_gunicorn_host}},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_CMS_ENV }},SERVICE_VARIANT="cms"
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_CMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PORT={{ edxapp_cms_gunicorn_port }},ADDRESS={{ edxapp_cms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_CMS_ENV }},SERVICE_VARIANT="cms"
stdout_logfile={{ supervisor_log_dir }}/%(program_name)-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)-stderr.log
killasgroup=true
......
......@@ -25,7 +25,7 @@ command={{ executable }} {{ max_req }} --preload -b {{ edxapp_lms_gunicorn_host
user={{ common_web_user }}
directory={{ edxapp_code_dir }}
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_LMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%} PORT={{edxapp_lms_gunicorn_port}},ADDRESS={{edxapp_lms_gunicorn_host}},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_LMS_ENV }},SERVICE_VARIANT="lms",PATH="{{ edxapp_deploy_path }}"
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ EDXAPP_NEWRELIC_LMS_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%} PORT={{ edxapp_lms_gunicorn_port }},ADDRESS={{ edxapp_lms_gunicorn_host }},LANG={{ EDXAPP_LANG }},DJANGO_SETTINGS_MODULE={{ EDXAPP_LMS_ENV }},SERVICE_VARIANT="lms",PATH="{{ edxapp_deploy_path }}"
stdout_logfile={{ supervisor_log_dir }}/%(program_name)-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)-stderr.log
killasgroup=true
......
......@@ -17,7 +17,7 @@
mysql_user: >
name={{ EDXAPP_MYSQL_USER }}
password={{ EDXAPP_MYSQL_PASSWORD }}
priv='{{EDXAPP_MYSQL_DB_NAME}}.*:ALL'
priv='{{ EDXAPP_MYSQL_DB_NAME }}.*:ALL'
when: EDXAPP_MYSQL_USER is defined
- name: create a database for edxapp
......@@ -31,7 +31,7 @@
mysql_user: >
name={{ XQUEUE_MYSQL_USER }}
password={{ XQUEUE_MYSQL_PASSWORD }}
priv='{{XQUEUE_MYSQL_DB_NAME}}.*:ALL'
priv='{{ XQUEUE_MYSQL_DB_NAME }}.*:ALL'
when: XQUEUE_MYSQL_USER is defined
- name: create a database for xqueue
......@@ -45,7 +45,7 @@
mysql_user: >
name={{ ORA_MYSQL_USER }}
password={{ ORA_MYSQL_PASSWORD }}
priv='{{ORA_MYSQL_DB_NAME}}.*:ALL'
priv='{{ ORA_MYSQL_DB_NAME }}.*:ALL'
when: ORA_MYSQL_USER is defined
- name: create a database for ora
......
......@@ -3,11 +3,11 @@
# Path to directory where to store index data allocated for this node.
#
path.data: {{elasticsearch_data_dir}}
path.data: {{ elasticsearch_data_dir }}
# Path to log files:
#
path.logs: {{elasticsearch_log_dir}}
path.logs: {{ elasticsearch_log_dir }}
# ElasticSearch performs poorly when JVM starts swapping: you should ensure that
# it _never_ swaps.
......
......@@ -24,4 +24,4 @@ flower_deploy_path: "{{ flower_venv_bin }}:/usr/local/sbin:/usr/local/bin:/usr/b
flower_broker: "amqp://{{ FLOWER_BROKER_USERNAME }}:{{ FLOWER_BROKER_PASSWORD }}@{{ FLOWER_BROKER_HOST }}:{{ FLOWER_BROKER_PORT }}"
flower_environment:
PATH: "{{flower_deploy_path}}"
PATH: "{{ flower_deploy_path }}"
......@@ -18,7 +18,7 @@ FORUM_MONGO_HOSTS:
FORUM_MONGO_TAGS: !!null
FORUM_MONGO_PORT: "27017"
FORUM_MONGO_DATABASE: "cs_comments_service"
FORUM_MONGO_URL: "mongodb://{{ FORUM_MONGO_USER }}:{{ FORUM_MONGO_PASSWORD }}@{%- for host in FORUM_MONGO_HOSTS -%}{{host}}:{{ FORUM_MONGO_PORT }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%}/{{ FORUM_MONGO_DATABASE }}{%- if FORUM_MONGO_TAGS -%}?tags={{ FORUM_MONGO_TAGS }}{%- endif -%}"
FORUM_MONGO_URL: "mongodb://{{ FORUM_MONGO_USER }}:{{ FORUM_MONGO_PASSWORD }}@{%- for host in FORUM_MONGO_HOSTS -%}{{ host }}:{{ FORUM_MONGO_PORT }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%}/{{ FORUM_MONGO_DATABASE }}{%- if FORUM_MONGO_TAGS -%}?tags={{ FORUM_MONGO_TAGS }}{%- endif -%}"
FORUM_SINATRA_ENV: "development"
FORUM_RACK_ENV: "development"
FORUM_NGINX_PORT: "18080"
......
......@@ -10,7 +10,7 @@ env PID=/var/tmp/comments_service.pid
chdir {{ forum_code_dir }}
script
. {{forum_app_dir}}/forum_env
{{forum_app_dir}}/.rbenv/shims/ruby app.rb
. {{ forum_app_dir }}/forum_env
{{ forum_app_dir }}/.rbenv/shims/ruby app.rb
end script
......@@ -22,7 +22,7 @@ dependencies:
- common
- aws
- role: jenkins_master
jenkins_plugins: "{{jenkins_admin_plugins}}"
jenkins_plugins: "{{ jenkins_admin_plugins }}"
- role: supervisor
supervisor_app_dir: "{{ jenkins_supervisor_app_dir }}"
supervisor_data_dir: "{{ jenkins_supervisor_data_dir }}"
......
{% for deployment, creds in JENKINS_ADMIN_AWS_CREDENTIALS.iteritems() %}
[profile {{deployment}}]
[profile {{ deployment }}]
aws_access_key_id = {{ creds.access_id }}
aws_secret_access_key = {{ creds.secret_key }}
......
{% for deployment, creds in JENKINS_ADMIN_AWS_CREDENTIALS.iteritems() %}
[profile {{deployment}}]
[profile {{ deployment }}]
aws_access_key_id = {{ creds.access_id }}
aws_secret_access_key = {{ creds.secret_key }}
......
......@@ -46,7 +46,7 @@ rm -rf $BUILD_ID
<profileName>{{ JENKINS_ADMIN_S3_PROFILE.name }}</profileName>
<entries>
<hudson.plugins.s3.Entry>
<bucket>edx-jenkins-backups/{{JENKINS_ADMIN_NAME}}</bucket>
<bucket>edx-jenkins-backups/{{ JENKINS_ADMIN_NAME }}</bucket>
<sourceFile>${BUILD_ID}.tar.gz</sourceFile>
<storageClass>STANDARD</storageClass>
<selectedRegion>US_EAST_1</selectedRegion>
......
......@@ -58,7 +58,7 @@
# Using this instead of the user module because the user module
# fails if the directory exists.
- name: set home directory for jenkins user
shell: usermod -d {{jenkins_home}} {{jenkins_user}}
shell: usermod -d {{ jenkins_home }} {{ jenkins_user }}
- name: make plugins directory
file:
......
......@@ -45,7 +45,7 @@
assign_public_ip: yes
wait: true
region: "{{ region }}"
instance_tags: "{{instance_tags}}"
instance_tags: "{{ instance_tags }}"
volumes:
- device_name: /dev/sda1
volume_size: "{{ root_ebs_size }}"
......
......@@ -18,15 +18,15 @@
- name: create ora application config
copy:
src={{secure_dir}}/files/{{COMMON_ENVIRONMENT}}/legacy_ora/ora.env.json
dest={{ora_app_dir}}/env.json
src={{ secure_dir }}/files/{{ COMMON_ENVIRONMENT }}/legacy_ora/ora.env.json
dest={{ ora_app_dir }}/env.json
sudo_user: "{{ ora_user }}"
register: env_state
- name: create ora auth file
copy:
src={{secure_dir}}/files/{{COMMON_ENVIRONMENT}}/legacy_ora/ora.auth.json
dest={{ora_app_dir}}/auth.json
src={{ secure_dir }}/files/{{ COMMON_ENVIRONMENT }}/legacy_ora/ora.auth.json
dest={{ ora_app_dir }}/auth.json
sudo_user: "{{ ora_user }}"
register: auth_state
......
......@@ -32,7 +32,7 @@
get_url: url={{ logstash_url }} dest={{ logstash_app_dir }}/share/{{ logstash_file }}
- name: ensure symlink with no version exists at {{ logstash_app_dir }}/share/logstash.jar
file: src={{ logstash_app_dir }}/share/{{logstash_file}} dest={{ logstash_app_dir }}/share/logstash.jar state=link
file: src={{ logstash_app_dir }}/share/{{ logstash_file }} dest={{ logstash_app_dir }}/share/logstash.jar state=link
- name: start logstash
action: service name=logstash state=started enabled=yes
......
......@@ -17,7 +17,7 @@
- name: add the mongodb signing key
apt_key: >
id=7F0CEB10
url={{MONGODB_APT_KEY}}
url={{ MONGODB_APT_KEY }}
state=present
- name: add the mongodb repo to the sources list
......
......@@ -18,8 +18,8 @@ server {
{% if NGINX_ENABLE_SSL %}
listen {{EDXAPP_CMS_NGINX_PORT}} {{default_site}};
listen {{EDXAPP_CMS_SSL_NGINX_PORT}} ssl;
listen {{ EDXAPP_CMS_NGINX_PORT }} {{ default_site }};
listen {{ EDXAPP_CMS_SSL_NGINX_PORT }} ssl;
ssl_certificate /etc/ssl/certs/{{ NGINX_SSL_CERTIFICATE|basename }};
ssl_certificate_key /etc/ssl/private/{{ NGINX_SSL_KEY|basename }};
......@@ -27,7 +27,7 @@ server {
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
{% else %}
listen {{EDXAPP_CMS_NGINX_PORT}} {{default_site}};
listen {{ EDXAPP_CMS_NGINX_PORT }} {{ default_site }};
{% endif %}
server_name {{ CMS_HOSTNAME }};
......
......@@ -32,7 +32,7 @@ upstream forum_app_server {
server {
server_name forum.*;
listen {{ FORUM_NGINX_PORT }} {{default_site}};
listen {{ FORUM_NGINX_PORT }} {{ default_site }};
client_max_body_size 1M;
keepalive_timeout 5;
......
......@@ -13,14 +13,14 @@ server {
{% if NGINX_ENABLE_SSL %}
listen {{KIBANA_NGINX_PORT}} {{default_site}};
listen {{KIBANA_SSL_NGINX_PORT}} {{default_site}} ssl;
listen {{ KIBANA_NGINX_PORT }} {{ default_site }};
listen {{ KIBANA_SSL_NGINX_PORT }} {{ default_site }} ssl;
ssl_certificate /etc/ssl/certs/{{ NGINX_SSL_CERTIFICATE|basename }};
ssl_certificate_key /etc/ssl/private/{{ NGINX_SSL_KEY|basename }};
{% else %}
listen {{KIBANA_NGINX_PORT}} {{default_site}};
listen {{ KIBANA_NGINX_PORT }} {{ default_site }};
{% endif %}
server_name {{ KIBANA_SERVER_NAME }};
......
......@@ -7,7 +7,7 @@ upstream lms-preview-backend {
server {
# LMS-preview configuration file for nginx, templated by ansible
listen {{EDXAPP_LMS_PREVIEW_NGINX_PORT}};
listen {{ EDXAPP_LMS_PREVIEW_NGINX_PORT }};
server_name preview.*;
......
......@@ -23,8 +23,8 @@ server {
{% if NGINX_ENABLE_SSL %}
listen {{EDXAPP_LMS_NGINX_PORT}} {{default_site}};
listen {{EDXAPP_LMS_SSL_NGINX_PORT}} {{default_site}} ssl;
listen {{ EDXAPP_LMS_NGINX_PORT }} {{ default_site }};
listen {{ EDXAPP_LMS_SSL_NGINX_PORT }} {{ default_site }} ssl;
ssl_certificate /etc/ssl/certs/{{ NGINX_SSL_CERTIFICATE|basename }};
ssl_certificate_key /etc/ssl/private/{{ NGINX_SSL_KEY|basename }};
......@@ -32,7 +32,7 @@ server {
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
{% else %}
listen {{EDXAPP_LMS_NGINX_PORT}} {{default_site}};
listen {{ EDXAPP_LMS_NGINX_PORT }} {{ default_site }};
{% endif %}
access_log {{ nginx_log_dir }}/access.log {{ NGINX_LOG_FORMAT_NAME }};
......
......@@ -20,7 +20,7 @@
- name: unarchive nltk data
shell: >
unzip {{NLTK_DATA_DIR}}/{{ item.url|basename }} chdir="{{ NLTK_DATA_DIR }}/{{ item.path|dirname }}"
unzip {{ NLTK_DATA_DIR }}/{{ item.url|basename }} chdir="{{ NLTK_DATA_DIR }}/{{ item.path|dirname }}"
with_items: NLTK_DATA
when: nltk_download|changed
tags:
......
......@@ -32,9 +32,9 @@
- name: checkout theme
git: >
dest={{ NOTIFIER_CODE_DIR }}/{{NOTIFIER_THEME_NAME}}
repo={{NOTIFIER_THEME_REPO}}
version={{NOTIFIER_THEME_VERSION}}
dest={{ NOTIFIER_CODE_DIR }}/{{ NOTIFIER_THEME_NAME }}
repo={{ NOTIFIER_THEME_REPO }}
version={{ NOTIFIER_THEME_VERSION }}
accept_hostkey=yes
when: NOTIFIER_THEME_NAME != ''
sudo_user: "{{ NOTIFIER_USER }}"
......
......@@ -2,5 +2,5 @@ from .settings import *
FORUM_DIGEST_EMAIL_SUBJECT = '{{ NOTIFIER_DIGEST_EMAIL_SUBJECT }}'
CUSTOM_THEME_DIR = '{{ NOTIFIER_CODE_DIR }}/{{NOTIFIER_THEME_NAME}}/'
CUSTOM_THEME_DIR = '{{ NOTIFIER_CODE_DIR }}/{{ NOTIFIER_THEME_NAME }}/'
TEMPLATE_DIRS = (CUSTOM_THEME_DIR + 'templates',)
......@@ -87,61 +87,61 @@ ora_gunicorn_host: 127.0.0.1
# appropriate for running all edX
# services on a single server.
ora_env_config:
LOGGING_ENV: "{{ORA_LOGGING_ENV}}"
LOGGING_ENV: "{{ ORA_LOGGING_ENV }}"
LOG_DIR: "{{ COMMON_DATA_DIR }}/logs/xqueue"
COURSE_DATA_PATH: "{{ ora_data_course_dir }}"
REQUESTS_TIMEOUT: "{{ORA_REQUESTS_TIMEOUT}}"
QUEUES_TO_PULL_FROM: "{{ORA_QUEUES_TO_PULL_FROM}}"
TIME_BETWEEN_XQUEUE_PULLS: "{{ORA_TIME_BETWEEN_XQUEUE_PULLS}}"
TIME_BETWEEN_EXPIRED_CHECKS: "{{ORA_TIME_BETWEEN_EXPIRED_CHECKS}}"
GRADER_SETTINGS_DIRECTORY: "{{ORA_GRADER_SETTINGS_DIRECTORY}}"
MAX_NUMBER_OF_TIMES_TO_RETRY_GRADING: "{{ORA_MAX_NUMBER_OF_TIMES_TO_RETRY_GRADING}}"
MIN_TO_USE_ML: "{{ORA_MIN_TO_USE_ML}}"
ML_PATH: "{{ORA_ML_PATH}}"
ML_MODEL_PATH: "{{ORA_ML_MODEL_PATH}}"
TIME_BETWEEN_ML_CREATOR_CHECKS: "{{ORA_TIME_BETWEEN_ML_CREATOR_CHECKS}}"
TIME_BETWEEN_ML_GRADER_CHECKS: "{{ORA_TIME_BETWEEN_ML_GRADER_CHECKS}}"
MIN_TO_USE_PEER: "{{ORA_MIN_TO_USE_PEER}}"
PEER_GRADER_COUNT: "{{ORA_PEER_GRADER_COUNT}}"
PEER_GRADER_MINIMUM_TO_CALIBRATE: "{{ORA_PEER_GRADER_MINIMUM_TO_CALIBRATE}}"
PEER_GRADER_MAXIMUM_TO_CALIBRATE: "{{ORA_PEER_GRADER_MAXIMUM_TO_CALIBRATE}}"
PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR: "{{ORA_PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR}}"
EXPIRE_SUBMISSIONS_AFTER: "{{ORA_EXPIRE_SUBMISSIONS_AFTER}}"
RESET_SUBMISSIONS_AFTER: "{{ORA_RESET_SUBMISSIONS_AFTER}}"
LOCAL_LOGLEVEL: "{{ORA_LOCAL_LOGLEVEL}}"
DEBUG: "{{ORA_DEBUG}}"
REQUESTS_TIMEOUT: "{{ ORA_REQUESTS_TIMEOUT }}"
QUEUES_TO_PULL_FROM: "{{ ORA_QUEUES_TO_PULL_FROM }}"
TIME_BETWEEN_XQUEUE_PULLS: "{{ ORA_TIME_BETWEEN_XQUEUE_PULLS }}"
TIME_BETWEEN_EXPIRED_CHECKS: "{{ ORA_TIME_BETWEEN_EXPIRED_CHECKS }}"
GRADER_SETTINGS_DIRECTORY: "{{ ORA_GRADER_SETTINGS_DIRECTORY }}"
MAX_NUMBER_OF_TIMES_TO_RETRY_GRADING: "{{ ORA_MAX_NUMBER_OF_TIMES_TO_RETRY_GRADING }}"
MIN_TO_USE_ML: "{{ ORA_MIN_TO_USE_ML }}"
ML_PATH: "{{ ORA_ML_PATH }}"
ML_MODEL_PATH: "{{ ORA_ML_MODEL_PATH }}"
TIME_BETWEEN_ML_CREATOR_CHECKS: "{{ ORA_TIME_BETWEEN_ML_CREATOR_CHECKS }}"
TIME_BETWEEN_ML_GRADER_CHECKS: "{{ ORA_TIME_BETWEEN_ML_GRADER_CHECKS }}"
MIN_TO_USE_PEER: "{{ ORA_MIN_TO_USE_PEER }}"
PEER_GRADER_COUNT: "{{ ORA_PEER_GRADER_COUNT }}"
PEER_GRADER_MINIMUM_TO_CALIBRATE: "{{ ORA_PEER_GRADER_MINIMUM_TO_CALIBRATE }}"
PEER_GRADER_MAXIMUM_TO_CALIBRATE: "{{ ORA_PEER_GRADER_MAXIMUM_TO_CALIBRATE }}"
PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR: "{{ ORA_PEER_GRADER_MIN_NORMALIZED_CALIBRATION_ERROR }}"
EXPIRE_SUBMISSIONS_AFTER: "{{ ORA_EXPIRE_SUBMISSIONS_AFTER }}"
RESET_SUBMISSIONS_AFTER: "{{ ORA_RESET_SUBMISSIONS_AFTER }}"
LOCAL_LOGLEVEL: "{{ ORA_LOCAL_LOGLEVEL }}"
DEBUG: "{{ ORA_DEBUG }}"
SYSLOG_SERVER: ORA_SYSLOG_SERVER
USE_S3_TO_STORE_MODELS: ORA_USE_S3_TO_STORE_MODELS
S3_BUCKETNAME: "{{ORA_S3_BUCKETNAME}}"
S3_BUCKETNAME: "{{ ORA_S3_BUCKETNAME }}"
ora_auth_config:
USERS: "{{ORA_USERS}}"
USERS: "{{ ORA_USERS }}"
XQUEUE_INTERFACE:
django_auth:
username: "{{ORA_XQUEUE_DJANGO_USER}}"
password: "{{ORA_XQUEUE_DJANGO_PASSWORD}}"
basic_auth: [ "{{ORA_XQUEUE_BASIC_AUTH_USER}}", "{{ORA_XQUEUE_BASIC_AUTH_PASSWORD}}" ]
url: "{{ORA_XQUEUE_URL}}"
username: "{{ ORA_XQUEUE_DJANGO_USER }}"
password: "{{ ORA_XQUEUE_DJANGO_PASSWORD }}"
basic_auth: [ "{{ ORA_XQUEUE_BASIC_AUTH_USER }}", "{{ORA_XQUEUE_BASIC_AUTH_PASSWORD}}" ]
url: "{{ ORA_XQUEUE_URL }}"
GRADING_CONTROLLER_INTERFACE:
django_auth:
password: "{{ORA_DJANGO_PASSWORD}}"
username: "{{ORA_DJANGO_USER}}"
url: "{{ORA_URL}}"
password: "{{ ORA_DJANGO_PASSWORD }}"
username: "{{ ORA_DJANGO_USER }}"
url: "{{ ORA_URL }}"
DATABASES:
default:
ENGINE: 'django.db.backends.mysql'
NAME: "{{ORA_MYSQL_DB_NAME}}"
USER: "{{ORA_MYSQL_USER}}"
PASSWORD: "{{ORA_MYSQL_PASSWORD}}"
HOST: "{{ORA_MYSQL_HOST}}"
PORT: "{{ORA_MYSQL_PORT}}"
AWS_ACCESS_KEY_ID: "{{ORA_AWS_ACCESS_KEY_ID}}"
AWS_SECRET_ACCESS_KEY: "{{ORA_AWS_SECRET_ACCESS_KEY}}"
NAME: "{{ ORA_MYSQL_DB_NAME }}"
USER: "{{ ORA_MYSQL_USER }}"
PASSWORD: "{{ ORA_MYSQL_PASSWORD }}"
HOST: "{{ ORA_MYSQL_HOST }}"
PORT: "{{ ORA_MYSQL_PORT }}"
AWS_ACCESS_KEY_ID: "{{ ORA_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "{{ ORA_AWS_SECRET_ACCESS_KEY }}"
ora_environment:
SERVICE_VARIANT: ora
LANG: "{{ORA_LANG}}"
PATH: "{{ora_deploy_path}}"
LANG: "{{ ORA_LANG }}"
PATH: "{{ ora_deploy_path }}"
ora_debian_pkgs:
- python-software-properties
......
......@@ -22,11 +22,11 @@
- include: ease.yml
- name: create ora application config
template: src=ora.env.json.j2 dest={{ora_app_dir}}/ora.env.json
template: src=ora.env.json.j2 dest={{ ora_app_dir }}/ora.env.json
sudo_user: "{{ ora_user }}"
- name: create ora auth file
template: src=ora.auth.json.j2 dest={{ora_app_dir}}/ora.auth.json
template: src=ora.auth.json.j2 dest={{ ora_app_dir }}/ora.auth.json
sudo_user: "{{ ora_user }}"
- name: setup the ora env
......@@ -80,7 +80,7 @@
- restart ora_celery
- name: syncdb and migrate
shell: SERVICE_VARIANT=ora {{ora_venv_dir}}/bin/django-admin.py syncdb --migrate --noinput --settings=edx_ora.aws --pythonpath={{ora_code_dir}}
shell: SERVICE_VARIANT=ora {{ ora_venv_dir }}/bin/django-admin.py syncdb --migrate --noinput --settings=edx_ora.aws --pythonpath={{ ora_code_dir }}
when: migrate_db is defined and migrate_db|lower == "yes"
sudo_user: "{{ ora_user }}"
notify:
......@@ -88,7 +88,7 @@
- restart ora_celery
- name: create users
shell: SERVICE_VARIANT=ora {{ora_venv_dir}}/bin/django-admin.py update_users --settings=edx_ora.aws --pythonpath={{ora_code_dir}}
shell: SERVICE_VARIANT=ora {{ ora_venv_dir }}/bin/django-admin.py update_users --settings=edx_ora.aws --pythonpath={{ ora_code_dir }}
sudo_user: "{{ ora_user }}"
notify:
- restart ora
......
# Do A Checkout
- name: git checkout ease repo into its base dir
git: >
dest={{ora_ease_code_dir}} repo={{ora_ease_source_repo}} version={{ora_ease_version}}
dest={{ ora_ease_code_dir }} repo={{ ora_ease_source_repo }} version={{ora_ease_version}}
accept_hostkey=yes
sudo_user: "{{ ora_user }}"
notify:
......@@ -9,7 +9,7 @@
- restart ora_celery
- name: install ease system packages
apt: pkg={{item}} state=present
apt: pkg={{ item }} state=present
with_items: ora_ease_debian_pkgs
notify:
- restart ora
......@@ -19,7 +19,7 @@
# Install the python pre requirements into {{ ora_ease_venv_dir }}
- name: install ease python pre-requirements
pip: >
requirements="{{ora_ease_pre_requirements_file}}" virtualenv="{{ora_ease_venv_dir}}" state=present
requirements="{{ ora_ease_pre_requirements_file }}" virtualenv="{{ ora_ease_venv_dir }}" state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ ora_user }}"
notify:
......@@ -29,7 +29,7 @@
# Install the python post requirements into {{ ora_ease_venv_dir }}
- name: install ease python post-requirements
pip: >
requirements="{{ora_ease_post_requirements_file}}" virtualenv="{{ora_ease_venv_dir}}" state=present
requirements="{{ ora_ease_post_requirements_file }}" virtualenv="{{ ora_ease_venv_dir }}" state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ ora_user }}"
notify:
......
......@@ -35,14 +35,14 @@
- "{{ ora_app_dir }}/ml_models"
- name: install debian packages that ora needs
apt: pkg={{item}} state=present
apt: pkg={{ item }} state=present
notify:
- restart ora
- restart ora_celery
with_items: ora_debian_pkgs
- name: install debian packages for ease that ora needs
apt: pkg={{item}} state=present
apt: pkg={{ item }} state=present
notify:
- restart ora
- restart ora_celery
......
export JAVA_HOME="{{oraclejdk_link}}"
export JAVA_HOME="{{ oraclejdk_link }}"
export PATH=$JAVA_HOME/bin:$PATH
......@@ -43,9 +43,9 @@ rabbitmq_debian_pkgs:
rabbitmq_config_dir: "/etc/rabbitmq"
rabbitmq_cookie_dir: "/var/lib/rabbitmq"
rabbitmq_cookie_location: "{{rabbitmq_cookie_dir}}/.erlang.cookie"
rabbitmq_cookie_location: "{{ rabbitmq_cookie_dir }}/.erlang.cookie"
rabbitmq_mnesia_folder: "{{rabbitmq_cookie_dir}}/mnesia"
rabbitmq_mnesia_folder: "{{ rabbitmq_cookie_dir }}/mnesia"
rabbitmq_port: 5672
rabbitmq_management_port: 15672
......@@ -53,8 +53,8 @@ rabbitmq_ip: "{{ ansible_default_ipv4.address }}"
# Structure for auth config file.
rabbitmq_auth_config:
erlang_cookie: "{{RABBIT_ERLANG_COOKIE}}"
admins: "{{RABBIT_USERS}}"
erlang_cookie: "{{ RABBIT_ERLANG_COOKIE }}"
admins: "{{ RABBIT_USERS }}"
rabbitmq_clustered_hosts: []
......
......@@ -4,13 +4,13 @@
# http://rabbitmq.1065348.n5.nabble.com/Rabbitmq-boot-failure-with-quot-tables-not-present-quot-td24494.html
- name: trust rabbit repository
apt_key: url={{rabbitmq_apt_key}} state=present
apt_key: url={{ rabbitmq_apt_key }} state=present
- name: install python-software-properties if debian
apt: pkg={{",".join(rabbitmq_debian_pkgs)}} state=present
apt: pkg={{ ",".join(rabbitmq_debian_pkgs) }} state=present
- name: add rabbit repository
apt_repository_1.8: repo="{{rabbitmq_repository}}" state=present update_cache=yes validate_certs=no
apt_repository_1.8: repo="{{ rabbitmq_repository }}" state=present update_cache=yes validate_certs=no
- name: fetch the rabbitmq server deb
get_url: >
......@@ -63,30 +63,30 @@
# Defaulting to /var/lib/rabbitmq
- name: create cookie directory
file: >
path={{rabbitmq_cookie_dir}}
path={{ rabbitmq_cookie_dir }}
owner=rabbitmq group=rabbitmq mode=0755 state=directory
- name: add rabbitmq erlang cookie
template: >
src=erlang.cookie.j2 dest={{rabbitmq_cookie_location}}
src=erlang.cookie.j2 dest={{ rabbitmq_cookie_location }}
owner=rabbitmq group=rabbitmq mode=0400
register: erlang_cookie
# Defaulting to /etc/rabbitmq
- name: create rabbitmq config directory
file: >
path={{rabbitmq_config_dir}}
path={{ rabbitmq_config_dir }}
owner=root group=root mode=0755 state=directory
- name: add rabbitmq environment configuration
template: >
src=rabbitmq-env.conf.j2 dest={{rabbitmq_config_dir}}/rabbitmq-env.conf
src=rabbitmq-env.conf.j2 dest={{ rabbitmq_config_dir }}/rabbitmq-env.conf
owner=root group=root mode=0644
- name: add rabbitmq cluster configuration
template: >
src=etc/rabbitmq/rabbitmq.config.j2
dest={{rabbitmq_config_dir}}/rabbitmq.config
dest={{ rabbitmq_config_dir }}/rabbitmq.config
owner=root group=root mode=0644
register: cluster_configuration
......@@ -98,7 +98,7 @@
# This folder should be deleted before clustering is setup because it retains data
# that can conflict with the clustering information.
- name: remove mnesia configuration
file: path={{rabbitmq_mnesia_folder}} state=absent
file: path={{ rabbitmq_mnesia_folder }} state=absent
when: erlang_cookie.changed or cluster_configuration.changed or rabbitmq_refresh
- name: start rabbit nodes
......
#### Enable periodic security updates
- name: install security packages
apt: name={{item}} state=latest
apt: name={{ item }} state=latest
with_items: security_debian_pkgs
......
......@@ -2,7 +2,7 @@
---
- name: Installs shib and dependencies from apt
apt: pkg={{item}} install_recommends=no state=present update_cache=yes
apt: pkg={{ item }} install_recommends=no state=present update_cache=yes
with_items:
- shibboleth-sp2-schemas
- libshibsp-dev
......@@ -24,14 +24,14 @@
when: shib_download_metadata
- name: writes out key and pem file
template: src=sp.{{item}}.j2 dest=/etc/shibboleth/sp.{{item}} group=_shibd owner=_shibd mode=0600
template: src=sp.{{ item }}.j2 dest=/etc/shibboleth/sp.{{ item }} group=_shibd owner=_shibd mode=0600
with_items:
- key
- pem
notify: restart shibd
- name: writes out configuration files
template: src={{ shib_template_dir }}/{{item}}.j2 dest=/etc/shibboleth/{{item}} group=_shibd owner=_shibd mode=0644
template: src={{ shib_template_dir }}/{{ item }}.j2 dest=/etc/shibboleth/{{ item }} group=_shibd owner=_shibd mode=0644
with_items:
- attribute-map.xml
- shibboleth2.xml
......
......@@ -28,23 +28,23 @@ SPLUNKFORWARDER_SERVERS:
SPLUNKFORWARDER_LOG_ITEMS:
- source: '{{ COMMON_LOG_DIR }}/lms'
recursive: true
index: '{{COMMON_ENVIRONMENT}}-{{COMMON_DEPLOYMENT}}'
index: '{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}'
sourcetype: 'edx'
- source: '{{ COMMON_LOG_DIR }}/cms'
recursive: true
index: '{{COMMON_ENVIRONMENT}}-{{COMMON_DEPLOYMENT}}'
index: '{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}'
sourcetype: 'edx'
- source: '{{ COMMON_LOG_DIR }}'
recursive: true
index: '{{COMMON_ENVIRONMENT}}-{{COMMON_DEPLOYMENT}}'
index: '{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}'
sourcetype: 'syslog'
- source: '/var/log'
recursive: true
index: '{{COMMON_ENVIRONMENT}}-{{COMMON_DEPLOYMENT}}'
index: '{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}'
sourcetype: 'syslog'
- source: '{{ COMMON_LOG_DIR }}/nginx'
recursive: true
index: '{{COMMON_ENVIRONMENT}}-{{COMMON_DEPLOYMENT}}'
index: '{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}'
sourcetype: 'nginx'
#
......
......@@ -31,12 +31,12 @@
- name: download the splunk deb
get_url: >
dest="/tmp/{{SPLUNKFORWARDER_DEB}}"
url="{{SPLUNKFORWARDER_PACKAGE_URL}}"
dest="/tmp/{{ SPLUNKFORWARDER_DEB }}"
url="{{ SPLUNKFORWARDER_PACKAGE_URL }}"
register: download_deb
- name: install splunk forwarder
shell: gdebi -nq /tmp/{{SPLUNKFORWARDER_DEB}}
shell: gdebi -nq /tmp/{{ SPLUNKFORWARDER_DEB }}
when: download_deb.changed
# Create splunk user
......@@ -49,19 +49,19 @@
# to run some of the below commands.
- name: start splunk manually
shell: >
{{splunkforwarder_output_dir}}/bin/splunk start --accept-license --answer-yes --no-prompt
creates={{splunkforwarder_output_dir}}/var/lib/splunk
{{ splunkforwarder_output_dir }}/bin/splunk start --accept-license --answer-yes --no-prompt
creates={{ splunkforwarder_output_dir }}/var/lib/splunk
when: download_deb.changed
register: started_manually
- name: stop splunk manually
shell: >
{{splunkforwarder_output_dir}}/bin/splunk stop --accept-license --answer-yes --no-prompt
{{ splunkforwarder_output_dir }}/bin/splunk stop --accept-license --answer-yes --no-prompt
when: download_deb.changed and started_manually.changed
- name: create boot script
shell: >
{{splunkforwarder_output_dir}}/bin/splunk enable boot-start -user splunk --accept-license --answer-yes --no-prompt
{{ splunkforwarder_output_dir }}/bin/splunk enable boot-start -user splunk --accept-license --answer-yes --no-prompt
creates=/etc/init.d/splunk
register: create_boot_script
when: download_deb.changed
......@@ -69,7 +69,7 @@
# Update credentials
- name: update admin pasword
shell: "{{splunkforwarder_output_dir}}/bin/splunk edit user admin -password {{SPLUNKFORWARDER_PASSWORD}} -auth admin:changeme --accept-license --answer-yes --no-prompt"
shell: "{{ splunkforwarder_output_dir }}/bin/splunk edit user admin -password {{ SPLUNKFORWARDER_PASSWORD }} -auth admin:changeme --accept-license --answer-yes --no-prompt"
when: download_deb.changed
notify: restart splunkforwarder
......@@ -80,7 +80,7 @@
# Ensure permissions on splunk content
- name: ensure splunk forder permissions
file: path={{splunkforwarder_output_dir}} state=directory recurse=yes owner=splunk group=splunk
file: path={{ splunkforwarder_output_dir }} state=directory recurse=yes owner=splunk group=splunk
when: download_deb.changed
notify: restart splunkforwarder
......
......@@ -97,13 +97,13 @@
- name: install supervisor in its venv
pip: >
name=supervisor virtualenv="{{supervisor_venv_dir}}" state=present
name=supervisor virtualenv="{{ supervisor_venv_dir }}" state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ supervisor_user }}"
- name: install supervisor in its venv
pip: >
name={{ item }} virtualenv="{{supervisor_venv_dir}}" state=present
name={{ item }} virtualenv="{{ supervisor_venv_dir }}" state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ supervisor_user }}"
with_items: supervisor_pip_pkgs
......@@ -156,7 +156,7 @@
- name: start supervisor
service: >
name={{supervisor_service}}
name={{ supervisor_service }}
state=started
register: start_supervisor
......
......@@ -5,4 +5,4 @@ task
setuid {{ supervisor_user }}
exec {{ supervisor_venv_dir }}/bin/python {{ supervisor_app_dir }}/pre_supervisor_checks.py --available={{supervisor_available_dir}} --enabled={{supervisor_cfg_dir}} {% if SUPERVISOR_HIPCHAT_API_KEY is defined %}--hipchat-api-key {{ SUPERVISOR_HIPCHAT_API_KEY }} --hipchat-room {{ SUPERVISOR_HIPCHAT_ROOM }} {% endif %} {% if edxapp_code_dir is defined %}--edxapp-python {{ COMMON_BIN_DIR }}/python.edxapp --edxapp-code-dir {{ edxapp_code_dir }}{% endif %} {% if xqueue_code_dir is defined %}--xqueue-code-dir {{ xqueue_code_dir }} --xqueue-python {{ COMMON_BIN_DIR }}/python.xqueue {% endif %}
exec {{ supervisor_venv_dir }}/bin/python {{ supervisor_app_dir }}/pre_supervisor_checks.py --available={{ supervisor_available_dir }} --enabled={{ supervisor_cfg_dir }} {% if SUPERVISOR_HIPCHAT_API_KEY is defined %}--hipchat-api-key {{ SUPERVISOR_HIPCHAT_API_KEY }} --hipchat-room {{ SUPERVISOR_HIPCHAT_ROOM }} {% endif %} {% if edxapp_code_dir is defined %}--edxapp-python {{ COMMON_BIN_DIR }}/python.edxapp --edxapp-code-dir {{ edxapp_code_dir }}{% endif %} {% if xqueue_code_dir is defined %}--xqueue-code-dir {{ xqueue_code_dir }} --xqueue-python {{ COMMON_BIN_DIR }}/python.xqueue {% endif %}
......@@ -56,31 +56,31 @@ xqueue_gunicorn_port: 8040
xqueue_gunicorn_host: 127.0.0.1
xqueue_env_config:
XQUEUES: "{{XQUEUE_QUEUES}}"
XQUEUE_WORKERS_PER_QUEUE: "{{XQUEUE_WORKERS_PER_QUEUE}}"
LOGGING_ENV: "{{XQUEUE_LOGGING_ENV}}"
SYSLOG_SERVER: "{{XQUEUE_SYSLOG_SERVER}}"
XQUEUES: "{{ XQUEUE_QUEUES }}"
XQUEUE_WORKERS_PER_QUEUE: "{{ XQUEUE_WORKERS_PER_QUEUE }}"
LOGGING_ENV: "{{ XQUEUE_LOGGING_ENV }}"
SYSLOG_SERVER: "{{ XQUEUE_SYSLOG_SERVER }}"
LOG_DIR: "{{ COMMON_DATA_DIR }}/logs/xqueue"
RABBIT_HOST: "{{XQUEUE_RABBITMQ_HOSTNAME}}"
S3_BUCKET: "{{XQUEUE_S3_BUCKET}}"
S3_PATH_PREFIX: "{{XQUEUE_S3_PATH_PREFIX}}"
LOCAL_LOGLEVEL: "{{XQUEUE_LOCAL_LOGLEVEL}}"
RABBIT_HOST: "{{ XQUEUE_RABBITMQ_HOSTNAME }}"
S3_BUCKET: "{{ XQUEUE_S3_BUCKET }}"
S3_PATH_PREFIX: "{{ XQUEUE_S3_PATH_PREFIX }}"
LOCAL_LOGLEVEL: "{{ XQUEUE_LOCAL_LOGLEVEL }}"
xqueue_auth_config:
AWS_ACCESS_KEY_ID: "{{XQUEUE_AWS_ACCESS_KEY_ID}}"
AWS_SECRET_ACCESS_KEY: "{{XQUEUE_AWS_SECRET_ACCESS_KEY}}"
REQUESTS_BASIC_AUTH: ["{{XQUEUE_BASIC_AUTH_USER}}", "{{XQUEUE_BASIC_AUTH_PASSWORD}}"]
USERS: "{{XQUEUE_DJANGO_USERS}}"
AWS_ACCESS_KEY_ID: "{{ XQUEUE_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "{{ XQUEUE_AWS_SECRET_ACCESS_KEY }}"
REQUESTS_BASIC_AUTH: ["{{ XQUEUE_BASIC_AUTH_USER }}", "{{XQUEUE_BASIC_AUTH_PASSWORD}}"]
USERS: "{{ XQUEUE_DJANGO_USERS }}"
DATABASES:
default:
ENGINE: "django.db.backends.mysql"
NAME: "{{XQUEUE_MYSQL_DB_NAME}}"
USER: "{{XQUEUE_MYSQL_USER}}"
PASSWORD: "{{XQUEUE_MYSQL_PASSWORD}}"
HOST: "{{XQUEUE_MYSQL_HOST}}"
PORT: "{{XQUEUE_MYSQL_PORT}}"
RABBITMQ_USER: "{{XQUEUE_RABBITMQ_USER}}"
RABBITMQ_PASS: "{{XQUEUE_RABBITMQ_PASS}}"
NAME: "{{ XQUEUE_MYSQL_DB_NAME }}"
USER: "{{ XQUEUE_MYSQL_USER }}"
PASSWORD: "{{ XQUEUE_MYSQL_PASSWORD }}"
HOST: "{{ XQUEUE_MYSQL_HOST }}"
PORT: "{{ XQUEUE_MYSQL_PORT }}"
RABBITMQ_USER: "{{ XQUEUE_RABBITMQ_USER }}"
RABBITMQ_PASS: "{{ XQUEUE_RABBITMQ_PASS }}"
xqueue_source_repo: "https://github.com/edx/xqueue.git"
xqueue_version: 'HEAD'
......
......@@ -2,7 +2,7 @@
# - group_vars/all
# - common/tasks/main.yml
---
# Check out xqueue repo to {{xqueue_code_dir}}
# Check out xqueue repo to {{ xqueue_code_dir }}
#
#
......
[program:xqueue_consumer]
command={{xqueue_venv_bin}}/django-admin.py run_consumer --pythonpath={{xqueue_code_dir}} --settings=xqueue.aws_settings $WORKERS_PER_QUEUE
command={{ xqueue_venv_bin }}/django-admin.py run_consumer --pythonpath={{ xqueue_code_dir }} --settings=xqueue.aws_settings $WORKERS_PER_QUEUE
user={{ common_web_user }}
directory={{ xqueue_code_dir }}
......
......@@ -32,11 +32,11 @@ xserver_grader_root: "{{ XSERVER_GRADER_DIR }}/graders"
xserver_git_identity: "{{ xserver_app_dir }}/xserver-identity"
xserver_env_config:
RUN_URL: "{{XSERVER_RUN_URL}}"
GRADER_ROOT: "{{xserver_grader_root}}"
LOGGING_ENV: "{{XSERVER_LOGGING_ENV}}"
RUN_URL: "{{ XSERVER_RUN_URL }}"
GRADER_ROOT: "{{ xserver_grader_root }}"
LOGGING_ENV: "{{ XSERVER_LOGGING_ENV }}"
LOG_DIR: "{{ xserver_log_dir }}"
SYSLOG_SERVER: "{{XSERVER_SYSLOG_SERVER}}"
SYSLOG_SERVER: "{{ XSERVER_SYSLOG_SERVER }}"
SANDBOX_PYTHON: '{{ xserver_venv_sandbox_dir }}/bin/python'
xserver_source_repo: "git://github.com/edx/xserver.git"
......
......@@ -13,7 +13,7 @@
- name: checkout code
git: >
dest={{xserver_code_dir}} repo={{xserver_source_repo}} version={{xserver_version}}
dest={{ xserver_code_dir }} repo={{ xserver_source_repo }} version={{xserver_version}}
accept_hostkey=yes
sudo_user: "{{ xserver_user }}"
register: xserver_checkout
......@@ -21,14 +21,14 @@
- name: install requirements
pip: >
requirements="{{xserver_requirements_file}}" virtualenv="{{ xserver_venv_dir }}" state=present
requirements="{{ xserver_requirements_file }}" virtualenv="{{ xserver_venv_dir }}" state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ xserver_user }}"
notify: restart xserver
- name: install sandbox requirements
pip: >
requirements="{{xserver_requirements_file}}" virtualenv="{{xserver_venv_sandbox_dir}}" state=present
requirements="{{ xserver_requirements_file }}" virtualenv="{{ xserver_venv_sandbox_dir }}" state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ xserver_user }}"
notify: restart xserver
......
www-data ALL=({{ xserver_sandbox_user }}) NOPASSWD:{{xserver_venv_sandbox_dir}}/bin/python
www-data ALL=({{ xserver_sandbox_user }}) NOPASSWD:{{ xserver_venv_sandbox_dir }}/bin/python
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment