Commit 1a5f7364 by Brian Beggs Committed by Feanil Patel

Update playbooks to new format

parent 0326e11f
...@@ -13,25 +13,27 @@ ...@@ -13,25 +13,27 @@
keyfile: "/home/{{ owner }}/.ssh/authorized_keys" keyfile: "/home/{{ owner }}/.ssh/authorized_keys"
serial: "{{ serial_count }}" serial: "{{ serial_count }}"
tasks: tasks:
- fail: msg="You must pass in a public_key" - fail:
msg: "You must pass in a public_key"
when: public_key is not defined when: public_key is not defined
- fail: msg="public does not exist in secrets" - fail:
msg: "public does not exist in secrets"
when: ubuntu_public_keys[public_key] is not defined when: ubuntu_public_keys[public_key] is not defined
- command: mktemp - command: mktemp
register: mktemp register: mktemp
- name: Validate the public key before we add it to authorized_keys - name: Validate the public key before we add it to authorized_keys
copy: > copy:
content="{{ ubuntu_public_keys[public_key] }}" content: "{{ ubuntu_public_keys[public_key] }}"
dest={{ mktemp.stdout }} dest: "{{ mktemp.stdout }}"
# This tests the public key and will not continue if it does not look valid # This tests the public key and will not continue if it does not look valid
- command: ssh-keygen -l -f {{ mktemp.stdout }} - command: ssh-keygen -l -f {{ mktemp.stdout }}
- file: > - file:
path={{ mktemp.stdout }} path: "{{ mktemp.stdout }}"
state=absent state: absent
- lineinfile: > - lineinfile:
dest={{ keyfile }} dest: "{{ keyfile }}"
line="{{ ubuntu_public_keys[public_key] }}" line: "{{ ubuntu_public_keys[public_key] }}"
- file: > - file:
path={{ keyfile }} path: "{{ keyfile }}"
owner={{ owner }} owner: "{{ owner }}"
mode=0600 mode: 0600
...@@ -13,9 +13,9 @@ ...@@ -13,9 +13,9 @@
# is called it will use the new MYSQL connection # is called it will use the new MYSQL connection
# info. # info.
- name: Update RDS to point to the sandbox clone - name: Update RDS to point to the sandbox clone
lineinfile: > lineinfile:
dest=/edx/app/edx_ansible/server-vars.yml dest: /edx/app/edx_ansible/server-vars.yml
line="{{ item }}" line: "{{ item }}"
with_items: with_items:
- "EDXAPP_MYSQL_HOST: {{ EDXAPP_MYSQL_HOST }}" - "EDXAPP_MYSQL_HOST: {{ EDXAPP_MYSQL_HOST }}"
- "EDXAPP_MYSQL_DB_NAME: {{ EDXAPP_MYSQL_DB_NAME }}" - "EDXAPP_MYSQL_DB_NAME: {{ EDXAPP_MYSQL_DB_NAME }}"
...@@ -24,9 +24,9 @@ ...@@ -24,9 +24,9 @@
tags: update_edxapp_mysql_host tags: update_edxapp_mysql_host
- name: Update mongo to point to the sandbox mongo clone - name: Update mongo to point to the sandbox mongo clone
lineinfile: > lineinfile:
dest=/edx/app/edx_ansible/server-vars.yml dest: /edx/app/edx_ansible/server-vars.yml
line="{{ item }}" line: "{{ item }}"
with_items: with_items:
- "EDXAPP_MONGO_HOSTS: {{ EDXAPP_MONGO_HOSTS }}" - "EDXAPP_MONGO_HOSTS: {{ EDXAPP_MONGO_HOSTS }}"
- "EDXAPP_MONGO_DB_NAME: {{ EDXAPP_MONGO_DB_NAME }}" - "EDXAPP_MONGO_DB_NAME: {{ EDXAPP_MONGO_DB_NAME }}"
...@@ -35,6 +35,5 @@ ...@@ -35,6 +35,5 @@
tags: update_edxapp_mysql_host tags: update_edxapp_mysql_host
- name: call update on edx-platform - name: call update on edx-platform
shell: > shell: "/edx/bin/update edx-platform {{ edxapp_version }}"
/edx/bin/update edx-platform {{ edxapp_version }}
tags: update_edxapp_mysql_host tags: update_edxapp_mysql_host
...@@ -53,26 +53,26 @@ ...@@ -53,26 +53,26 @@
- MySQL-python - MySQL-python
- name: create mysql databases - name: create mysql databases
mysql_db: > mysql_db:
db={{ item.name}} db: "{{ item.name}}"
state={{ item.state }} state: "{{ item.state }}"
encoding={{ item.encoding }} encoding: "{{ item.encoding }}"
login_host={{ item.login_host }} login_host: "{{ item.login_host }}"
login_user={{ item.login_user }} login_user: "{{ item.login_user }}"
login_password={{ item.login_password }} login_password: "{{ item.login_password }}"
with_items: databases with_items: databases
tags: tags:
- dbs - dbs
- name: create mysql users and assign privileges - name: create mysql users and assign privileges
mysql_user: > mysql_user:
name="{{ item.name }}" name: "{{ item.name }}"
priv="{{ '/'.join(item.privileges) }}" priv: "{{ '/'.join(item.privileges) }}"
password="{{ item.password }}" password: "{{ item.password }}"
host={{ item.host }} host: "{{ item.host }}"
login_host={{ item.login_host }} login_host: "{{ item.login_host }}"
login_user={{ item.login_user }} login_user: "{{ item.login_user }}"
login_password={{ item.login_password }} login_password: "{{ item.login_password }}"
append_privs=yes append_privs=yes
with_items: database_users with_items: database_users
tags: tags:
......
...@@ -47,10 +47,10 @@ ...@@ -47,10 +47,10 @@
elb: false elb: false
pre_tasks: pre_tasks:
- name: Wait for cloud-init to finish - name: Wait for cloud-init to finish
wait_for: > wait_for:
path=/var/log/cloud-init.log path: /var/log/cloud-init.log
timeout=15 timeout: 15
search_regex="final-message" search_regex: "final-message"
vars_files: vars_files:
- roles/edxapp/defaults/main.yml - roles/edxapp/defaults/main.yml
- roles/xqueue/defaults/main.yml - roles/xqueue/defaults/main.yml
......
...@@ -8,9 +8,9 @@ ...@@ -8,9 +8,9 @@
- edxapp - edxapp
tasks: tasks:
- name: migrate lms - name: migrate lms
shell: > shell: "python manage.py lms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws"
chdir={{ edxapp_code_dir }} args:
python manage.py lms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws chdir: "{{ edxapp_code_dir }}"
environment: environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}" DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}" DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
...@@ -21,9 +21,9 @@ ...@@ -21,9 +21,9 @@
tags: tags:
- always - always
- name: migrate cms - name: migrate cms
shell: > shell: "python manage.py cms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws"
chdir={{ edxapp_code_dir }} args:
python manage.py cms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws chdir: "{{ edxapp_code_dir }}"
environment: environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}" DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}" DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
......
...@@ -14,11 +14,11 @@ ...@@ -14,11 +14,11 @@
- name: stop certs service - name: stop certs service
service: name="certificates" state="stopped" service: name="certificates" state="stopped"
- name: checkout code - name: checkout code
git_2_0_1: > git_2_0_1:
repo="{{ repo_url }}" repo: "{{ repo_url }}"
dest="{{ repo_path }}" dest: "{{ repo_path }}"
version="{{ certificates_version }}" version: "{{ certificates_version }}"
accept_hostkey=yes accept_hostkey: yes
environment: environment:
GIT_SSH: "{{ git_ssh_script }}" GIT_SSH: "{{ git_ssh_script }}"
- name: install requirements - name: install requirements
...@@ -29,11 +29,11 @@ ...@@ -29,11 +29,11 @@
# Need to do this because the www-data user is not properly setup # Need to do this because the www-data user is not properly setup
# and can't run ssh. # and can't run ssh.
- name: change owner to www-data - name: change owner to www-data
file: > file:
path="{{ repo_path }}" path: "{{ repo_path }}"
owner="www-data" owner: "www-data"
group="www-data" group: "www-data"
recurse=yes recurse: yes
state="directory" state: "directory"
- name: start certs service - name: start certs service
service: name="certificates" state="started" service: name="certificates" state="started"
...@@ -46,9 +46,7 @@ ...@@ -46,9 +46,7 @@
dest: "{{ xblock_config_temp_directory.stdout }}/{{ file | basename }}" dest: "{{ xblock_config_temp_directory.stdout }}/{{ file | basename }}"
register: xblock_config_file register: xblock_config_file
- name: Manage xblock configurations - name: Manage xblock configurations
shell: > shell: "{{ python_path }} {{ manage_path }} lms --settings=aws populate_model -f {{ xblock_config_file.dest | quote }} -u {{ user }}"
{{ python_path }} {{ manage_path }} lms --settings=aws
populate_model -f {{ xblock_config_file.dest | quote }} -u {{ user }}
register: command_result register: command_result
changed_when: "'Import complete, 0 new entries created' not in command_result.stdout" changed_when: "'Import complete, 0 new entries created' not in command_result.stdout"
- debug: msg="{{ command_result.stdout }}" - debug: msg="{{ command_result.stdout }}"
......
...@@ -17,22 +17,21 @@ ...@@ -17,22 +17,21 @@
register: mktemp register: mktemp
# This command will fail if this returns zero lines which will prevent # This command will fail if this returns zero lines which will prevent
# the last key from being removed # the last key from being removed
- shell: > - shell: "grep -Fv '{{ ubuntu_public_keys[public_key] }}' {{ keyfile }} > {{ mktemp.stdout }}"
grep -Fv '{{ ubuntu_public_keys[public_key] }}' {{ keyfile }} > {{ mktemp.stdout }} - shell: "while read line; do ssh-keygen -lf /dev/stdin <<<$line; done <{{ mktemp.stdout }}"
- shell: > args:
while read line; do ssh-keygen -lf /dev/stdin <<<$line; done <{{ mktemp.stdout }} executable: /bin/bash
executable=/bin/bash
register: keycheck register: keycheck
- fail: msg="public key check failed!" - fail: msg="public key check failed!"
when: keycheck.stderr != "" when: keycheck.stderr != ""
- command: cp {{ mktemp.stdout }} {{ keyfile }} - command: cp {{ mktemp.stdout }} {{ keyfile }}
- file: > - file:
path={{ keyfile }} path: "{{ keyfile }}"
owner={{ owner }} owner: "{{ owner }}"
mode=0600 mode: 0600
- file: > - file:
path={{ mktemp.stdout }} path: "{{ mktemp.stdout }}"
state=absent state: absent
- shell: wc -l < {{ keyfile }} - shell: wc -l < {{ keyfile }}
register: line_count register: line_count
- fail: msg="There should only be one line in ubuntu's authorized_keys" - fail: msg="There should only be one line in ubuntu's authorized_keys"
......
...@@ -7,6 +7,6 @@ ...@@ -7,6 +7,6 @@
- roles/supervisor/defaults/main.yml - roles/supervisor/defaults/main.yml
tasks: tasks:
- name: supervisor | restart supervisor - name: supervisor | restart supervisor
service: > service:
name={{ supervisor_service }} name: "{{ supervisor_service }}"
state=restarted state: restarted
...@@ -12,8 +12,8 @@ ...@@ -12,8 +12,8 @@
- name: Set hostname - name: Set hostname
hostname: name={{ hostname_fqdn.split('.')[0] }} hostname: name={{ hostname_fqdn.split('.')[0] }}
- name: Update /etc/hosts - name: Update /etc/hosts
lineinfile: > lineinfile:
dest=/etc/hosts dest: /etc/hosts
regexp="^127\.0\.1\.1" regexp: "^127\.0\.1\.1"
line="127.0.1.1{{'\t'}}{{ hostname_fqdn.split('.')[0] }}{{'\t'}}{{ hostname_fqdn }}{{'\t'}}localhost" line: "127.0.1.1{{'\t'}}{{ hostname_fqdn.split('.')[0] }}{{'\t'}}{{ hostname_fqdn }}{{'\t'}}localhost"
state=present state: present
...@@ -33,30 +33,30 @@ ...@@ -33,30 +33,30 @@
# #
- name: setup the analytics_api env file - name: setup the analytics_api env file
template: > template:
src="edx/app/analytics_api/analytics_api_env.j2" src: "edx/app/analytics_api/analytics_api_env.j2"
dest="{{ analytics_api_home }}/analytics_api_env" dest: "{{ analytics_api_home }}/analytics_api_env"
owner={{ analytics_api_user }} owner: "{{ analytics_api_user }}"
group={{ analytics_api_user }} group: "{{ analytics_api_user }}"
mode=0644 mode: 0644
tags: tags:
- install - install
- install:configuration - install:configuration
- name: "add gunicorn configuration file" - name: "add gunicorn configuration file"
template: > template:
src=edx/app/analytics_api/analytics_api_gunicorn.py.j2 src: edx/app/analytics_api/analytics_api_gunicorn.py.j2
dest={{ analytics_api_home }}/analytics_api_gunicorn.py dest: "{{ analytics_api_home }}/analytics_api_gunicorn.py"
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: install application requirements - name: install application requirements
pip: > pip:
requirements="{{ analytics_api_requirements_base }}/{{ item }}" requirements: "{{ analytics_api_requirements_base }}/{{ item }}"
virtualenv="{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}" virtualenv: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}"
state=present state: present
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
with_items: analytics_api_requirements with_items: analytics_api_requirements
tags: tags:
...@@ -64,11 +64,9 @@ ...@@ -64,11 +64,9 @@
- install:app-requirements - install:app-requirements
- name: migrate - name: migrate
shell: > shell:
chdir={{ analytics_api_code_dir }} args: "DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}' DB_MIGRATION_PASS='{{ COMMON_MYSQL_MIGRATE_PASS }}' {{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python ./manage.py migrate --noinput"
DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}' chdir: "{{ analytics_api_code_dir }}"
DB_MIGRATION_PASS='{{ COMMON_MYSQL_MIGRATE_PASS }}'
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python ./manage.py migrate --noinput
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}" environment: "{{ analytics_api_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes" when: migrate_db is defined and migrate_db|lower == "yes"
...@@ -77,9 +75,9 @@ ...@@ -77,9 +75,9 @@
- migrate:db - migrate:db
- name: run collectstatic - name: run collectstatic
shell: > shell:
chdir={{ analytics_api_code_dir }} args: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py collectstatic --noinput"
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py collectstatic --noinput chdir: "{{ analytics_api_code_dir }}"
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}" environment: "{{ analytics_api_environment }}"
tags: tags:
...@@ -87,9 +85,9 @@ ...@@ -87,9 +85,9 @@
- assets:gather - assets:gather
- name: create api users - name: create api users
shell: > shell: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py set_api_key {{ item.key }} {{ item.value }}"
chdir={{ analytics_api_code_dir }} args:
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py set_api_key {{ item.key }} {{ item.value }} chdir: "{{ analytics_api_code_dir }}"
become_user: "{{ analytics_api_user }}" become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}" environment: "{{ analytics_api_environment }}"
with_dict: ANALYTICS_API_USERS with_dict: ANALYTICS_API_USERS
...@@ -98,29 +96,32 @@ ...@@ -98,29 +96,32 @@
- manage:app-users - manage:app-users
- name: write out the supervisor wrapper - name: write out the supervisor wrapper
template: > template:
src=edx/app/analytics_api/analytics_api.sh.j2 src: edx/app/analytics_api/analytics_api.sh.j2
dest={{ analytics_api_home }}/{{ analytics_api_service_name }}.sh dest: "{{ analytics_api_home }}/{{ analytics_api_service_name }}.sh"
mode=0650 owner={{ supervisor_user }} group={{ common_web_user }} mode: 0650
owner: "{{ supervisor_user }}"
group: "{{ common_web_user }}"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: write supervisord config - name: write supervisord config
template: > template:
src=edx/app/supervisor/conf.d.available/analytics_api.conf.j2 src: edx/app/supervisor/conf.d.available/analytics_api.conf.j2
dest="{{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf" dest: "{{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf"
owner={{ supervisor_user }} group={{ common_web_user }} mode=0644 owner: "{{ supervisor_user }} group={{ common_web_user }}"
mode: 0644
tags: tags:
- install - install
- install:configuration - install:configuration
- name: enable supervisor script - name: enable supervisor script
file: > file:
src={{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf src: "{{ supervisor_available_dir }}/{{ analytics_api_service_name }}.conf"
dest={{ supervisor_cfg_dir }}/{{ analytics_api_service_name }}.conf dest: "{{ supervisor_cfg_dir }}/{{ analytics_api_service_name }}.conf"
state=link state: link
force=yes force: yes
when: not disable_edx_services when: not disable_edx_services
tags: tags:
- install - install
...@@ -134,10 +135,10 @@ ...@@ -134,10 +135,10 @@
- manage:start - manage:start
- name: create symlinks from the venv bin dir - name: create symlinks from the venv bin dir
file: > file:
src="{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/{{ item }}" src: "{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/{{ item }}"
dest="{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api" dest: "{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api"
state=link state: link
with_items: with_items:
- python - python
- pip - pip
...@@ -147,10 +148,10 @@ ...@@ -147,10 +148,10 @@
- install:base - install:base
- name: create symlinks from the repo dir - name: create symlinks from the repo dir
file: > file:
src="{{ analytics_api_code_dir }}/{{ item }}" src: "{{ analytics_api_code_dir }}/{{ item }}"
dest="{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api" dest: "{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.analytics_api"
state=link state: link
with_items: with_items:
- manage.py - manage.py
tags: tags:
...@@ -158,11 +159,11 @@ ...@@ -158,11 +159,11 @@
- install:base - install:base
- name: restart analytics_api - name: restart analytics_api
supervisorctl: > supervisorctl:
state=restarted state: restarted
supervisorctl_path={{ supervisor_ctl }} supervisorctl_path: "{{ supervisor_ctl }}"
config={{ supervisor_cfg }} config: "{{ supervisor_cfg }}"
name={{ analytics_api_service_name }} name: "{{ analytics_api_service_name }}"
when: not disable_edx_services when: not disable_edx_services
become_user: "{{ supervisor_service_user }}" become_user: "{{ supervisor_service_user }}"
tags: tags:
......
...@@ -173,20 +173,24 @@ ...@@ -173,20 +173,24 @@
- manage:start - manage:start
- name: Copying nginx configs for {{ role_name }} - name: Copying nginx configs for {{ role_name }}
template: > template:
src=edx/app/nginx/sites-available/{{ role_name }}.j2 src: edx/app/nginx/sites-available/{{ role_name }}.j2
dest={{ '{{' }} nginx_sites_available_dir }}/{{ role_name }} dest: "{{ '{{' }} nginx_sites_available_dir }}/{{ role_name }}"
owner=root group={{ '{{' }} common_web_user }} mode=0640 owner: root
group: "{{ '{{' }} common_web_user }}"
mode: 0640
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
- install:vhosts - install:vhosts
- name: Creating nginx config links for {{ role_name }} - name: Creating nginx config links for {{ role_name }}
file: > file:
src={{ '{{' }} nginx_sites_available_dir }}/{{ role_name }} src: "{{ '{{' }} nginx_sites_available_dir }}/{{ role_name }}"
dest={{ '{{' }} nginx_sites_enabled_dir }}/{{ role_name }} dest: "{{ '{{' }} nginx_sites_enabled_dir }}/{{ role_name }}"
state=link owner=root group=root state: link
owner: root
group: root
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
......
...@@ -26,38 +26,38 @@ ...@@ -26,38 +26,38 @@
with_items: antivirus_debian_pkgs with_items: antivirus_debian_pkgs
- name: create antivirus scanner user - name: create antivirus scanner user
user: > user:
name="{{ antivirus_user }}" name: "{{ antivirus_user }}"
home="{{ antivirus_app_dir }}" home: "{{ antivirus_app_dir }}"
createhome=no createhome: no
shell=/bin/false shell: /bin/false
- name: create antivirus app and data dirs - name: create antivirus app and data dirs
file: > file:
path="{{ item }}" path: "{{ item }}"
state=directory state: directory
owner="{{ antivirus_user }}" owner: "{{ antivirus_user }}"
group="{{ antivirus_user }}" group: "{{ antivirus_user }}"
with_items: with_items:
- "{{ antivirus_app_dir }}" - "{{ antivirus_app_dir }}"
- "{{ antivirus_app_dir }}/data" - "{{ antivirus_app_dir }}/data"
- name: install antivirus s3 scanner script - name: install antivirus s3 scanner script
template: > template:
src=s3_bucket_virus_scan.sh.j2 src: s3_bucket_virus_scan.sh.j2
dest={{ antivirus_app_dir }}/s3_bucket_virus_scan.sh dest: "{{ antivirus_app_dir }}/s3_bucket_virus_scan.sh"
mode=0555 mode=0555
owner={{ antivirus_user }} owner: "{{ antivirus_user }}"
group={{ antivirus_user }} group: "{{ antivirus_user }}"
- name: install antivirus s3 scanner cronjob - name: install antivirus s3 scanner cronjob
cron: > cron:
name="antivirus-{{ item }}" name: "antivirus-{{ item }}"
job="{{ antivirus_app_dir }}/s3_bucket_virus_scan.sh -b '{{ item }}' -m '{{ ANTIVIRUS_MAILTO }}' -f '{{ ANTIVIRUS_MAILFROM }}'" job: "{{ antivirus_app_dir }}/s3_bucket_virus_scan.sh -b '{{ item }}' -m '{{ ANTIVIRUS_MAILTO }}' -f '{{ ANTIVIRUS_MAILFROM }}'"
backup=yes backup: yes
cron_file=antivirus-{{ item }} cron_file: "antivirus-{{ item }}"
user={{ antivirus_user }} user: "{{ antivirus_user }}"
hour="*" hour: "*"
minute="0" minute: "0"
day="*" day: "*"
with_items: ANTIVIRUS_BUCKETS with_items: ANTIVIRUS_BUCKETS
...@@ -102,7 +102,7 @@ ...@@ -102,7 +102,7 @@
file: file:
path: "{{ item.item }}" path: "{{ item.item }}"
mode: "0644" mode: "0644"
when: > when:
vagrant_home_dir.stat.exists == false and vagrant_home_dir.stat.exists == false and
ansible_distribution in common_debian_variants and ansible_distribution in common_debian_variants and
item.stat.exists item.stat.exists
......
# Install browsermob-proxy, which is used for page performance testing with bok-choy # Install browsermob-proxy, which is used for page performance testing with bok-choy
--- ---
- name: get zip file - name: get zip file
get_url: > get_url:
url={{ browsermob_proxy_url }} url: "{{ browsermob_proxy_url }}"
dest=/var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip dest: "/var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip"
register: download_browsermob_proxy register: download_browsermob_proxy
- name: unzip into /var/tmp/ - name: unzip into /var/tmp/
shell: > shell: "unzip /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip"
unzip /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }}.zip args:
chdir=/var/tmp chdir=/var/tmp
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: move to /etc/browsermob-proxy/ - name: move to /etc/browsermob-proxy/
shell: > shell: "mv /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }} /etc/browsermob-proxy"
mv /var/tmp/browsermob-proxy-{{ browsermob_proxy_version }} /etc/browsermob-proxy
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: change permissions of main script - name: change permissions of main script
file: > file:
path=/etc/browsermob-proxy/bin/browsermob-proxy path: "/etc/browsermob-proxy/bin/browsermob-proxy"
mode=0755 mode: 0755
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: add wrapper script /usr/local/bin/browsermob-proxy - name: add wrapper script /usr/local/bin/browsermob-proxy
copy: > copy:
src=browsermob-proxy src: browsermob-proxy
dest=/usr/local/bin/browsermob-proxy dest: /usr/local/bin/browsermob-proxy
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
- name: change permissions of wrapper script - name: change permissions of wrapper script
file: > file:
path=/usr/local/bin/browsermob-proxy path: /usr/local/bin/browsermob-proxy
mode=0755 mode: 0755
when: download_browsermob_proxy.changed when: download_browsermob_proxy.changed
...@@ -50,15 +50,15 @@ ...@@ -50,15 +50,15 @@
- "chromedriver.stat.mode == '0755'" - "chromedriver.stat.mode == '0755'"
- name: download PhantomJS - name: download PhantomJS
get_url: > get_url:
url={{ phantomjs_url }} url: "{{ phantomjs_url }}"
dest=/var/tmp/{{ phantomjs_tarfile }} dest: "/var/tmp/{{ phantomjs_tarfile }}"
register: download_phantom_js register: download_phantom_js
- name: unpack the PhantomJS tarfile - name: unpack the PhantomJS tarfile
shell: > shell: "tar -xjf /var/tmp/{{ phantomjs_tarfile }}"
tar -xjf /var/tmp/{{ phantomjs_tarfile }} args:
chdir=/var/tmp chdir: "/var/tmp"
when: download_phantom_js.changed when: download_phantom_js.changed
- name: move PhantomJS binary to /usr/local - name: move PhantomJS binary to /usr/local
......
...@@ -43,9 +43,9 @@ ...@@ -43,9 +43,9 @@
- install:app-requirements - install:app-requirements
- name: create nodeenv - name: create nodeenv
shell: > shell:
creates={{ credentials_nodeenv_dir }} args: "{{ credentials_venv_dir }}/bin/nodeenv {{ credentials_nodeenv_dir }} --prebuilt"
{{ credentials_venv_dir }}/bin/nodeenv {{ credentials_nodeenv_dir }} --prebuilt creates: "{{ credentials_nodeenv_dir }}"
become_user: "{{ credentials_user }}" become_user: "{{ credentials_user }}"
tags: tags:
- install - install
...@@ -74,9 +74,12 @@ ...@@ -74,9 +74,12 @@
# var should have more permissive permissions than the rest # var should have more permissive permissions than the rest
- name: create credentials var dirs - name: create credentials var dirs
file: > file:
path="{{ item }}" state=directory mode=0775 path: "{{ item }}"
owner="{{ credentials_user }}" group="{{ common_web_group }}" state: directory
mode: 0775
owner: "{{ credentials_user }}"
group: "{{ common_web_group }}"
with_items: with_items:
- "{{ CREDENTIALS_MEDIA_ROOT }}" - "{{ CREDENTIALS_MEDIA_ROOT }}"
tags: tags:
...@@ -192,20 +195,23 @@ ...@@ -192,20 +195,23 @@
- manage:start - manage:start
- name: Copying nginx configs for credentials - name: Copying nginx configs for credentials
template: > template:
src=edx/app/nginx/sites-available/credentials.j2 src: edx/app/nginx/sites-available/credentials.j2
dest={{ nginx_sites_available_dir }}/credentials dest: "{{ nginx_sites_available_dir }}/credentials"
owner=root group={{ common_web_user }} mode=0640 owner: root group={{ common_web_user }}"
mode: 0640
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
- install:vhosts - install:vhosts
- name: Creating nginx config links for credentials - name: Creating nginx config links for credentials
file: > file:
src={{ nginx_sites_available_dir }}/credentials src: "{{ nginx_sites_available_dir }}/credentials"
dest={{ nginx_sites_enabled_dir }}/credentials dest: "{{ nginx_sites_enabled_dir }}/credentials"
state=link owner=root group=root state: link
owner: root
group: root
notify: reload nginx notify: reload nginx
tags: tags:
- install - install
......
--- ---
- name: check out the demo course - name: check out the demo course
git_2_0_1: > git_2_0_1:
dest={{ demo_code_dir }} repo={{ demo_repo }} version={{ demo_version }} dest: "{{ demo_code_dir }}"
accept_hostkey=yes repo: "{{ demo_repo }}"
version: "{{ demo_version }}"
accept_hostkey: yes
become_user: "{{ demo_edxapp_user }}" become_user: "{{ demo_edxapp_user }}"
register: demo_checkout register: demo_checkout
- name: import demo course - name: import demo course
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py cms --settings=aws import {{ demo_edxapp_course_data_dir }} {{ demo_code_dir }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py cms --settings=aws import {{ demo_edxapp_course_data_dir }} {{ demo_code_dir }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
when: demo_checkout.changed when: demo_checkout.changed
- name: create some test users - name: create some test users
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user {{ item.username}} {{ item.email }} --initial-password-hash {{ item.hashed_password | quote }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user {{ item.username}} {{ item.email }} --initial-password-hash {{ item.hashed_password | quote }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
with_items: demo_test_users with_items: demo_test_users
when: demo_checkout.changed when: demo_checkout.changed
- name: create staff user - name: create staff user
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user staff staff@example.com --initial-password-hash {{ demo_hashed_password | quote }} --staff"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms manage_user staff staff@example.com --initial-password-hash {{ demo_hashed_password | quote }} --staff args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
when: when:
- demo_checkout.changed - demo_checkout.changed
- DEMO_CREATE_STAFF_USER - DEMO_CREATE_STAFF_USER
- name: enroll test users in the demo course - name: enroll test users in the demo course
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms enroll_user_in_course -e {{ item.email }} -c {{ demo_course_id }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms enroll_user_in_course -e {{ item.email }} -c {{ demo_course_id }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
with_items: with_items:
- "{{ demo_test_users }}" - "{{ demo_test_users }}"
...@@ -43,15 +45,15 @@ ...@@ -43,15 +45,15 @@
- name: add test users to the certificate whitelist - name: add test users to the certificate whitelist
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms cert_whitelist -a {{ item.email }} -c {{ demo_course_id }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms cert_whitelist -a {{ item.email }} -c {{ demo_course_id }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
with_items: demo_test_users with_items: demo_test_users
when: demo_checkout.changed when: demo_checkout.changed
- name: seed the forums for the demo course - name: seed the forums for the demo course
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws seed_permissions_roles {{ demo_course_id }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws seed_permissions_roles {{ demo_course_id }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
with_items: demo_test_users with_items: demo_test_users
when: demo_checkout.changed when: demo_checkout.changed
...@@ -31,8 +31,10 @@ ...@@ -31,8 +31,10 @@
# - demo # - demo
- name: create demo app and data dirs - name: create demo app and data dirs
file: > file:
path="{{ demo_app_dir }}" state=directory path: "{{ demo_app_dir }}"
owner="{{ demo_edxapp_user }}" group="{{ common_web_group }}" state: directory
owner: "{{ demo_edxapp_user }}"
group: "{{ common_web_group }}"
- include: deploy.yml tags=deploy - include: deploy.yml tags=deploy
...@@ -77,9 +77,9 @@ ...@@ -77,9 +77,9 @@
- devstack:install - devstack:install
- name: create nodeenv - name: create nodeenv
shell: > shell: "{{ discovery_venv_dir }}/bin/nodeenv {{ discovery_nodeenv_dir }} --node={{ discovery_node_version }} --prebuilt"
creates={{ discovery_nodeenv_dir }} args:
{{ discovery_venv_dir }}/bin/nodeenv {{ discovery_nodeenv_dir }} --node={{ discovery_node_version }} --prebuilt creates: "{{ discovery_nodeenv_dir }}"
become_user: "{{ discovery_user }}" become_user: "{{ discovery_user }}"
tags: tags:
- install - install
...@@ -94,9 +94,9 @@ ...@@ -94,9 +94,9 @@
- install:app-requirements - install:app-requirements
- name: install bower dependencies - name: install bower dependencies
shell: > shell: ". {{ discovery_nodeenv_bin }}/activate && {{ discovery_node_bin }}/bower install --production --config.interactive=false"
chdir={{ discovery_code_dir }} args:
. {{ discovery_nodeenv_bin }}/activate && {{ discovery_node_bin }}/bower install --production --config.interactive=false chdir: "{{ discovery_code_dir }}"
become_user: "{{ discovery_user }}" become_user: "{{ discovery_user }}"
tags: tags:
- install - install
......
...@@ -84,11 +84,9 @@ ...@@ -84,11 +84,9 @@
- migrate:db - migrate:db
- name: Populate countries - name: Populate countries
shell: > shell: "DB_MIGRATION_USER={{ COMMON_MYSQL_MIGRATE_USER }} DB_MIGRATION_PASS={{ COMMON_MYSQL_MIGRATE_PASS }} {{ ecommerce_venv_dir }}/bin/python ./manage.py oscar_populate_countries"
chdir={{ ecommerce_code_dir }} args:
DB_MIGRATION_USER={{ COMMON_MYSQL_MIGRATE_USER }} chdir: {{ ecommerce_code_dir }}
DB_MIGRATION_PASS={{ COMMON_MYSQL_MIGRATE_PASS }}
{{ ecommerce_venv_dir }}/bin/python ./manage.py oscar_populate_countries
become_user: "{{ ecommerce_user }}" become_user: "{{ ecommerce_user }}"
environment: "{{ ecommerce_environment }}" environment: "{{ ecommerce_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes" when: migrate_db is defined and migrate_db|lower == "yes"
......
...@@ -110,10 +110,10 @@ ...@@ -110,10 +110,10 @@
- install:app-requirements - install:app-requirements
- name: Create the virtualenv to install the Python requirements - name: Create the virtualenv to install the Python requirements
command: > command: "virtualenv {{ edxapp_venv_dir }}"
virtualenv {{ edxapp_venv_dir }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
creates={{ edxapp_venv_dir }}/bin/pip creates: "{{ edxapp_venv_dir }}/bin/pip"
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
tags: tags:
...@@ -134,9 +134,9 @@ ...@@ -134,9 +134,9 @@
# Need to use command rather than pip so that we can maintain the context of our current working directory; some # Need to use command rather than pip so that we can maintain the context of our current working directory; some
# requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly # requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly
# installs everything into that virtual environment. # installs everything into that virtual environment.
command: > command: "{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item.item }}"
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item.item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
when: item.stat.exists when: item.stat.exists
...@@ -151,9 +151,9 @@ ...@@ -151,9 +151,9 @@
# Need to use shell rather than pip so that we can maintain the context of our current working directory; some # Need to use shell rather than pip so that we can maintain the context of our current working directory; some
# requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly # requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly
# installs everything into that virtual environment. # installs everything into that virtual environment.
shell: > shell: "{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }}"
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
with_items: with_items:
- "{{ private_requirements_file }}" - "{{ private_requirements_file }}"
become_user: "{{ edxapp_user }}" become_user: "{{ edxapp_user }}"
...@@ -197,9 +197,9 @@ ...@@ -197,9 +197,9 @@
# Need to use shell rather than pip so that we can maintain the context of our current working directory; some # Need to use shell rather than pip so that we can maintain the context of our current working directory; some
# requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly # requirements are pathed relative to the edx-platform repo. Using the pip from inside the virtual environment implicitly
# installs everything into that virtual environment. # installs everything into that virtual environment.
shell: > shell: "{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }}"
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
with_items: with_items:
- "{{ sandbox_base_requirements }}" - "{{ sandbox_base_requirements }}"
- "{{ sandbox_local_requirements }}" - "{{ sandbox_local_requirements }}"
...@@ -211,8 +211,7 @@ ...@@ -211,8 +211,7 @@
- install:app-requirements - install:app-requirements
- name: create nodeenv - name: create nodeenv
shell: > shell: "{{ edxapp_venv_dir }}/bin/nodeenv {{ edxapp_nodeenv_dir }} --node={{ edxapp_node_version }} --prebuilt"
{{ edxapp_venv_dir }}/bin/nodeenv {{ edxapp_nodeenv_dir }} --node={{ edxapp_node_version }} --prebuilt
args: args:
creates: "{{ edxapp_nodeenv_dir }}" creates: "{{ edxapp_nodeenv_dir }}"
tags: tags:
...@@ -223,8 +222,7 @@ ...@@ -223,8 +222,7 @@
# This needs to be done as root since npm is weird about # This needs to be done as root since npm is weird about
# chown - https://github.com/npm/npm/issues/3565 # chown - https://github.com/npm/npm/issues/3565
- name: Set the npm registry - name: Set the npm registry
shell: > shell: "npm config set registry '{{ COMMON_NPM_MIRROR_URL }}'"
npm config set registry '{{ COMMON_NPM_MIRROR_URL }}'
args: args:
creates: "{{ edxapp_app_dir }}/.npmrc" creates: "{{ edxapp_app_dir }}/.npmrc"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
...@@ -279,9 +277,9 @@ ...@@ -279,9 +277,9 @@
- install:app-requirements - install:app-requirements
- name: code sandbox | Install sandbox requirements into sandbox venv - name: code sandbox | Install sandbox requirements into sandbox venv
shell: > shell: "{{ edxapp_sandbox_venv_dir }}/bin/pip install -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }}"
{{ edxapp_sandbox_venv_dir }}/bin/pip install -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ item }} args:
chdir={{ edxapp_code_dir }} chdir: "{{ edxapp_code_dir }}"
with_items: with_items:
- "{{ sandbox_local_requirements }}" - "{{ sandbox_local_requirements }}"
- "{{ sandbox_post_requirements }}" - "{{ sandbox_post_requirements }}"
......
...@@ -21,31 +21,28 @@ ...@@ -21,31 +21,28 @@
# #
# #
- name: download elasticsearch plugin - name: download elasticsearch plugin
shell: > shell: "./npi fetch {{ ELASTICSEARCH_MONITOR_PLUGIN }} -y"
./npi fetch {{ ELASTICSEARCH_MONITOR_PLUGIN }} -y
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
creates: "{{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}.compressed" creates: "{{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}.compressed"
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
- name: prepare elasticsearch plugin - name: prepare elasticsearch plugin
shell: > shell: "./npi prepare {{ ELASTICSEARCH_MONITOR_PLUGIN }} -n"
./npi prepare {{ ELASTICSEARCH_MONITOR_PLUGIN }} -n
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
- name: configure elasticsearch plugin - name: configure elasticsearch plugin
template: > template:
src=plugins/me.snov.newrelic-elasticsearch/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json.j2 src: "plugins/me.snov.newrelic-elasticsearch/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json.j2"
dest={{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json dest: "{{ NEWRELIC_NPI_PREFIX }}/plugins/{{ ELASTICSEARCH_MONITOR_PLUGIN }}/newrelic-elasticsearch-plugin-1.4.1/config/plugin.json"
owner={{ NEWRELIC_USER }} owner: "{{ NEWRELIC_USER }}"
mode=0644 mode: 0644
- name: register/start elasticsearch plugin - name: register/start elasticsearch plugin
shell: > shell: "./npi add-service {{ ELASTICSEARCH_MONITOR_PLUGIN }} --start --user={{ NEWRELIC_USER }}"
./npi add-service {{ ELASTICSEARCH_MONITOR_PLUGIN }} --start --user={{ NEWRELIC_USER }} args:
args: chdir: "{{ NEWRELIC_NPI_PREFIX }}"
chdir: "{{ NEWRELIC_NPI_PREFIX }}"
become_user: "root" become_user: "root"
...@@ -33,33 +33,35 @@ ...@@ -33,33 +33,35 @@
with_items: gh_mirror_pip_pkgs with_items: gh_mirror_pip_pkgs
- name: install debian packages - name: install debian packages
apt: > apt:
pkg={{ ",".join(gh_mirror_debian_pkgs) }} pkg: "{{ ",".join(gh_mirror_debian_pkgs) }}"
state=present state: present
update_cache=yes update_cache: yes
- name: create gh_mirror user - name: create gh_mirror user
user: > user:
name={{ gh_mirror_user }} name: "{{ gh_mirror_user }}"
state=present state: present
- name: create the gh_mirror data directory - name: create the gh_mirror data directory
file: > file:
path={{ gh_mirror_data_dir }} path: "{{ gh_mirror_data_dir }}"
state=directory state: directory
owner={{ gh_mirror_user }} owner: "{{ gh_mirror_user }}"
group={{ gh_mirror_group }} group: "{{ gh_mirror_group }}"
- name: create the gh_mirror app directory - name: create the gh_mirror app directory
file: > file:
path={{ gh_mirror_app_dir }} path: "{{ gh_mirror_app_dir }}"
state=directory state: directory
- name: create org config - name: create org config
template: src=orgs.yml.j2 dest={{ gh_mirror_app_dir }}/orgs.yml template: src=orgs.yml.j2 dest={{ gh_mirror_app_dir }}/orgs.yml
- name: copying sync scripts - name: copying sync scripts
copy: src={{ item }} dest={{ gh_mirror_app_dir }}/{{ item }} copy:
src: "{{ item }}"
dest: "{{ gh_mirror_app_dir }}/{{ item }}"
with_items: gh_mirror_app_files with_items: gh_mirror_app_files
- name: creating cron job to update repos - name: creating cron job to update repos
......
...@@ -15,9 +15,9 @@ ...@@ -15,9 +15,9 @@
# #
# #
- name: restart gitreload - name: restart gitreload
supervisorctl: > supervisorctl:
name=gitreload name: gitreload
supervisorctl_path={{ supervisor_ctl }} supervisorctl_path: "{{ supervisor_ctl }}"
config={{ supervisor_cfg }} config: "{{ supervisor_cfg }}"
state=restarted state: restarted
when: not disable_edx_services when: not disable_edx_services
...@@ -6,29 +6,29 @@ ...@@ -6,29 +6,29 @@
with_items: GITRELOAD_REPOS with_items: GITRELOAD_REPOS
- name: do import of courses - name: do import of courses
shell: > shell:
executable=/bin/bash executable: /bin/bash
chdir="{{ edxapp_code_dir }}" chdir: "{{ edxapp_code_dir }}"
SERVICE_VARIANT=lms {{ edxapp_venv_bin }}/python manage.py lms --settings=aws git_add_course {{ item.url }} {{ GITRELOAD_REPODIR }}/{{ item.name }} SERVICE_VARIANT: "lms {{ edxapp_venv_bin }}/python manage.py lms --settings=aws git_add_course {{ item.url }} {{ GITRELOAD_REPODIR }}/{{ item.name }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
with_items: GITRELOAD_REPOS with_items: GITRELOAD_REPOS
- name: change ownership on repos for access by edxapp and www-data - name: change ownership on repos for access by edxapp and www-data
file: > file:
path={{ GITRELOAD_REPODIR }} path: "{{ GITRELOAD_REPODIR }}"
state=directory state: directory
owner={{ common_web_user }} owner: "{{ common_web_user }}"
owner={{ common_web_group }} owner: "{{ common_web_group }}"
recurse=yes recurse: yes
- name: change group on repos if using devstack - name: change group on repos if using devstack
file: > file:
path={{ GITRELOAD_REPODIR }} path: "{{ GITRELOAD_REPODIR }}"
state=directory state: directory
group={{ edxapp_user }} group: "{{ edxapp_user }}"
recurse=yes recurse: yes
when: devstack when: devstack
- name: change mode on repos with using devstack - name: change mode on repos with using devstack
command: chmod -R o=rwX,g=srwX,o=rX {{ GITRELOAD_REPODIR }} command: "chmod -R o=rwX,g=srwX,o=rX {{ GITRELOAD_REPODIR }}"
when: devstack when: devstack
...@@ -11,10 +11,10 @@ ...@@ -11,10 +11,10 @@
tags: course_pull tags: course_pull
- name: install gitreload - name: install gitreload
pip: > pip:
name=git+{{ gitreload_repo }}@{{ gitreload_version }}#egg=gitreload name: "git+{{ gitreload_repo }}@{{ gitreload_version }}#egg=gitreload"
virtualenv={{ gitreload_venv }} virtualenv: "{{ gitreload_venv }}"
extra_args="--exists-action w" extra_args: "--exists-action w"
become_user: "{{ gitreload_user }}" become_user: "{{ gitreload_user }}"
notify: restart gitreload notify: restart gitreload
...@@ -24,22 +24,22 @@ ...@@ -24,22 +24,22 @@
notify: restart gitreload notify: restart gitreload
- name: "add gunicorn configuration file" - name: "add gunicorn configuration file"
template: > template:
src=edx/app/gitreload/gitreload_gunicorn.py.j2 dest={{ gitreload_dir }}/gitreload_gunicorn.py src: "edx/app/gitreload/gitreload_gunicorn.py.j2 dest={{ gitreload_dir }}/gitreload_gunicorn.py"
become_user: "{{ gitreload_user }}" become_user: "{{ gitreload_user }}"
notify: restart gitreload notify: restart gitreload
- name: "writing supervisor script" - name: "writing supervisor script"
template: > template:
src=edx/app/supervisor/conf.available.d/gitreload.conf.j2 dest={{ supervisor_available_dir }}/gitreload.conf src: "edx/app/supervisor/conf.available.d/gitreload.conf.j2 dest={{ supervisor_available_dir }}/gitreload.conf"
owner={{ supervisor_user }} group={{ common_web_user }} mode=0644 owner: "{{ supervisor_user }} group={{ common_web_user }} mode=0644"
- name: "enable supervisor script" - name: "enable supervisor script"
file: > file:
src={{ supervisor_available_dir }}/gitreload.conf src: "{{ supervisor_available_dir }}/gitreload.conf"
dest={{ supervisor_cfg_dir }}/gitreload.conf dest: "{{ supervisor_cfg_dir }}/gitreload.conf"
owner={{ supervisor_user }} group={{ common_web_user }} mode=0644 owner: "{{ supervisor_user }} group={{ common_web_user }} mode=0644"
state=link force=yes state: link force=yes
when: not disable_edx_services when: not disable_edx_services
# call supervisorctl update. this reloads # call supervisorctl update. this reloads
...@@ -54,9 +54,9 @@ ...@@ -54,9 +54,9 @@
when: not disable_edx_services when: not disable_edx_services
- name: ensure gitreload is started - name: ensure gitreload is started
supervisorctl: > supervisorctl:
name=gitreload name: gitreload
supervisorctl_path={{ supervisor_ctl }} supervisorctl_path: "{{ supervisor_ctl }}"
config={{ supervisor_cfg }} config: "{{ supervisor_cfg }}"
state=started state: started
when: not disable_edx_services when: not disable_edx_services
...@@ -38,25 +38,25 @@ ...@@ -38,25 +38,25 @@
- deploy - deploy
- name: create gitreload user - name: create gitreload user
user: > user:
name="{{ gitreload_user }}" name: "{{ gitreload_user }}"
home="{{ gitreload_dir }}" home: "{{ gitreload_dir }}"
createhome=no createhome: no
shell=/bin/false shell: /bin/false
- name: ensure home folder exists - name: ensure home folder exists
file: > file:
path={{ gitreload_dir }} path: "{{ gitreload_dir }}"
state=directory state: directory
owner={{ gitreload_user }} owner: "{{ gitreload_user }}"
group={{ gitreload_user }} group: "{{ gitreload_user }}"
- name: ensure repo dir exists - name: ensure repo dir exists
file: > file:
path={{ GITRELOAD_REPODIR }} path: "{{ GITRELOAD_REPODIR }}"
state=directory state: directory
owner={{ common_web_user }} owner: "{{ common_web_user }}"
group={{ common_web_group }} group: "{{ common_web_group }}"
- name: grab ssh host keys - name: grab ssh host keys
shell: ssh-keyscan {{ item }} shell: ssh-keyscan {{ item }}
...@@ -65,17 +65,17 @@ ...@@ -65,17 +65,17 @@
register: gitreload_repo_host_keys register: gitreload_repo_host_keys
- name: add host keys if needed to known_hosts - name: add host keys if needed to known_hosts
lineinfile: > lineinfile:
create=yes create: yes
dest=~/.ssh/known_hosts dest: ~/.ssh/known_hosts
line="{{ item.stdout }}" line: "{{ item.stdout }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
with_items: gitreload_repo_host_keys.results with_items: gitreload_repo_host_keys.results
- name: create a symlink for venv python - name: create a symlink for venv python
file: > file:
src="{{ gitreload_venv_bin }}/{{ item }}" src: "{{ gitreload_venv_bin }}/{{ item }}"
dest={{ COMMON_BIN_DIR }}/{{ item }}.gitreload dest: "{{ COMMON_BIN_DIR }}/{{ item }}.gitreload"
state=link state=link
with_items: with_items:
- python - python
......
...@@ -63,21 +63,21 @@ ...@@ -63,21 +63,21 @@
tags: gluster tags: gluster
- name: all | mount volume - name: all | mount volume
mount: > mount:
name={{ item.mount_location }} name: "{{ item.mount_location }}"
src={{ gluster_primary_ip }}:{{ item.name }} src: "{{ gluster_primary_ip }}:{{ item.name }}"
fstype=glusterfs fstype: glusterfs
state=mounted state: mounted
opts=defaults,_netdev opts: defaults,_netdev
with_items: gluster_volumes with_items: gluster_volumes
tags: gluster tags: gluster
# This required due to an annoying bug in Ubuntu and gluster where it tries to mount the system # This required due to an annoying bug in Ubuntu and gluster where it tries to mount the system
# before the network stack is up and can't lookup 127.0.0.1 # before the network stack is up and can't lookup 127.0.0.1
- name: all | sleep mount - name: all | sleep mount
lineinfile: > lineinfile:
dest=/etc/rc.local dest: /etc/rc.local
line='sleep 5; /bin/mount -a' line: 'sleep 5; /bin/mount -a'
regexp='sleep 5; /bin/mount -a' regexp: 'sleep 5; /bin/mount -a'
insertbefore='exit 0' insertbefore: 'exit 0'
tags: gluster tags: gluster
...@@ -76,20 +76,17 @@ ...@@ -76,20 +76,17 @@
- { url: "{{ GO_SERVER_GITHUB_PR_PLUGIN_JAR_URL }}", md5: "{{ GO_SERVER_GITHUB_PR_PLUGIN_MD5 }}" } - { url: "{{ GO_SERVER_GITHUB_PR_PLUGIN_JAR_URL }}", md5: "{{ GO_SERVER_GITHUB_PR_PLUGIN_MD5 }}" }
- name: generate line for go-server password file for admin user - name: generate line for go-server password file for admin user
command: > command: "/usr/bin/htpasswd -nbs \"{{ GO_SERVER_ADMIN_USERNAME }}\" \"{{ GO_SERVER_ADMIN_PASSWORD }}\""
/usr/bin/htpasswd -nbs "{{ GO_SERVER_ADMIN_USERNAME }}" "{{ GO_SERVER_ADMIN_PASSWORD }}"
register: admin_user_password_line register: admin_user_password_line
when: GO_SERVER_ADMIN_USERNAME and GO_SERVER_ADMIN_PASSWORD when: GO_SERVER_ADMIN_USERNAME and GO_SERVER_ADMIN_PASSWORD
- name: generate line for go-server password file for backup user - name: generate line for go-server password file for backup user
command: > command: "/usr/bin/htpasswd -nbs \"{{ GO_SERVER_BACKUP_USERNAME }}\" \"{{ GO_SERVER_BACKUP_PASSWORD }}\""
/usr/bin/htpasswd -nbs "{{ GO_SERVER_BACKUP_USERNAME }}" "{{ GO_SERVER_BACKUP_PASSWORD }}"
register: backup_user_password_line register: backup_user_password_line
when: GO_SERVER_BACKUP_USERNAME and GO_SERVER_BACKUP_PASSWORD when: GO_SERVER_BACKUP_USERNAME and GO_SERVER_BACKUP_PASSWORD
- name: generate line for go-server password file for gomatic user - name: generate line for go-server password file for gomatic user
command: > command: "/usr/bin/htpasswd -nbs \"{{ GO_SERVER_GOMATIC_USERNAME }}\" \"{{ GO_SERVER_GOMATIC_PASSWORD }}\""
/usr/bin/htpasswd -nbs "{{ GO_SERVER_GOMATIC_USERNAME }}" "{{ GO_SERVER_GOMATIC_PASSWORD }}"
register: gomatic_user_password_line register: gomatic_user_password_line
when: GO_SERVER_GOMATIC_USERNAME and GO_SERVER_GOMATIC_PASSWORD when: GO_SERVER_GOMATIC_USERNAME and GO_SERVER_GOMATIC_PASSWORD
......
...@@ -23,68 +23,83 @@ ...@@ -23,68 +23,83 @@
# #
- name: install system packages - name: install system packages
apt: > apt:
pkg={{ item }} pkg: "{{ item }}"
state=present state: present
with_items: hadoop_common_debian_pkgs with_items: hadoop_common_debian_pkgs
- name: ensure group exists - name: ensure group exists
group: name={{ hadoop_common_group }} system=yes state=present group:
name: "{{ hadoop_common_group }}"
system: yes
state: present
- name: ensure user exists - name: ensure user exists
user: > user:
name={{ hadoop_common_user }} name: "{{ hadoop_common_user }}"
group={{ hadoop_common_group }} group: "{{ hadoop_common_group }}"
home={{ HADOOP_COMMON_USER_HOME }} createhome=yes home: "{{ HADOOP_COMMON_USER_HOME }}"
shell=/bin/bash system=yes generate_ssh_key=yes createhome: yes
state=present shell: /bin/bash
system: yes
generate_ssh_key: yes
state: present
- name: own key authorized - name: own key authorized
file: > file:
src={{ HADOOP_COMMON_USER_HOME }}/.ssh/id_rsa.pub src: "{{ HADOOP_COMMON_USER_HOME }}/.ssh/id_rsa.pub"
dest={{ HADOOP_COMMON_USER_HOME }}/.ssh/authorized_keys dest: "{{ HADOOP_COMMON_USER_HOME }}/.ssh/authorized_keys"
owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state=link owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
state: link
- name: ssh configured - name: ssh configured
template: > template:
src=hadoop_user_ssh_config.j2 src: hadoop_user_ssh_config.j2
dest={{ HADOOP_COMMON_USER_HOME }}/.ssh/config dest: "{{ HADOOP_COMMON_USER_HOME }}/.ssh/config"
mode=0600 owner={{ hadoop_common_user }} group={{ hadoop_common_group }} mode: 0600
owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
- name: ensure user is in sudoers - name: ensure user is in sudoers
lineinfile: > lineinfile:
dest=/etc/sudoers state=present dest: /etc/sudoers
regexp='^%hadoop ALL\=' line='%hadoop ALL=(ALL) NOPASSWD:ALL' state: present
validate='visudo -cf %s' regexp: "'^%hadoop ALL\=' line='%hadoop ALL=(ALL) NOPASSWD:ALL'"
validate: 'visudo -cf %s'
- name: check if downloaded and extracted - name: check if downloaded and extracted
stat: path={{ HADOOP_COMMON_HOME }} stat: path={{ HADOOP_COMMON_HOME }}
register: extracted_hadoop_dir register: extracted_hadoop_dir
- name: distribution downloaded - name: distribution downloaded
get_url: > get_url:
url={{ hadoop_common_dist.url }} url: "{{ hadoop_common_dist.url }}"
sha256sum={{ hadoop_common_dist.sha256sum }} sha256sum: "{{ hadoop_common_dist.sha256sum }}"
dest={{ hadoop_common_temporary_dir }} dest: "{{ hadoop_common_temporary_dir }}"
when: not extracted_hadoop_dir.stat.exists when: not extracted_hadoop_dir.stat.exists
- name: distribution extracted - name: distribution extracted
shell: > shell: "tar -xzf {{ hadoop_common_temporary_dir }}/{{ hadoop_common_dist.filename }} && chown -R {{ hadoop_common_user }}:{{ hadoop_common_group }} hadoop-{{ HADOOP_COMMON_VERSION }}"
chdir={{ HADOOP_COMMON_USER_HOME }} args:
tar -xzf {{ hadoop_common_temporary_dir }}/{{ hadoop_common_dist.filename }} && chown -R {{ hadoop_common_user }}:{{ hadoop_common_group }} hadoop-{{ HADOOP_COMMON_VERSION }} chdir: "{{ HADOOP_COMMON_USER_HOME }}"
when: not extracted_hadoop_dir.stat.exists when: not extracted_hadoop_dir.stat.exists
- name: versioned directory symlink created - name: versioned directory symlink created
file: > file:
src={{ HADOOP_COMMON_USER_HOME }}/hadoop-{{ HADOOP_COMMON_VERSION }} src: "{{ HADOOP_COMMON_USER_HOME }}/hadoop-{{ HADOOP_COMMON_VERSION }}"
dest={{ HADOOP_COMMON_HOME }} dest: "{{ HADOOP_COMMON_HOME }}"
owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state=link owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
state: link
- name: configuration installed - name: configuration installed
template: > template:
src={{ item }}.j2 src: "{{ item }}.j2"
dest={{ HADOOP_COMMON_CONF_DIR }}/{{ item }} dest: "{{ HADOOP_COMMON_CONF_DIR }}/{{ item }}"
mode=0640 owner={{ hadoop_common_user }} group={{ hadoop_common_group }} mode: 0640
owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
with_items: with_items:
- hadoop-env.sh - hadoop-env.sh
- mapred-site.xml - mapred-site.xml
...@@ -93,79 +108,84 @@ ...@@ -93,79 +108,84 @@
- yarn-site.xml - yarn-site.xml
- name: upstart scripts installed - name: upstart scripts installed
template: > template:
src={{ item }}.j2 src: "{{ item }}.j2"
dest=/etc/init/{{ item }} dest: "/etc/init/{{ item }}"
mode=0640 owner=root group=root mode: 0640
owner: root
group: root
with_items: with_items:
- hdfs.conf - hdfs.conf
- yarn.conf - yarn.conf
- name: hadoop env file exists - name: hadoop env file exists
file: > file:
path={{ hadoop_common_env }} state=touch path: "{{ hadoop_common_env }}"
owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state: touch
owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
- name: env vars sourced in bashrc - name: env vars sourced in bashrc
lineinfile: > lineinfile:
dest={{ HADOOP_COMMON_USER_HOME }}/.bashrc dest: "{{ HADOOP_COMMON_USER_HOME }}/.bashrc"
state=present state: present
regexp="^. {{ hadoop_common_env }}" regexp: "^. {{ hadoop_common_env }}"
line=". {{ hadoop_common_env }}" line: ". {{ hadoop_common_env }}"
insertbefore=BOF insertbefore: BOF
- name: env vars sourced in hadoop env - name: env vars sourced in hadoop env
lineinfile: > lineinfile:
dest={{ hadoop_common_env }} state=present dest: "{{ hadoop_common_env }}"
regexp="^. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh" line=". {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh" state: present
regexp: "^. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh"
line: ". {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh"
- name: check if native libraries need to be built - name: check if native libraries need to be built
stat: path={{ HADOOP_COMMON_USER_HOME }}/.native_libs_built stat: path={{ HADOOP_COMMON_USER_HOME }}/.native_libs_built
register: native_libs_built register: native_libs_built
- name: protobuf downloaded - name: protobuf downloaded
get_url: > get_url:
url={{ hadoop_common_protobuf_dist.url }} url: "{{ hadoop_common_protobuf_dist.url }}"
sha256sum={{ hadoop_common_protobuf_dist.sha256sum }} sha256sum: "{{ hadoop_common_protobuf_dist.sha256sum }}"
dest={{ hadoop_common_temporary_dir }} dest: "{{ hadoop_common_temporary_dir }}"
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: protobuf extracted - name: protobuf extracted
shell: > shell: "tar -xzf {{ hadoop_common_protobuf_dist.filename }}"
chdir={{ hadoop_common_temporary_dir }} args:
tar -xzf {{ hadoop_common_protobuf_dist.filename }} chdir: "{{ hadoop_common_temporary_dir }}"
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: protobuf installed - name: protobuf installed
shell: > shell: "./configure --prefix=/usr/local && make && make install"
chdir={{ hadoop_common_temporary_dir }}/protobuf-{{ HADOOP_COMMON_PROTOBUF_VERSION }} args:
./configure --prefix=/usr/local && make && make install chdir: "{{ hadoop_common_temporary_dir }}/protobuf-{{ HADOOP_COMMON_PROTOBUF_VERSION }}"
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: native lib source downloaded - name: native lib source downloaded
get_url: > get_url:
url={{ hadoop_common_native_dist.url }} url: "{{ hadoop_common_native_dist.url }}"
sha256sum={{ hadoop_common_native_dist.sha256sum }} sha256sum: "{{ hadoop_common_native_dist.sha256sum }}"
dest={{ hadoop_common_temporary_dir }}/{{ hadoop_common_native_dist.filename }} dest: "{{ hadoop_common_temporary_dir }}/{{ hadoop_common_native_dist.filename }}"
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: native lib source extracted - name: native lib source extracted
shell: > shell: "tar -xzf {{ hadoop_common_native_dist.filename }}"
chdir={{ hadoop_common_temporary_dir }} args:
tar -xzf {{ hadoop_common_native_dist.filename }} chdir: "{{ hadoop_common_temporary_dir }}"
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: native lib built - name: native lib built
shell: > shell: "mvn package -X -Pnative -DskipTests"
chdir={{ hadoop_common_temporary_dir }}/hadoop-common-release-{{ HADOOP_COMMON_VERSION }}/hadoop-common-project args:
mvn package -X -Pnative -DskipTests chdir: "{{ hadoop_common_temporary_dir }}/hadoop-common-release-{{ HADOOP_COMMON_VERSION }}/hadoop-common-project"
environment: environment:
LD_LIBRARY_PATH: /usr/local/lib LD_LIBRARY_PATH: /usr/local/lib
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: old native libs renamed - name: old native libs renamed
shell: > shell: "mv {{ HADOOP_COMMON_HOME }}/lib/native/{{ item.name }} {{ HADOOP_COMMON_HOME }}/lib/native/{{ item.new_name }}"
mv {{ HADOOP_COMMON_HOME }}/lib/native/{{ item.name }} {{ HADOOP_COMMON_HOME }}/lib/native/{{ item.new_name }}
with_items: with_items:
- { name: libhadoop.a, new_name: libhadoop32.a } - { name: libhadoop.a, new_name: libhadoop32.a }
- { name: libhadoop.so, new_name: libhadoop32.so } - { name: libhadoop.so, new_name: libhadoop32.so }
...@@ -173,9 +193,9 @@ ...@@ -173,9 +193,9 @@
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: new native libs installed - name: new native libs installed
shell: > shell: "chown {{ hadoop_common_user }}:{{ hadoop_common_group }} {{ item }} && cp {{ item }} {{ HADOOP_COMMON_HOME }}/lib/native/{{ item }}"
args:
chdir={{ hadoop_common_temporary_dir }}/hadoop-common-release-{{ HADOOP_COMMON_VERSION }}/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib chdir={{ hadoop_common_temporary_dir }}/hadoop-common-release-{{ HADOOP_COMMON_VERSION }}/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib
chown {{ hadoop_common_user }}:{{ hadoop_common_group }} {{ item }} && cp {{ item }} {{ HADOOP_COMMON_HOME }}/lib/native/{{ item }}
with_items: with_items:
- libhadoop.a - libhadoop.a
- libhadoop.so - libhadoop.so
...@@ -183,13 +203,17 @@ ...@@ -183,13 +203,17 @@
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: native lib marker touched - name: native lib marker touched
file: > file:
path={{ HADOOP_COMMON_USER_HOME }}/.native_libs_built path: "{{ HADOOP_COMMON_USER_HOME }}/.native_libs_built"
owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state=touch owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
state: touch
when: not native_libs_built.stat.exists when: not native_libs_built.stat.exists
- name: service directory exists - name: service directory exists
file: > file:
path={{ HADOOP_COMMON_SERVICES_DIR }} path: "{{ HADOOP_COMMON_SERVICES_DIR }}"
mode=0750 owner={{ hadoop_common_user }} group={{ hadoop_common_group }} mode=0750
state=directory owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
state: directory
...@@ -22,9 +22,11 @@ ...@@ -22,9 +22,11 @@
notify: restart haproxy notify: restart haproxy
- name: Server configuration file - name: Server configuration file
template: > template:
src={{ haproxy_template_dir }}/haproxy.cfg.j2 dest=/etc/haproxy/haproxy.cfg src: "{{ haproxy_template_dir }}/haproxy.cfg.j2 dest=/etc/haproxy/haproxy.cfg"
owner=root group=root mode=0644 owner: root
group: root
mode: 0644
notify: reload haproxy notify: reload haproxy
- name: Enabled in default - name: Enabled in default
......
--- ---
# Installs the harprofiler # Installs the harprofiler
- name: create harprofiler user - name: create harprofiler user
user: > user:
name="{{ harprofiler_user }}" name: "{{ harprofiler_user }}"
createhome=no createhome: no
home={{ harprofiler_dir }} home: "{{ harprofiler_dir }}"
shell=/bin/bash shell: /bin/bash
- name: create harprofiler repo - name: create harprofiler repo
file: > file:
path={{ harprofiler_dir }} state=directory path: "{{ harprofiler_dir }}"
owner="{{ harprofiler_user }}" group="{{ common_web_group }}" state: directory
mode=0755 owner: "{{ harprofiler_user }}"
group: "{{ common_web_group }}"
mode: 0755
- name: check out the harprofiler - name: check out the harprofiler
git_2_0_1: > git_2_0_1:
dest={{ harprofiler_dir }} dest: "{{ harprofiler_dir }}"
repo={{ harprofiler_github_url }} version={{ harprofiler_version }} repo: "{{ harprofiler_github_url }}"
accept_hostkey=yes version: "{{ harprofiler_version }}"
accept_hostkey: yes
become_user: "{{ harprofiler_user }}" become_user: "{{ harprofiler_user }}"
- name: set bashrc for harprofiler user - name: set bashrc for harprofiler user
template: > template:
src=bashrc.j2 dest="{{ harprofiler_dir }}/.bashrc" owner="{{ harprofiler_user }}" src: bashrc.j2
mode=0755 dest: "{{ harprofiler_dir }}/.bashrc"
owner: "{{ harprofiler_user }}"
mode: 0755
- name: install requirements - name: install requirements
pip: > pip:
requirements="{{ harprofiler_dir }}/requirements.txt" virtualenv="{{ harprofiler_venv_dir }}" requirements: "{{ harprofiler_dir }}/requirements.txt"
virtualenv: "{{ harprofiler_venv_dir }}"
become_user: "{{ harprofiler_user }}" become_user: "{{ harprofiler_user }}"
- name: update config file - name: update config file
# harprofiler ships with a default config file. Doing a line-replace for the default # harprofiler ships with a default config file. Doing a line-replace for the default
# configuration that does not match what this machine will have # configuration that does not match what this machine will have
lineinfile: > lineinfile:
dest={{ harprofiler_dir }}/config.yaml dest: "{{ harprofiler_dir }}/config.yaml"
regexp="browsermob_dir" regexp: "browsermob_dir"
line="browsermob_dir: /usr/local" line: "browsermob_dir: /usr/local"
state=present state: present
- name: create validation shell script - name: create validation shell script
template: template:
...@@ -49,6 +55,5 @@ ...@@ -49,6 +55,5 @@
- name: test install - name: test install
shell: > shell: "./{{ harprofiler_validation_script }} chdir={{ harprofiler_dir }}"
./{{ harprofiler_validation_script }} chdir={{ harprofiler_dir }}
become_user: "{{ harprofiler_user }}" become_user: "{{ harprofiler_user }}"
...@@ -25,59 +25,66 @@ ...@@ -25,59 +25,66 @@
register: extracted_dir register: extracted_dir
- name: distribution downloaded - name: distribution downloaded
get_url: > get_url:
url={{ hive_dist.url }} url: "{{ hive_dist.url }}"
sha256sum={{ hive_dist.sha256sum }} sha256sum: "{{ hive_dist.sha256sum }}"
dest={{ hive_temporary_dir }} dest: "{{ hive_temporary_dir }}"
when: not extracted_dir.stat.exists when: not extracted_dir.stat.exists
- name: distribution extracted - name: distribution extracted
shell: > shell: "tar -xzf {{ hive_temporary_dir }}/{{ hive_dist.filename }} && chown -R {{ hadoop_common_user }}:{{ hadoop_common_group }} hive-{{ HIVE_VERSION }}-bin"
args:
chdir={{ HADOOP_COMMON_USER_HOME }} chdir={{ HADOOP_COMMON_USER_HOME }}
tar -xzf {{ hive_temporary_dir }}/{{ hive_dist.filename }} && chown -R {{ hadoop_common_user }}:{{ hadoop_common_group }} hive-{{ HIVE_VERSION }}-bin
when: not extracted_dir.stat.exists when: not extracted_dir.stat.exists
- name: versioned directory symlink created - name: versioned directory symlink created
file: > file:
src={{ HADOOP_COMMON_USER_HOME }}/hive-{{ HIVE_VERSION }}-bin src: "{{ HADOOP_COMMON_USER_HOME }}/hive-{{ HIVE_VERSION }}-bin"
dest={{ HIVE_HOME }} dest: "{{ HIVE_HOME }}"
owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state=link owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
state: link
- name: hive mysql connector distribution downloaded - name: hive mysql connector distribution downloaded
get_url: > get_url:
url={{ hive_mysql_connector_dist.url }} url: "{{ hive_mysql_connector_dist.url }}"
sha256sum={{ hive_mysql_connector_dist.sha256sum }} sha256sum: "{{ hive_mysql_connector_dist.sha256sum }}"
dest={{ hive_temporary_dir }} dest: "{{ hive_temporary_dir }}"
when: not extracted_dir.stat.exists when: not extracted_dir.stat.exists
- name: hive mysql connector distribution extracted - name: hive mysql connector distribution extracted
shell: > shell: "tar -xzf {{ hive_temporary_dir }}/{{ hive_mysql_connector_dist.filename }}"
args:
chdir={{ hive_temporary_dir }} chdir={{ hive_temporary_dir }}
tar -xzf {{ hive_temporary_dir }}/{{ hive_mysql_connector_dist.filename }}
when: not extracted_dir.stat.exists when: not extracted_dir.stat.exists
- name: hive lib exists - name: hive lib exists
file: > file:
path={{ HIVE_LIB }} path: "{{ HIVE_LIB }}"
owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state=directory owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
state: directory
- name: hive mysql connector installed - name: hive mysql connector installed
shell: > shell: "cp mysql-connector-java-{{ HIVE_MYSQL_CONNECTOR_VERSION }}-bin.jar {{ HIVE_LIB }} && chown {{ hadoop_common_user }}:{{ hadoop_common_group }} {{ HIVE_LIB }}/mysql-connector-java-{{ HIVE_MYSQL_CONNECTOR_VERSION }}-bin.jar"
chdir=/{{ hive_temporary_dir }}/mysql-connector-java-{{ HIVE_MYSQL_CONNECTOR_VERSION }} args:
cp mysql-connector-java-{{ HIVE_MYSQL_CONNECTOR_VERSION }}-bin.jar {{ HIVE_LIB }} && chdir: "/{{ hive_temporary_dir }}/mysql-connector-java-{{ HIVE_MYSQL_CONNECTOR_VERSION }}"
chown {{ hadoop_common_user }}:{{ hadoop_common_group }} {{ HIVE_LIB }}/mysql-connector-java-{{ HIVE_MYSQL_CONNECTOR_VERSION }}-bin.jar
when: not extracted_dir.stat.exists when: not extracted_dir.stat.exists
- name: configuration installed - name: configuration installed
template: > template:
src={{ item }}.j2 src: "{{ item }}.j2"
dest={{ HIVE_CONF }}/{{ item }} dest: "{{ HIVE_CONF }}/{{ item }}"
mode=0640 owner={{ hadoop_common_user }} group={{ hadoop_common_group }} mode: 0640
owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
with_items: with_items:
- hive-env.sh - hive-env.sh
- hive-site.xml - hive-site.xml
- name: env vars sourced in hadoop env - name: env vars sourced in hadoop env
lineinfile: > lineinfile:
dest={{ hadoop_common_env }} state=present dest: "{{ hadoop_common_env }}"
regexp="^. {{ HIVE_CONF }}/hive-env.sh" line=". {{ HIVE_CONF }}/hive-env.sh" state: present
regexp: "^. {{ HIVE_CONF }}/hive-env.sh"
line: ". {{ HIVE_CONF }}/hive-env.sh"
...@@ -22,21 +22,22 @@ ...@@ -22,21 +22,22 @@
# #
- name: setup the insights env file - name: setup the insights env file
template: > template:
src="edx/app/insights/insights_env.j2" src: "edx/app/insights/insights_env.j2"
dest="{{ insights_app_dir }}/insights_env" dest: "{{ insights_app_dir }}/insights_env"
owner={{ insights_user }} owner: "{{ insights_user }}"
group={{ insights_user }} group: "{{ insights_user }}"
mode=0644 mode: 0644
tags: tags:
- install - install
- install:configuration - install:configuration
- name: install application requirements - name: install application requirements
pip: > pip:
requirements="{{ insights_requirements_base }}/{{ item }}" requirements: "{{ insights_requirements_base }}/{{ item }}"
virtualenv="{{ insights_venv_dir }}" virtualenv: "{{ insights_venv_dir }}"
state=present extra_args="--exists-action w" state: present
extra_args: "--exists-action w"
become_user: "{{ insights_user }}" become_user: "{{ insights_user }}"
with_items: insights_requirements with_items: insights_requirements
tags: tags:
...@@ -44,9 +45,9 @@ ...@@ -44,9 +45,9 @@
- install:app-requirements - install:app-requirements
- name: create nodeenv - name: create nodeenv
shell: > shell: "{{ insights_venv_dir }}/bin/nodeenv {{ insights_nodeenv_dir }} --prebuilt"
creates={{ insights_nodeenv_dir }} args:
{{ insights_venv_dir }}/bin/nodeenv {{ insights_nodeenv_dir }} --prebuilt creates: "{{ insights_nodeenv_dir }}"
become_user: "{{ insights_user }}" become_user: "{{ insights_user }}"
tags: tags:
- install - install
...@@ -61,21 +62,19 @@ ...@@ -61,21 +62,19 @@
environment: "{{ insights_environment }}" environment: "{{ insights_environment }}"
- name: install bower dependencies - name: install bower dependencies
shell: > shell: ". {{ insights_venv_dir }}/bin/activate && . {{ insights_nodeenv_bin }}/activate && {{ insights_node_bin }}/bower install --production --config.interactive=false"
chdir={{ insights_code_dir }} args:
. {{ insights_venv_dir }}/bin/activate && chdir: "{{ insights_code_dir }}"
. {{ insights_nodeenv_bin }}/activate && {{ insights_node_bin }}/bower install --production --config.interactive=false
become_user: "{{ insights_user }}" become_user: "{{ insights_user }}"
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: migrate - name: migrate
shell: > shell: "DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}'DB_MIGRATION_PASS='{{ COMMON_MYSQL_MIGRATE_PASS }}' {{ insights_venv_dir }}/bin/python {{ insights_manage }} migrate --noinput"
chdir={{ insights_code_dir }} args:
DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}' chdir: "{{ insights_code_dir }}"
DB_MIGRATION_PASS='{{ COMMON_MYSQL_MIGRATE_PASS }}'
{{ insights_venv_dir }}/bin/python {{ insights_manage }} migrate --noinput
become_user: "{{ insights_user }}" become_user: "{{ insights_user }}"
environment: "{{ insights_environment }}" environment: "{{ insights_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes" when: migrate_db is defined and migrate_db|lower == "yes"
...@@ -84,18 +83,18 @@ ...@@ -84,18 +83,18 @@
- migrate:db - migrate:db
- name: run r.js optimizer - name: run r.js optimizer
shell: > shell: ". {{ insights_nodeenv_bin }}/activate && {{ insights_node_bin }}/r.js -o build.js"
chdir={{ insights_code_dir }} args:
. {{ insights_nodeenv_bin }}/activate && {{ insights_node_bin }}/r.js -o build.js chdir: "{{ insights_code_dir }}"
become_user: "{{ insights_user }}" become_user: "{{ insights_user }}"
tags: tags:
- assets - assets
- assets:gather - assets:gather
- name: run collectstatic - name: run collectstatic
shell: > shell: "{{ insights_venv_dir }}/bin/python {{ insights_manage }} {{ item }}"
chdir={{ insights_code_dir }} args:
{{ insights_venv_dir }}/bin/python {{ insights_manage }} {{ item }} chdir: "{{ insights_code_dir }}"
become_user: "{{ insights_user }}" become_user: "{{ insights_user }}"
environment: "{{ insights_environment }}" environment: "{{ insights_environment }}"
with_items: with_items:
...@@ -106,38 +105,40 @@ ...@@ -106,38 +105,40 @@
- assets:gather - assets:gather
- name: compile translations - name: compile translations
shell: > shell: ". {{ insights_venv_dir }}/bin/activate && i18n_tool generate -v"
chdir={{ insights_code_dir }}/analytics_dashboard args:
. {{ insights_venv_dir }}/bin/activate && i18n_tool generate -v chdir: {{ insights_code_dir }}/analytics_dashboard
become_user: "{{ insights_user }}" become_user: "{{ insights_user }}"
tags: tags:
- assets - assets
- assets:gather - assets:gather
- name: write out the supervisior wrapper - name: write out the supervisior wrapper
template: > template:
src=edx/app/insights/insights.sh.j2 src: "edx/app/insights/insights.sh.j2"
dest={{ insights_app_dir }}/{{ insights_service_name }}.sh dest: "{{ insights_app_dir }}/{{ insights_service_name }}.sh"
mode=0650 owner={{ supervisor_user }} group={{ common_web_user }} mode: "0650 owner={{ supervisor_user }} group={{ common_web_user }}"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: write supervisord config - name: write supervisord config
template: > template:
src=edx/app/supervisor/conf.d.available/insights.conf.j2 src: edx/app/supervisor/conf.d.available/insights.conf.j2
dest="{{ supervisor_available_dir }}/{{ insights_service_name }}.conf" dest: "{{ supervisor_available_dir }}/{{ insights_service_name }}.conf"
owner={{ supervisor_user }} group={{ common_web_user }} mode=0644 owner: "{{ supervisor_user }}"
group: "{{ common_web_user }}"
mode: 0644
tags: tags:
- install - install
- install:configuration - install:configuration
- name: enable supervisor script - name: enable supervisor script
file: > file:
src={{ supervisor_available_dir }}/{{ insights_service_name }}.conf src: "{{ supervisor_available_dir }}/{{ insights_service_name }}.conf"
dest={{ supervisor_cfg_dir }}/{{ insights_service_name }}.conf dest: "{{ supervisor_cfg_dir }}/{{ insights_service_name }}.conf"
state=link state: link
force=yes force: yes
when: not disable_edx_services when: not disable_edx_services
tags: tags:
- install - install
...@@ -151,10 +152,10 @@ ...@@ -151,10 +152,10 @@
- manage:start - manage:start
- name: create symlinks from the venv bin dir - name: create symlinks from the venv bin dir
file: > file:
src="{{ insights_venv_dir }}/bin/{{ item }}" src: "{{ insights_venv_dir }}/bin/{{ item }}"
dest="{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.{{ insights_service_name }}" dest: "{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.{{ insights_service_name }}"
state=link state: link
with_items: with_items:
- python - python
- pip - pip
...@@ -164,20 +165,20 @@ ...@@ -164,20 +165,20 @@
- install:base - install:base
- name: create manage.py symlink - name: create manage.py symlink
file: > file:
src="{{ insights_manage }}" src: "{{ insights_manage }}"
dest="{{ COMMON_BIN_DIR }}/manage.{{ insights_service_name }}" dest: "{{ COMMON_BIN_DIR }}/manage.{{ insights_service_name }}"
state=link state: link
tags: tags:
- install - install
- install:base - install:base
- name: restart insights - name: restart insights
supervisorctl: > supervisorctl:
state=restarted state: restarted
supervisorctl_path={{ supervisor_ctl }} supervisorctl_path: "{{ supervisor_ctl }}"
config={{ supervisor_cfg }} config: "{{ supervisor_cfg }}"
name={{ insights_service_name }} name: "{{ insights_service_name }}"
when: not disable_edx_services when: not disable_edx_services
become_user: "{{ supervisor_service_user }}" become_user: "{{ supervisor_service_user }}"
tags: tags:
......
...@@ -42,20 +42,20 @@ ...@@ -42,20 +42,20 @@
owner={{ jenkins_user }} group={{ jenkins_group }} mode=755 owner={{ jenkins_user }} group={{ jenkins_group }} mode=755
- name: configure s3 plugin - name: configure s3 plugin
template: > template:
src="./{{ jenkins_home }}/hudson.plugins.s3.S3BucketPublisher.xml.j2" src: "./{{ jenkins_home }}/hudson.plugins.s3.S3BucketPublisher.xml.j2"
dest="{{ jenkins_home }}/hudson.plugins.s3.S3BucketPublisher.xml" dest: "{{ jenkins_home }}/hudson.plugins.s3.S3BucketPublisher.xml"
owner={{ jenkins_user }} owner: "{{ jenkins_user }}"
group={{ jenkins_group }} group: "{{ jenkins_group }}"
mode=0644 mode=0644
- name: configure the boto profiles for jenkins - name: configure the boto profiles for jenkins
template: > template:
src="./{{ jenkins_home }}/boto.j2" src: "./{{ jenkins_home }}/boto.j2"
dest="{{ jenkins_home }}/.boto" dest: "{{ jenkins_home }}/.boto"
owner="{{ jenkins_user }}" owner: "{{ jenkins_user }}"
group="{{ jenkins_group }}" group: "{{ jenkins_group }}"
mode="0600" mode: "0600"
tags: tags:
- aws-config - aws-config
...@@ -66,53 +66,52 @@ ...@@ -66,53 +66,52 @@
- aws-config - aws-config
- name: configure the awscli profiles for jenkins - name: configure the awscli profiles for jenkins
template: > template:
src="./{{ jenkins_home }}/aws_config.j2" src: "./{{ jenkins_home }}/aws_config.j2"
dest="{{ jenkins_home }}/.aws/config" dest: "{{ jenkins_home }}/.aws/config"
owner="{{ jenkins_user }}" owner: "{{ jenkins_user }}"
group="{{ jenkins_group }}" group: "{{ jenkins_group }}"
mode="0600" mode: "0600"
tags: tags:
- aws-config - aws-config
- name: create the ssh directory - name: create the ssh directory
file: > file:
path={{ jenkins_home }}/.ssh path: "{{ jenkins_home }}/.ssh"
owner={{ jenkins_user }} owner: "{{ jenkins_user }}"
group={{ jenkins_group }} group: "{{ jenkins_group }}"
mode=0700 mode: 0700
state=directory state: directory
# Need to add Github to known_hosts to avoid # Need to add Github to known_hosts to avoid
# being prompted when using git through ssh # being prompted when using git through ssh
- name: Add github.com to known_hosts if it does not exist - name: Add github.com to known_hosts if it does not exist
shell: > shell: "ssh-keygen -f {{ jenkins_home }}/.ssh/known_hosts -H -F github.com | grep -q found || ssh-keyscan -H github.com > {{ jenkins_home }}/.ssh/known_hosts"
ssh-keygen -f {{ jenkins_home }}/.ssh/known_hosts -H -F github.com | grep -q found || ssh-keyscan -H github.com > {{ jenkins_home }}/.ssh/known_hosts
- name: create job directory - name: create job directory
file: > file:
path="{{ jenkins_home }}/jobs" path: "{{ jenkins_home }}/jobs"
owner="{{ jenkins_user }}" owner: "{{ jenkins_user }}"
group="{{ jenkins_group }}" group: "{{ jenkins_group }}"
mode=0755 mode: 0755
state=directory state: directory
- name: create admin job directories - name: create admin job directories
file: > file:
path="{{ jenkins_home }}/jobs/{{ item }}" path: "{{ jenkins_home }}/jobs/{{ item }}"
owner={{ jenkins_user }} owner: "{{ jenkins_user }}"
group={{ jenkins_group }} group: "{{ jenkins_group }}"
mode=0755 mode: 0755
state=directory state: directory
with_items: jenkins_admin_jobs with_items: jenkins_admin_jobs
- name: create admin job config files - name: create admin job config files
template: > template:
src="./{{ jenkins_home }}/jobs/{{ item }}/config.xml.j2" src: "./{{ jenkins_home }}/jobs/{{ item }}/config.xml.j2"
dest="{{ jenkins_home }}/jobs/{{ item }}/config.xml" dest: "{{ jenkins_home }}/jobs/{{ item }}/config.xml"
owner={{ jenkins_user }} owner: "{{ jenkins_user }}"
group={{ jenkins_group }} group: "{{ jenkins_group }}"
mode=0644 mode: 0644
with_items: jenkins_admin_jobs with_items: jenkins_admin_jobs
# adding chris-lea nodejs repo # adding chris-lea nodejs repo
...@@ -125,17 +124,18 @@ ...@@ -125,17 +124,18 @@
# This is necessary so that ansible can run with # This is necessary so that ansible can run with
# sudo set to True (as the jenkins user) on jenkins # sudo set to True (as the jenkins user) on jenkins
- name: grant sudo access to the jenkins user - name: grant sudo access to the jenkins user
copy: > copy:
content="{{ jenkins_user }} ALL=({{ jenkins_user }}) NOPASSWD:ALL" content: "{{ jenkins_user }} ALL=({{ jenkins_user }}) NOPASSWD:ALL"
dest=/etc/sudoers.d/99-jenkins owner=root group=root dest: "/etc/sudoers.d/99-jenkins owner=root group=root"
mode=0440 validate='visudo -cf %s' mode: 0440
validate: "visudo -cf %s"
- name: install global gem dependencies - name: install global gem dependencies
gem: > gem:
name={{ item.name }} name: "{{ item.name }}"
state=present state: present
version={{ item.version }} version: "{{ item.version }}"
user_install=no user_install: no
with_items: jenkins_admin_gem_pkgs with_items: jenkins_admin_gem_pkgs
- name: get s3 one time url - name: get s3 one time url
......
...@@ -170,9 +170,9 @@ ...@@ -170,9 +170,9 @@
- jenkins-seed-job - jenkins-seed-job
- name: generate seed job xml - name: generate seed job xml
shell: > shell:
cd {{ jenkins_seed_job_root }} && args: "GRADLE_OPTS=\"-Dorg.gradle.daemon=true\" ./gradlew run -Pargs={{ jenkins_seed_job_script }}"
GRADLE_OPTS="-Dorg.gradle.daemon=true" ./gradlew run -Pargs={{ jenkins_seed_job_script }} chdir: "{{ jenkins_seed_job_root }}"
become: yes become: yes
become_user: "{{ jenkins_user }}" become_user: "{{ jenkins_user }}"
tags: tags:
......
...@@ -6,22 +6,21 @@ ...@@ -6,22 +6,21 @@
# refers to the --depth-setting of git clone. A value of 1 # refers to the --depth-setting of git clone. A value of 1
# will truncate all history prior to the last revision. # will truncate all history prior to the last revision.
- name: Create shallow clone of edx-platform - name: Create shallow clone of edx-platform
git_2_0_1: > git_2_0_1:
repo=https://github.com/edx/edx-platform.git repo: https://github.com/edx/edx-platform.git
dest={{ jenkins_home }}/shallow-clone dest: "{{ jenkins_home }}/shallow-clone"
version={{ jenkins_edx_platform_version }} version: "{{ jenkins_edx_platform_version }}"
depth=1 depth: 1
become_user: "{{ jenkins_user }}" become_user: "{{ jenkins_user }}"
# Install the platform requirements using pip. # Install the platform requirements using pip.
- name: Install edx-platform requirements using pip - name: Install edx-platform requirements using pip
pip: > pip:
requirements={{ jenkins_home }}/shallow-clone/requirements/edx/{{ item }} requirements: "{{ jenkins_home }}/shallow-clone/requirements/edx/{{ item }}"
extra_args="--exists-action=w" extra_args: "--exists-action=w"
virtualenv={{ jenkins_home }}/edx-venv virtualenv: "{{ jenkins_home }}/edx-venv"
virtualenv_command=virtualenv virtualenv_command: virtualenv
executable=pip executable: pip
with_items: with_items:
- pre.txt - pre.txt
- github.txt - github.txt
...@@ -39,12 +38,12 @@ ...@@ -39,12 +38,12 @@
become_user: "{{ jenkins_user }}" become_user: "{{ jenkins_user }}"
- name: Install edx-platform post requirements using pip - name: Install edx-platform post requirements using pip
pip: > pip:
requirements={{ jenkins_home }}/shallow-clone/requirements/edx/{{ item }} requirements: "{{ jenkins_home }}/shallow-clone/requirements/edx/{{ item }}"
extra_args="--exists-action=w" extra_args: "--exists-action=w"
virtualenv={{ jenkins_home }}/edx-venv virtualenv: "{{ jenkins_home }}/edx-venv"
virtualenv_command=virtualenv virtualenv_command: virtualenv
executable=pip executable: pip
with_items: with_items:
- post.txt - post.txt
become_user: "{{ jenkins_user }}" become_user: "{{ jenkins_user }}"
...@@ -55,9 +54,9 @@ ...@@ -55,9 +54,9 @@
# The edx-venv directory is deleted and then recreated # The edx-venv directory is deleted and then recreated
# cleanly from the archive by the jenkins build scripts. # cleanly from the archive by the jenkins build scripts.
- name: Create a clean virtualenv archive - name: Create a clean virtualenv archive
command: > command: "tar -cpzf edx-venv_clean.tar.gz edx-venv"
tar -cpzf edx-venv_clean.tar.gz edx-venv args:
chdir={{ jenkins_home }} chdir: "{{ jenkins_home }}"
become_user: "{{ jenkins_user }}" become_user: "{{ jenkins_user }}"
# Remove the shallow-clone directory now that we are # Remove the shallow-clone directory now that we are
......
...@@ -39,8 +39,7 @@ ...@@ -39,8 +39,7 @@
# Need to add Github to known_hosts to avoid # Need to add Github to known_hosts to avoid
# being prompted when using git through ssh # being prompted when using git through ssh
- name: Add github.com to known_hosts if it does not exist - name: Add github.com to known_hosts if it does not exist
shell: > shell: "ssh-keygen -f {{ jenkins_home }}/.ssh/known_hosts -H -F github.com | grep -q found || ssh-keyscan -H github.com > {{ jenkins_home }}/.ssh/known_hosts"
ssh-keygen -f {{ jenkins_home }}/.ssh/known_hosts -H -F github.com | grep -q found || ssh-keyscan -H github.com > {{ jenkins_home }}/.ssh/known_hosts
# Edit the /etc/hosts file so that the Preview button will work in Studio # Edit the /etc/hosts file so that the Preview button will work in Studio
- name: add preview.localhost to /etc/hosts - name: add preview.localhost to /etc/hosts
......
...@@ -12,28 +12,42 @@ ...@@ -12,28 +12,42 @@
- nginx - nginx
- name: Ensure {{ kibana_app_dir }} exists - name: Ensure {{ kibana_app_dir }} exists
file: path={{ kibana_app_dir }} state=directory owner=root group=root mode=0755 file:
path: "{{ kibana_app_dir }}"
state: directory
owner: root
group: root
mode: 0755
- name: Ensure subdirectories exist - name: Ensure subdirectories exist
file: path={{ kibana_app_dir }}/{{ item }} owner=root group=root mode=0755 state=directory file:
path: "{{ kibana_app_dir }}/{{ item }}"
owner: root
group: root
mode: 0755
state: directory
with_items: with_items:
- htdocs - htdocs
- share - share
- name: ensure we have the specified kibana release - name: ensure we have the specified kibana release
get_url: url={{ kibana_url }} dest={{ kibana_app_dir }}/share/{{ kibana_file }} get_url:
url: "{{ kibana_url }}"
dest: "{{ kibana_app_dir }}/share/{{ kibana_file }}"
- name: extract - name: extract
shell: > shell: "tar -xzvf {{ kibana_app_dir }}/share/{{ kibana_file }}"
chdir={{ kibana_app_dir }}/share args:
tar -xzvf {{ kibana_app_dir }}/share/{{ kibana_file }} chdir: "{{ kibana_app_dir }}/share"
creates={{ kibana_app_dir }}/share/{{ kibana_file|replace('.tar.gz','') }} creates: "{{ kibana_app_dir }}/share/{{ kibana_file|replace('.tar.gz','') }}"
- name: install - name: install
shell: > shell: "cp -R * {{ kibana_app_dir }}/htdocs/"
chdir={{ kibana_app_dir }}/share/{{ kibana_file|replace('.tar.gz','') }} args:
cp -R * {{ kibana_app_dir }}/htdocs/ chdir: "{{ kibana_app_dir }}/share/{{ kibana_file|replace('.tar.gz','') }}"
- name: copy config - name: copy config
template: src=config.js.j2 dest={{ kibana_app_dir }}/htdocs/config.js template:
src: config.js.j2
dest: "{{ kibana_app_dir }}/htdocs/config.js"
...@@ -56,19 +56,19 @@ ...@@ -56,19 +56,19 @@
when: LOGSTASH_ROTATE|bool when: LOGSTASH_ROTATE|bool
- name: Setup cron to run rotation - name: Setup cron to run rotation
cron: > cron:
user=root user: root
name="Elasticsearch logstash index rotation" name: "Elasticsearch logstash index rotation"
hour={{ logstash_rotate_cron.hour }} hour: "{{ logstash_rotate_cron.hour }}"
minute={{ logstash_rotate_cron.minute }} minute: "{{ logstash_rotate_cron.minute }}"
job="/usr/bin/python {{ logstash_app_dir }}/share/logstash-elasticsearch-scripts/logstash_index_cleaner.py -d {{ LOGSTASH_DAYS_TO_KEEP }} > {{ logstash_log_dir }}/rotation_cron" job: "/usr/bin/python {{ logstash_app_dir }}/share/logstash-elasticsearch-scripts/logstash_index_cleaner.py -d {{ LOGSTASH_DAYS_TO_KEEP }} > {{ logstash_log_dir }}/rotation_cron"
when: LOGSTASH_ROTATE|bool when: LOGSTASH_ROTATE|bool
- name: Setup cron to run rotation - name: Setup cron to run rotation
cron: > cron:
user=root user: root
name="Elasticsearch logstash index optimization" name: "Elasticsearch logstash index optimization"
hour={{ logstash_optimize_cron.hour }} hour: "{{ logstash_optimize_cron.hour }}"
minute={{ logstash_optimize_cron.minute }} minute: "{{ logstash_optimize_cron.minute }}"
job="/usr/bin/python {{ logstash_app_dir }}/share/logstash-elasticsearch-scripts/logstash_index_optimize.py -d {{ LOGSTASH_DAYS_TO_KEEP }} > {{ logstash_log_dir }}/optimize_cron" job: "/usr/bin/python {{ logstash_app_dir }}/share/logstash-elasticsearch-scripts/logstash_index_optimize.py -d {{ LOGSTASH_DAYS_TO_KEEP }} > {{ logstash_log_dir }}/optimize_cron"
when: LOGSTASH_ROTATE|bool when: LOGSTASH_ROTATE|bool
- name: copy galera cluster config - name: copy galera cluster config
template: > template:
src="etc/mysql/conf.d/galera.cnf.j2" src: "etc/mysql/conf.d/galera.cnf.j2"
dest="/etc/mysql/conf.d/galera.cnf" dest: "/etc/mysql/conf.d/galera.cnf"
owner="root" owner: "root"
group="root" group: "root"
mode=0600 mode: 0600
- name: check if we have already bootstrapped the cluster - name: check if we have already bootstrapped the cluster
stat: path=/etc/mysql/ansible_cluster_started stat: path=/etc/mysql/ansible_cluster_started
...@@ -15,18 +15,18 @@ ...@@ -15,18 +15,18 @@
when: not mariadb_bootstrap.stat.exists when: not mariadb_bootstrap.stat.exists
- name: setup bootstrap on primary - name: setup bootstrap on primary
lineinfile: > lineinfile:
dest="/etc/mysql/conf.d/galera.cnf" dest: "/etc/mysql/conf.d/galera.cnf"
regexp="^wsrep_cluster_address=gcomm://{{ hostvars.keys()|sort|join(',') }}$" regexp: "^wsrep_cluster_address=gcomm://{{ hostvars.keys()|sort|join(',') }}$"
line="wsrep_cluster_address=gcomm://" line: "wsrep_cluster_address=gcomm://"
when: ansible_hostname == hostvars[hostvars.keys()[0]].ansible_hostname and not mariadb_bootstrap.stat.exists when: ansible_hostname == hostvars[hostvars.keys()[0]].ansible_hostname and not mariadb_bootstrap.stat.exists
- name: fetch debian.cnf file so start-stop will work properly - name: fetch debian.cnf file so start-stop will work properly
fetch: > fetch:
src=/etc/mysql/debian.cnf src: /etc/mysql/debian.cnf
dest=/tmp/debian.cnf dest: /tmp/debian.cnf
fail_on_missing=yes fail_on_missing: yes
flat=yes flat: yes
when: ansible_hostname == hostvars[hostvars.keys()[0]].ansible_hostname and not mariadb_bootstrap.stat.exists when: ansible_hostname == hostvars[hostvars.keys()[0]].ansible_hostname and not mariadb_bootstrap.stat.exists
register: mariadb_new_debian_cnf register: mariadb_new_debian_cnf
...@@ -39,12 +39,12 @@ ...@@ -39,12 +39,12 @@
when: not mariadb_bootstrap.stat.exists when: not mariadb_bootstrap.stat.exists
- name: reset galera cluster config since we are bootstrapped - name: reset galera cluster config since we are bootstrapped
template: > template:
src="etc/mysql/conf.d/galera.cnf.j2" src: "etc/mysql/conf.d/galera.cnf.j2"
dest="/etc/mysql/conf.d/galera.cnf" dest: "/etc/mysql/conf.d/galera.cnf"
owner="root" owner: "root"
group="root" group: "root"
mode=0600 mode: 0600
when: not mariadb_bootstrap.stat.exists when: not mariadb_bootstrap.stat.exists
- name: touch bootstrap file to confirm we are fully up - name: touch bootstrap file to confirm we are fully up
...@@ -53,6 +53,5 @@ ...@@ -53,6 +53,5 @@
# This is needed for mysql-check in haproxy or other mysql monitor # This is needed for mysql-check in haproxy or other mysql monitor
# scripts to prevent haproxy checks exceeding `max_connect_errors`. # scripts to prevent haproxy checks exceeding `max_connect_errors`.
- name: create haproxy monitor user - name: create haproxy monitor user
command: > command: "mysql -e \"INSERT INTO mysql.user (Host,User) values ('{{ item }}','{{ MARIADB_HAPROXY_USER }}'); FLUSH PRIVILEGES;\""
mysql -e "INSERT INTO mysql.user (Host,User) values ('{{ item }}','{{ MARIADB_HAPROXY_USER }}'); FLUSH PRIVILEGES;"
with_items: MARIADB_HAPROXY_HOSTS with_items: MARIADB_HAPROXY_HOSTS
...@@ -29,9 +29,9 @@ ...@@ -29,9 +29,9 @@
apt_key: url="{{ COMMON_UBUNTU_APT_KEYSERVER }}{{ MARIADB_APT_KEY_ID }}" apt_key: url="{{ COMMON_UBUNTU_APT_KEYSERVER }}{{ MARIADB_APT_KEY_ID }}"
- name: add the mariadb repo to the sources list - name: add the mariadb repo to the sources list
apt_repository: > apt_repository:
repo='{{ MARIADB_REPO }}' repo: "{{ MARIADB_REPO }}"
state=present state: present
- name: install mariadb solo packages - name: install mariadb solo packages
apt: name={{ item }} update_cache=yes apt: name={{ item }} update_cache=yes
...@@ -44,10 +44,10 @@ ...@@ -44,10 +44,10 @@
when: MARIADB_CLUSTERED|bool when: MARIADB_CLUSTERED|bool
- name: remove bind-address - name: remove bind-address
lineinfile: > lineinfile:
dest=/etc/mysql/my.cnf dest: /etc/mysql/my.cnf
regexp="^bind-address\s+=\s+127\.0\.0\.1$" regexp: "^bind-address\s+=\s+127\.0\.0\.1$"
state=absent state: absent
when: MARIADB_LISTEN_ALL|bool or MARIADB_CLUSTERED|bool when: MARIADB_LISTEN_ALL|bool or MARIADB_CLUSTERED|bool
- include: cluster.yml - include: cluster.yml
...@@ -57,37 +57,37 @@ ...@@ -57,37 +57,37 @@
service: name=mysql state=started service: name=mysql state=started
- name: create all databases - name: create all databases
mysql_db: > mysql_db:
db={{ item }} db: "{{ item }}"
state=present state: present
encoding=utf8 encoding: utf8
with_items: MARIADB_DATABASES with_items: MARIADB_DATABASES
when: MARIADB_CREATE_DBS|bool when: MARIADB_CREATE_DBS|bool
- name: create all analytics dbs - name: create all analytics dbs
mysql_db: > mysql_db:
db={{ item }} db: {{ item }}
state=present state: present
encoding=utf8 encoding: utf8
with_items: MARIADB_ANALYTICS_DATABASES with_items: MARIADB_ANALYTICS_DATABASES
when: MARIADB_CREATE_DBS|bool and ANALYTICS_API_CONFIG is defined when: MARIADB_CREATE_DBS|bool and ANALYTICS_API_CONFIG is defined
- name: create all users/privs - name: create all users/privs
mysql_user: > mysql_user:
name="{{ item.name }}" name: "{{ item.name }}"
password="{{ item.pass }}" password: "{{ item.pass }}"
priv="{{ item.priv }}" priv: "{{ item.priv }}"
host="{{ item.host }}" host: "{{ item.host }}"
append_privs=yes append_privs: yes
with_items: MARIADB_USERS with_items: MARIADB_USERS
when: MARIADB_CREATE_DBS|bool when: MARIADB_CREATE_DBS|bool
- name: create all analytics users/privs - name: create all analytics users/privs
mysql_user: > mysql_user:
name="{{ item.name }}" name: "{{ item.name }}"
password="{{ item.pass }}" password: "{{ item.pass }}"
priv="{{ item.priv }}" priv: "{{ item.priv }}"
host="{{ item.host }}" host: "{{ item.host }}"
append_privs=yes append_privs: yes
with_items: MARIADB_ANALYTICS_USERS with_items: MARIADB_ANALYTICS_USERS
when: MARIADB_CREATE_DBS|bool and ANALYTICS_API_CONFIG is defined when: MARIADB_CREATE_DBS|bool and ANALYTICS_API_CONFIG is defined
...@@ -11,31 +11,33 @@ ...@@ -11,31 +11,33 @@
when: MMSAPIKEY is not defined when: MMSAPIKEY is not defined
- name: download mongo mms agent - name: download mongo mms agent
get_url: > get_url:
url="{{ base_url }}/{{ item.dir }}/{{ item.agent }}_{{ item.version }}_{{ pkg_arch }}.{{ pkg_format }}" url: "{{ base_url }}/{{ item.dir }}/{{ item.agent }}_{{ item.version }}_{{ pkg_arch }}.{{ pkg_format }}"
dest="/tmp/{{ item.agent }}-{{ item.version }}.{{ pkg_format }}" dest: "/tmp/{{ item.agent }}-{{ item.version }}.{{ pkg_format }}"
register: download_mms_deb register: download_mms_deb
with_items: with_items:
agents agents
- name: install mongo mms agent - name: install mongo mms agent
apt: > apt:
deb="/tmp/{{ item.agent }}-{{ item.version }}.deb" deb: "/tmp/{{ item.agent }}-{{ item.version }}.deb"
when: download_mms_deb.changed when: download_mms_deb.changed
notify: restart mms notify: restart mms
with_items: with_items:
agents agents
- name: add key to monitoring-agent.config - name: add key to monitoring-agent.config
lineinfile: > lineinfile:
dest="{{ item.config }}" dest: "{{ item.config }}"
regexp="^mmsApiKey=" regexp: "^mmsApiKey="
line="mmsApiKey={{ MMSAPIKEY }}" line: "mmsApiKey={{ MMSAPIKEY }}"
notify: restart mms notify: restart mms
with_items: with_items:
agents agents
- name: start mms service - name: start mms service
service: name="{{ item.agent }}" state=started service:
name: "{{ item.agent }}"
state: started
with_items: with_items:
agents agents
...@@ -49,7 +49,7 @@ ...@@ -49,7 +49,7 @@
src: "{{ item.device }}" src: "{{ item.device }}"
fstype: "{{ item.fstype }}" fstype: "{{ item.fstype }}"
state: unmounted state: unmounted
when: > when:
UNMOUNT_DISKS and UNMOUNT_DISKS and
volumes | selectattr('device', 'equalto', item.device) | list | length != 0 and volumes | selectattr('device', 'equalto', item.device) | list | length != 0 and
(volumes | selectattr('device', 'equalto', item.device) | first).mount != item.mount (volumes | selectattr('device', 'equalto', item.device) | first).mount != item.mount
...@@ -59,7 +59,7 @@ ...@@ -59,7 +59,7 @@
# that is an errorable condition, since it can easily allow us to double mount a disk. # that is an errorable condition, since it can easily allow us to double mount a disk.
- name: Check that we don't want to unmount disks when UNMOUNT_DISKS is false - name: Check that we don't want to unmount disks when UNMOUNT_DISKS is false
fail: msg="Found disks mounted in the wrong place, but can't unmount them. This role will need to be re-run with -e 'UNMOUNT_DISKS=True' if you believe that is safe." fail: msg="Found disks mounted in the wrong place, but can't unmount them. This role will need to be re-run with -e 'UNMOUNT_DISKS=True' if you believe that is safe."
when: > when:
not UNMOUNT_DISKS and not UNMOUNT_DISKS and
volumes | selectattr('device', 'equalto', item.device) | list | length != 0 and volumes | selectattr('device', 'equalto', item.device) | list | length != 0 and
(volumes | selectattr('device', 'equalto', item.device) | first).mount != item.mount (volumes | selectattr('device', 'equalto', item.device) | first).mount != item.mount
......
...@@ -22,41 +22,37 @@ ...@@ -22,41 +22,37 @@
# #
- name: Download newrelic NPI - name: Download newrelic NPI
get_url: > get_url:
dest="/tmp/{{ newrelic_npi_installer }}" dest: "/tmp/{{ newrelic_npi_installer }}"
url="{{ NEWRELIC_NPI_URL }}" url: "{{ NEWRELIC_NPI_URL }}"
register: download_npi_installer register: download_npi_installer
- name: create npi install directory {{ NEWRELIC_NPI_PREFIX }} - name: create npi install directory {{ NEWRELIC_NPI_PREFIX }}
file: > file:
path="{{ NEWRELIC_NPI_PREFIX }}" path: "{{ NEWRELIC_NPI_PREFIX }}"
state=directory state: directory
mode=0755 mode: 0755
owner="{{ NEWRELIC_USER }}" owner: "{{ NEWRELIC_USER }}"
- name: install newrelic npi - name: install newrelic npi
shell: > shell: "tar -xzf /tmp/{{ newrelic_npi_installer }} --strip-components=1 -C \"{{NEWRELIC_NPI_PREFIX}}\""
tar -xzf /tmp/{{ newrelic_npi_installer }} --strip-components=1 -C "{{NEWRELIC_NPI_PREFIX}}"
when: download_npi_installer.changed when: download_npi_installer.changed
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
- name: configure npi with the default user - name: configure npi with the default user
shell: > shell: "{{ NEWRELIC_NPI_PREFIX }}/bin/node {{ NEWRELIC_NPI_PREFIX }}/npi.js \"set user {{ NEWRELIC_USER }}\""
{{ NEWRELIC_NPI_PREFIX }}/bin/node {{ NEWRELIC_NPI_PREFIX }}/npi.js "set user {{ NEWRELIC_USER }}"
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
- name: configure npi with the license key - name: configure npi with the license key
shell: > shell: "./npi set license_key {{ NEWRELIC_LICENSE_KEY }}"
./npi set license_key {{ NEWRELIC_LICENSE_KEY }}
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
- name: configure npi with the distro - name: configure npi with the distro
shell: > shell: "./npi set distro {{ NEWRELIC_NPI_DISTRO }}"
./npi set distro {{ NEWRELIC_NPI_DISTRO }}
args: args:
chdir: "{{ NEWRELIC_NPI_PREFIX }}" chdir: "{{ NEWRELIC_NPI_PREFIX }}"
become_user: "{{ NEWRELIC_USER }}" become_user: "{{ NEWRELIC_USER }}"
......
...@@ -10,17 +10,18 @@ ...@@ -10,17 +10,18 @@
- deploy - deploy
- name: download nltk data - name: download nltk data
get_url: > get_url:
dest={{ NLTK_DATA_DIR }}/{{ item.url|basename }} dest: "{{ NLTK_DATA_DIR }}/{{ item.url|basename }}"
url={{ item.url }} url: "{{ item.url }}"
with_items: NLTK_DATA with_items: NLTK_DATA
register: nltk_download register: nltk_download
tags: tags:
- deploy - deploy
- name: unarchive nltk data - name: unarchive nltk data
shell: > shell: "unzip {{ NLTK_DATA_DIR }}/{{ item.url|basename }}"
unzip {{ NLTK_DATA_DIR }}/{{ item.url|basename }} chdir="{{ NLTK_DATA_DIR }}/{{ item.path|dirname }}" args:
chdir: "{{ NLTK_DATA_DIR }}/{{ item.path|dirname }}"
with_items: NLTK_DATA with_items: NLTK_DATA
when: nltk_download|changed when: nltk_download|changed
tags: tags:
......
...@@ -55,8 +55,8 @@ ...@@ -55,8 +55,8 @@
# Need to use command rather than pip so that we can maintain the context of our current working directory; # Need to use command rather than pip so that we can maintain the context of our current working directory;
# some requirements are pathed relative to the edx-platform repo. # some requirements are pathed relative to the edx-platform repo.
# Using the pip from inside the virtual environment implicitly installs everything into that virtual environment. # Using the pip from inside the virtual environment implicitly installs everything into that virtual environment.
command: > command: "{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ openstack_requirements_file }}"
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ openstack_requirements_file }} args:
chdir={{ edxapp_code_dir }} chdir={{ edxapp_code_dir }}
sudo_user: "{{ edxapp_user }}" sudo_user: "{{ edxapp_user }}"
environment: "{{ edxapp_environment }}" environment: "{{ edxapp_environment }}"
......
...@@ -56,9 +56,9 @@ ...@@ -56,9 +56,9 @@
- migrate:db - migrate:db
- name: run collectstatic - name: run collectstatic
shell: > shell: "{{ programs_venv_dir }}/bin/python manage.py collectstatic --noinput"
chdir={{ programs_code_dir }} args:
{{ programs_venv_dir }}/bin/python manage.py collectstatic --noinput chdir: "{{ programs_code_dir }}"
become_user: "{{ programs_user }}" become_user: "{{ programs_user }}"
environment: "{{ programs_environment }}" environment: "{{ programs_environment }}"
when: not devstack when: not devstack
...@@ -68,9 +68,12 @@ ...@@ -68,9 +68,12 @@
# NOTE this isn't used or needed when s3 is used for PROGRAMS_MEDIA_STORAGE_BACKEND # NOTE this isn't used or needed when s3 is used for PROGRAMS_MEDIA_STORAGE_BACKEND
- name: create programs media dir - name: create programs media dir
file: > file:
path="{{ item }}" state=directory mode=0775 path: "{{ item }}"
owner="{{ programs_user }}" group="{{ common_web_group }}" state: directory
mode: 0775
owner: "{{ programs_user }}"
group: "{{ common_web_group }}"
with_items: with_items:
- "{{ PROGRAMS_MEDIA_ROOT }}" - "{{ PROGRAMS_MEDIA_ROOT }}"
tags: tags:
......
...@@ -171,8 +171,7 @@ ...@@ -171,8 +171,7 @@
- maintenance - maintenance
- name: Make queues mirrored - name: Make queues mirrored
shell: > shell: "/usr/sbin/rabbitmqctl -p {{ item }} set_policy HA \"\" '{\"ha-mode\":\"all\",\"ha-sync-mode\":\"automatic\"}'"
/usr/sbin/rabbitmqctl -p {{ item }} set_policy HA "" '{"ha-mode":"all","ha-sync-mode":"automatic"}'
when: RABBITMQ_CLUSTERED_HOSTS|length > 1 when: RABBITMQ_CLUSTERED_HOSTS|length > 1
with_items: "{{ RABBITMQ_VHOSTS }}" with_items: "{{ RABBITMQ_VHOSTS }}"
tags: tags:
......
...@@ -38,18 +38,21 @@ ...@@ -38,18 +38,21 @@
when: rbenv_ruby_version is not defined when: rbenv_ruby_version is not defined
- name: create rbenv user {{ rbenv_user }} - name: create rbenv user {{ rbenv_user }}
user: > user:
name={{ rbenv_user }} home={{ rbenv_dir }} name: "{{ rbenv_user }}"
shell=/bin/false createhome=no home: "{{ rbenv_dir }}"
shell: /bin/false
createhome: no
when: rbenv_user != common_web_user when: rbenv_user != common_web_user
tags: tags:
- install - install
- install:base - install:base
- name: create rbenv dir if it does not exist - name: create rbenv dir if it does not exist
file: > file:
path="{{ rbenv_dir }}" owner="{{ rbenv_user }}" path: "{{ rbenv_dir }}"
state=directory owner: "{{ rbenv_user }}"
state: directory
tags: tags:
- install - install
- install:base - install:base
...@@ -62,18 +65,20 @@ ...@@ -62,18 +65,20 @@
- install:base - install:base
- name: update rbenv repo - name: update rbenv repo
git_2_0_1: > git_2_0_1:
repo=https://github.com/sstephenson/rbenv.git repo: https://github.com/sstephenson/rbenv.git
dest={{ rbenv_dir }}/.rbenv version={{ rbenv_version }} dest: "{{ rbenv_dir }}/.rbenv"
accept_hostkey=yes version: "{{ rbenv_version }}"
accept_hostkey: yes
become_user: "{{ rbenv_user }}" become_user: "{{ rbenv_user }}"
tags: tags:
- install - install
- install:base - install:base
- name: ensure ruby_env exists - name: ensure ruby_env exists
template: > template:
src=ruby_env.j2 dest={{ rbenv_dir }}/ruby_env src: ruby_env.j2
dest: "{{ rbenv_dir }}/ruby_env"
become_user: "{{ rbenv_user }}" become_user: "{{ rbenv_user }}"
tags: tags:
- install - install
...@@ -107,9 +112,10 @@ ...@@ -107,9 +112,10 @@
- install:base - install:base
- name: clone ruby-build repo - name: clone ruby-build repo
git: > git:
repo=https://github.com/sstephenson/ruby-build.git dest={{ tempdir.stdout }}/ruby-build repo: https://github.com/sstephenson/ruby-build.git
accept_hostkey=yes dest: "{{ tempdir.stdout }}/ruby-build"
accept_hostkey: yes
when: tempdir.stdout is defined and (rbuild_present|failed or (installable_ruby_vers is defined and rbenv_ruby_version not in installable_ruby_vers)) when: tempdir.stdout is defined and (rbuild_present|failed or (installable_ruby_vers is defined and rbenv_ruby_version not in installable_ruby_vers))
become_user: "{{ rbenv_user }}" become_user: "{{ rbenv_user }}"
tags: tags:
......
...@@ -15,12 +15,12 @@ ...@@ -15,12 +15,12 @@
file: path=/etc/shibboleth/metadata state=directory mode=2774 group=_shibd owner=_shibd file: path=/etc/shibboleth/metadata state=directory mode=2774 group=_shibd owner=_shibd
- name: Downloads metadata into metadata directory as backup - name: Downloads metadata into metadata directory as backup
get_url: > get_url:
url={{ shib_metadata_backup_url }} url: "{{ shib_metadata_backup_url }}"
dest=/etc/shibboleth/metadata/idp-metadata.xml dest: "/etc/shibboleth/metadata/idp-metadata.xml"
mode=0640 mode: 0640
group=_shibd group: _shibd
owner=_shibd owner: _shibd
when: shib_download_metadata when: shib_download_metadata
- name: writes out key and pem file - name: writes out key and pem file
......
...@@ -9,39 +9,51 @@ ...@@ -9,39 +9,51 @@
- oinkmaster - oinkmaster
- name: configure snort - name: configure snort
template: > template:
src=etc/snort/snort.conf.j2 dest=/etc/snort/snort.conf src: etc/snort/snort.conf.j2
owner=root group=root mode=0644 dest: /etc/snort/snort.conf
owner: root
group: root
mode: 0644
- name: configure snort (debian) - name: configure snort (debian)
template: > template:
src=etc/snort/snort.debian.conf.j2 dest=/etc/snort/snort.debian.conf src: etc/snort/snort.debian.conf.j2
owner=root group=root mode=0644 dest: /etc/snort/snort.debian.conf
owner: root
group: root
mode: 0644
- name: configure oinkmaster - name: configure oinkmaster
template: > template:
src=etc/oinkmaster.conf.j2 dest=/etc/oinkmaster.conf src: etc/oinkmaster.conf.j2
owner=root group=root mode=0644 dest: /etc/oinkmaster.conf
owner: root
group: root
mode: 0644
- name: update snort - name: update snort
shell: oinkmaster -C /etc/oinkmaster.conf -o /etc/snort/rules/ shell: oinkmaster -C /etc/oinkmaster.conf -o /etc/snort/rules/
become: yes become: yes
- name: snort service - name: snort service
service: > service:
name="snort" name: "snort"
state="started" state: "started"
- name: open read permissions on snort logs - name: open read permissions on snort logs
file: > file:
name="/var/log/snort" name: "/var/log/snort"
state="directory" state: "directory"
mode="755" mode: "755"
- name: install oinkmaster cronjob - name: install oinkmaster cronjob
template: > template:
src=etc/cron.daily/oinkmaster.j2 dest=/etc/cron.daily/oinkmaster src: etc/cron.daily/oinkmaster.j2
owner=root group=root mode=0755 dest: /etc/cron.daily/oinkmaster
owner: root
group: root
mode: 0755
---
tanguru_debian_pkgs:
- openjdk-7-jre
- unzip
- libmysql-java
- python-mysqldb
- tomcat7
- libspring-instrument-java
- xvfb
- mailutils
- postfix
tanaguru_download_link: "http://download.tanaguru.org/Tanaguru/tanaguru-3.1.0.i386.tar.gz"
# Go this link to find your desired ESR Firefox
# http://download-origin.cdn.mozilla.net/pub/firefox/releases/24.0esr/linux-x86_64/
# Default is en-US in our example
fixfox_esr_link: "http://download-origin.cdn.mozilla.net/pub/firefox/releases/24.0esr/linux-x86_64/en-US/firefox-24.0esr.tar.bz2"
TANAGURU_DATABASE_NAME: 'tgdatabase'
TANAGURU_DATABASE_USER: 'tguser'
TANAGURU_DATABASE_PASSWORD: 'tgPassword'
TANAGURU_URL: 'http://localhost:8080/tanaguru/'
TANAGURU_ADMIN_EMAIL: 'admin@example.com'
TANAGURU_ADMIN_PASSWORD: 'tanaguru15'
tanaguru_parameters:
db_name: "{{ TANAGURU_DATABASE_NAME }}"
db_user: "{{ TANAGURU_DATABASE_USER }}"
db_password: "{{ TANAGURU_DATABASE_PASSWORD }}"
url: "{{ TANAGURU_URL }}"
admin_email: "{{ TANAGURU_ADMIN_EMAIL }}"
admin_passwd: "{{ TANAGURU_ADMIN_PASSWORD }}"
\ No newline at end of file
---
- name: Add the Partner repository
apt_repository:
repo: "{{ item }}"
state: present
with_items:
- "deb http://archive.canonical.com/ubuntu {{ ansible_distribution_release }} partner"
- "deb-src http://archive.canonical.com/ubuntu {{ ansible_distribution_release }} partner"
tags:
- install
- install:base
- name: Set Postfix options
debconf:
name: postifx
question: "{{ item.question }}"
value: "{{ item.value }} "
vtype: "string"
with_items:
- { question: "postfix/mailname", value: " " }
- { question: "postfix/main_mailer_type", value: "Satellite system" }
tags:
- install
- install:configuration
- name: Install the TanaGuru Prerequisites
apt:
name: "{{ item }}"
update_cache: yes
state: installed
with_items: tanguru_debian_pkgs
tags:
- install
- install:base
- name: Modify the my.cnf file for max_allowed_packet option
lineinfile:
dest: /etc/mysql/my.cnf
regexp: '^max_allowed_packet'
line: 'max_allowed_packet = 64M'
state: present
register: my_cnf
tags:
- install
- install:configuration
- name: Restart MySQL
service:
name: mysql
state: restarted
when: my_cnf.changed
- name: Create a soft link for tomcat jar and mysql connector
file:
dest: "{{ item.dest }}"
src: "{{ item.src }}"
state: link
with_items:
- { src: '/usr/share/java/spring3-instrument-tomcat.jar', dest: '/usr/share/tomcat7/lib/spring3-instrument-tomcat.jar' }
- { src: '/usr/share/java/mysql-connector-java.jar', dest: '/usr/share/tomcat7/lib/mysql-connector-java.jar'}
tags:
- install
- install:configuration
- name: Copy the xvfb template to /etc/init.d
template:
dest: /etc/init.d/xvfb
src: xvfb.j2
owner: root
group: root
mode: 0755
register: xvfb
tags:
- install
- install:configuration
- name: Restart xvfb
service:
name: xvfb
pattern: /etc/init.d/xvfb
state: restarted
when: xvfb.changed
- name: Configure xvfb to run at startup
command: update-rc.d xvfb defaults
ignore_errors: yes
when: xvfb.changed
- name: Download the latest ESR Firfox
get_url:
url: "{{ fixfox_esr_link }}"
dest: "/tmp/{{ fixfox_esr_link | basename }}"
tags:
- install
- install:base
- name: Unzip the downloaded Firfox zipped file
unarchive:
src: "/tmp/{{ fixfox_esr_link | basename }}"
dest: /opt
copy: no
tags:
- install
- install:base
- name: Download the latest TanaGuru tarball
get_url:
url: "{{ tanaguru_download_link }}"
dest: "/tmp/{{ tanaguru_download_link | basename }}"
tags:
- install
- install:base
- name: Unzip the downloaded TanaGuru tarball
unarchive:
src: "/tmp/{{ tanaguru_download_link | basename }}"
dest: "/tmp/"
copy: no
tags:
- install
- install:base
- name: Create MySQL database for TanaGuru
mysql_db:
name: "{{ tanaguru_parameters.db_name }}"
state: present
encoding: utf8
collation: utf8_general_ci
tags:
- install
- install:base
- name: Create MySQL user for TanaGuru
mysql_user:
name: "{{ tanaguru_parameters.db_user }}"
password: "{{ tanaguru_parameters.db_password }}"
host: localhost
priv: "{{ tanaguru_parameters.db_name }}.*:ALL"
state: present
tags:
- install
- install:base
- name: Check that tanaguru app is running
shell: >
/bin/ps aux | grep -i tanaguru
register: tanaguru_app
changed_when: no
tags:
- install
- name: Install the TanaGuru
shell: >
/bin/echo "yes" | ./install.sh --mysql-tg-user "{{ tanaguru_parameters.db_user }}" \
--mysql-tg-passwd "{{ tanaguru_parameters.db_password }}" \
--mysql-tg-db "{{ tanaguru_parameters.db_name }}" \
--tanaguru-url "{{ tanaguru_parameters.url }}" \
--tomcat-webapps /var/lib/tomcat7/webapps \
--tomcat-user tomcat7 \
--tg-admin-email "{{ tanaguru_parameters.admin_email }}" \
--tg-admin-passwd "{{ tanaguru_parameters.admin_passwd }}" \
--firefox-esr-path /opt/firefox/firefox \
--display-port ":99.1"
args:
chdir: "/tmp/{{ tanaguru_download_link | basename | regex_replace('.tar.gz$', '') }}"
when: "tanaguru_app.stdout.find('/etc/tanaguru/') == -1"
register: tanaguru_install
tags:
- install
- install:base
- name: Restart tomcat7
service:
name: tomcat7
state: restarted
when: tanaguru_install.changed
\ No newline at end of file
#!/bin/sh
set -e
RUN_AS_USER=tomcat7
OPTS=":99 -screen 1 1024x768x24 -nolisten tcp"
XVFB_DIR=/usr/bin
PIDFILE=/var/run/xvfb
case $1 in
start)
start-stop-daemon --chuid $RUN_AS_USER -b --start --exec $XVFB_DIR/Xvfb --make-pidfile --pidfile $PIDFILE -- $OPTS &
;;
stop)
start-stop-daemon --stop --user $RUN_AS_USER --pidfile $PIDFILE
rm -f $PIDFILE
;;
restart)
if start-stop-daemon --test --stop --user $RUN_AS_USER --pidfile $PIDFILE >/dev/null; then
$0 stop
fi;
$0 start
;;
*)
echo "Usage: $0 (start|restart|stop)"
exit 1
;;
esac
exit 0
\ No newline at end of file
...@@ -21,20 +21,20 @@ ...@@ -21,20 +21,20 @@
# #
- name: Create clone of edx-platform - name: Create clone of edx-platform
git_2_0_1: > git_2_0_1:
repo=https://github.com/edx/edx-platform.git repo: "https://github.com/edx/edx-platform.git"
dest={{ test_build_server_repo_path }}/edx-platform-clone dest: "{{ test_build_server_repo_path }}/edx-platform-clone"
version={{ test_edx_platform_version }} version: "{{ test_edx_platform_version }}"
become_user: "{{ test_build_server_user }}" become_user: "{{ test_build_server_user }}"
- name: get xargs limit - name: get xargs limit
shell: "xargs --show-limits" shell: "xargs --show-limits"
- name: Copy test-development-environment.sh to somewhere the jenkins user can access it - name: Copy test-development-environment.sh to somewhere the jenkins user can access it
copy: > copy:
src=test-development-environment.sh src: test-development-environment.sh
dest="{{ test_build_server_repo_path }}" dest: "{{ test_build_server_repo_path }}"
mode=0755 mode: 0755
- name: Validate build environment - name: Validate build environment
shell: "bash test-development-environment.sh {{ item }}" shell: "bash test-development-environment.sh {{ item }}"
......
--- ---
- name: import the test courses from github - name: import the test courses from github
shell: > shell: "{{ demo_edxapp_venv_bin }}/python /edx/bin/manage.edxapp lms git_add_course --settings=aws \"{{ item.github_url }}\""
{{ demo_edxapp_venv_bin }}/python /edx/bin/manage.edxapp lms git_add_course --settings=aws "{{ item.github_url }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
when: item.install == True when: item.install == True
with_items: TESTCOURSES_EXPORTS with_items: TESTCOURSES_EXPORTS
- name: enroll test users in the testcourses - name: enroll test users in the testcourses
shell: > shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms enroll_user_in_course -e {{ item[0].email }} -c {{ item[1].course_id }}"
{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings=aws --service-variant lms enroll_user_in_course -e {{ item[0].email }} -c {{ item[1].course_id }} args:
chdir={{ demo_edxapp_code_dir }} chdir: "{{ demo_edxapp_code_dir }}"
become_user: "{{ common_web_user }}" become_user: "{{ common_web_user }}"
when: item[1].install == True when: item[1].install == True
with_nested: with_nested:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment