convert sudo_user to become user in roles for Ansible 2.x

parent 411f0092
......@@ -21,4 +21,4 @@
command: >
aideinit -y -f
creates=/var/lib/aide/aide.db
sudo: yes
become: yes
......@@ -18,7 +18,7 @@
git_2_0_1: >
dest="{{ alton_code_dir }}" repo="{{ alton_source_repo }}"
version="{{ alton_version }}" accept_hostkey=yes
sudo_user: "{{ alton_user }}"
become_user: "{{ alton_user }}"
register: alton_checkout
notify: restart alton
......@@ -28,7 +28,7 @@
virtualenv="{{ alton_venv_dir }}"
state=present
extra_args="-i {{ COMMON_PYPI_MIRROR_URL }}"
sudo_user: "{{ alton_user }}"
become_user: "{{ alton_user }}"
notify: restart alton
- name: create the supervisor wrapper
......@@ -44,7 +44,7 @@
src=alton.conf.j2 dest="{{ supervisor_available_dir }}/alton.conf"
owner="{{ supervisor_user }}"
group="{{ supervisor_user }}"
sudo_user: "{{ supervisor_user }}"
become_user: "{{ supervisor_user }}"
notify: restart alton
- name: enable the supervisor config
......@@ -54,7 +54,7 @@
state=link
force=yes
mode=0644
sudo_user: "{{ supervisor_user }}"
become_user: "{{ supervisor_user }}"
when: not disable_edx_services
notify: restart alton
......
......@@ -19,7 +19,7 @@
# Example play:
# - name: Deploy Analytics API
# hosts: all
# sudo: True
# become: True
# gather_facts: True
# vars:
# ENABLE_DATADOG: False
......@@ -47,7 +47,7 @@
template: >
src=edx/app/analytics_api/analytics_api_gunicorn.py.j2
dest={{ analytics_api_home }}/analytics_api_gunicorn.py
sudo_user: "{{ analytics_api_user }}"
become_user: "{{ analytics_api_user }}"
tags:
- install
- install:configuration
......@@ -57,7 +57,7 @@
requirements="{{ analytics_api_requirements_base }}/{{ item }}"
virtualenv="{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}"
state=present
sudo_user: "{{ analytics_api_user }}"
become_user: "{{ analytics_api_user }}"
with_items: analytics_api_requirements
tags:
- install
......@@ -69,7 +69,7 @@
DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}'
DB_MIGRATION_PASS='{{ COMMON_MYSQL_MIGRATE_PASS }}'
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python ./manage.py migrate --noinput
sudo_user: "{{ analytics_api_user }}"
become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes"
tags:
......@@ -80,7 +80,7 @@
shell: >
chdir={{ analytics_api_code_dir }}
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py collectstatic --noinput
sudo_user: "{{ analytics_api_user }}"
become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}"
tags:
- assets
......@@ -90,7 +90,7 @@
shell: >
chdir={{ analytics_api_code_dir }}
{{ analytics_api_home }}/venvs/{{ analytics_api_service_name }}/bin/python manage.py set_api_key {{ item.key }} {{ item.value }}
sudo_user: "{{ analytics_api_user }}"
become_user: "{{ analytics_api_user }}"
environment: "{{ analytics_api_environment }}"
with_dict: ANALYTICS_API_USERS
tags:
......@@ -164,6 +164,6 @@
config={{ supervisor_cfg }}
name={{ analytics_api_service_name }}
when: not disable_edx_services
sudo_user: "{{ supervisor_service_user }}"
become_user: "{{ supervisor_service_user }}"
tags:
- manage:start
......@@ -29,7 +29,7 @@
#
# - name: Deploy all dependencies of edx-analytics-pipeline to the node
# hosts: all
# sudo: True
# become: True
# gather_facts: True
# roles:
# - analytics_pipeline
......@@ -136,7 +136,7 @@
- name: ensure package dir exists in HDFS
shell: >
. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -mkdir -p /edx-analytics-pipeline/packages/
sudo_user: "{{ hadoop_common_user }}"
become_user: "{{ hadoop_common_user }}"
tags:
- install
- install:app-requirements
......@@ -144,7 +144,7 @@
- name: ensure util library is in HDFS
shell: >
. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -put -f {{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar /edx-analytics-pipeline/packages/
sudo_user: "{{ hadoop_common_user }}"
become_user: "{{ hadoop_common_user }}"
tags:
- install
- install:app-requirements
......@@ -152,7 +152,7 @@
- name: ensure the data directory exists
shell: >
. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -mkdir -p {{ ANALYTICS_PIPELINE_HDFS_DATA_DIR }}
sudo_user: "{{ hadoop_common_user }}"
become_user: "{{ hadoop_common_user }}"
tags:
- install
- install:base
......
......@@ -142,12 +142,12 @@
- name: restart ssh
service: name=ssh state=restarted
sudo: True
become: True
when: sshd_config.changed
when: ansible_distribution in common_debian_variants
- name: restart ssh
service: name=sshd state=restarted
sudo: True
become: True
when: sshd_config.changed
when: ansible_distribution in common_redhat_variants
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment