using YAML syntax for rewriting the tasks

parent 54e89f13
...@@ -37,159 +37,166 @@ ...@@ -37,159 +37,166 @@
# ansible-playbook -i 'localhost,' ./analytics_pipeline.yml -e@/ansible/vars/deployment.yml -e@/ansible/vars/env-deployment.yml # ansible-playbook -i 'localhost,' ./analytics_pipeline.yml -e@/ansible/vars/deployment.yml -e@/ansible/vars/env-deployment.yml
# #
- name: create config directory - name: Create config directory
file: > file:
path="{{ ANALYTICS_PIPELINE_CONFIG_DIR }}" path: "{{ ANALYTICS_PIPELINE_CONFIG_DIR }}"
mode=0755 owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state: directory
state=directory owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
mode: "0755"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: store output database credentials for analytics pipeline - name: Store output database credentials for analytics pipeline
copy: > copy:
content="{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE | to_json }}" content: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE | to_json }}"
dest={{ COMMON_CFG_DIR }}/edx-analytics-pipeline/output.json dest: "{{ COMMON_CFG_DIR }}/edx-analytics-pipeline/output.json"
mode=0644 owner={{ hadoop_common_user }} group={{ hadoop_common_group }} owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
mode: "0644"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: store input database credentials for analytics pipeline - name: Store input database credentials for analytics pipeline
copy: > copy:
content="{{ ANALYTICS_PIPELINE_INPUT_DATABASE | to_json }}" content: "{{ ANALYTICS_PIPELINE_INPUT_DATABASE | to_json }}"
dest={{ COMMON_CFG_DIR }}/edx-analytics-pipeline/input.json dest: "{{ COMMON_CFG_DIR }}/edx-analytics-pipeline/input.json"
mode=0644 owner={{ hadoop_common_user }} group={{ hadoop_common_group }} owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
mode: "0644"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: luigi configuration directory created - name: luigi configuration directory created
file: > file:
path=/etc/luigi path: /etc/luigi
state=directory state: directory
mode=755 mode: "0755"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: luigi configuration file written - name: luigi configuration file written
template: > template:
src=client.cfg.j2 src: client.cfg.j2
dest=/etc/luigi/client.cfg dest: /etc/luigi/client.cfg
mode=644 mode: "0644"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: util library source checked out - name: Util library source checked out
git_2_0_1: > git_2_0_1:
dest={{ analytics_pipeline_util_library.path }} repo={{ analytics_pipeline_util_library.repo }} repo: "{{ analytics_pipeline_util_library.repo }}"
version={{ analytics_pipeline_util_library.version }} dest: "{{ analytics_pipeline_util_library.path }}"
version: "{{ analytics_pipeline_util_library.version }}"
tags: tags:
- install - install
- install:code - install:code
- name: lib directory created - name: lib directory created
file: > file:
path={{ HADOOP_COMMON_USER_HOME }}/lib path: "{{ HADOOP_COMMON_USER_HOME }}/lib"
owner={{ hadoop_common_user }} group={{ hadoop_common_group }} state=directory owner: "{{ hadoop_common_user }}"
group: "{{ hadoop_common_group }}"
state: directory
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: check if the util library needs to be built - name: Check if the util library needs to be built
stat: > stat:
path={{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar path: "{{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar"
register: util_lib_built register: util_lib_built
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: util library built - name: Util library built
shell: > shell: >
chdir={{ analytics_pipeline_util_library.path }}
{{ hadoop_common_java_home }}/bin/javac -cp `{{ HADOOP_COMMON_HOME }}/bin/hadoop classpath` org/edx/hadoop/input/ManifestTextInputFormat.java && {{ hadoop_common_java_home }}/bin/javac -cp `{{ HADOOP_COMMON_HOME }}/bin/hadoop classpath` org/edx/hadoop/input/ManifestTextInputFormat.java &&
{{ hadoop_common_java_home }}/bin/jar cf {{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar org/edx/hadoop/input/ManifestTextInputFormat.class && {{ hadoop_common_java_home }}/bin/jar cf {{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar org/edx/hadoop/input/ManifestTextInputFormat.class &&
chown {{ hadoop_common_user }}:{{ hadoop_common_group }} {{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar chown {{ hadoop_common_user }}:{{ hadoop_common_group }} {{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar
args:
chdir: "{{ analytics_pipeline_util_library.path }}"
when: not util_lib_built.stat.exists when: not util_lib_built.stat.exists
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: ensure hdfs services are started - name: Ensure hdfs services are started
service: > service:
name=hdfs name: hdfs
state=started state: started
tags: tags:
- manage - manage
- manage:start - manage:start
- name: ensure map reduce services are started - name: Ensure map reduce services are started
service: > service:
name=yarn name: yarn
state=started state: started
tags: tags:
- manage - manage
- manage:start - manage:start
- name: ensure package dir exists in HDFS - name: Ensure package dir exists in HDFS
shell: > shell: ". {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -mkdir -p /edx-analytics-pipeline/packages/"
. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -mkdir -p /edx-analytics-pipeline/packages/
become_user: "{{ hadoop_common_user }}" become_user: "{{ hadoop_common_user }}"
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: ensure util library is in HDFS - name: Ensure util library is in HDFS
shell: > shell: ". {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -put -f {{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar /edx-analytics-pipeline/packages/"
. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -put -f {{ HADOOP_COMMON_USER_HOME }}/lib/edx-analytics-hadoop-util.jar /edx-analytics-pipeline/packages/
become_user: "{{ hadoop_common_user }}" become_user: "{{ hadoop_common_user }}"
tags: tags:
- install - install
- install:app-requirements - install:app-requirements
- name: ensure the data directory exists - name: Ensure the data directory exists
shell: > shell: ". {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -mkdir -p {{ ANALYTICS_PIPELINE_HDFS_DATA_DIR }}"
. {{ HADOOP_COMMON_CONF_DIR }}/hadoop-env.sh && hdfs dfs -mkdir -p {{ ANALYTICS_PIPELINE_HDFS_DATA_DIR }}
become_user: "{{ hadoop_common_user }}" become_user: "{{ hadoop_common_user }}"
tags: tags:
- install - install
- install:base - install:base
- name: ensure tracking log file can be read - name: Ensure tracking log file can be read
file: > file:
path={{ COMMON_LOG_DIR }}/tracking/tracking.log path: "{{ COMMON_LOG_DIR }}/tracking/tracking.log"
mode=0644 mode: "0644"
ignore_errors: yes ignore_errors: yes
tags: tags:
- install - install
- install:configuration - install:configuration
- name: cron job syncs tracking log file to hdfs - name: Cron job syncs tracking log file to hdfs
cron: > cron:
user={{ hadoop_common_user }} user: "{{ hadoop_common_user }}"
name="Sync tracking log to HDFS" name: "Sync tracking log to HDFS"
job="{{ HADOOP_COMMON_HOME }}/bin/hdfs dfs -put -f {{ COMMON_LOG_DIR }}/tracking/tracking.log {{ ANALYTICS_PIPELINE_HDFS_DATA_DIR }}/tracking.log" job: "{{ HADOOP_COMMON_HOME }}/bin/hdfs dfs -put -f {{ COMMON_LOG_DIR }}/tracking/tracking.log {{ ANALYTICS_PIPELINE_HDFS_DATA_DIR }}/tracking.log"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: store configuration for acceptance tests - name: store configuration for acceptance tests
copy: > copy:
src=acceptance.json src: acceptance.json
dest=/var/tmp/acceptance.json dest: /var/tmp/acceptance.json
mode=644 mode: "0644"
tags: tags:
- install - install
- install:configuration - install:configuration
- name: grant access to table storing test data in output database - name: Grant access to table storing test data in output database
mysql_user: > mysql_user:
user={{ ANALYTICS_PIPELINE_OUTPUT_DATABASE.username }} user: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE.username }}"
password={{ ANALYTICS_PIPELINE_OUTPUT_DATABASE.password }} password: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE.password }}"
priv=acceptance%.*:ALL priv: 'acceptance%.*:ALL'
append_privs=yes append_privs: yes
tags: tags:
- install - install
- install:configuration - install:configuration
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment