Commit cccc7d58 by Braden MacDonald Committed by GitHub

Merge pull request #2723 from open-craft/omar/openstack

Add openstack role
parents b6ea11cc 924a7a01
- Role: common
- Renamed `COMMON_AWS_SYNC` to `COMMON_OBJECT_STORE_LOG_SYNC`
- Renamed `COMMON_AWS_SYNC_BUCKET` to `COMMON_OBJECT_STORE_LOG_SYNC_BUCKET`
- Renamed `COMMON_AWS_S3_SYNC_SCRIPT` to `COMMON_OBJECT_STORE_LOG_SYNC_SCRIPT`
- Added `COMMON_OBJECT_STORE_LOG_SYNC_PREFIX`. Default: `logs/tracking/`
- Role: aws
- Removed `AWS_S3_LOGS`
- Added `vhost` role as dependency
- Role: edxapp
- Added `EDXAPP_SWIFT_USERNAME`
- Added `EDXAPP_SWIFT_KEY`
- Added `EDXAPP_SWIFT_TENANT_ID`
- Added `EDXAPP_SWIFT_TENANT_NAME`
- Added `EDXAPP_SWIFT_AUTH_URL`
- Added `EDXAPP_SWIFT_AUTH_VERSION`
- Added `EDXAPP_SWIFT_REGION_NAME`
- Added `EDXAPP_SWIFT_USE_TEMP_URLS`
- Added `EDXAPP_SWIFT_TEMP_URL_KEY`
- Added `EDXAPP_SWIFT_TEMP_URL_DURATION`
- Added `EDXAPP_SETTINGS` to allow using a settings file other than `aws.py`. Default: `aws`
- Renamed `ENABLE_S3_GRADE_DOWNLOADS` to `ENABLE_GRADE_DOWNLOADS`
- Replaced `EDXAPP_GRADE_STORAGE_TYPE`, `EDXAPP_GRADE_BUCKET` and `EDXAPP_GRADE_ROOT_PATH` with `EDXAPP_GRADE_STORAGE_CLASS` and `EDXAPP_GRADE_STORAGE_KWARGS`
- Role: openstack
- Added role
- Role: vhost
- Added as dependency for aws and openstack roles. Handles common functionality for setting up VM hosts
- Role: xqueue
- Added `XQUEUE_SETTINGS` to specify which settings file to use. Default: `aws_settings`
- Renamed `XQUEUE_S3_BUCKET` to `XQUEUE_UPLOAD_BUCKET`
- Renamed `XQUEUE_S3_PATH_PREFIX` to `XQUEUE_UPLOAD_PATH_PREFIX`
- Role: discovery
- Course Discovery JWT configuration now takes a list of issuers instead of a single issuer. This change is not backward compatible with older versions of course discovery.
......
......@@ -10,7 +10,7 @@
- name: migrate lms
shell: >
chdir={{ edxapp_code_dir }}
python manage.py lms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws_migrate
python manage.py lms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws
environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
......@@ -23,7 +23,7 @@
- name: migrate cms
shell: >
chdir={{ edxapp_code_dir }}
python manage.py cms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws_migrate
python manage.py cms migrate --database {{ item }} --noinput {{ db_dry_run }} --settings=aws
environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
......
......@@ -38,7 +38,7 @@
- "edx-services"
# This catches the case where tracking.log is 0b
- name: Sync again
command: /edx/bin/send-logs-to-s3 -d "{{ COMMON_LOG_DIR }}/tracking/*" -b "{{ COMMON_AWS_SYNC_BUCKET }}/logs/tracking"
command: /edx/bin/send-logs-to-s3 -d "{{ COMMON_LOG_DIR }}/tracking/*" -b "{{ COMMON_OBJECT_STORE_LOG_SYNC_BUCKET }}/logs/tracking"
- name: Run minos verification
hosts: "{{TARGET}}"
......
......@@ -10,14 +10,9 @@
##
# Defaults for role aws
#
#
# Rotate logs to S3
# Only for when edX is running in AWS since it organizes
# logs by security group.
# !! The buckets defined below MUST exist prior to enabling !!
# this feature and the instance IAM role must have write permissions
# to the buckets, or you must specify the access and secret keys below.
AWS_S3_LOGS: false
VHOST_NAME: aws
# If there are any issues with the s3 sync an error
# log will be sent to the following address.
# This relies on your server being able to send mail
......@@ -28,30 +23,8 @@ AWS_S3_LOGS_FROM_EMAIL: dummy@example.com
AWS_S3_LOGS_ACCESS_KEY_ID: ""
AWS_S3_LOGS_SECRET_KEY: ""
#
# vars are namespace with the module name.
#
aws_role_name: aws
aws_dirs:
home:
path: "{{ COMMON_APP_DIR }}/{{ aws_role_name }}"
owner: "root"
group: "root"
mode: "0755"
logs:
path: "{{ COMMON_LOG_DIR }}/{{ aws_role_name }}"
owner: "syslog"
group: "syslog"
mode: "0650"
data:
path: "{{ COMMON_DATA_DIR }}/{{ aws_role_name }}"
owner: "root"
group: "root"
mode: "0700"
aws_s3_sync_script: "{{ aws_dirs.home.path }}/send-logs-to-s3"
aws_s3_logfile: "{{ aws_dirs.logs.path }}/s3-log-sync.log"
aws_s3_sync_script: "{{ vhost_dirs.home.path }}/send-logs-to-s3"
aws_s3_logfile: "{{ vhost_dirs.logs.path }}/s3-log-sync.log"
aws_region: "us-east-1"
# default path to the aws binary
aws_s3cmd: "/usr/local/bin/s3cmd"
......
......@@ -10,13 +10,5 @@
##
# Role includes for role aws
#
# Example:
#
# dependencies:
# - {
# role: my_role
# my_role_var0: "foo"
# my_role_var1: "bar"
# }
dependencies:
- common
- vhost
......@@ -26,7 +26,7 @@
#
- name: Gather ec2 facts for use in other roles
action: ec2_facts
tags:
tags:
- deploy
- name: Set the MTU to 1500 temporarily
......@@ -51,15 +51,6 @@
# End dealing with Jumbo frames issue in mixed MTU deployements in AWS
#
- name: Create all service directories
file:
path: "{{ item.value.path }}"
state: directory
owner: "{{ item.value.owner }}"
group: "{{ item.value.group }}"
mode: "{{ item.value.mode }}"
with_dict: "{{ aws_dirs }}"
- name: Install system packages
apt:
name: "{{ item }}"
......@@ -82,23 +73,14 @@
mode: 0755
owner: root
group: root
when: AWS_S3_LOGS
when: COMMON_OBJECT_STORE_LOG_SYNC
- name: Create symlink for s3 log sync script
file:
state: link
src: "{{ aws_s3_sync_script }}"
dest: "{{ COMMON_BIN_DIR }}/{{ aws_s3_sync_script|basename }}"
when: AWS_S3_LOGS
- name: Force logrotate on supervisor stop
template:
src: etc/init/sync-on-stop.conf.j2
dest: /etc/init/sync-on-stop.conf
owner: root
group: root
mode: 0644
when: AWS_S3_LOGS
dest: "{{ COMMON_OBJECT_STORE_LOG_SYNC_SCRIPT }}"
when: COMMON_OBJECT_STORE_LOG_SYNC
# update the ssh motd on Ubuntu
# Remove some of the default motd display on ubuntu
......@@ -115,49 +97,3 @@
- "/usr/share/landscape/landscape-sysinfo.wrapper"
- "/etc/update-motd.d/51-cloudguest"
- "/etc/update-motd.d/91-release-upgrade"
- name: Update /etc/dhcp/dhclient.conf
template:
src: etc/dhcp/dhclient.conf.j2
dest: /etc/dhcp/dhclient.conf
when: COMMON_CUSTOM_DHCLIENT_CONFIG
- name: Copy the MOTD template in place
template:
dest: "{{ item.dest }}"
src: "{{ item.src }}"
owner: root
group: root
mode: "{{ item.mode | default(644) }}"
with_items:
- { src: 'etc/motd.tail.j2', dest: '/etc/motd.tail', mode: '755' }
- name: Update sshd logging to VERBOSE
lineinfile:
dest: /etc/ssh/sshd_config
state: present
regexp: "^LogLevel .*"
line: "LogLevel VERBOSE"
register: sshd_config
- name: Update sshd logging to VERBOSE
lineinfile:
dest: /etc/ssh/sshd_config
state: present
regexp: "^PasswordAuthentication .*"
line: "PasswordAuthentication {{ COMMON_SSH_PASSWORD_AUTH }}"
register: sshd_config
- name: Restart ssh
service:
name: ssh
state: restarted
become: True
when: sshd_config.changed and ansible_distribution in common_debian_variants
- name: Restart ssh
service:
name: sshd
state: restarted
become: True
when: sshd_config.changed and ansible_distribution in common_redhat_variants
---
aws_profile: !!null
s3_bucket: 'edx-prod-edx'
bucket_path: 'test'
voters:
- BellwetherVoter:
config:
- ProccessQuienscenceVoter:
config:
process_name: 'gunicorn'
- TrackingLogVoter:
config:
aws_profile: !!null
s3_bucket: 'edx-prod-edx'
bucket_path: 'test'
local_directory: '{{ COMMON_LOG_DIR }}'
*******************************************************************
* _ __ __ *
* _ _| |\ \/ / This system is for the use of authorized *
* / -_) _` | > < users only. Usage of this system may be *
* \___\__,_|/_/\_\ monitored and recorded by system personnel. *
* *
* Anyone using this system expressly consents to such monitoring *
* and is advised that if such monitoring reveals possible *
* evidence of criminal activity, system personnel may provide the *
* evidence from such monitoring to law enforcement officials. *
* *
*******************************************************************
# {{ ansible_managed }}
#
# Changes from the default Ubuntu ssh config:
# - LogLevel set to VERBOSE
#
# What ports, IPs and protocols we listen for
Port 22
# Use these options to restrict which interfaces/protocols sshd will bind to
#ListenAddress ::
#ListenAddress 0.0.0.0
Protocol 2
# HostKeys for protocol version 2
HostKey /etc/ssh/ssh_host_rsa_key
HostKey /etc/ssh/ssh_host_dsa_key
HostKey /etc/ssh/ssh_host_ecdsa_key
#Privilege Separation is turned on for security
UsePrivilegeSeparation yes
# Lifetime and size of ephemeral version 1 server key
KeyRegenerationInterval 3600
ServerKeyBits 768
# Logging
SyslogFacility AUTH
LogLevel VERBOSE
# Authentication:
LoginGraceTime 120
PermitRootLogin yes
StrictModes yes
RSAAuthentication yes
PubkeyAuthentication yes
#AuthorizedKeysFile %h/.ssh/authorized_keys
# Don't read the user's ~/.rhosts and ~/.shosts files
IgnoreRhosts yes
# For this to work you will also need host keys in /etc/ssh_known_hosts
RhostsRSAAuthentication no
# similar for protocol version 2
HostbasedAuthentication no
# Uncomment if you don't trust ~/.ssh/known_hosts for RhostsRSAAuthentication
#IgnoreUserKnownHosts yes
# To enable empty passwords, change to yes (NOT RECOMMENDED)
PermitEmptyPasswords no
# Change to yes to enable challenge-response passwords (beware issues with
# some PAM modules and threads)
ChallengeResponseAuthentication no
# Change to no to disable tunnelled clear text passwords
PasswordAuthentication {{ COMMON_SSH_PASSWORD_AUTH }}
# Kerberos options
#KerberosAuthentication no
#KerberosGetAFSToken no
#KerberosOrLocalPasswd yes
#KerberosTicketCleanup yes
# GSSAPI options
#GSSAPIAuthentication no
#GSSAPICleanupCredentials yes
X11Forwarding yes
X11DisplayOffset 10
PrintMotd no
PrintLastLog yes
TCPKeepAlive yes
#UseLogin no
#MaxStartups 10:30:60
#Banner /etc/issue
# Allow client to pass locale environment variables
AcceptEnv LANG LC_*
Subsystem sftp /usr/lib/openssh/sftp-server
# Set this to 'yes' to enable PAM authentication, account processing,
# and session processing. If this is enabled, PAM authentication will
# be allowed through the ChallengeResponseAuthentication and
# PasswordAuthentication. Depending on your PAM configuration,
# PAM authentication via ChallengeResponseAuthentication may bypass
# the setting of "PermitRootLogin without-password".
# If you just want the PAM account and session checks to run without
# PAM authentication, then enable this but set PasswordAuthentication
# and ChallengeResponseAuthentication to 'no'.
UsePAM yes
......@@ -43,10 +43,11 @@ usage() {
-h this
-d directory to sync
-b bucket path to sync to
-p name prefix
EO
}
while getopts "vhnb:d:" opt; do
while getopts "vhnb:d:p:" opt; do
case $opt in
v)
set -x
......@@ -64,12 +65,15 @@ while getopts "vhnb:d:" opt; do
directory=$OPTARG
;;
b)
bucket_path=$OPTARG
bucket=$OPTARG
;;
p)
prefix=$OPTARG
;;
esac
done
if [[ -z $bucket_path || -z $directory ]]; then
if [[ -z $bucket || -z $directory ]]; then
echo "ERROR: You must provide a directory and a bucket to sync!"
usage
exit 1
......@@ -123,4 +127,4 @@ auth_opts=""
{% endif %}
s3_path="${2}/$sec_grp/"
$noop {{ aws_s3cmd }} $auth_opts --multipart-chunk-size-mb 5120 --disable-multipart sync $directory "s3://${bucket_path}/${sec_grp}/${instance_id}-${ip}/"
$noop {{ aws_s3cmd }} $auth_opts --multipart-chunk-size-mb 5120 --disable-multipart sync $directory/* "s3://${bucket}/${prefix}${sec_grp}/${instance_id}-${ip}/"
......@@ -14,8 +14,8 @@
/usr/bin/killall -HUP rsyslogd
endscript
lastaction
{% if COMMON_AWS_SYNC -%}
{{ COMMON_AWS_S3_SYNC_SCRIPT }} -d "{{ COMMON_LOG_DIR }}/tracking/*" -b "{{ COMMON_AWS_SYNC_BUCKET }}/logs/tracking"
{% if COMMON_OBJECT_STORE_LOG_SYNC -%}
{{ COMMON_OBJECT_STORE_LOG_SYNC_SCRIPT }} -d "{{ COMMON_LOG_DIR }}/tracking" -b "{{ COMMON_OBJECT_STORE_LOG_SYNC_BUCKET }}" -p "{{ COMMON_OBJECT_STORE_LOG_SYNC_PREFIX }}"
{% endif -%}
endscript
}
......@@ -13,10 +13,12 @@ COMMON_BASIC_AUTH_EXCEPTIONS:
# Turn on syncing logs on rotation for edx
# application and tracking logs, must also
# have the AWS role installed
COMMON_AWS_SYNC: False
COMMON_AWS_SYNC_BUCKET: "edx-{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}"
COMMON_AWS_S3_SYNC_SCRIPT: "{{ COMMON_BIN_DIR }}/send-logs-to-s3"
# have the aws or openstack role installed
COMMON_OBJECT_STORE_LOG_SYNC: False
COMMON_OBJECT_STORE_LOG_SYNC_BUCKET: "edx-{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}"
COMMON_OBJECT_STORE_LOG_SYNC_PREFIX: "logs/tracking/"
COMMON_OBJECT_STORE_LOG_SYNC_SCRIPT: "{{ COMMON_BIN_DIR }}/send-logs-to-object-store"
COMMON_BASE_DIR: /edx
COMMON_DATA_DIR: "{{ COMMON_BASE_DIR}}/var"
COMMON_APP_DIR: "{{ COMMON_BASE_DIR}}/app"
......@@ -66,7 +68,6 @@ COMMON_CUSTOM_DHCLIENT_CONFIG: false
# uncomment and specifity your domains.
# COMMON_DHCLIENT_DNS_SEARCH: ["ec2.internal","example.com"]
COMMON_MOTD_TEMPLATE: "motd.tail.j2"
COMMON_SSH_PASSWORD_AUTH: "no"
......
......@@ -45,6 +45,16 @@ EDXAPP_AWS_SECRET_ACCESS_KEY: "None"
EDXAPP_AWS_QUERYSTRING_AUTH: false
EDXAPP_AWS_STORAGE_BUCKET_NAME: "SET-ME-PLEASE (ex. bucket-name)"
EDXAPP_AWS_S3_CUSTOM_DOMAIN: "SET-ME-PLEASE (ex. bucket-name.s3.amazonaws.com)"
EDXAPP_SWIFT_USERNAME: "None"
EDXAPP_SWIFT_KEY: "None"
EDXAPP_SWIFT_TENANT_ID: "None"
EDXAPP_SWIFT_TENANT_NAME: "None"
EDXAPP_SWIFT_AUTH_URL: "None"
EDXAPP_SWIFT_AUTH_VERSION: "None"
EDXAPP_SWIFT_REGION_NAME: "None"
EDXAPP_SWIFT_USE_TEMP_URLS: false
EDXAPP_SWIFT_TEMP_URL_KEY: "None"
EDXAPP_SWIFT_TEMP_URL_DURATION: 1800 # seconds
EDXAPP_DEFAULT_FILE_STORAGE: "django.core.files.storage.FileSystemStorage"
EDXAPP_XQUEUE_BASIC_AUTH: [ "{{ COMMON_HTPASSWD_USER }}", "{{ COMMON_HTPASSWD_PASS }}" ]
EDXAPP_XQUEUE_DJANGO_AUTH:
......@@ -86,8 +96,10 @@ EDXAPP_ELASTIC_SEARCH_CONFIG:
- host: "localhost"
port: 9200
EDXAPP_LMS_ENV: 'lms.envs.aws'
EDXAPP_CMS_ENV: 'cms.envs.aws'
EDXAPP_SETTINGS: 'aws'
EDXAPP_LMS_ENV: 'lms.envs.{{ EDXAPP_SETTINGS }}'
EDXAPP_CMS_ENV: 'cms.envs.{{ EDXAPP_SETTINGS }}'
EDXAPP_EMAIL_BACKEND: 'django.core.mail.backends.smtp.EmailBackend'
EDXAPP_EMAIL_HOST: 'localhost'
......@@ -208,7 +220,7 @@ EDXAPP_FEATURES:
ENABLE_DISCUSSION_SERVICE: true
ENABLE_INSTRUCTOR_ANALYTICS: false
PREVIEW_LMS_BASE: "{{ EDXAPP_PREVIEW_LMS_BASE }}"
ENABLE_S3_GRADE_DOWNLOADS: true
ENABLE_GRADE_DOWNLOADS: true
USE_CUSTOM_THEME: "{{ edxapp_use_custom_theme }}"
ENABLE_MKTG_SITE: "{{ EDXAPP_ENABLE_MKTG_SITE }}"
AUTOMATIC_AUTH_FOR_TESTING: "{{ EDXAPP_ENABLE_AUTO_AUTH }}"
......@@ -307,9 +319,10 @@ EDXAPP_EDXMKTG_USER_INFO_COOKIE_NAME: "edx-user-info"
EDXAPP_STATIC_URL_BASE: "/static/"
# Settings for Grade downloads
EDXAPP_GRADE_STORAGE_TYPE: 'localfs'
EDXAPP_GRADE_BUCKET: 'edx-grades'
EDXAPP_GRADE_ROOT_PATH: '/tmp/edx-s3/grades'
EDXAPP_GRADE_STORAGE_CLASS: 'django.core.files.storage.FileSystemStorage'
EDXAPP_GRADE_STORAGE_KWARGS:
location: /tmp/edx-s3/grades
# Credit card processor
# These are the same defaults set in common.py
EDXAPP_CC_PROCESSOR_NAME: "CyberSource2"
......@@ -654,7 +667,7 @@ edxapp_environment:
# the settings module for edxapp, DJANGO_SETTINGS_MODULE
# should be set to {{SERVICE_VARIANT}}.{{EDXAPP_SETTINGS}}
# where SERVICE_VARIANT is lms or cms
EDX_PLATFORM_SETTINGS: aws
EDX_PLATFORM_SETTINGS: "{{ EDXAPP_SETTINGS }}"
# Current set to the app dir for json config, this should
# be updated to /edx/etc/edxapp when the switch to
# yaml based configs is complete
......@@ -715,6 +728,16 @@ edxapp_generic_auth_config: &edxapp_generic_auth
AWS_QUERYSTRING_AUTH: "{{ EDXAPP_AWS_QUERYSTRING_AUTH }}"
AWS_STORAGE_BUCKET_NAME: "{{ EDXAPP_AWS_STORAGE_BUCKET_NAME }}"
AWS_S3_CUSTOM_DOMAIN: "{{ EDXAPP_AWS_S3_CUSTOM_DOMAIN }}"
SWIFT_USERNAME: "{{ EDXAPP_SWIFT_USERNAME }}"
SWIFT_KEY: "{{ EDXAPP_SWIFT_KEY }}"
SWIFT_TENANT_ID: "{{ EDXAPP_SWIFT_TENANT_ID }}"
SWIFT_TENANT_NAME: "{{ EDXAPP_SWIFT_TENANT_NAME }}"
SWIFT_AUTH_URL: "{{ EDXAPP_SWIFT_AUTH_URL }}"
SWIFT_AUTH_VERSION: "{{ EDXAPP_SWIFT_AUTH_VERSION }}"
SWIFT_REGION_NAME: "{{ EDXAPP_SWIFT_REGION_NAME }}"
SWIFT_USE_TEMP_URLS: "{{ EDXAPP_SWIFT_USE_TEMP_URLS }}"
SWIFT_TEMP_URL_KEY: "{{ EDXAPP_SWIFT_TEMP_URL_KEY }}"
SWIFT_TEMP_URL_DURATION: "{{ EDXAPP_SWIFT_TEMP_URL_DURATION }}"
SECRET_KEY: "{{ EDXAPP_EDXAPP_SECRET_KEY }}"
DOC_STORE_CONFIG: "{{ edxapp_generic_doc_store_config }}"
XQUEUE_INTERFACE:
......@@ -787,9 +810,11 @@ generic_env_config: &edxapp_generic_env
MICROSITE_CONFIGURATION: "{{ EDXAPP_MICROSITE_CONFIGURATION }}"
DEFAULT_FILE_STORAGE: "{{ EDXAPP_DEFAULT_FILE_STORAGE }}"
GRADES_DOWNLOAD:
STORAGE_TYPE: "{{ EDXAPP_GRADE_STORAGE_TYPE }}"
BUCKET: "{{ EDXAPP_GRADE_BUCKET }}"
ROOT_PATH: "{{ EDXAPP_GRADE_ROOT_PATH }}"
STORAGE_CLASS: "{{ EDXAPP_GRADE_STORAGE_CLASS | default(None) }}"
STORAGE_KWARGS: "{{ EDXAPP_GRADE_STORAGE_KWARGS | default(None) }}"
STORAGE_TYPE: "{{ EDXAPP_GRADE_STORAGE_TYPE | default(None) }}"
BUCKET: "{{ EDXAPP_GRADE_BUCKET | default(None) }}"
ROOT_PATH: "{{ EDXAPP_GRADE_ROOT_PATH | default(None) }}"
STATIC_URL_BASE: "{{ EDXAPP_STATIC_URL_BASE }}"
STATIC_ROOT_BASE: "{{ edxapp_staticfile_dir }}"
LMS_BASE: "{{ EDXAPP_LMS_BASE }}"
......@@ -1121,4 +1146,4 @@ edxapp_lms_variant: lms
edxapp_cms_variant: cms
# Worker Settings
worker_django_settings_module: 'aws'
worker_django_settings_module: '{{ EDXAPP_SETTINGS }}'
......@@ -121,7 +121,6 @@
environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
DB_MIGRATION_PASS: "{{ COMMON_MYSQL_MIGRATE_PASS }}"
EDX_PLATFORM_SETTINGS_OVERRIDE: "aws_migrate"
with_items: "{{ service_variants_enabled }}"
tags:
- migrate
......
......@@ -2,5 +2,5 @@ TrackingLogVoter:
config:
aws_profile: !!null
local_directory: '{{ COMMON_LOG_DIR }}/tracking'
s3_bucket: '{{ COMMON_AWS_SYNC_BUCKET }}'
s3_bucket: '{{ COMMON_OBJECT_STORE_LOG_SYNC_BUCKET }}'
bucket_path_prefix: 'logs/tracking'
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://github.com/edx/configuration/wiki
# code style: https://github.com/edx/configuration/wiki/Ansible-Coding-Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for role openstack
#
VHOST_NAME: openstack
# Credentials for log sync script
SWIFT_LOG_SYNC_USERNAME: ''
SWIFT_LOG_SYNC_PASSWORD: ''
SWIFT_LOG_SYNC_TENANT_ID: ''
SWIFT_LOG_SYNC_TENANT_NAME: ''
SWIFT_LOG_SYNC_AUTH_URL: ''
SWIFT_LOG_SYNC_REGION_NAME: ''
openstack_requirements_file: "{{ edxapp_code_dir }}/requirements/edx/openstack.txt"
openstack_log_sync_script: "{{ vhost_dirs.home.path }}/send-logs-to-swift"
openstack_log_sync_script_environment: "{{ vhost_dirs.home.path }}/log-sync-env.sh"
openstack_swift_logfile: "{{ vhost_dirs.logs.path }}/log-sync.log"
openstack_debian_pkgs:
- python-setuptools
openstack_pip_pkgs:
- python-keystoneclient
- python-swiftclient
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://github.com/edx/configuration/wiki
# code style: https://github.com/edx/configuration/wiki/Ansible-Coding-Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Role includes for role openstack
#
dependencies:
- vhost
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://github.com/edx/configuration/wiki
# code style: https://github.com/edx/configuration/wiki/Ansible-Coding-Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Tasks for role openstack
#
- name: Install system packages
apt:
pkg: "{{','.join(openstack_debian_pkgs)}}"
state: present
update_cache: yes
- name: Install openstack python packages
pip:
name: "{{ item }}"
state: present
extra_args: "-i {{ COMMON_PYPI_MIRROR_URL }}"
with_items: "{{ openstack_pip_pkgs }}"
- name: Create log sync script
template:
src: send-logs-to-swift.j2
dest: "{{ openstack_log_sync_script }}"
mode: 0755
owner: root
group: root
when: COMMON_OBJECT_STORE_LOG_SYNC
- name: Upload openstack credentials for log script
template:
src: log-sync-env.sh.j2
dest: "{{ openstack_log_sync_script_environment }}"
mode: 0600
owner: root
group: root
when: COMMON_OBJECT_STORE_LOG_SYNC
- name: Create symlink for log sync script
file:
state: link
src: "{{ openstack_log_sync_script }}"
dest: "{{ COMMON_OBJECT_STORE_LOG_SYNC_SCRIPT }}"
when: COMMON_OBJECT_STORE_LOG_SYNC
# Install openstack python requirements into {{ edxapp_venv_dir }}
- name : Install python requirements
# Need to use command rather than pip so that we can maintain the context of our current working directory;
# some requirements are pathed relative to the edx-platform repo.
# Using the pip from inside the virtual environment implicitly installs everything into that virtual environment.
command: >
{{ edxapp_venv_dir }}/bin/pip install {{ COMMON_PIP_VERBOSITY }} -i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w -r {{ openstack_requirements_file }}
chdir={{ edxapp_code_dir }}
sudo_user: "{{ edxapp_user }}"
environment: "{{ edxapp_environment }}"
when: edxapp_code_dir is defined
tags:
- install
- install:app-requirements
export OS_USERNAME='{{ SWIFT_LOG_SYNC_USERNAME }}'
export OS_PASSWORD='{{ SWIFT_LOG_SYNC_PASSWORD }}'
export OS_TENANT_ID='{{ SWIFT_LOG_SYNC_TENANT_ID }}'
export OS_TENANT_NAME='{{ SWIFT_LOG_SYNC_TENANT_NAME }}'
export OS_AUTH_URL='{{ SWIFT_LOG_SYNC_AUTH_URL }}'
export OS_REGION_NAME='{{ SWIFT_LOG_SYNC_REGION_NAME }}'
#!/bin/bash
#
# This script can be called from logrotate to sync logs to swift. Based on
# the existing S3 implementation
#
if (( $EUID != 0 )); then
echo "Please run as the root user"
exit 1
fi
# Ensure the log processors can read without running as root
if [ ! -f "{{ openstack_swift_logfile }}" ]; then
touch "{{ openstack_swift_logfile }}"
fi
chown syslog:syslog "{{ openstack_swift_logfile }}"
exec > >(tee -a "{{ openstack_swift_logfile }}")
exec 2>&1
usage() {
cat<<EOF
A wrapper of python-swiftclient that will sync log files to
a swift container.
Usage: $PROG
-v add verbosity (set -x)
-n echo what will be done
-h this
-d directory to sync
-b bucket to sync to
-p name prefix
EOF
}
while getopts "vhnb:d:p:" opt; do
case $opt in
v)
set -x
shift
;;
h)
usage
exit 0
;;
n)
noop="echo Would have run: "
shift
;;
d)
directory=$OPTARG
;;
b)
container=$OPTARG
;;
p)
prefix=$OPTARG
;;
esac
done
if [[ -z $container || -z $directory ]]; then
echo "ERROR: You must provide a directory and a container to sync!"
usage
exit 1
fi
set -e
# The openstack metadata service has an ec2-compatible API, so we can
# use the ec2metadata command here.
instance_id=$(ec2metadata --instance-id)
ip=$(ec2metadata --local-ipv4)
# Source openstack credentials
source "{{ openstack_log_sync_script_environment }}"
# Sync the logs
$noop swift upload --changed --skip-identical --object-name "${prefix}${instance_id}-${ip}" $container $directory
......@@ -11,15 +11,23 @@
# Defaults for role vhost
#
#
# vars are namespaced with the module name.
#
vhost_role_name: vhost
#
# OS packages
#
vhost_debian_pkgs: []
# Specify a name for vhost deployments, e.g. aws or openstack. Service files
# specific to the vhost will be namespaced in directories with this name.
VHOST_NAME: 'vhost'
vhost_redhat_pkgs: []
vhost_dirs:
home:
path: "{{ COMMON_APP_DIR }}/{{ VHOST_NAME }}"
owner: "root"
group: "root"
mode: "0755"
logs:
path: "{{ COMMON_LOG_DIR }}/{{ VHOST_NAME }}"
owner: "syslog"
group: "syslog"
mode: "0650"
data:
path: "{{ COMMON_DATA_DIR }}/{{ VHOST_NAME }}"
owner: "root"
group: "root"
mode: "0700"
......@@ -10,11 +10,5 @@
##
# Role includes for role vhost
#
# Example:
#
# dependencies:
# - {
# role: my_role
# my_role_var0: "foo"
# my_role_var1: "bar"
# }
dependencies:
- common
......@@ -10,7 +10,7 @@
#
#
# Tasks for role vhost
#
#
# Overview:
#
# This task is to contain tasks that should be run in vhost
......@@ -20,21 +20,66 @@
#
# Dependencies:
# - common
#
#
- name: Create all service directories
file:
path: "{{ item.value.path }}"
state: directory
owner: "{{ item.value.owner }}"
group: "{{ item.value.group }}"
mode: "{{ item.value.mode }}"
with_dict: "{{ vhost_dirs }}"
- name: Force logrotate on supervisor stop
template:
src: etc/init/sync-on-stop.conf.j2
dest: /etc/init/sync-on-stop.conf
owner: root
group: root
mode: 0644
when: COMMON_OBJECT_STORE_LOG_SYNC
- name: Update /etc/dhcp/dhclient.conf
template:
src: etc/dhcp/dhclient.conf.j2
dest: /etc/dhcp/dhclient.conf
when: COMMON_CUSTOM_DHCLIENT_CONFIG
- name: Copy the templates to their respestive destination
- name: Copy the MOTD template in place
template:
dest: "{{ item.dest }}"
src: "{{ item.src }}"
owner: root
group: root
mode: "{{ item.mode | default(644) }}"
register: config_templates
with_items:
- { src: 'etc/{{ COMMON_MOTD_TEMPLATE }}', dest: '/etc/motd.tail', mode: '755' }
- { src: 'etc/ssh/sshd_config.j2', dest: '/etc/ssh/sshd_config' }
- name: restart ssh
service: name=ssh state=restarted
src: etc/motd.tail.j2
dest: /etc/motd.tail
owner: root
group: root
mode: '755'
- name: Update sshd logging to VERBOSE
lineinfile:
dest: /etc/ssh/sshd_config
state: present
regexp: "^LogLevel .*"
line: "LogLevel VERBOSE"
register: sshd_config
- name: Update sshd logging to VERBOSE
lineinfile:
dest: /etc/ssh/sshd_config
state: present
regexp: "^PasswordAuthentication .*"
line: "PasswordAuthentication {{ COMMON_SSH_PASSWORD_AUTH }}"
register: sshd_config
- name: Restart ssh
service:
name: ssh
state: restarted
become: True
when: sshd_config.changed and ansible_distribution in common_debian_variants
- name: Restart ssh
service:
name: sshd
state: restarted
become: True
when: config_templates.changed
when: sshd_config.changed and ansible_distribution in common_redhat_variants
****************************************************************************
* ___ _ __ __ *
* / _ \ _ __ ___ _ _ ___ __| |\ \/ / *
* | |_| | '_ \ -_) ' \ / -_) _` | > < *
* \___/| .__/___|_|_| \___\__,_|/_/\_\ *
* |_| *
* *
* Instructions and troubleshooting: *
* https://openedx.atlassian.net/wiki/display/OpenOPS/Running+Devstack *
****************************************************************************
*******************************************************************
* _ __ __ *
* _ _| |\ \/ / This system is for the use of authorized *
*******************************************************************
* _ __ __ *
* _ _| |\ \/ / This system is for the use of authorized *
* / -_) _` | > < users only. Usage of this system may be *
* \___\__,_|/_/\_\ monitored and recorded by system personnel. *
* *
......
# variables common to the xqueue role, automatically loaded
# when the role is included
---
XQUEUE_SETTINGS: 'aws_settings'
XQUEUE_NGINX_PORT: 18040
XQUEUE_GUNICORN_WORKERS_EXTRA: ""
XQUEUE_GUNICORN_WORKERS_EXTRA_CONF: ""
XQUEUE_QUEUES:
# push queue
'edX-Open_DemoX': 'http://localhost:18050'
......@@ -12,13 +15,30 @@ XQUEUE_QUEUES:
'certificates': !!null
'open-ended': !!null
'open-ended-message': !!null
XQUEUE_LOGGING_ENV: sandbox
XQUEUE_SYSLOG_SERVER: 'localhost'
XQUEUE_S3_BUCKET : 'sandbox-bucket'
XQUEUE_S3_PATH_PREFIX: 'sandbox-xqueue'
XQUEUE_UPLOAD_BUCKET: 'sandbox-bucket'
XQUEUE_UPLOAD_PATH_PREFIX: 'sandbox-xqueue'
# Deprecated, use XQUEUE_UPLOAD_BUCKET and XQUEUE_UPLOAD_PATH_PREFIX
XQUEUE_S3_BUCKET : "{{ XQUEUE_UPLOAD_BUCKET }}"
XQUEUE_S3_PATH_PREFIX: "{{ XQUEUE_UPLOAD_PATH_PREFIX }}"
XQUEUE_LOCAL_LOGLEVEL: 'INFO'
XQUEUE_AWS_ACCESS_KEY_ID : ''
XQUEUE_AWS_SECRET_ACCESS_KEY : ''
XQUEUE_SWIFT_USERNAME: ''
XQUEUE_SWIFT_KEY: ''
XQUEUE_SWIFT_TENANT_ID: ''
XQUEUE_SWIFT_TENANT_NAME: ''
XQUEUE_SWIFT_AUTH_URL: ''
XQUEUE_SWIFT_AUTH_VERSION: 1
XQUEUE_SWIFT_REGION_NAME: ''
XQUEUE_BASIC_AUTH_USER: "{{ COMMON_HTPASSWD_USER }}"
XQUEUE_BASIC_AUTH_PASSWORD: "{{ COMMON_HTPASSWD_PASS }}"
XQUEUE_DJANGO_USERS:
......@@ -62,13 +82,24 @@ xqueue_env_config:
SYSLOG_SERVER: "{{ XQUEUE_SYSLOG_SERVER }}"
LOG_DIR: "{{ COMMON_DATA_DIR }}/logs/xqueue"
RABBIT_HOST: "{{ XQUEUE_RABBITMQ_HOSTNAME }}"
LOCAL_LOGLEVEL: "{{ XQUEUE_LOCAL_LOGLEVEL }}"
UPLOAD_BUCKET: "{{ XQUEUE_UPLOAD_BUCKET }}"
UPLOAD_PATH_PREFIX: "{{ XQUEUE_UPLOAD_PATH_PREFIX }}"
# Deprecated, use UPLOAD_BUCKET and UPLOAD_PATH_PREFIX instead
S3_BUCKET: "{{ XQUEUE_S3_BUCKET }}"
S3_PATH_PREFIX: "{{ XQUEUE_S3_PATH_PREFIX }}"
LOCAL_LOGLEVEL: "{{ XQUEUE_LOCAL_LOGLEVEL }}"
xqueue_auth_config:
AWS_ACCESS_KEY_ID: "{{ XQUEUE_AWS_ACCESS_KEY_ID }}"
AWS_SECRET_ACCESS_KEY: "{{ XQUEUE_AWS_SECRET_ACCESS_KEY }}"
SWIFT_USERNAME: "{{ XQUEUE_SWIFT_USERNAME }}"
SWIFT_KEY: "{{ XQUEUE_SWIFT_KEY }}"
SWIFT_TENANT_ID: "{{ XQUEUE_SWIFT_TENANT_ID }}"
SWIFT_TENANT_NAME: "{{ XQUEUE_SWIFT_TENANT_NAME }}"
SWIFT_AUTH_URL: "{{ XQUEUE_SWIFT_AUTH_URL }}"
SWIFT_AUTH_VERSION: "{{ XQUEUE_SWIFT_AUTH_VERSION }}"
SWIFT_REGION_NAME: "{{ XQUEUE_SWIFT_REGION_NAME }}"
REQUESTS_BASIC_AUTH: ["{{ XQUEUE_BASIC_AUTH_USER }}", "{{XQUEUE_BASIC_AUTH_PASSWORD}}"]
USERS: "{{ XQUEUE_DJANGO_USERS }}"
DATABASES:
......@@ -85,8 +116,9 @@ xqueue_auth_config:
xqueue_source_repo: "https://github.com/edx/xqueue.git"
xqueue_version: 'master'
xqueue_pre_requirements_file: "{{ xqueue_code_dir }}/pre-requirements.txt"
xqueue_post_requirements_file: "{{ xqueue_code_dir }}/requirements.txt"
xqueue_pre_requirements_file: "{{ xqueue_code_dir }}/pre-requirements.txt"
xqueue_post_requirements_file: "{{ xqueue_code_dir }}/requirements.txt"
xqueue_openstack_requirements_file: "{{ xqueue_code_dir }}/openstack-requirements.txt"
# These packages are required for the xqueue server,
# copied from the LMS role for now since there is a lot
......
......@@ -72,10 +72,23 @@
- install
- install:app-requirements
# If this is an openstack deployment, install openstack requirements
- name: Install python openstack requirements
pip:
requirements: "{{ xqueue_openstack_requirements_file }}"
virtualenv: "{{ xqueue_venv_dir }}"
state: present
extra_args: "-i {{ COMMON_PYPI_MIRROR_URL }} --exists-action w"
become_user: "{{ xqueue_user }}"
when: XQUEUE_SETTINGS == 'openstack_settings'
tags:
- install
- install:app-requirements
# If there is a common user for migrations run migrations using his username
# and credentials. If not we use the xqueue mysql user
- name: Migrate
shell: "SERVICE_VARIANT=xqueue {{ xqueue_venv_bin }}/django-admin.py migrate --noinput --settings=xqueue.aws_migrate --pythonpath={{ xqueue_code_dir }}"
shell: "SERVICE_VARIANT=xqueue {{ xqueue_venv_bin }}/django-admin.py migrate --noinput --settings=xqueue.{{ XQUEUE_SETTINGS }} --pythonpath={{ xqueue_code_dir }}"
become_user: "{{ xqueue_user }}"
environment:
DB_MIGRATION_USER: "{{ COMMON_MYSQL_MIGRATE_USER }}"
......@@ -86,7 +99,7 @@
- migrate:db
- name: Create users
shell: "SERVICE_VARIANT=xqueue {{ xqueue_venv_bin }}/django-admin.py update_users --settings=xqueue.aws_settings --pythonpath={{ xqueue_code_dir }}"
shell: "SERVICE_VARIANT=xqueue {{ xqueue_venv_bin }}/django-admin.py update_users --settings=xqueue.{{ XQUEUE_SETTINGS }} --pythonpath={{ xqueue_code_dir }}"
become_user: "{{ xqueue_user }}"
tags:
- manage
......
......@@ -11,7 +11,7 @@ command={{ executable }} -c {{ xqueue_app_dir }}/xqueue_gunicorn.py {{ XQUEUE_GU
user={{ common_web_user }}
directory={{ xqueue_code_dir }}
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ XQUEUE_NEWRELIC_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PID=/var/tmp/xqueue.pid,PORT={{ xqueue_gunicorn_port }},ADDRESS={{ xqueue_gunicorn_host }},LANG={{ XQUEUE_LANG }},DJANGO_SETTINGS_MODULE=xqueue.aws_settings,SERVICE_VARIANT="xqueue"
environment={% if COMMON_ENABLE_NEWRELIC_APP %}NEW_RELIC_APP_NAME={{ XQUEUE_NEWRELIC_APPNAME }},NEW_RELIC_LICENSE_KEY={{ NEWRELIC_LICENSE_KEY }},{% endif -%}PID=/var/tmp/xqueue.pid,PORT={{ xqueue_gunicorn_port }},ADDRESS={{ xqueue_gunicorn_host }},LANG={{ XQUEUE_LANG }},DJANGO_SETTINGS_MODULE=xqueue.{{ XQUEUE_SETTINGS }},SERVICE_VARIANT="xqueue"
stdout_logfile={{ supervisor_log_dir }}/%(program_name)s-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log
......
[program:xqueue_consumer]
command={{ xqueue_venv_bin }}/django-admin.py run_consumer --pythonpath={{ xqueue_code_dir }} --settings=xqueue.aws_settings $WORKERS_PER_QUEUE
command={{ xqueue_venv_bin }}/django-admin.py run_consumer --pythonpath={{ xqueue_code_dir }} --settings=xqueue.{{ XQUEUE_SETTINGS }} $WORKERS_PER_QUEUE
user={{ common_web_user }}
directory={{ xqueue_code_dir }}
......
......@@ -106,7 +106,7 @@
# SUBDOMAIN_BRANDING: false
# SUBDOMAIN_COURSE_LISTINGS: false
# PREVIEW_LMS_BASE: "{{ EDXAPP_PREVIEW_LMS_BASE }}"
# ENABLE_S3_GRADE_DOWNLOADS: true
# ENABLE_GRADE_DOWNLOADS: true
# USE_CUSTOM_THEME: "{{ edxapp_use_custom_theme }}"
# ENABLE_MKTG_SITE: "{{ EDXAPP_ENABLE_MKTG_SITE }}"
# AUTOMATIC_AUTH_FOR_TESTING: "{{ EDXAPP_ENABLE_AUTO_AUTH }}"
......@@ -194,7 +194,7 @@
# - github_url: "https://github.com/edx/edx-demo-course.git"
# install: "{{ ENABLE_EDX_DEMO_COURSE }}"
# course_id: "course-v1:edX+DemoX+Demo_Course"
#
#
#EDXAPP_FILE_UPLOAD_STORAGE_BUCKET_NAME: edxuploads-sandbox
#EDXAPP_AWS_STORAGE_BUCKET_NAME: edxuploads-sandbox
#
......@@ -211,4 +211,3 @@
## LOCUST Settings
##
#LOCUST_GIT_IDENTITY: "{{ _local_git_identity }}"
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment