Commit 83873d4d by Filippo Panessa Committed by GitHub

Merge branch 'master' into mariadb

parents 3f34fb39 b797db62
- Role: analytics_api
- Added `ANALYTICS_API_AGGREGATE_PAGE_SIZE`, default value 10. Adjust this parameter to increase the number of
aggregate search results returned by the Analytics API, i.e. in course_metadata: enrollment_modes, cohorts, and
segments.
- Role: programs
- This role has been removed as this service is no longer supported. The role is still available on the [Ficus branch](https://github.com/edx/configuration/releases/tag/open-release%2Fficus.1).
- Role: xqueue
- Changed `XQUEUE_RABBITMQ_TLS` default from `true` to `false`.
- Role: credentials
- Added `CREDENTIALS_EXTRA_APPS` to enable the inclusion of additional Django apps in the Credentials Service.
- Role: common
......@@ -225,3 +230,6 @@
- Role: ecommerce
- Removed `SEGMENT_KEY` which is no longer used. Segment key is now defined in DB configuration. (https://github.com/edx/ecommerce/pull/1121)
- Role: edxapp
- Added `EDXAPP_BLOCK_STRUCTURES_SETTINGS` to configure S3-backed Course Block Structures.
......@@ -10,7 +10,6 @@
FROM edxops/xenial-common:latest
MAINTAINER edxops
ENV PROGRAMS_VERSION=master
ENV REPO_OWNER=edx
ADD . /edx/app/edx_ansible/edx_ansible
......
......@@ -58,7 +58,7 @@ RUN \
# Install AWS command-line interface - for AWS operations in a go-agent task.
RUN pip install awscli
RUN pip install 'awscli>=1.11.58'
# !!!!NOTICE!!!! ---- Runner of this pipeline take heed!! You must replace go_github_key.pem with the REAL key material
# that can checkout private github repositories used as pipeline materials. The key material here is faked and is only
......
FROM edxops/xenial-common:latest
MAINTAINER edxops
USER root
ADD . /edx/app/edx_ansible/edx_ansible
WORKDIR /edx/app/edx_ansible/edx_ansible/docker/plays
RUN /edx/app/edx_ansible/venvs/edx_ansible/bin/ansible-playbook edx-monitoring.yml -c local \
-i '127.0.0.1,'
# To build this Dockerfile:
#
# From the root of configuration:
#
# docker build -f docker/build/programs/Dockerfile .
#
# This allows the dockerfile to update /edx/app/edx_ansible/edx_ansible
# with the currently checked-out configuration repo.
FROM edxops/xenial-common:latest
MAINTAINER edxops
ENV PROGRAMS_VERSION=master
ENV REPO_OWNER=edx
ADD . /edx/app/edx_ansible/edx_ansible
WORKDIR /edx/app/edx_ansible/edx_ansible/docker/plays
COPY docker/build/programs/ansible_overrides.yml /
RUN sudo /edx/app/edx_ansible/venvs/edx_ansible/bin/ansible-playbook programs.yml \
-c local -i '127.0.0.1,' \
-t 'install,assets' \
--extra-vars="@/ansible_overrides.yml" \
--extra-vars="PROGRAMS_VERSION=$PROGRAMS_VERSION" \
--extra-vars="COMMON_GIT_PATH=$REPO_OWNER"
USER root
CMD ["/edx/app/supervisor/venvs/supervisor/bin/supervisord", "-n", "--configuration", "/edx/app/supervisor/supervisord.conf"]
---
DOCKER_TLD: "edx"
# The prod settings assume an rsyslogd
# In addition, on systemd systems, and newer rsyslogd
# there may be issues with /dev/log existing
# http://www.projectatomic.io/blog/2014/09/running-syslog-within-a-docker-container/
PROGRAMS_DJANGO_SETTINGS_MODULE: programs.settings.devstack
PROGRAMS_DATABASES:
# rw user
default:
ENGINE: 'django.db.backends.mysql'
NAME: '{{ PROGRAMS_DEFAULT_DB_NAME }}'
USER: 'programs001'
PASSWORD: 'password'
HOST: 'db.{{ DOCKER_TLD }}'
PORT: '3306'
ATOMIC_REQUESTS: true
CONN_MAX_AGE: 60
......@@ -93,7 +93,7 @@ forum:
# Image built from the opencraft fork as it fixes
# an auth bug. Update when the change merges
# upstream
image: edxops/forum:opencraft-v2
image: edxops/forums:opencraft-v2
volumes:
- ${DOCKER_EDX_ROOT}/cs_comments_service:/edx/app/forum/cs_comments_service
ports:
......
- name: Deploy Programs
hosts: all
sudo: True
gather_facts: True
roles:
- nginx
- docker
- role: programs
nginx_default_sites:
- programs
......@@ -9,10 +9,6 @@
# - app_repo - the url of the github repo for this app
# - app_version - git hash of the app (play, service, IDA) being deployed
# - play - the play that was run
# - configuration_repo - The github url for the configuration repo
# - configuration_version - The version (git hash) of configuration
# - configuration_secure_repo - the github url for the configuration secure repository
# - configuration_secure_version - the version (git hash) of configuration secure
# - cache_id - the cache_id version
#
# Other variables
......@@ -21,12 +17,12 @@
# - ami_wait - (yes/no) should ansible pause while
# - no_reboot - (yes/no) should the instance not be rebooted during AMI creation
# - artifact_path - the path to where this ansible run stores the artifacts for the pipeline
# - edxapp_theme_source_repo - the edx_app_theme repository
# - edxapp_theme_version - the edx_app_theme repository version (git hash)
# - hipchat_token - API token to send messages to hipchat
# - hipchat_room - ID or name of the room to send the notification
# - hipchat_url - URL of the hipchat API (defaults to v1 of the api)
# - extra_name_identifier - Makes each AMI unique if desired - Default: 0
# - version_tags - A mapping of {app: [repo, version], ...}, used to generate
# a "version:app = repo version" tag on the AMI
#
# Example command line to run this playbook:
# ansible-playbook -vvvv -i "localhost," -c local \
......@@ -36,9 +32,7 @@
# -e deployment=edx \
# -e edx_environment=sandbox \
# -e app_version=12345 \
# -e configuration_version=12345
# -e configuration_secure_version=12345
# -e cache_id=12345
# -e cache_id=12345 \
# create_ami.yml
#
......@@ -74,8 +68,6 @@
# used a JSON object here as there is a string interpolation in the keys.
tags: "{
'version:{{ play }}':'{{ app_repo }} {{ app_version }}',
'version:configuration':'{{ configuration_repo }} {{ configuration_version }}',
'version:configuration_secure':'{{ configuration_secure_repo }} {{ configuration_secure_version }}',
'play':'{{ play }}',
'cache_id':'{{ cache_id }}',
'environment':'{{ edx_environment }}',
......@@ -89,13 +81,16 @@
resource: "{{ ami_register.image_id }}"
tags: "{{ instance_tags.tags }}"
- name: add optional edx-theme tag
- name: Add any version tags that were passed on the commandline
ec2_tag:
region: "{{ ec2_region }}"
resource: "{{ ami_register.image_id }}"
tags:
version:edxapp_theme: "{{ edxapp_theme_source_repo }} {{ edxapp_theme_version }}"
when: edxapp_theme_version is defined and edxapp_theme_source_repo is defined
tags: "{
{% for name, (repo, version) in version_tags.items() %}
'version:{{ name }}': '{{ repo }} {{ version }}',
{% endfor %}
}"
when: version_tags is defined
- name: Fetch tags on the AMI
ec2_tag:
......
......@@ -38,7 +38,7 @@
# -e ARTIFACT_PATH=`/bin/pwd`/../{artifact_path}/migrations \
# -e DB_MIGRATION_USER=$DB_MIGRATION_USER \
# -e DB_MIGRATION_PASS=$DB_MIGRATION_PASS \
# -e ../{artifact_path}/migration_input_file \
# -e @../{artifact_path}/migration_input_file.yml \
# -e SUB_APPLICATION_NAME={sub_application_name} \
# playbooks/continuous_delivery/rollback_migrations.yml
......
......@@ -11,3 +11,4 @@ ansible_managed=This file is created and updated by ansible, edit at your peril
[ssh_connection]
ssh_args=-o ControlMaster=auto -o ControlPersist=60s -o ControlPath="~/.ansible/tmp/ansible-ssh-%h-%p-%r" -o ServerAliveInterval=30
retries=5
---
# Runs the python bootstratpping role against an ubuntu machine
# This is not as complete as ansible_bootstrap.sh (intentionally so)
# This lets you get pythong2.7 installed on a machine so you can followup
# with your actual playbook or role. The key is gather_facts: False.
#
# Usage:
# ansible-playbook ./bootstrap_python.yml -i "hostname,"
#
- hosts: all
become: True
gather_facts: False
roles:
- python
......@@ -15,7 +15,6 @@
- xserver
- analytics_api
- ecommerce
- programs
- credentials
nginx_default_sites:
- lms
......@@ -38,7 +37,6 @@
- edx_ansible
- analytics_api
- ecommerce
- programs
- credentials
- oauth_client_setup
- role: datadog
......
......@@ -27,7 +27,6 @@
roles:
- common
- aws
- oraclejdk
- elasticsearch
post_tasks:
- debug:
......
......@@ -7,7 +7,6 @@
- "roles/edxapp/defaults/main.yml"
- "roles/insights/defaults/main.yml"
- "roles/ecommerce/defaults/main.yml"
- "roles/programs/defaults/main.yml"
- "roles/credentials/defaults/main.yml"
- "roles/discovery/defaults/main.yml"
roles:
......
......@@ -6,7 +6,7 @@
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
# Usage: ansible-playbook -i lms-host-1, -e "file=/path/to/json/file" -e "user=username"
# Usage: ansible-playbook -i lms-host-1, ./populate_configuration_model.yml -e "file=/path/to/json/file" -e "user=username"
#
# Overview:
# This executes the "populate_model" management command to populate a ConfigurationModel with
......@@ -36,6 +36,8 @@
vars:
python_path: /edx/bin/python.edxapp
manage_path: /edx/bin/manage.edxapp
become_user: www-data
become: true
tasks:
- name: Create a temp directory
shell: mktemp -d /tmp/ansible_xblock_config.XXXXX
......
- name: Deploy edX Programs Service
hosts: all
become: True
gather_facts: True
vars:
ENABLE_DATADOG: False
ENABLE_SPLUNKFORWARDER: False
ENABLE_NEWRELIC: False
CLUSTER_NAME: 'programs'
roles:
- aws
- role: nginx
nginx_sites:
- programs
nginx_default_sites:
- programs
- programs
- role: datadog
when: COMMON_ENABLE_DATADOG
- role: splunkforwarder
when: COMMON_ENABLE_SPLUNKFORWARDER
- role: newrelic
when: COMMON_ENABLE_NEWRELIC
# Configure an admin instance with jenkins and asgard.
- name: Configure instance(s)
hosts: all
become: True
gather_facts: True
roles:
- aws
- xsy
---
- name: Bootstrap instance(s)
hosts: all
gather_facts: no
become: True
roles:
- role: python
tags:
- install
- install:system-requirements
- name: Configure instance(s)
hosts: all
become: True
gather_facts: True
roles:
- graphite
- role: nginx
nginx_sites:
- graphite
......@@ -57,7 +57,6 @@
- role: ecomworker
ECOMMERCE_WORKER_BROKER_HOST: 127.0.0.1
when: SANDBOX_ENABLE_ECOMMERCE
- programs
- analytics_api
- insights
# not ready yet: - edx_notes_api
......
......@@ -66,6 +66,7 @@ ANALYTICS_API_DATE_FORMAT: '%Y-%m-%d'
ANALYTICS_API_DATETIME_FORMAT: '%Y-%m-%dT%H%M%S'
ANALYTICS_API_DEFAULT_PAGE_SIZE: 25
ANALYTICS_API_MAX_PAGE_SIZE: 100
ANALYTICS_API_AGGREGATE_PAGE_SIZE: 10
ANALYTICS_API_BASE_URL: 'http://localhost:8100'
ANALYTICS_API_DATA_DIR: '{{ COMMON_DATA_DIR }}/{{ analytics_api_service_name }}'
......@@ -123,6 +124,7 @@ ANALYTICS_API_SERVICE_CONFIG:
DATETIME_FORMAT: '{{ ANALYTICS_API_DATETIME_FORMAT }}'
DEFAULT_PAGE_SIZE: '{{ ANALYTICS_API_DEFAULT_PAGE_SIZE }}'
MAX_PAGE_SIZE: '{{ ANALYTICS_API_MAX_PAGE_SIZE }}'
AGGREGATE_PAGE_SIZE: '{{ ANALYTICS_API_AGGREGATE_PAGE_SIZE }}'
REPORT_DOWNLOAD_BACKEND: '{{ ANALYTICS_API_REPORT_DOWNLOAD_BACKEND }}'
ANALYTICS_API_REPOS:
......
......@@ -44,7 +44,7 @@ aws_debian_pkgs:
aws_pip_pkgs:
- https://s3.amazonaws.com/cloudformation-examples/aws-cfn-bootstrap-latest.tar.gz
- awscli==1.10.28
- awscli==1.11.58
- boto=="{{ common_boto_version }}"
- s3cmd==1.6.1
......
......@@ -5,6 +5,7 @@ browser_deb_pkgs:
- libgconf2-4
- libnss3-1d
- libxss1
- ubuntu-restricted-extras
- xdg-utils
- xvfb
......
......@@ -63,14 +63,8 @@ CREDENTIALS_SOCIAL_AUTH_REDIRECT_IS_HTTPS: false
CREDENTIALS_PLATFORM_NAME: 'Your Platform Name Here'
# credentials service user in programs service and lms
CREDENTIALS_SERVICE_USER: 'credentials_service_user'
# Absolute URL used to get programs from the programs service.
CREDENTIALS_PROGRAMS_API_URL: 'https://127.0.0.1:8004/api/v1/'
CREDENTIALS_PROGRAMS_API_JWT_AUDIENCE: 'SET-ME-TO-THE-SAME-AS-PROGRAMS_SOCIAL_AUTH_EDX_OIDC_KEY'
CREDENTIALS_PROGRAMS_API_JWT_SECRET_KEY: 'SET-ME-TO-THE-SAME-AS-PROGRAMS_SOCIAL_AUTH_EDX_OIDC_SECRET'
# Absolute URL used to get organization data from the organizations api in LMS
CREDENTIALS_ORGANIZATIONS_API_URL: 'https://127.0.0.1:8000/api/organizations/v0/'
CREDENTIALS_ORGANIZATIONS_API_AUDIENCE: '{{ EDXAPP_JWT_AUDIENCE | default("lms-key") }}'
......@@ -193,14 +187,8 @@ CREDENTIALS_SERVICE_CONFIG:
# Set credentials files storage backend
FILE_STORAGE_BACKEND: '{{ CREDENTIALS_FILE_STORAGE_BACKEND }}'
# credentials service user in programs service and lms
CREDENTIALS_SERVICE_USER: '{{ CREDENTIALS_SERVICE_USER }}'
# programs api configuration
PROGRAMS_API_URL: '{{ CREDENTIALS_PROGRAMS_API_URL }}'
PROGRAMS_JWT_AUDIENCE: '{{ CREDENTIALS_PROGRAMS_API_JWT_AUDIENCE }}'
PROGRAMS_JWT_SECRET_KEY: '{{ CREDENTIALS_PROGRAMS_API_JWT_SECRET_KEY }}'
# organizations api configuration
ORGANIZATIONS_API_URL: '{{ CREDENTIALS_ORGANIZATIONS_API_URL }}'
ORGANIZATIONS_AUDIENCE: '{{ CREDENTIALS_ORGANIZATIONS_API_AUDIENCE }}'
......
......@@ -89,7 +89,6 @@ DISCOVERY_COURSES_API_URL: '{{ DISCOVERY_LMS_ROOT_URL }}/api/courses/v1/'
DISCOVERY_ORGANIZATIONS_API_URL: '{{ DISCOVERY_LMS_ROOT_URL }}/api/organizations/v0/'
DISCOVERY_MARKETING_API_URL: 'https://example.org/api/catalog/v2/'
DISCOVERY_MARKETING_URL_ROOT: 'https://example.org/'
DISCOVERY_PROGRAMS_API_URL: 'https://replace-me/api/v1/'
DISCOVERY_DATA_DIR: '{{ COMMON_DATA_DIR }}/{{ discovery_service_name }}'
DISCOVERY_MEDIA_ROOT: '{{ DISCOVERY_DATA_DIR }}/media'
......@@ -150,7 +149,6 @@ DISCOVERY_SERVICE_CONFIG:
ORGANIZATIONS_API_URL: '{{ DISCOVERY_ORGANIZATIONS_API_URL }}'
MARKETING_API_URL: '{{ DISCOVERY_MARKETING_API_URL }}'
MARKETING_URL_ROOT: '{{ DISCOVERY_MARKETING_URL_ROOT }}'
PROGRAMS_API_URL: '{{ DISCOVERY_PROGRAMS_API_URL }}'
EDX_DRF_EXTENSIONS: '{{ DISCOVERY_EDX_DRF_EXTENSIONS }}'
......
......@@ -68,6 +68,24 @@
- install
- install:app-requirements
# This is a hacked fix for the fact that the table `thumbnail_kvstore` exists in
# some environments, which won't need the 3rd party newly introduced migration
# to create this table, so we fake the migration.
# This is required for the Ginkgo release.
# TODO: Delete this task for the Hawthorn release.
- name: fake thumbnails
shell: >
table_exists=`mysql -uroot -ss -e "SELECT EXISTS(SELECT * FROM information_schema.tables WHERE table_schema = '{{ ECOMMERCE_DEFAULT_DB_NAME }}' AND table_name = 'thumbnail_kvstore')"`;
if [ "$table_exists" -eq "1" ]; then {{ ecommerce_venv_dir }}/bin/python ./manage.py migrate thumbnail 0001 --fake; fi;
args:
chdir: "{{ ecommerce_code_dir }}"
become_user: "{{ ecommerce_user }}"
environment: "{{ ecommerce_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes"
tags:
- migrate
- migrate:db
- name: Migrate
shell: >
DB_MIGRATION_USER='{{ COMMON_MYSQL_MIGRATE_USER }}'
......@@ -83,19 +101,12 @@
- migrate:db
- name: Populate countries
shell: "DB_MIGRATION_USER={{ COMMON_MYSQL_MIGRATE_USER }} DB_MIGRATION_PASS={{ COMMON_MYSQL_MIGRATE_PASS }} {{ ecommerce_venv_dir }}/bin/python ./manage.py oscar_populate_countries"
shell: "DB_MIGRATION_USER={{ COMMON_MYSQL_MIGRATE_USER }} DB_MIGRATION_PASS={{ COMMON_MYSQL_MIGRATE_PASS }} {{ ecommerce_venv_dir }}/bin/python ./manage.py oscar_populate_countries --initial-only"
args:
chdir: "{{ ecommerce_code_dir }}"
become_user: "{{ ecommerce_user }}"
environment: "{{ ecommerce_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes"
# the `register` and `failed_when` directives below are here to emulate idempotency for this oscar command.
# if and when https://github.com/django-oscar/django-oscar/pull/1841 is merged, the directives can be removed
# in favor of the (proposed) --initial-only command option.
register: command_result
failed_when:
- "'You already have countries in your database' not in command_result.stderr"
- "command_result.rc != 0"
tags:
- migrate
- migrate:db
......
......@@ -13,7 +13,7 @@ IFS=","
-h this
<repo> - must be one of edx-platform, edx-workers, xqueue, cs_comments_service, credentials, xserver, configuration,
read-only-certificate-code, edx-analytics-data-api, edx-ora2, insights, ecommerce, programs, course_discovery,
read-only-certificate-code, edx-analytics-data-api, edx-ora2, insights, ecommerce, course_discovery,
notifier
<version> - can be a commit or tag
......@@ -59,7 +59,6 @@ repos_to_cmd["edx-analytics-data-api"]="$edx_ansible_cmd analyticsapi.yml -e 'AN
repos_to_cmd["edx-ora2"]="$edx_ansible_cmd ora2.yml -e 'ora2_version=$2'"
repos_to_cmd["insights"]="$edx_ansible_cmd insights.yml -e 'INSIGHTS_VERSION=$2'"
repos_to_cmd["ecommerce"]="$edx_ansible_cmd ecommerce.yml -e 'ECOMMERCE_VERSION=$2'"
repos_to_cmd["programs"]="$edx_ansible_cmd programs.yml -e 'PROGRAMS_VERSION=$2'"
repos_to_cmd["discovery"]="$edx_ansible_cmd discovery.yml -e 'DISCOVERY_VERSION=$2'"
repos_to_cmd["notifier"]="$edx_ansible_cmd notifier.yml -e 'NOTIFIER_VERSION=$2'"
......
......@@ -534,6 +534,12 @@ EDXAPP_ECOMMERCE_API_URL: "http://localhost:8002/api/v2"
EDXAPP_ECOMMERCE_API_SIGNING_KEY: "{{ EDXAPP_JWT_SECRET_KEY }}"
EDXAPP_COURSE_CATALOG_API_URL: "http://localhost:8008/api/v1"
# which access.py permission name to check in order to determine if a course about page is
# visible. We default this to the legacy permission 'see_exists'.
EDXAPP_COURSE_CATALOG_VISIBILITY_PERMISSION: 'see_exists'
EDXAPP_COURSE_ABOUT_VISIBILITY_PERMISSION: 'see_exists'
# Mailchimp Settings
EDXAPP_MAILCHIMP_NEW_USER_LIST_ID: null
......@@ -629,6 +635,21 @@ EDXAPP_SESSION_SAVE_EVERY_REQUEST: false
EDXAPP_SESSION_COOKIE_SECURE: false
# Course Block Structures
EDXAPP_BLOCK_STRUCTURES_SETTINGS:
# Delay, in seconds, after a new edit of a course is published
# before updating the block structures cache. This is needed
# for a better chance at getting the latest changes when there
# are secondary reads in sharded mongoDB clusters. See TNL-5041
# for more info.
COURSE_PUBLISH_TASK_DELAY: 30
# Delay, in seconds, between retry attempts if a task fails.
TASK_DEFAULT_RETRY_DELAY: 30
# Maximum number of retries per task.
TASK_MAX_RETRIES: 5
#-------- Everything below this line is internal to the role ------------
#Use YAML references (& and *) and hash merge <<: to factor out shared settings
......@@ -809,6 +830,8 @@ generic_env_config: &edxapp_generic_env
ECOMMERCE_PUBLIC_URL_ROOT: "{{ EDXAPP_ECOMMERCE_PUBLIC_URL_ROOT }}"
ECOMMERCE_API_URL: "{{ EDXAPP_ECOMMERCE_API_URL }}"
COURSE_CATALOG_API_URL: "{{ EDXAPP_COURSE_CATALOG_API_URL }}"
COURSE_CATALOG_VISIBILITY_PERMISSION: "{{ EDXAPP_COURSE_CATALOG_VISIBILITY_PERMISSION }}"
COURSE_ABOUT_VISIBILITY_PERMISSION: "{{ EDXAPP_COURSE_ABOUT_VISIBILITY_PERMISSION }}"
FINANCIAL_REPORTS: "{{ EDXAPP_FINANCIAL_REPORTS }}"
ONLOAD_BEACON_SAMPLE_RATE: "{{ EDXAPP_ONLOAD_BEACON_SAMPLE_RATE }}"
CORS_ORIGIN_WHITELIST: "{{ EDXAPP_CORS_ORIGIN_WHITELIST }}"
......@@ -951,6 +974,7 @@ generic_env_config: &edxapp_generic_env
REGISTRATION_EXTRA_FIELDS: "{{ EDXAPP_REGISTRATION_EXTRA_FIELDS }}"
XBLOCK_SETTINGS: "{{ EDXAPP_XBLOCK_SETTINGS }}"
EDXMKTG_USER_INFO_COOKIE_NAME: "{{ EDXAPP_EDXMKTG_USER_INFO_COOKIE_NAME }}"
BLOCK_STRUCTURES_SETTINGS: "{{ EDXAPP_BLOCK_STRUCTURES_SETTINGS }}"
# Deprecated, maintained for backward compatibility
COMPREHENSIVE_THEME_DIR: "{{ EDXAPP_COMPREHENSIVE_THEME_DIR }}"
......
......@@ -10,6 +10,11 @@ stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log
command={{ edxapp_venv_dir + '/bin/newrelic-admin run-program ' if w.monitor and COMMON_ENABLE_NEWRELIC_APP else ''}}{{ edxapp_venv_bin }}/python {{ edxapp_code_dir }}/manage.py {{ w.service_variant }} --settings={{ worker_django_settings_module }} celery worker --loglevel=info --queues=edx.{{ w.service_variant }}.core.{{ w.queue }} --hostname=edx.{{ w.service_variant }}.core.{{ w.queue }}.%%h --concurrency={{ w.concurrency }} {{ '--maxtasksperchild ' + w.max_tasks_per_child|string if w.max_tasks_per_child is defined else '' }}
killasgroup=true
stopwaitsecs={{ w.stopwaitsecs | default(EDXAPP_WORKER_DEFAULT_STOPWAITSECS) }}
; Set autorestart to `true`. The default value for autorestart is `unexpected`, but celery < 4.x will exit
; with an exit code of zero for certain types of unrecoverable errors, so we must make sure that the workers
; are auto restarted even when exiting with code 0.
; The Celery bug was reported in https://github.com/celery/celery/issues/2024, and is fixed in Celery 4.0.0.
autorestart=true
{% endfor %}
......
......@@ -10,7 +10,6 @@ edxlocal_databases:
- "{{ EDXAPP_MYSQL_DB_NAME | default(None) }}"
- "{{ EDXAPP_MYSQL_CSMH_DB_NAME | default(None) }}"
- "{{ EDX_NOTES_API_MYSQL_DB_NAME | default(None) }}"
- "{{ PROGRAMS_DEFAULT_DB_NAME | default(None) }}"
- "{{ ANALYTICS_API_DEFAULT_DB_NAME | default(None) }}"
- "{{ ANALYTICS_API_REPORTS_DB_NAME | default(None) }}"
- "{{ CREDENTIALS_DEFAULT_DB_NAME | default(None) }}"
......@@ -24,8 +23,8 @@ edxlocal_database_users:
}
- {
db: "{{ INSIGHTS_DATABASE_NAME | default(None) }}",
user: "{{ INSIGHTS_MYSQL_USER | default(None) }}",
pass: "{{ INSIGHTS_MYSQL_USER | default(None) }}"
user: "{{ INSIGHTS_DATABASE_USER | default(None) }}",
pass: "{{ INSIGHTS_DATABASE_PASSWORD | default(None) }}"
}
- {
db: "{{ XQUEUE_MYSQL_DB_NAME | default(None) }}",
......@@ -43,11 +42,6 @@ edxlocal_database_users:
pass: "{{ EDXAPP_MYSQL_CSMH_PASSWORD | default(None) }}"
}
- {
db: "{{ PROGRAMS_DEFAULT_DB_NAME | default(None) }}",
user: "{{ PROGRAMS_DATABASE_USER | default(None) }}",
pass: "{{ PROGRAMS_DATABASE_PASSWORD | default(None) }}"
}
- {
db: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_NAME | default(None) }}",
user: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_USER | default(None) }}",
pass: "{{ ANALYTICS_PIPELINE_OUTPUT_DATABASE_PASSWORD | default(None) }}"
......
......@@ -53,6 +53,14 @@ GO_SERVER_BACKUP_PASSWORD: ""
GO_SERVER_GOMATIC_USERNAME: ""
GO_SERVER_GOMATIC_PASSWORD: ""
GO_SERVER_USERS:
- username: "{{ GO_SERVER_ADMIN_USERNAME }}"
password: "{{ GO_SERVER_ADMIN_PASSWORD }}"
- username: "{{ GO_SERVER_BACKUP_USERNAME }}"
password: "{{ GO_SERVER_BACKUP_PASSWORD }}"
- username: "{{ GO_SERVER_GOMATIC_USERNAME }}"
password: "{{ GO_SERVER_GOMATIC_PASSWORD }}"
# go-server network settings
GO_SERVER_PORT: 8153
GO_SERVER_SECURE_PORT: 8154
......
......@@ -74,31 +74,6 @@
- { url: "{{ GO_SERVER_OAUTH_LOGIN_JAR_URL }}", md5: "{{ GO_SERVER_OAUTH_LOGIN_MD5 }}" }
- { url: "{{ GO_SERVER_GITHUB_PR_PLUGIN_JAR_URL }}", md5: "{{ GO_SERVER_GITHUB_PR_PLUGIN_MD5 }}" }
- name: generate line for go-server password file for admin user
command: "/usr/bin/htpasswd -nbs \"{{ GO_SERVER_ADMIN_USERNAME }}\" \"{{ GO_SERVER_ADMIN_PASSWORD }}\""
register: admin_user_password_line
when: GO_SERVER_ADMIN_USERNAME and GO_SERVER_ADMIN_PASSWORD
- name: generate line for go-server password file for backup user
command: "/usr/bin/htpasswd -nbs \"{{ GO_SERVER_BACKUP_USERNAME }}\" \"{{ GO_SERVER_BACKUP_PASSWORD }}\""
register: backup_user_password_line
when: GO_SERVER_BACKUP_USERNAME and GO_SERVER_BACKUP_PASSWORD
- name: generate line for go-server password file for gomatic user
command: "/usr/bin/htpasswd -nbs \"{{ GO_SERVER_GOMATIC_USERNAME }}\" \"{{ GO_SERVER_GOMATIC_PASSWORD }}\""
register: gomatic_user_password_line
when: GO_SERVER_GOMATIC_USERNAME and GO_SERVER_GOMATIC_PASSWORD
- name: setup password file
template:
src: edx/app/go-server/password.txt.j2
dest: "{{ GO_SERVER_CONF_HOME }}/{{ GO_SERVER_PASSWORD_FILE_NAME }}"
mode: 0600
owner: "{{ GO_SERVER_USER }}"
group: "{{ GO_SERVER_GROUP }}"
force: no
when: GO_SERVER_ADMIN_PASSWORD and GO_SERVER_BACKUP_PASSWORD and GO_SERVER_GOMATIC_PASSWORD
- name: install go-server configuration
template:
src: edx/app/go-server/cruise-config.xml.j2
......@@ -108,19 +83,43 @@
group: "{{ GO_SERVER_GROUP }}"
force: no
- name: ensure everyone can read go-server log files
file:
path: "/var/log/go-server"
state: directory
mode: "0755"
owner: "{{ GO_SERVER_USER }}"
group: "{{ GO_SERVER_GROUP }}"
tags:
- install
- install:base
- include: download_backup.yml
when: GO_SERVER_BACKUP_S3_BUCKET and GO_SERVER_BACKUP_S3_OBJECT and GO_SERVER_RESTORE_BACKUP
- name: replace the password lines in the password file post-restore
- name: generate lines for go-server password file for users
command: "/usr/bin/htpasswd -nbs '{{ item.username }}' '{{ item.password }}'"
with_items: "{{ GO_SERVER_USERS }}"
when: item.username and item.password
register: go_server_users_htpasswds
tags:
- install
- install:app-configuration
- name: replace the password lines in the password file
lineinfile:
dest: "{{ GO_SERVER_CONF_HOME }}/{{ GO_SERVER_PASSWORD_FILE_NAME }}"
regexp: "^{{ item.username }}"
line: "{{ item.password_hash.stdout }}"
with_items:
- { username: "{{ GO_SERVER_ADMIN_USERNAME }}", password: "{{ GO_SERVER_ADMIN_PASSWORD }}", password_hash: "{{ admin_user_password_line }}" }
- { username: "{{ GO_SERVER_BACKUP_USERNAME }}", password: "{{ GO_SERVER_BACKUP_PASSWORD }}", password_hash: "{{ backup_user_password_line }}" }
- { username: "{{ GO_SERVER_GOMATIC_USERNAME }}", password: "{{ GO_SERVER_GOMATIC_PASSWORD }}", password_hash: "{{ gomatic_user_password_line }}" }
when: item.username and item.password and item.password_hash
regexp: "^{{ item.item.username }}"
line: "{{ item.stdout }}"
create: yes
mode: "0600"
owner: "{{ GO_SERVER_USER }}"
group: "{{ GO_SERVER_GROUP }}"
with_items: "{{ go_server_users_htpasswds.results }}"
when: item.item.username and item.item.password and item.stdout
tags:
- install
- install:app-configuration
- name: install ssh key for the secure repos
copy:
......
{{ admin_user_password_line.stdout }}
{{ backup_user_password_line.stdout }}
{{ gomatic_user_password_line.stdout }}
---
GRAPHITE_CARBON_GIT_URL: "https://github.com/graphite-project/carbon.git"
GRAPHITE_WHISPER_GIT_URL: "https://github.com/graphite-project/whisper.git"
GRAPHITE_API_GIT_URL: "https://github.com/brutasse/graphite-api.git"
GRAPHITE_CARBON_VERSION: "816ac631efae33c7b47ecbe79ca9e1f907e5efe8"
GRAPHITE_WHISPER_VERSION: "0.9.15"
GRAPHITE_API_VERSION: "1.1.3"
graphite_root: "/opt/graphite"
graphite_storage_root: "/var/opt/graphite"
graphite_common_dirs:
root:
path: "{{ graphite_root }}"
owner: "{{ graphite_user }}"
group: "{{ graphite_group }}"
mode: "0755"
src:
path: "{{ graphite_root }}/src"
owner: "{{ graphite_user }}"
group: "{{ graphite_group }}"
mode: "0755"
conf:
path: "{{ graphite_root }}/conf"
owner: "{{ graphite_user }}"
group: "{{ graphite_group }}"
mode: "0755"
run:
path: "{{ graphite_root }}/run"
owner: "{{ graphite_user }}"
group: "{{ graphite_group }}"
mode: "0775"
storage:
path: "{{ graphite_storage_root }}"
owner: "{{ graphite_user }}"
group: "{{ graphite_group }}"
mode: "0775"
log:
path: "/var/log/carbon"
owner: "{{ carbon_user }}"
group: "{{ graphite_group }}"
mode: "0755"
graphite_common_deb_deps:
- libcairo2-dev
- libffi-dev
- pkg-config
- fontconfig
carbon_user: "carbon"
graphite_user: "graphite"
graphite_group: "graphite"
GRAPHITE_NGINX_PORT: 6000
GRAPHITE_GUNICORN_WORKERS: 3
CARBON_LISTEN_ADDR: 0.0.0.0
CARBON_CONF:
cache:
USER: "{{ carbon_user }}"
STORAGE_DIR: "{{ graphite_storage_root }}"
CACHE_WRITE_STRATEGY: max
MAX_CACHE_SIZE: inf
USE_FLOW_CONTROL: True
WHISPER_FALLOCATE_CREATE: True
MAX_CREATES_PER_MINUTE: 100
MAX_UPDATES_PER_SECOND: 1000
LINE_RECEIVER_INTERFACE: "{{ CARBON_LISTEN_ADDR }}"
PICKLE_RECEIVER_INTERFACE: "{{ CARBON_LISTEN_ADDR }}"
USE_INSECURE_UNPICKLER: False
CACHE_QUERY_INTERFACE: "{{ CARBON_LISTEN_ADDR }}"
LOG_CACHE_HITS: False
LOG_CACHE_QUEUE_SORTS: True
LOG_LISTENER_CONNECTIONS: True
LOG_UPDATES: False
ENABLE_LOGROTATION: True
WHISPER_AUTOFLUSH: False
"cache:1":
LINE_RECEIVER_PORT: 2003
PICKLE_RECEIVER_PORT: 2004
CACHE_QUERY_PORT: 7002
CARBON_STORAGE_SCHEMAS:
carbon:
description: "Catches all of Carbon's internal metrics"
pattern: "carbon.*"
retentions: "10s:1y"
default:
pattern: ".*"
retentions: "60s:1y"
CARBON_STORAGE_AGGREGATIONS:
min:
pattern: "\\.min$"
xFilesFactor: "0.1"
aggregationMethod: "min"
max:
pattern: "\\.max$"
xFilesFactor: "0.1"
aggregationMethod: "max"
sum:
pattern: "\\.count$"
xFilesFactor: "0"
aggregationMethod: "sum"
default_average:
pattern: ".*"
xFilesFactor: "0.5"
aggregationMethod: "average"
GRAPHITE_API_CONF:
search_index: "{{ graphite_root }}/api/index"
whisper:
directories:
- "{{ graphite_storage_root }}"
---
dependencies:
- role: common
tags:
- always
- config-encoders
---
- name: checkout carbon
git:
repo: "{{ GRAPHITE_CARBON_GIT_URL }}"
dest: "{{ graphite_root }}/src/carbon"
version: "{{ GRAPHITE_CARBON_VERSION }}"
tags:
- install
- install:code
- name: install carbon dependencies
pip:
virtualenv: "{{ graphite_root }}"
requirements: "{{ graphite_root }}/src/carbon/requirements.txt"
tags:
- install
- install:app-requirements
- name: install carbon
command: "{{ graphite_root }}/bin/python setup.py install"
args:
chdir: "{{ graphite_root }}/src/carbon"
creates: "{{ graphite_root }}/bin/carbon-cache.py"
tags:
- install
- install:app-requirements
- name: configure carbon
template:
src: "carbon/conf/{{ item }}.conf.j2"
dest: "{{ graphite_root }}/conf/{{ item }}.conf"
with_items:
- carbon
- storage-schemas
- storage-aggregation
tags:
- install
- install:app-configuration
- name: configure carbon service definition
template:
src: carbon/systemd/carbon-cache.service.j2
dest: /etc/systemd/system/carbon-cache.service
tags:
- install
- install:configuration
- name: reload systemd configuration
command: systemctl daemon-reload
tags:
- install
- install:configuration
- name: enable carbon
service:
name: carbon-cache
enabled: yes
state: started
tags:
- install
- install:configuration
- name: stop carbon
service:
name: carbon-cache
state: stopped
tags:
- manage:stop
- name: restart carbon
service:
name: carbon-cache
state: restarted
tags:
- manage:start
- manage:restart
---
- name: checkout graphite-api
git:
repo: "{{ GRAPHITE_API_GIT_URL }}"
dest: "{{ graphite_root }}/src/api"
version: "{{ GRAPHITE_API_VERSION }}"
tags:
- install
- install:code
- name: install graphite-api dependencies
pip:
virtualenv: "{{ graphite_root }}"
requirements: "{{ graphite_root }}/src/api/requirements.txt"
tags:
- install
- install:app-requirements
- name: install graphite-api
command: "{{ graphite_root }}/bin/python setup.py install"
args:
chdir: "{{ graphite_root }}/src/api"
tags:
- install
- install:app-requirements
- name: install gunicorn
pip:
virtualenv: "{{ graphite_root }}"
name: gunicorn
tags:
- install
- install:app-requirements
- name: configure graphite-api
template:
src: graphite-api/conf/graphite-api.yml.j2
dest: "{{ graphite_root }}/conf/graphite-api.yml"
owner: "{{ graphite_user }}"
group: "{{ graphite_group }}"
tags:
- install
- install:app-configuration
- name: configure graphite-api service definitions
template:
src: "graphite-api/systemd/{{ item }}.j2"
dest: "/etc/systemd/system/{{ item }}"
with_items:
- graphite-api.socket
- graphite-api.service
tags:
- install
- install:configuration
- name: reload systemd configuration
command: systemctl daemon-reload
tags:
- install
- install:configuration
- name: enable graphite-api
service:
name: "{{ item }}"
enabled: yes
with_items:
- graphite-api.socket
- graphite-api.service
- name: stop graphite-api
service:
name: graphite-api
state: stopped
tags:
- manage:stop
- name: restart graphite-api
service:
name: graphite-api
state: restarted
tags:
- manage:start
- manage:restart
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
# Tasks for role graphite
#
# Overview:
#
# This task is to install "Graphite", or more specifically,
# its constituent parts: carbon and graphite-api.
#
# For our needs at edX, we do not need the full-fledged
# graphite-web, instead opting to use the light graphite-api,
# which can be pointed to by other tools such as Grafana.
#
# Dependencies:
# - common
# - config-encoders
#
- name: create Graphite user group
group:
name: "{{ graphite_group }}"
state: present
tags:
- install
- install:base
- name: create service account for Graphite
user:
name: "{{ graphite_user }}"
system: yes
home: "{{ graphite_root }}"
shell: /bin/false
group: "{{ graphite_group }}"
state: present
tags:
- install
- install:base
- name: create service account for Carbon
user:
name: "{{ carbon_user }}"
system: yes
home: "{{ graphite_root }}"
shell: /bin/false
group: "{{ graphite_group }}"
state: present
tags:
- install
- install:base
- name: create common Graphite directories
file:
path: "{{ item.value.path }}"
state: directory
owner: "{{ item.value.owner }}"
group: "{{ item.value.group }}"
mode: "{{ item.value.mode }}"
with_dict: "{{ graphite_common_dirs }}"
tags:
- install
- install:base
- name: install Graphite dependencies
apt:
pkg: "{{ item }}"
state: present
with_items: "{{ graphite_common_deb_deps }}"
tags:
- install
- install:system-requirements
- name: initialize virtualenv for Graphite
command: "virtualenv {{ graphite_root }}"
args:
creates: "{{ graphite_root }}/bin/activate"
tags:
- install
- install:base
- include: whisper.yml
- include: carbon.yml
- include: graphite-api.yml
---
- name: checkout Whisper
git:
repo: "{{ GRAPHITE_WHISPER_GIT_URL }}"
dest: "{{ graphite_root }}/src/whisper"
version: "{{ GRAPHITE_WHISPER_VERSION }}"
tags:
- install
- install:code
- name: install Whisper
command: "{{ graphite_root }}/bin/python setup.py install"
args:
chdir: "{{ graphite_root }}/src/whisper"
creates: "{{ graphite_root }}/bin/whisper-create.py"
tags:
- install
- install:app-requirements
# Aggregation methods for whisper files. Entries are scanned in order,
# and first match wins. This file is scanned for changes every 60 seconds
#
# [name]
# pattern = <regex>
# xFilesFactor = <float between 0 and 1>
# aggregationMethod = <average|sum|last|max|min>
#
# name: Arbitrary unique name for the rule
# pattern: Regex pattern to match against the metric name
# xFilesFactor: Ratio of valid data points required for aggregation to the next retention to occur
# aggregationMethod: function to apply to data points for aggregation
{{ CARBON_STORAGE_AGGREGATIONS | encode_ini }}
# Schema definitions for Whisper files. Entries are scanned in order,
# and first match wins. This file is scanned for changes every 60 seconds.
{{ CARBON_STORAGE_SCHEMAS | encode_ini }}
[Unit]
Description=carbon-cache
After=network.target
[Service]
Type=forking
StandardOutput=syslog
StandardError=syslog
User={{ carbon_user }}
Group={{ graphite_group }}
ExecStart={{ graphite_root }}/bin/carbon-cache.py --config={{ graphite_root }}/conf/carbon.conf --pidfile={{ graphite_root }}/run/carbon-cache.pid --logdir=/var/log/carbon/ start
ExecReload=/bin/kill -USR1 $MAINPID
ExecStop={{ graphite_root }}/bin/carbon-cache.py --pidfile={{ graphite_root }}/run/carbon-cache.pid stop
PIDFile={{ graphite_root }}/run/carbon-cache.pid
PrivateTmp=true
LimitNOFILE=128000
[Install]
WantedBy=multi-user.target
[Unit]
Description=graphite-api service
After=network.target
Requires=graphite-api.socket
[Service]
StandardOutput=syslog
StandardError=syslog
User={{ graphite_user }}
Group={{ graphite_group }}
WorkingDirectory={{ graphite_root }}
Environment=GRAPHITE_API_CONFIG={{ graphite_root }}/conf/graphite-api.yml
ExecStart={{ graphite_root }}/bin/gunicorn -w {{ GRAPHITE_GUNICORN_WORKERS }} graphite_api.app:app
Restart=on-failure
ExecReload=/bin/kill -s HUP $MAINPID
ExecStop=/bin/kill -s TERM $MAINPID
PrivateTmp=true
LimitNOFILE=128000
[Install]
WantedBy=multi-user.target
[Unit]
Description=graphite-api socket
[Socket]
SocketUser={{ graphite_user }}
SocketGroup={{ graphite_group }}
ListenStream={{ graphite_root }}/run/graphite-api.sock
[Install]
WantedBy=sockets.target
......@@ -77,6 +77,8 @@ YARN_SITE_DEFAULT_CONFIG:
yarn.log-aggregation-enable: "true"
# 24 hour log retention
yarn.log-aggregation.retain-seconds: 86400
# Checking virtual memory usage causes too many spurious failures.
yarn.nodemanager.vmem-check-enabled: false
HADOOP_CORE_SITE_DEFAULT_CONFIG:
fs.default.name: "hdfs://localhost:9000"
......
......@@ -106,15 +106,6 @@
- assets
- assets:gather
- name: compile translations
shell: ". {{ insights_venv_dir }}/bin/activate && i18n_tool generate -v"
args:
chdir: "{{ insights_code_dir }}/analytics_dashboard"
become_user: "{{ insights_user }}"
tags:
- assets
- assets:gather
- name: write out the supervisior wrapper
template:
src: "edx/app/insights/insights.sh.j2"
......
......@@ -146,6 +146,7 @@
tags:
- install
- install:base
- install:plugins
# We first download the plugins to a temp directory and include
# the version in the file name. That way, if we increment
......@@ -159,6 +160,7 @@
tags:
- install
- install:base
- install:plugins
- name: Install Jenkins plugins
command: "cp {{ item.dest }} {{ jenkins_home }}/plugins/{{ item.item.name }}.hpi"
......@@ -167,6 +169,7 @@
tags:
- install
- install:base
- install:plugins
- name: Set Jenkins plugin permissions
file:
......@@ -179,6 +182,7 @@
tags:
- install
- install:base
- install:plugins
# We had to fork some plugins to workaround
# certain issues. If these changes get merged
......@@ -195,6 +199,7 @@
tags:
- install
- install:base
- install:plugins
- name: Compile custom plugins
command: "mvn -Dmaven.test.skip=true install"
......@@ -205,6 +210,7 @@
tags:
- install
- install:base
- install:plugins
- name: Install custom plugins
command: mv /tmp/{{ item.item.repo_name }}/target/{{ item.item.package }}
......@@ -214,6 +220,7 @@
tags:
- install
- install:base
- install:plugins
- name: Set custom plugin permissions
file:
......@@ -226,6 +233,7 @@
tags:
- install
- install:base
- install:plugins
# Plugins that are bundled with Jenkins are "pinned".
# Jenkins will overwrite updated plugins with its built-in version
......@@ -239,6 +247,7 @@
tags:
- install
- install:base
- install:plugins
- name: Setup nginix vhost
template:
......
......@@ -79,7 +79,7 @@
value: "{{ item[0].public_dns_name }}"
with_nested:
- "{{ ec2.instances }}"
- ['studio', 'ecommerce', 'preview', 'programs', 'discovery', 'credentials']
- ['studio', 'ecommerce', 'preview', 'discovery', 'credentials']
- name: Add new instance to host group
local_action:
......
......@@ -56,13 +56,6 @@ localdev_accounts:
}
- {
user: "{{ programs_user|default('None') }}",
home: "{{ programs_home|default('None') }}",
env: "programs_env",
repo: "programs"
}
- {
user: "{{ credentials_user|default('None') }}",
home: "{{ credentials_home|default('None') }}",
env: "credentials_env",
......
......@@ -17,21 +17,13 @@
locust_service_name: "locust"
locust_home: "{{ COMMON_APP_DIR }}/{{ locust_service_name }}"
locust_user: "locust"
locust_code_dir: "{{ locust_home }}/load-tests"
locust_requirements_base: "{{ locust_code_dir }}"
locust_requirements:
- "requirements.txt"
locust_run_dir: "{{ locust_code_dir }}/{{ LOCUST_LOADTEST_DIR }}"
locust_code_dir: "{{ locust_home }}/edx-load-tests"
locust_requirements_base: "{{ locust_code_dir }}/requirements"
LOCUST_GIT_IDENTITY: !!null
LOCUST_LOADTEST_REPO: 'load-tests'
LOCUST_LOADTEST_REPO: 'edx-load-tests'
LOCUST_LOADTEST_REPO_VERSION: 'master'
LOCUST_LOADTEST_DIR: 'locust/lms'
LOCUST_MASTER: !!null
LOCUST_TARGET_HOST: 'http://localhost/'
LOCUST_SERVICE_CONFIG: ''
......
......@@ -12,7 +12,6 @@
#
dependencies:
- common
- supervisor
- role: edx_service
edx_service_name: "{{ locust_service_name }}"
edx_service_config: "{{ LOCUST_SERVICE_CONFIG }}"
......
......@@ -12,9 +12,10 @@
# Tasks for role locust
#
# Overview:
# - Expects a load-tests repo that declares locust.io as a requirement
# - Installs load-tests repo using standard edx service roles
# - configures locust with defaults in supervisor
# - Expects an edx-load-tests repo that declares locust.io as a requirement
# - Installs edx-load-tests repo using standard edx service roles
# - Makes the locust user suitable for running an interactive shell
# - Adds a MOTD message for self-help resources related to loadtesting
#
# Dependencies:
# - edx-service role
......@@ -22,61 +23,62 @@
#
# Example play:
#
# # To run: ansible-playbook locust.yml -i "locustmaster.sandbox.edx.org," -e "LOCUST_LOADTEST_DIR='locust/lms'" -e "LOCUST_TARGET_HOST='https://courses-loadtest.edx.org'" -e "@/Users/derf/workspace/sandbox-secure/ansible/vars/developer-sandbox.yml"
# # To run: ansible-playbook locust.yml -i "locustdriver.sandbox.edx.org," -e "@/Users/derf/workspace/sandbox-secure/ansible/vars/developer-sandbox.yml"
# - name: Deploy Locust
# hosts: all
# sudo: True
# become: True
# gather_facts: True
# roles:
# - locust
- name: Install system packages
apt:
pkg: "{{ item }}"
state: "present"
update_cache: true
cache_valid_time: 3600
with_items: "{{ locust_debian_pkgs }}"
- name: Install application requirements
- name: Install base requirements used by all load tests
pip:
requirements: "{{ locust_requirements_base }}/{{ item }}"
requirements: "{{ locust_requirements_base }}/base.txt"
virtualenv: "{{ locust_home }}/venvs/{{ locust_service_name }}"
state: present
become_user: "{{ locust_user }}"
with_items: "{{ locust_requirements }}"
- name: Write supervisord config
template:
src: "edx/app/supervisor/conf.d.available/locust.conf.j2"
dest: "{{ supervisor_available_dir }}/{{ locust_service_name }}.conf"
owner: "{{ supervisor_user }}"
group: "{{ common_web_user }}"
mode: "0644"
- name: Enable supervisor script
file:
src: "{{ supervisor_available_dir }}/{{ locust_service_name }}.conf"
dest: "{{ supervisor_cfg_dir }}/{{ locust_service_name }}.conf"
state: link
force: yes
when: not disable_edx_services
- name: Update supervisor configuration
shell: "{{ supervisor_ctl }} -c {{ supervisor_cfg }} update"
when: not disable_edx_services
- name: Restart the applicaton
supervisorctl:
name: "{{ locust_service_name }}"
state: restarted
supervisorctl_path: "{{ supervisor_ctl }}"
config: "{{ supervisor_cfg }}"
when: not disable_edx_services
become_user: "{{ supervisor_service_user }}"
# Specifically, we are concerned about allowing as many open connections as
# possible, to simulate more locust clients.
- name: Increase file descriptor limit of the system (Session Logout and Login would be required)
lineinfile:
dest: "{{ ulimit_conf_file }}"
line: "{{ item.domain }} {{ item.type }} {{ item.item }} {{ item.value }}"
with_items: "{{ ulimit_config }}"
- name: Configure locust user with an interactive shell
user:
name: "{{ locust_user }}"
shell: /bin/bash
# This is a hack because we're not using .bash_aliases for its stated purpose.
# As of this writing, the script will activate a preinstalled virtualenv and
# change to the edx-load-tests directory.
- name: Hack .bash_aliases to make it more convenient to start loadtests
template:
src: locust_home/bash_aliases.j2
dest: "{{ locust_home }}/.bash_aliases"
owner: "{{ locust_user }}"
group: root
mode: '644'
- name: Setup a loadtest-specific MOTD
template:
src: etc/motd.tail.locust.j2
dest: /etc/motd.tail.locust
owner: root
group: root
mode: '644'
# the directory /etc/update-motd.d is a xenial-ism only
when: ansible_distribution_release == 'xenial'
- name: Add motd.tail.locust to update-motd.d
copy:
dest: "/etc/update-motd.d/76-motd-tail-locust"
content: "#!/bin/sh\necho\ncat /etc/motd.tail.locust\n"
force: true
owner: root
group: root
mode: "0755"
# the directory /etc/update-motd.d is a xenial-ism only
when: ansible_distribution_release == 'xenial'
#
# {{ ansible_managed }}
#
[program:{{ locust_service_name }}]
{% set locust_venv_bin = locust_home + "/venvs/" + locust_service_name + "/bin" %}
{% set executable = locust_venv_bin + '/locust' %}
{% if LOCUST_MASTER %}
{% set locust_extra_args = '--slave --master-host ' + LOCUST_MASTER %}
{% else %}
{% set locust_extra_args = '' %}
{% endif %}
command={{ executable }} --host {{ LOCUST_TARGET_HOST }} {{ locust_extra_args }}
user={{ common_web_user }}
directory={{ locust_run_dir }}
stdout_logfile={{ supervisor_log_dir }}/%(program_name)-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)-stderr.log
killasgroup=true
stopasgroup=true
*******************************************************************
* Message regarding loadtests *
* *
* Start your loadtesting journey by switching to the locust user: *
* *
* sudo su locust *
* *
* Please consult the loadtest environment queue before running *
* load tests against our shared loadtest environment: *
* https://openedx.atlassian.net/wiki/x/B4M3AQ *
* *
* For troubleshooting or other help, see our documentation: *
* https://openedx.atlassian.net/wiki/x/-QEsAQ *
*******************************************************************
cd {{ locust_code_dir }}
source {{ locust_home }}/venvs/{{ locust_service_name }}/bin/activate
......@@ -82,7 +82,6 @@ NGINX_EDXAPP_ERROR_PAGES:
CMS_HOSTNAME: '~^((stage|prod)-)?studio.*'
ECOMMERCE_HOSTNAME: '~^((stage|prod)-)?ecommerce.*'
PROGRAMS_HOSTNAME: '~^((stage|prod)-)?programs.*'
CREDENTIALS_HOSTNAME: '~^((stage|prod)-)?credentials.*'
nginx_template_dir: "edx/app/nginx/sites-available"
......@@ -107,8 +106,6 @@ nginx_edx_notes_api_gunicorn_hosts:
- 127.0.0.1
nginx_ecommerce_gunicorn_hosts:
- 127.0.0.1
nginx_programs_gunicorn_hosts:
- 127.0.0.1
nginx_credentails_gunicorn_hosts:
- 127.0.0.1
......@@ -135,7 +132,6 @@ EDXAPP_CMS_ENABLE_BASIC_AUTH: "{{ COMMON_ENABLE_BASIC_AUTH }}"
EDXAPP_LMS_ENABLE_BASIC_AUTH: "{{ COMMON_ENABLE_BASIC_AUTH }}"
EDXAPP_LMS_PREVIEW_ENABLE_BASIC_AUTH: "{{ COMMON_ENABLE_BASIC_AUTH }}"
KIBANA_ENABLE_BASIC_AUTH: "{{ COMMON_ENABLE_BASIC_AUTH }}"
PROGRAMS_ENABLE_BASIC_AUTH: "{{ COMMON_ENABLE_BASIC_AUTH }}"
XQUEUE_ENABLE_BASIC_AUTH: "{{ COMMON_ENABLE_BASIC_AUTH }}"
XSERVER_ENABLE_BASIC_AUTH: "{{ COMMON_ENABLE_BASIC_AUTH }}"
......@@ -147,7 +143,6 @@ NGINX_CREATE_HTPASSWD_FILE: >
EDXAPP_LMS_ENABLE_BASIC_AUTH|bool or
EDXAPP_LMS_PREVIEW_ENABLE_BASIC_AUTH|bool or
KIBANA_ENABLE_BASIC_AUTH|bool or
PROGRAMS_ENABLE_BASIC_AUTH|bool or
XQUEUE_ENABLE_BASIC_AUTH|bool or
XSERVER_ENABLE_BASIC_AUTH|bool
}}
#
# {{ ansible_managed }}
#
{# This prevents the injected comment from eating the server
directive. There's probably a better way of doing this,
but I don't know it currently.
#}
{% raw %}
{% endraw %}
{%- if "graphite" in nginx_default_sites -%}
{%- set default_site = "default_server" -%}
{%- else -%}
{%- set default_site = "" -%}
{%- endif -%}
upstream graphite_app_server {
server unix:{{ graphite_root }}/run/graphite-api.sock fail_timeout=0;
}
server {
server_name graphite.*;
listen {{ GRAPHITE_NGINX_PORT }} {{ default_site }};
client_max_body_size 1M;
keepalive_timeout 5;
location / {
try_files $uri @proxy_to_app;
}
location @proxy_to_app {
{% if NGINX_SET_X_FORWARDED_HEADERS %}
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-For $remote_addr;
{% else %}
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
proxy_set_header X-Forwarded-Port $http_x_forwarded_port;
proxy_set_header X-Forwarded-For $http_x_forwarded_for;
{% endif %}
# newrelic-specific header records the time when nginx handles a request.
proxy_set_header X-Queue-Start "t=${msec}";
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_pass http://graphite_app_server;
}
}
#
# {{ ansible_managed }}
#
{% if "programs" in nginx_default_sites %}
{% set default_site = "default_server" %}
{% else %}
{% set default_site = "" %}
{% endif %}
upstream programs_app_server {
{% for host in nginx_programs_gunicorn_hosts %}
server {{ host }}:{{ programs_gunicorn_port }} fail_timeout=0;
{% endfor %}
}
server {
server_name {{ PROGRAMS_HOSTNAME }};
listen {{ PROGRAMS_NGINX_PORT }} {{ default_site }};
{% include "common-settings.j2" %}
{% if NGINX_ENABLE_SSL %}
listen {{ PROGRAMS_SSL_NGINX_PORT }} ssl;
ssl_certificate /etc/ssl/certs/{{ NGINX_SSL_CERTIFICATE|basename }};
ssl_certificate_key /etc/ssl/private/{{ NGINX_SSL_KEY|basename }};
# Request that the browser use SSL for all connections.
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
{% endif %}
# Nginx does not support nested condition or or conditions so
# there is an unfortunate mix of conditonals here.
{% if NGINX_REDIRECT_TO_HTTPS %}
{% if NGINX_HTTPS_REDIRECT_STRATEGY == "scheme" %}
# Redirect http to https over single instance
if ($scheme != "https")
{
set $do_redirect_to_https "true";
}
{% elif NGINX_HTTPS_REDIRECT_STRATEGY == "forward_for_proto" %}
# Forward to HTTPS if we're an HTTP request... and the server is behind ELB
if ($http_x_forwarded_proto = "http")
{
set $do_redirect_to_https "true";
}
{% endif %}
# Execute the actual redirect
if ($do_redirect_to_https = "true")
{
return 301 https://$host$request_uri;
}
{% endif %}
location ~ ^/static/(?P<file>.*) {
root {{ PROGRAMS_DATA_DIR }};
try_files /staticfiles/$file =404;
# Request that the browser use SSL for these connections. Repeated here
# because add_header directives are only inherited from the previous level
# if there are no add_header directives defined on the current level.
# See: http://nginx.org/en/docs/http/ngx_http_headers_module.html#add_header.
add_header Strict-Transport-Security "max-age=31536000; includeSubDomains";
# Instruct the browser to cache static assets for one hour.
add_header Cache-Control "public; max-age=3600";
}
location ~ ^/media/(?P<file>.*) {
root {{ PROGRAMS_DATA_DIR }};
try_files /media/$file =404;
# django / app always assigns new filenames so these can be cached forever.
add_header Cache-Control "public; max-age=31536000";
}
location / {
{% if PROGRAMS_ENABLE_BASIC_AUTH|bool %}
{% include "basic-auth.j2" %}
{% endif %}
try_files $uri @proxy_to_app;
}
# The API should be secured with OAuth 2.0 or or JWT.
location /api {
try_files $uri @proxy_to_app;
}
{% include "robots.j2" %}
location @proxy_to_app {
{% if NGINX_SET_X_FORWARDED_HEADERS %}
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Port $server_port;
proxy_set_header X-Forwarded-For $remote_addr;
{% else %}
proxy_set_header X-Forwarded-Proto $http_x_forwarded_proto;
proxy_set_header X-Forwarded-Port $http_x_forwarded_port;
proxy_set_header X-Forwarded-For $http_x_forwarded_for;
{% endif %}
# newrelic-specific header records the time when nginx handles a request.
proxy_set_header X-Queue-Start "t=${msec}";
proxy_set_header Host $http_host;
proxy_redirect off;
proxy_pass http://programs_app_server;
}
}
......@@ -32,13 +32,6 @@ oauth_client_setup_oauth2_clients:
logout_uri: "{{ INSIGHTS_LOGOUT_URL | default('None') }}"
}
- {
name: "{{ programs_service_name | default('None') }}",
url_root: "{{ PROGRAMS_URL_ROOT | default('None') }}",
id: "{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_KEY | default('None') }}",
secret: "{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_SECRET | default('None') }}",
logout_uri: "{{ PROGRAMS_LOGOUT_URL | default('None') }}"
}
- {
name: "{{ credentials_service_name | default('None') }}",
url_root: "{{ CREDENTIALS_URL_ROOT | default('None') }}",
id: "{{ CREDENTIALS_SOCIAL_AUTH_EDX_OIDC_KEY | default('None') }}",
......
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for role programs
#
PROGRAMS_GIT_IDENTITY: !!null
# depends upon Newrelic being enabled via COMMON_ENABLE_NEWRELIC
# and a key being provided via NEWRELIC_LICENSE_KEY
PROGRAMS_NEWRELIC_APPNAME: "{{ COMMON_ENVIRONMENT }}-{{ COMMON_DEPLOYMENT }}-{{ programs_service_name }}"
PROGRAMS_PIP_EXTRA_ARGS: "-i {{ COMMON_PYPI_MIRROR_URL }}"
PROGRAMS_NGINX_PORT: 18140
PROGRAMS_SSL_NGINX_PORT: 48140
PROGRAMS_DEFAULT_DB_NAME: 'programs'
PROGRAMS_DATABASE_USER: 'programs001'
PROGRAMS_DATABASE_PASSWORD: 'password'
PROGRAMS_DATABASE_HOST: 'localhost'
PROGRAMS_DATABASE_PORT: 3306
PROGRAMS_DATABASES:
# rw user
default:
ENGINE: 'django.db.backends.mysql'
NAME: '{{ PROGRAMS_DEFAULT_DB_NAME }}'
USER: '{{ PROGRAMS_DATABASE_USER }}'
PASSWORD: '{{ PROGRAMS_DATABASE_PASSWORD }}'
HOST: '{{ PROGRAMS_DATABASE_HOST }}'
PORT: '{{ PROGRAMS_DATABASE_PORT }}'
ATOMIC_REQUESTS: true
CONN_MAX_AGE: 60
PROGRAMS_VERSION: "master"
PROGRAMS_DJANGO_SETTINGS_MODULE: "programs.settings.production"
PROGRAMS_URL_ROOT: 'http://localhost:8004'
PROGRAMS_LOGOUT_URL: '{{ PROGRAMS_URL_ROOT }}/accounts/logout/'
PROGRAMS_LMS_URL_ROOT: 'http://127.0.0.1:8000'
PROGRAMS_ORGANIZATIONS_API_URL_ROOT: '{{ PROGRAMS_LMS_URL_ROOT }}/api/organizations/v0/'
PROGRAMS_SECRET_KEY: 'Your secret key here'
PROGRAMS_TIME_ZONE: 'UTC'
PROGRAMS_LANGUAGE_CODE: 'en-us'
# Used to automatically configure OAuth2 Client
PROGRAMS_SOCIAL_AUTH_EDX_OIDC_KEY : 'programs-key'
PROGRAMS_SOCIAL_AUTH_EDX_OIDC_SECRET : 'programs-secret'
PROGRAMS_SOCIAL_AUTH_REDIRECT_IS_HTTPS: false
PROGRAMS_OAUTH_URL_ROOT: '{{ PROGRAMS_LMS_URL_ROOT }}/oauth2'
PROGRAMS_SOCIAL_AUTH_EDX_OIDC_LOGOUT_URL: '{{ PROGRAMS_LMS_URL_ROOT }}/logout'
PROGRAMS_JWT_AUDIENCE: '{{ EDXAPP_JWT_AUDIENCE | default("SET-ME-PLEASE") }}'
PROGRAMS_JWT_ISSUER: '{{ PROGRAMS_OAUTH_URL_ROOT }}'
PROGRAMS_JWT_SECRET_KEY: '{{ EDXAPP_JWT_SECRET_KEY | default("lms-secret") }}'
PROGRAMS_JWT_AUTH:
JWT_ISSUERS:
- AUDIENCE: '{{ PROGRAMS_JWT_AUDIENCE }}'
ISSUER: '{{ PROGRAMS_JWT_ISSUER }}'
SECRET_KEY: '{{ PROGRAMS_JWT_SECRET_KEY }}'
- AUDIENCE: '{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_KEY }}'
ISSUER: '{{ PROGRAMS_OAUTH_URL_ROOT }}'
SECRET_KEY: '{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_SECRET }}'
PROGRAMS_PLATFORM_NAME: 'Your Platform Name Here'
# CORS
# See: https://github.com/ottoyiu/django-cors-headers/.
# the whitelist should always contain the public hostname for Studio in the given environment.
PROGRAMS_CORS_ORIGIN_WHITELIST:
- '127.0.0.1:8001'
PROGRAMS_DATA_DIR: '{{ COMMON_DATA_DIR }}/{{ programs_service_name }}'
PROGRAMS_MEDIA_ROOT: '{{ PROGRAMS_DATA_DIR }}/media'
PROGRAMS_MEDIA_URL: '/media/'
# Example settings to use Amazon S3 as a storage backend for user-uploaded files
# https://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#amazon-s3
#
# This is only for user-uploaded files and does not cover static assets that ship
# with the code.
#
# Note, AWS_S3_CUSTOM_DOMAIN is required, otherwise boto will generate non-working
# querystring URLs for assets (see https://github.com/boto/boto/issues/1477)
#
# Note, set AWS_S3_CUSTOM_DOMAIN to the cloudfront domain instead, when that is in use.
#
# PROGRAMS_BUCKET: mybucket
# programs_s3_domain: s3.amazonaws.com
# PROGRAMS_MEDIA_ROOT: 'media' # NOTE use '$source_ip/media' for an edx sandbox
#
# PROGRAMS_MEDIA_STORAGE_BACKEND:
# DEFAULT_FILE_STORAGE: 'programs.apps.core.s3utils.MediaS3BotoStorage'
# MEDIA_ROOT: '{{ PROGRAMS_MEDIA_ROOT }}'
# MEDIA_URL: 'https://{{ PROGRAMS_BUCKET }}.{{ programs_s3_domain }}/{{ PROGRAMS_MEDIA_ROOT }}/'
# AWS_STORAGE_BUCKET_NAME: '{{ PROGRAMS_BUCKET }}'
# AWS_S3_CUSTOM_DOMAIN: '{{ PROGRAMS_BUCKET }}.{{ programs_s3_domain }}'
# AWS_QUERYSTRING_AUTH: false
# AWS_QUERYSTRING_EXPIRE: false
# AWS_DEFAULT_ACL: ''
# AWS_HEADERS:
# Cache-Control: max-age=31536000
#
#
PROGRAMS_MEDIA_STORAGE_BACKEND:
DEFAULT_FILE_STORAGE: 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT: '{{ PROGRAMS_MEDIA_ROOT }}'
MEDIA_URL: '{{ PROGRAMS_MEDIA_URL }}'
PROGRAMS_SERVICE_CONFIG:
SECRET_KEY: '{{ PROGRAMS_SECRET_KEY }}'
TIME_ZONE: '{{ PROGRAMS_TIME_ZONE }}'
LANGUAGE_CODE: '{{ PROGRAMS_LANGUAGE_CODE }}'
SOCIAL_AUTH_EDX_OIDC_KEY: '{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_KEY }}'
SOCIAL_AUTH_EDX_OIDC_SECRET: '{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_SECRET }}'
SOCIAL_AUTH_EDX_OIDC_ID_TOKEN_DECRYPTION_KEY: '{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_SECRET }}'
SOCIAL_AUTH_EDX_OIDC_URL_ROOT: '{{ PROGRAMS_OAUTH_URL_ROOT }}'
SOCIAL_AUTH_EDX_OIDC_LOGOUT_URL: '{{ PROGRAMS_SOCIAL_AUTH_EDX_OIDC_LOGOUT_URL }}'
SOCIAL_AUTH_REDIRECT_IS_HTTPS: '{{ PROGRAMS_SOCIAL_AUTH_REDIRECT_IS_HTTPS }}'
STATIC_ROOT: '{{ PROGRAMS_DATA_DIR }}/staticfiles'
# db config
DATABASE_OPTIONS:
connect_timeout: 10
DATABASES: '{{ PROGRAMS_DATABASES }}'
PLATFORM_NAME: '{{ PROGRAMS_PLATFORM_NAME }}'
CORS_ORIGIN_WHITELIST: '{{ PROGRAMS_CORS_ORIGIN_WHITELIST }}'
PUBLIC_URL_ROOT: '{{ PROGRAMS_URL_ROOT }}'
ORGANIZATIONS_API_URL_ROOT: '{{ PROGRAMS_ORGANIZATIONS_API_URL_ROOT }}'
# This hash contains top-level Django settings and must be unpacked
# when loading/overriding settings.
MEDIA_STORAGE_BACKEND: '{{ PROGRAMS_MEDIA_STORAGE_BACKEND }}'
JWT_AUTH: '{{ PROGRAMS_JWT_AUTH }}'
PROGRAMS_REPOS:
- PROTOCOL: "{{ COMMON_GIT_PROTOCOL }}"
DOMAIN: "{{ COMMON_GIT_MIRROR }}"
PATH: "{{ COMMON_GIT_PATH }}"
REPO: programs.git
VERSION: "{{ PROGRAMS_VERSION }}"
DESTINATION: "{{ programs_code_dir }}"
SSH_KEY: "{{ PROGRAMS_GIT_IDENTITY }}"
PROGRAMS_GUNICORN_WORKERS: "2"
PROGRAMS_GUNICORN_EXTRA: ""
PROGRAMS_GUNICORN_EXTRA_CONF: ""
PROGRAMS_GUNICORN_WORKER_CLASS: "gevent"
#
# vars are namespace with the module name.
#
programs_role_name: programs
programs_venv_dir: "{{ programs_home }}/venvs/{{ programs_service_name }}"
programs_service_name: "programs"
programs_user: "{{ programs_service_name }}"
programs_home: "{{ COMMON_APP_DIR }}/{{ programs_service_name }}"
programs_code_dir: "{{ programs_home }}/{{ programs_service_name }}"
programs_environment:
DJANGO_SETTINGS_MODULE: "{{ PROGRAMS_DJANGO_SETTINGS_MODULE }}"
PROGRAMS_CFG: "{{ COMMON_CFG_DIR }}/{{ programs_service_name }}.yml"
PATH: "{{ programs_venv_dir }}/bin:{{ ansible_env.PATH }}"
programs_gunicorn_host: "127.0.0.1"
programs_gunicorn_port: 8140
programs_gunicorn_timeout: 300
programs_log_dir: "{{ COMMON_LOG_DIR }}/{{ programs_service_name }}"
programs_requirements_base: "{{ programs_code_dir }}/requirements"
programs_requirements:
- production.txt
- optional.txt
#
# OS packages
#
programs_debian_pkgs:
- libjpeg-dev
- libmysqlclient-dev
- libssl-dev
- gettext
# Temporarily, while we get 16.04 rolled out everywhere
# Then this goes back in common_vars
- python2.7
programs_redhat_pkgs: []
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Role includes for role programs
#
# Example:
#
# dependencies:
# - {
# role: my_role
# my_role_var0: "foo"
# my_role_var1: "bar"
# }
dependencies:
- common
- supervisor
- role: edx_service
edx_service_name: "{{ programs_service_name }}"
edx_service_config: "{{ PROGRAMS_SERVICE_CONFIG }}"
edx_service_repos: "{{ PROGRAMS_REPOS }}"
edx_service_user: "{{ programs_user }}"
edx_service_home: "{{ programs_home }}"
edx_service_packages:
debian: "{{ programs_debian_pkgs }}"
redhat: "{{ programs_redhat_pkgs }}"
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Tasks for role programs
#
# Overview:
#
#
# Dependencies:
#
#
# Example play:
#
#
- name: add gunicorn configuration file
template:
src: edx/app/programs/programs_gunicorn.py.j2
dest: "{{ programs_home }}/programs_gunicorn.py"
become_user: "{{ programs_user }}"
tags:
- install
- install:configuration
- name: install application requirements
pip:
requirements: "{{ programs_requirements_base }}/{{ item }}"
# Ansible will initialize this virtualenv if it's missing.
virtualenv: "{{ programs_venv_dir }}"
state: present
become_user: "{{ programs_user }}"
with_items: "{{ programs_requirements }}"
tags:
- install
- install:app-requirements
- name: migrate
shell: >
chdir={{ programs_code_dir }}
DB_MIGRATION_USER={{ COMMON_MYSQL_MIGRATE_USER }}
DB_MIGRATION_PASS={{ COMMON_MYSQL_MIGRATE_PASS }}
{{ programs_venv_dir }}/bin/python ./manage.py migrate --noinput
become_user: "{{ programs_user }}"
environment: "{{ programs_environment }}"
when: migrate_db is defined and migrate_db|lower == "yes"
tags:
- migrate
- migrate:db
- name: run collectstatic
shell: "{{ programs_venv_dir }}/bin/python manage.py collectstatic --noinput"
args:
chdir: "{{ programs_code_dir }}"
become_user: "{{ programs_user }}"
environment: "{{ programs_environment }}"
when: not devstack
tags:
- assets
- assets:gather
# NOTE this isn't used or needed when s3 is used for PROGRAMS_MEDIA_STORAGE_BACKEND
- name: create programs media dir
file:
path: "{{ item }}"
state: directory
mode: 0775
owner: "{{ programs_user }}"
group: "{{ common_web_group }}"
with_items:
- "{{ PROGRAMS_MEDIA_ROOT }}"
tags:
- install
- install:base
- name: write out the supervisor wrapper
template:
src: "edx/app/programs/programs.sh.j2"
dest: "{{ programs_home }}/{{ programs_service_name }}.sh"
mode: 0650
owner: "{{ supervisor_user }}"
group: "{{ common_web_user }}"
tags:
- install
- install:configuration
- name: write supervisord config
template:
src: "edx/app/supervisor/conf.d.available/programs.conf.j2"
dest: "{{ supervisor_available_dir }}/{{ programs_service_name }}.conf"
owner: "{{ supervisor_user }}"
group: "{{ common_web_user }}"
mode: 0644
tags:
- install
- install:configuration
- name: setup the programs env file
template:
src: "./{{ programs_home }}/{{ programs_service_name }}_env.j2"
dest: "{{ programs_home }}/programs_env"
owner: "{{ programs_user }}"
group: "{{ programs_user }}"
mode: 0644
tags:
- install
- install:configuration
- name: enable supervisor script
file:
src: "{{ supervisor_available_dir }}/{{ programs_service_name }}.conf"
dest: "{{ supervisor_cfg_dir }}/{{ programs_service_name }}.conf"
state: link
force: yes
when: not disable_edx_services
tags:
- install
- install:configuration
- name: update supervisor configuration
shell: "{{ supervisor_ctl }} -c {{ supervisor_cfg }} update"
when: not disable_edx_services
tags:
- manage
- manage:start
- name: create symlinks from the venv bin dir
file:
src: "{{ programs_venv_dir }}/bin/{{ item }}"
dest: "{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.programs"
state: link
with_items:
- python
- pip
- django-admin.py
tags:
- install
- install:app-requirements
- name: create symlinks from the repo dir
file:
src: "{{ programs_code_dir }}/{{ item }}"
dest: "{{ COMMON_BIN_DIR }}/{{ item.split('.')[0] }}.programs"
state: link
with_items:
- manage.py
tags:
- install
- install:app-requirements
- name: restart the application
supervisorctl:
state: restarted
supervisorctl_path: "{{ supervisor_ctl }}"
config: "{{ supervisor_cfg }}"
name: "{{ programs_service_name }}"
when: not disable_edx_services
become_user: "{{ supervisor_service_user }}"
tags:
- manage
- manage:start
#!/usr/bin/env bash
# {{ ansible_managed }}
{% set programs_venv_bin = programs_home + "/venvs/" + programs_service_name + "/bin" %}
{% if COMMON_ENABLE_NEWRELIC_APP %}
{% set executable = programs_venv_bin + '/newrelic-admin run-program ' + programs_venv_bin + '/gunicorn' %}
{% else %}
{% set executable = programs_venv_bin + '/gunicorn' %}
{% endif %}
{% if COMMON_ENABLE_NEWRELIC_APP %}
export NEW_RELIC_APP_NAME="{{ PROGRAMS_NEWRELIC_APPNAME }}"
export NEW_RELIC_LICENSE_KEY="{{ NEWRELIC_LICENSE_KEY }}"
{% endif -%}
source {{ programs_home }}/programs_env
{{ executable }} -c {{ programs_home }}/programs_gunicorn.py {{ PROGRAMS_GUNICORN_EXTRA }} programs.wsgi:application
# {{ ansible_managed }}
{% for name,value in programs_environment.items() -%}
{%- if value -%}
export {{ name }}="{{ value }}"
{% endif %}
{%- endfor %}
"""
gunicorn configuration file: http://docs.gunicorn.org/en/develop/configure.html
{{ ansible_managed }}
"""
timeout = {{ programs_gunicorn_timeout }}
bind = "{{ programs_gunicorn_host }}:{{ programs_gunicorn_port }}"
pythonpath = "{{ programs_code_dir }}"
workers = {{ PROGRAMS_GUNICORN_WORKERS }}
worker_class = "{{ PROGRAMS_GUNICORN_WORKER_CLASS }}"
{{ PROGRAMS_GUNICORN_EXTRA_CONF }}
#
# {{ ansible_managed }}
#
[program:{{ programs_service_name }}]
command={{ programs_home }}/{{ programs_service_name }}.sh
user={{ common_web_user }}
directory={{ programs_code_dir }}
stdout_logfile={{ supervisor_log_dir }}/%(program_name)s-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log
killasgroup=true
stopasgroup=true
......@@ -51,6 +51,8 @@
owner: "{{ rabbitmq_user }}"
group: "{{ rabbitmq_group }}"
mode: "0755"
tags:
- "monitoring"
- name: Add RabbitMQ memory usage script
template:
......@@ -59,16 +61,22 @@
owner: "{{ rabbitmq_user }}"
group: "{{ rabbitmq_group }}"
mode: "0775"
tags:
- "monitoring"
- name: Set up a cron job to run queue script
cron:
name: "log-queue-lenghts"
job: "{{ rabbitmq_app_dir }}/log-rabbitmq-queues.sh >/dev/null 2>&1"
tags:
- "monitoring"
- name: Set up a cron job to run the script
cron:
name: "log-rabbitmq-memory-usage"
job: "{{ rabbitmq_app_dir }}/log-rabbitmq-memory.sh >/dev/null 2>&1"
tags:
- "monitoring"
- name: install logrotate configuration
template:
......
......@@ -67,10 +67,6 @@ supervisor_spec:
python: python.analytics_api
code: "{{ analytics_api_code_dir | default(None) }}"
env: "{{ analytics_api_home | default(None) }}/analytics_api_env"
- service: programs
python: python.programs
code: "{{ programs_code_dir | default(None) }}"
env: "{{ programs_home | default(None) }}/programs_env"
- service: credentials
python: python.credentials
code: "{{ credentials_code_dir | default(None) }}"
......
......@@ -15,7 +15,6 @@ MIGRATION_COMMANDS = {
'cms': "/edx/bin/edxapp-migrate-cms --noinput --list",
'xqueue': "SERVICE_VARIANT=xqueue sudo -E -u xqueue {python} {code_dir}/manage.py migrate --noinput --list --settings=xqueue.aws_settings",
'ecommerce': ". {env_file}; sudo -E -u ecommerce {python} {code_dir}/manage.py showmigrations",
'programs': ". {env_file}; sudo -E -u programs {python} {code_dir}/manage.py showmigrations",
'insights': ". {env_file}; sudo -E -u insights {python} {code_dir}/manage.py showmigrations",
'analytics_api': ". {env_file}; sudo -E -u analytics_api {python} {code_dir}/manage.py showmigrations",
'credentials': ". {env_file}; sudo -E -u credentials {python} {code_dir}/manage.py showmigrations",
......@@ -96,15 +95,6 @@ if __name__ == '__main__':
ecom_migration_args.add_argument("--ecommerce-code-dir",
help="Location of the ecommerce code.")
programs_migration_args = parser.add_argument_group("programs_migrations",
"Args for running programs migration checks.")
programs_migration_args.add_argument("--programs-python",
help="Path to python to use for executing migration check.")
programs_migration_args.add_argument("--programs-env",
help="Location of the programs environment file.")
programs_migration_args.add_argument("--programs-code-dir",
help="Location of the programs code.")
credentials_migration_args = parser.add_argument_group("credentials_migrations",
"Args for running credentials migration checks.")
credentials_migration_args.add_argument("--credentials-python",
......@@ -242,7 +232,6 @@ if __name__ == '__main__':
"lms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},
"cms": {'python': args.edxapp_python, 'env_file': args.edxapp_env, 'code_dir': args.edxapp_code_dir},
"ecommerce": {'python': args.ecommerce_python, 'env_file': args.ecommerce_env, 'code_dir': args.ecommerce_code_dir},
"programs": {'python': args.programs_python, 'env_file': args.programs_env, 'code_dir': args.programs_code_dir},
"credentials": {'python': args.credentials_python, 'env_file': args.credentials_env, 'code_dir': args.credentials_code_dir},
"discovery": {'python': args.discovery_python, 'env_file': args.discovery_env, 'code_dir': args.discovery_code_dir},
"insights": {'python': args.insights_python, 'env_file': args.insights_env, 'code_dir': args.insights_code_dir},
......
......@@ -3,12 +3,6 @@ description "Tasks before supervisord"
start on runlevel [2345]
task
{% if programs_code_dir is defined %}
{% set programs_command = "--programs-env " + programs_home + "/programs_env --programs-code-dir " + programs_code_dir + " --programs-python " + COMMON_BIN_DIR + "/python.programs" %}
{% else %}
{% set programs_command = "" %}
{% endif %}
{% if credentials_code_dir is defined %}
{% set credentials_command = "--credentials-env " + credentials_home + "/credentials_env --credentials-code-dir " + credentials_code_dir + " --credentials-python " + COMMON_BIN_DIR + "/python.credentials" %}
{% else %}
......@@ -22,4 +16,4 @@ task
{% endif %}
exec {{ supervisor_venv_dir }}/bin/python {{ supervisor_app_dir }}/pre_supervisor_checks.py --available={{ supervisor_available_dir }} --enabled={{ supervisor_cfg_dir }} {% if SUPERVISOR_HIPCHAT_API_KEY is defined %}--hipchat-api-key {{ SUPERVISOR_HIPCHAT_API_KEY }} --hipchat-room {{ SUPERVISOR_HIPCHAT_ROOM }} {% endif %} {% if edxapp_code_dir is defined %}--edxapp-python {{ COMMON_BIN_DIR }}/python.edxapp --edxapp-code-dir {{ edxapp_code_dir }} --edxapp-env {{ edxapp_app_dir }}/edxapp_env{% endif %} {% if xqueue_code_dir is defined %}--xqueue-code-dir {{ xqueue_code_dir }} --xqueue-python {{ COMMON_BIN_DIR }}/python.xqueue {% endif %} {% if ecommerce_code_dir is defined %}--ecommerce-env {{ ecommerce_home }}/ecommerce_env --ecommerce-code-dir {{ ecommerce_code_dir }} --ecommerce-python {{ COMMON_BIN_DIR }}/python.ecommerce {% endif %} {% if insights_code_dir is defined %}--insights-env {{ insights_home }}/insights_env --insights-code-dir {{ insights_code_dir }} --insights-python {{ COMMON_BIN_DIR }}/python.insights {% endif %} {% if analytics_api_code_dir is defined %}--analytics-api-env {{ analytics_api_home }}/analytics_api_env --analytics-api-code-dir {{ analytics_api_code_dir }} --analytics-api-python {{ COMMON_BIN_DIR }}/python.analytics_api {% endif %} {{ programs_command }} {{ discovery_command }} {{ credentials_command }}
exec {{ supervisor_venv_dir }}/bin/python {{ supervisor_app_dir }}/pre_supervisor_checks.py --available={{ supervisor_available_dir }} --enabled={{ supervisor_cfg_dir }} {% if SUPERVISOR_HIPCHAT_API_KEY is defined %}--hipchat-api-key {{ SUPERVISOR_HIPCHAT_API_KEY }} --hipchat-room {{ SUPERVISOR_HIPCHAT_ROOM }} {% endif %} {% if edxapp_code_dir is defined %}--edxapp-python {{ COMMON_BIN_DIR }}/python.edxapp --edxapp-code-dir {{ edxapp_code_dir }} --edxapp-env {{ edxapp_app_dir }}/edxapp_env{% endif %} {% if xqueue_code_dir is defined %}--xqueue-code-dir {{ xqueue_code_dir }} --xqueue-python {{ COMMON_BIN_DIR }}/python.xqueue {% endif %} {% if ecommerce_code_dir is defined %}--ecommerce-env {{ ecommerce_home }}/ecommerce_env --ecommerce-code-dir {{ ecommerce_code_dir }} --ecommerce-python {{ COMMON_BIN_DIR }}/python.ecommerce {% endif %} {% if insights_code_dir is defined %}--insights-env {{ insights_home }}/insights_env --insights-code-dir {{ insights_code_dir }} --insights-python {{ COMMON_BIN_DIR }}/python.insights {% endif %} {% if analytics_api_code_dir is defined %}--analytics-api-env {{ analytics_api_home }}/analytics_api_env --analytics-api-code-dir {{ analytics_api_code_dir }} --analytics-api-python {{ COMMON_BIN_DIR }}/python.analytics_api {% endif %} {{ discovery_command }} {{ credentials_command }}
---
- name: import the test courses from github
shell: "{{ demo_edxapp_venv_bin }}/python /edx/bin/manage.edxapp lms git_add_course --settings={{ demo_edxapp_settings }} \"{{ item.github_url }}\""
become_user: "{{ common_web_user }}"
when: item.install == True
- include: import_course.yml
when: course.install == True
with_items: "{{ TESTCOURSES_EXPORTS }}"
loop_control:
loop_var: course
- name: enroll test users in the testcourses
shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py lms --settings={{ demo_edxapp_settings }} --service-variant lms enroll_user_in_course -e {{ item[0].email }} -c {{ item[1].course_id }}"
......
- name: check out testcourses
git:
dest: "{{ demo_app_dir }}/{{ course.course_id }}"
repo: "{{ course.github_url }}"
accept_hostkey: yes
become_user: "{{ demo_edxapp_user }}"
register: testcourse_checkout
- name: import testcourse
shell: "{{ demo_edxapp_venv_bin }}/python ./manage.py cms --settings={{ demo_edxapp_settings }} import {{ demo_app_dir }} {{ course.course_id }}"
args:
chdir: "{{ demo_edxapp_code_dir }}"
when: testcourse_checkout.changed
become_user: "{{ common_web_user }}"
......@@ -46,7 +46,10 @@ jenkins_tools_plugins:
- { name: "github-oauth", version: "0.24" }
- { name: "gradle", version: "1.25" }
- { name: "credentials-binding", version: "1.9" }
# ec2 + dependencies, used by the android build workers + any additional workers we build
- { name: "ec2", version: "1.36" }
- { name: "node-iterator-api", version: "1.5" }
- { name: "aws-java-sdk", version: "1.11.37" }
- { name: "jackson2-api", version: "2.5.4" }
- { name: "aws-credentials", version: "1.11" }
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Defaults for role xsy
#
#
# vars are namespace with the module name.
#
XSY_USERNAME: '1234_1234@chat.hipchat.com'
XSY_PASSWORD: 'password'
XSY_V1_TOKEN: 'HIPCHAT_V1_TOKEN'
XSY_V2_TOKEN: 'HIPCHAT_V2_TOKEN'
XSY_WILL_ROOMS: 'Hammer'
XSY_NAME: 'xsy'
XSY_HANDLE: 'xsy'
XSY_REDIS_URL: 'redis://fakeuser:redispassword@localhost:6379'
XSY_HTTPSERVER_PORT: '8081'
XSY_WORLD_WEATHER_KEY: !!null
xsy_service_name: xsy
xsy_user: xsy
xsy_app_dir: "{{ COMMON_APP_DIR }}/xsy"
xsy_code_dir: "{{ xsy_app_dir }}/xsy"
xsy_venvs_dir: "{{ xsy_app_dir }}/venvs"
xsy_venv_dir: "{{ xsy_venvs_dir }}/xsy"
xsy_venv_bin: "{{ xsy_venv_dir }}/bin"
xsy_source_repo: "https://github.com/edx/xsy.git"
xsy_version: "HEAD"
xsy_requirements_file: "{{ xsy_code_dir }}/requirements.txt"
xsy_supervisor_wrapper: "{{ xsy_app_dir }}/xsy-supervisor.sh"
xsy_environment:
WILL_USERNAME: "{{ XSY_USERNAME }}"
WILL_PASSWORD: "{{ XSY_PASSWORD }}"
WILL_V1_TOKEN: "{{ XSY_V1_TOKEN }}"
WILL_V2_TOKEN: "{{ XSY_V2_TOKEN }}"
WILL_NAME: "{{ XSY_NAME }}"
WILL_HANDLE: "{{ XSY_HANDLE }}"
WILL_REDIS_URL: "{{ XSY_REDIS_URL }}"
WILL_HTTPSERVER_PORT: "{{ XSY_HTTPSERVER_PORT }}"
WILL_WORLD_WEATHER_ONLINE_KEY: "{{ XSY_WORLD_WEATHER_KEY }}"
WILL_ROOMS: "{{ XSY_WILL_ROOMS }}"
#
# OS packages
#
xsy_debian_pkgs: []
xsy_redhat_pkgs: []
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Handlers for role xsy
#
# Overview:
#
#
- name: restart xsy
supervisorctl:
name: xsy
supervisorctl_path: "{{ supervisor_ctl }}"
config: "{{ supervisor_cfg }}"
state: restarted
when: not disable_edx_services
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
##
# Role includes for role xsy
#
# Example:
#
# dependencies:
# - {
# role: my_role
# my_role_var0: "foo"
# my_role_var1: "bar"
# }
dependencies:
- common
- supervisor
---
#
# edX Configuration
#
# github: https://github.com/edx/configuration
# wiki: https://openedx.atlassian.net/wiki/display/OpenOPS
# code style: https://openedx.atlassian.net/wiki/display/OpenOPS/Ansible+Code+Conventions
# license: https://github.com/edx/configuration/blob/master/LICENSE.TXT
#
#
#
# Tasks for role xsy
#
# Overview:
#
#
# Dependencies:
#
#
# Example play:
#
#
- name: Create application user
user:
name: "{{ xsy_user }}"
home: "{{ xsy_app_dir }}"
createhome: no
shell: /bin/false
- name: Create xsy user dirs
file:
path: "{{ item }}"
state: directory
owner: "{{ xsy_user }}"
group: "{{ common_web_group }}"
with_items:
- "{{ xsy_app_dir }}"
- "{{ xsy_venvs_dir }}"
- name: Setup the xsy env
template:
src: "xsy_env.j2"
dest: "{{ xsy_app_dir }}/xsy_env"
owner: "{{ xsy_user }}"
group: "{{ common_web_user }}"
mode: "0644"
notify: restart xsy
- name: Checkout the code
git:
dest: "{{ xsy_code_dir }}"
repo: "{{ xsy_source_repo }}"
version: "{{ xsy_version }}"
accept_hostkey: yes
become_user: "{{ xsy_user }}"
register: xsy_checkout
notify: restart xsy
- name: Install the requirements
pip:
requirements: "{{ xsy_requirements_file }}"
virtualenv: "{{ xsy_venv_dir }}"
state: present
extra_args: "-i {{ COMMON_PYPI_MIRROR_URL }}"
become_user: "{{ xsy_user }}"
notify: restart xsy
- name: Create the supervisor wrapper
template:
src: "{{ xsy_supervisor_wrapper | basename }}.j2"
dest: "{{ xsy_supervisor_wrapper }}"
mode: "0755"
become_user: "{{ xsy_user }}"
notify: restart xsy
- name: Create a supervisor config
template:
src: "xsy.conf.j2"
dest: "{{ supervisor_available_dir }}/xsy.conf"
owner: "{{ supervisor_user }}"
group: "{{ supervisor_user }}"
become_user: "{{ supervisor_user }}"
notify: restart xsy
- name: Enable the supervisor config
file:
src: "{{ supervisor_available_dir }}/xsy.conf"
dest: "{{ supervisor_cfg_dir }}/xsy.conf"
state: link
force: yes
mode: "0644"
become_user: "{{ supervisor_user }}"
when: not disable_edx_services
notify: restart xsy
- name: Update supervisor configuration
shell: "{{ supervisor_ctl }} -c {{ supervisor_cfg }} update"
register: supervisor_update
changed_when: supervisor_update.stdout is defined and supervisor_update.stdout != ""
when: not disable_edx_services
- name: Ensure xsy is started
supervisorctl:
name: xsy
supervisorctl_path: "{{ supervisor_ctl }}"
config: "{{ supervisor_cfg }}"
state: started
when: not disable_edx_services
- include: tag_ec2.yml
when: COMMON_TAG_EC2_INSTANCE
tags:
- deploy
---
- name: get instance information
action: ec2_facts
tags:
- deploy
- name: Tag instance
ec2_tag:
resource: "{{ ansible_ec2_instance_id }}"
region: "{{ ansible_ec2_placement_region }}"
tags:
"version:xsy" : "{{ xsy_source_repo }} {{ xsy_checkout.after }}"
when: xsy_checkout.after is defined
tags:
- deploy
#!/bin/bash
source {{ xsy_app_dir }}/xsy_env
cd {{ xsy_code_dir }}
{{ xsy_venv_bin }}/python run_will.py
[program:xsy]
command={{ xsy_supervisor_wrapper }}
priority=999
user={{ common_web_user }}
stdout_logfile={{ supervisor_log_dir }}/%(program_name)s-stdout.log
stderr_logfile={{ supervisor_log_dir }}/%(program_name)s-stderr.log
killasgroup=true
stopasgroup=true
stopsignal=QUIT
# {{ ansible_managed }}
{% for name,value in xsy_environment.items() -%}
{%- if value -%}
export {{ name }}='{{ value }}'
{% endif %}
{%- endfor %}
......@@ -45,7 +45,6 @@ NOTIFIER_COMMENT_SERVICE_API_KEY: '{{ FORUM_API_KEY }}'
NOTIFIER_EMAIL_PASS: !!null
NOTIFIER_USER_SERVICE_HTTP_AUTH_PASS: !!null
POSTFIX_QUEUE_EXTERNAL_SMTP_PASSWORD: !!null
PROGRAMS_DATABASE_PASSWORD: !!null
REDIS_PASSWORD: !!null
SPLUNKFORWARDER_PASSWORD: !!null
SPLUNK_SMTP_PASSWORD: !!null
......@@ -55,7 +54,6 @@ XQUEUE_BASIC_AUTH_PASSWORD: !!null
XQUEUE_BASIC_AUTH_USER: !!null
XQUEUE_MYSQL_PASSWORD: !!null
XQUEUE_RABBITMQ_PASS: !!null
XSY_PASSWORD: !!null
EDXAPP_MONGO_PASSWORD: !!null
EDXAPP_MONGO_USER: 'edxapp'
......@@ -75,7 +73,6 @@ EDXAPP_EDXAPP_SECRET_KEY: !!null #SECRET_KEY
EDXAPP_PROFILE_IMAGE_SECRET_KEY: !!null #SECRET_KEY
INSIGHTS_SECRET_KEY: !!null #SECRET_KEY
NOTIFIER_LMS_SECRET_KEY: !!null #SECRET_KEY
PROGRAMS_SECRET_KEY: !!null #SECRET_KEY
MONGO_USERS:
- user: "{{ FORUM_MONGO_USER }}"
......
......@@ -31,10 +31,6 @@
#ECOMMERCE_LMS_URL_ROOT: "https://${deploy_host}"
#ECOMMERCE_SOCIAL_AUTH_REDIRECT_IS_HTTPS: true
#
#PROGRAMS_LMS_URL_ROOT: "https://${deploy_host}"
#PROGRAMS_URL_ROOT: "https://programs-${deploy_host}"
#PROGRAMS_SOCIAL_AUTH_REDIRECT_IS_HTTPS: true
#
#CREDENTIALS_LMS_URL_ROOT: "https://${deploy_host}"
#CREDENTIALS_URL_ROOT: "https://credentials-${deploy_host}"
#CREDENTIALS_SOCIAL_AUTH_REDIRECT_IS_HTTPS: true
......
......@@ -28,7 +28,6 @@
- forum
- ecommerce
- ecomworker
- programs
- role: notifier
NOTIFIER_DIGEST_TASK_INTERVAL: "5"
- analytics_api
......
......@@ -16,6 +16,7 @@
# When provisioning your devstack, we apply security updates
COMMON_SECURITY_UPDATES: true
SECURITY_UPGRADE_ON_ANSIBLE: true
MONGO_AUTH: false
vars_files:
- roles/edxapp/vars/devstack.yml
roles:
......@@ -35,7 +36,6 @@
- ecommerce
- role: ecomworker
ECOMMERCE_WORKER_BROKER_HOST: 127.0.0.1
- programs
- role: notifier
NOTIFIER_DIGEST_TASK_INTERVAL: "5"
- browsers
......
- name: Configure instance(s)
hosts: all
become: True
gather_facts: True
vars:
migrate_db: 'yes'
EDXAPP_LMS_NGINX_PORT: '80'
EDX_ANSIBLE_DUMP_VARS: true
CERTS_DOWNLOAD_URL: 'http://192.168.33.10:18090'
CERTS_VERIFY_URL: 'http://192.168.33.10:18090'
# used for releases
edx_platform_version: '{{ OPENEDX_RELEASE | default("master") }}'
certs_version: '{{ OPENEDX_RELEASE | default("master") }}'
forum_version: '{{ OPENEDX_RELEASE | default("master") }}'
xqueue_version: '{{ OPENEDX_RELEASE | default("master") }}'
demo_version: '{{ OPENEDX_RELEASE | default("master") }}'
roles:
- common
- vhost
- edx_ansible
- user
- role: nginx
nginx_sites:
- cms
- lms
- forum
- xqueue
- certs
nginx_default_sites:
- lms
- cms
- mysql
- edxlocal
- memcache
- mongo
- { role: 'rabbitmq', rabbitmq_ip: '127.0.0.1' }
- edxapp
- { role: 'edxapp', celery_worker: True }
- demo
- oraclejdk
- elasticsearch
- forum
- { role: "xqueue", update_users: True }
- certs
- role: analytics_api
when: ANALYTICS_API_GIT_IDENTITY
......@@ -12,7 +12,7 @@ python-simple-hipchat==0.2
prettytable==0.7.2
# Don't forget to update the version of this installed in
# the aws role as well.
awscli==1.10.28
awscli==1.11.58
requests==2.9.1
datadog==0.8.0
networkx==1.11
......
......@@ -52,7 +52,6 @@ VERSION_VARS=(
ANALYTICS_API_VERSION
ECOMMERCE_VERSION
ECOMMERCE_WORKER_VERSION
PROGRAMS_VERSION
)
EXTRA_VARS="-e SANDBOX_ENABLE_ECOMMERCE=True $EXTRA_VARS"
......
......@@ -193,10 +193,6 @@ ECOMMERCE_NGINX_PORT: 80
ECOMMERCE_SSL_NGINX_PORT: 443
ECOMMERCE_VERSION: $ecommerce_version
PROGRAMS_NGINX_PORT: 80
PROGRAMS_SSL_NGINX_PORT: 443
PROGRAMS_VERSION: $programs_version
CREDENTIALS_NGINX_PORT: 80
CREDENTIALS_SSL_NGINX_PORT: 443
CREDENTIALS_VERSION: $credentials_version
......@@ -287,12 +283,6 @@ ECOMMERCE_ECOMMERCE_URL_ROOT: "https://ecommerce-${deploy_host}"
ECOMMERCE_LMS_URL_ROOT: "https://${deploy_host}"
ECOMMERCE_SOCIAL_AUTH_REDIRECT_IS_HTTPS: true
PROGRAMS_LMS_URL_ROOT: "https://${deploy_host}"
PROGRAMS_URL_ROOT: "https://programs-${deploy_host}"
PROGRAMS_SOCIAL_AUTH_REDIRECT_IS_HTTPS: true
PROGRAMS_CORS_ORIGIN_WHITELIST:
- studio-${deploy_host}
CREDENTIALS_LMS_URL_ROOT: "https://${deploy_host}"
CREDENTIALS_DOMAIN: "credentials-${deploy_host}"
CREDENTIALS_URL_ROOT: "https://{{ CREDENTIALS_DOMAIN }}"
......@@ -301,7 +291,6 @@ COURSE_DISCOVERY_ECOMMERCE_API_URL: "https://ecommerce-${deploy_host}/api/v2"
DISCOVERY_URL_ROOT: "https://discovery-${deploy_host}"
DISCOVERY_SOCIAL_AUTH_REDIRECT_IS_HTTPS: true
DISCOVERY_PROGRAMS_API_URL: "{{ PROGRAMS_URL_ROOT }}/api/v1/"
EOF
fi
......@@ -348,7 +337,7 @@ EOF
fi
declare -A deploy
roles="edxapp forum ecommerce programs credentials discovery notifier xqueue xserver certs demo testcourses"
roles="edxapp forum ecommerce credentials discovery notifier xqueue xserver certs demo testcourses"
for role in $roles; do
deploy[$role]=${!role}
......
......@@ -22,7 +22,6 @@ weights:
- ecommerce: 6
- rabbitmq: 2
- automated: 1
- programs: 4
- mysql: 2
- elasticsearch: 7
- docker-tools: 3
......
import argparse
import subprocess
import requests
from requests.exceptions import HTTPError
import sys
parser=argparse.ArgumentParser(description='Shovels between RabbitMQ Clusters')
parser.add_argument('--src_host',action='store',dest='src_host')
parser.add_argument('--dest_host',action='store',dest='dest_host',default='127.0.0.1')
parser.add_argument('--src_user',action='store',dest='src_user')
parser.add_argument('--src_user_pass',action='store',dest='src_user_pass')
parser.add_argument('--dest_user',action='store',dest='dest_user')
parser.add_argument('--dest_user_pass',action='store',dest='dest_user_pass')
args=parser.parse_args()
src_uri='amqp://{}:{}@{}'.format(args.src_user,args.src_user_pass,args.src_host)
dest_uri='amqp://{}:{}@{}'.format(args.dest_user,args.dest_user_pass,args.dest_host)
port=15672
def list_vhosts():
url='http://{}:{}/api/vhosts'.format(args.src_host,port)
try:
response=requests.get(url,auth=(args.src_user,args.src_user_pass))
response.raise_for_status()
vhosts=[v['name'] for v in response.json() if v['name'].startswith('/')]
except Exception as ex:
print "Failed to get vhosts: {}".format(ex)
sys.exit(1)
return vhosts
def list_queues():
for vhost in list_vhosts():
url='http://{}:{}/api/queues/{}'.format(args.src_host,port,vhost)
try:
response=requests.get(url,auth=(args.src_user,args.src_user_pass))
response.raise_for_status()
queues=[q['name'] for q in response.json()]
except Exception as ex:
print "Failed to get queues: {}".format(ex)
sys.exit(1)
return queues
def create_shovel(shovel,arg):
cmd="/usr/sbin/rabbitmqctl set_parameter shovel {} '{}'".format(shovel,arg)
try:
subprocess.check_output(
cmd,stderr=subprocess.STDOUT,shell=True)
except subprocess.CalledProcessError as ex:
return ex.output
if __name__=='__main__':
"""
command line arguments are expected to be in following format
python shovel.py --src_host <src_host_IP> --src_user <src_rabbitmq_user> --src_user_pass <user_pass> \
--dest_host <dest_host_IP> --dest_user <dest_rabbitmq_user> --dest_user_pass <user_pass>
"""
output={}
for queue in list_queues():
"""
Ignore queues celeryev and *.pidbox to shovel
"""
q=queue.split('.')
if (q[0]!='celeryev' and q[-1]!='pidbox'):
args='{{"src-uri": "{}", "src-queue": "{}","dest-uri": "{}","dest-queue": "{}"}}'.format(src_uri,queue,dest_uri,queue)
print "Running shovel for queue:{}".format(queue)
shovel_output=create_shovel(queue,args)
if shovel_output is not None:
content=unicode(shovel_output,"utf-8")
output[queue]=content
for k,v in output.items():
print k,v
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment