Commit 184f0145 by John Jarvis

Merge remote-tracking branch 'origin' into jarv/ansible

parents 4b40152c 4fe01eb3
# Copyright 2013 John Jarvis <john@jarv.org>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import time
import json
try:
import boto.sqs
from boto.exception import NoAuthHandlerFound
except ImportError:
print "Boto is required for the sqs_notify callback plugin"
raise
class CallbackModule(object):
"""
This Ansible callback plugin sends task events
to SQS.
The following vars must be set in the environment:
ANSIBLE_ENABLE_SQS - enables the callback module
SQS_REGION - AWS region to connect to
SQS_MSG_PREFIX - Additional data that will be put
on the queue (optional)
The following events are put on the queue
- FAILURE events
- OK events
- TASK events
- START events
"""
def __init__(self):
self.start_time = time.time()
if 'ANSIBLE_ENABLE_SQS' in os.environ:
self.enable_sqs = True
if not 'SQS_REGION' in os.environ:
print 'ANSIBLE_ENABLE_SQS enabled but SQS_REGION ' \
'not defined in environment'
sys.exit(1)
self.region = os.environ['SQS_REGION']
try:
self.sqs = boto.sqs.connect_to_region(self.region)
except NoAuthHandlerFound:
print 'ANSIBLE_ENABLE_SQS enabled but cannot connect ' \
'to AWS due invalid credentials'
sys.exit(1)
if not 'SQS_NAME' in os.environ:
print 'ANSIBLE_ENABLE_SQS enabled but SQS_NAME not ' \
'defined in environment'
sys.exit(1)
self.name = os.environ['SQS_NAME']
self.queue = self.sqs.create_queue(self.name)
if 'SQS_MSG_PREFIX' in os.environ:
self.prefix = os.environ['SQS_MSG_PREFIX']
else:
self.prefix = ''
self.last_seen_ts = {}
else:
self.enable_sqs = False
def runner_on_failed(self, host, res, ignore_errors=False):
if self.enable_sqs:
if not ignore_errors:
self._send_queue_message(res, 'FAILURE')
def runner_on_ok(self, host, res):
if self.enable_sqs:
# don't send the setup results
if res['invocation']['module_name'] != "setup":
self._send_queue_message(res, 'OK')
def playbook_on_task_start(self, name, is_conditional):
if self.enable_sqs:
self._send_queue_message(name, 'TASK')
def playbook_on_play_start(self, pattern):
if self.enable_sqs:
self._send_queue_message(pattern, 'START')
def playbook_on_stats(self, stats):
if self.enable_sqs:
d = {}
delta = time.time() - self.start_time
d['delta'] = delta
for s in ['changed', 'failures', 'ok', 'processed', 'skipped']:
d[s] = getattr(stats, s)
self._send_queue_message(d, 'STATS')
def _send_queue_message(self, msg, msg_type):
if self.enable_sqs:
from_start = time.time() - self.start_time
payload = {msg_type: msg}
payload['TS'] = from_start
payload['PREFIX'] = self.prefix
# update the last seen timestamp for
# the message type
self.last_seen_ts[msg_type] = time.time()
if msg_type in ['OK', 'FAILURE']:
# report the delta between the OK/FAILURE and
# last TASK
if 'TASK' in self.last_seen_ts:
from_task = \
self.last_seen_ts[msg_type] - self.last_seen_ts['TASK']
payload['delta'] = from_task
for output in ['stderr', 'stdout']:
if output in payload[msg_type]:
# only keep the last 1000 characters
# of stderr and stdout
if len(payload[msg_type][output]) > 1000:
payload[msg_type][output] = "(clipping) ... " \
+ payload[msg_type][output][-1000:]
self.sqs.send_message(self.queue, json.dumps(payload))
../callback_plugins
\ No newline at end of file
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- certs
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- common
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- devpi
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- discern
---
# dummy var file
# This file is needed as a fall through
# for vars_files
dummy_var: True
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- edxapp
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- forum
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- ora
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: False
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- rabbitmq
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- role: xqueue
......@@ -2,5 +2,7 @@
hosts: all
sudo: True
gather_facts: True
vars_files:
- ["{{ secure_vars }}", "dummy.yml"]
roles:
- role: xserver
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: vpc_lookup
short_description: returns a list of subnet Ids using tags as criteria
description:
- Returns a list of subnet Ids for a given set of tags that identify one or more VPCs
version_added: "1.5"
options:
region:
description:
- The AWS region to use. Must be specified if ec2_url
is not used. If not specified then the value of the
EC2_REGION environment variable, if any, is used.
required: false
default: null
aliases: [ 'aws_region', 'ec2_region' ]
aws_secret_key:
description:
- AWS secret key. If not set then the value of
the AWS_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
aws_access_key:
description:
- AWS access key. If not set then the value of the
AWS_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
tags:
desription:
- tags to lookup
required: false
default: null
type: dict
aliases: []
requirements: [ "boto" ]
author: John Jarvis
'''
EXAMPLES = '''
# Note: None of these examples set aws_access_key, aws_secret_key, or region.
# It is assumed that their matching environment variables are set.
# Return all instances that match the tag "Name: foo"
- local_action:
module: vpc_lookup
tags:
Name: foo
'''
import sys
AWS_REGIONS = ['ap-northeast-1',
'ap-southeast-1',
'ap-southeast-2',
'eu-west-1',
'sa-east-1',
'us-east-1',
'us-west-1',
'us-west-2']
try:
from boto.vpc import VPCConnection
from boto.vpc import connect_to_region
except ImportError:
print "failed=True msg='boto required for this module'"
sys.exit(1)
def main():
module=AnsibleModule(
argument_spec=dict(
region=dict(choices=AWS_REGIONS),
aws_secret_key=dict(aliases=['ec2_secret_key', 'secret_key'],
no_log=True),
aws_access_key=dict(aliases=['ec2_access_key', 'access_key']),
tags=dict(default=None, type='dict'),
)
)
tags = module.params.get('tags')
aws_secret_key = module.params.get('aws_secret_key')
aws_access_key = module.params.get('aws_access_key')
region = module.params.get('region')
# If we have a region specified, connect to its endpoint.
if region:
try:
vpc = connect_to_region(region, aws_access_key_id=aws_access_key,
aws_secret_access_key=aws_secret_key)
except boto.exception.NoAuthHandlerFound, e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
vpc_conn = VPCConnection()
subnet_ids = []
for subnet in vpc_conn.get_all_subnets(filters={'tag:' + tag: value
for tag, value in tags.iteritems()}):
subnet_ids.append(subnet.id)
vpc_ids = []
for vpc in vpc.get_all_vpcs(filters={'tag:' + tag: value
for tag, value in tags.iteritems()}):
vpc_ids.append(vpc.id)
module.exit_json(changed=False, subnet_ids=subnet_ids, vpc_ids=vpc_ids)
# this is magic, see lib/ansible/module_common.py
#<<INCLUDE_ANSIBLE_MODULE_COMMON>>
main()
......@@ -34,7 +34,7 @@
# Install the python pre requirements into {{ xqueue_venv_dir }}
- name : install python pre-requirements
- name : xqueue | install python pre-requirements
pip: requirements="{{ xqueue_pre_requirements_file }}" virtualenv="{{ xqueue_venv_dir }}" state=present
sudo_user: "{{ xqueue_user }}"
notify:
......@@ -43,7 +43,7 @@
- deploy
# Install the python post requirements into {{ xqueue_venv_dir }}
- name : install python post-requirements
- name : xqueue | install python post-requirements
pip: requirements="{{ xqueue_post_requirements_file }}" virtualenv="{{ xqueue_venv_dir }}" state=present
sudo_user: "{{ xqueue_user }}"
notify:
......
Fabric==1.5.1
Jinja2==2.6
Jinja2==2.7.1
MarkupSafe==0.18
PyYAML==3.10
WebOb==1.2.3
ansible==1.3.2
argparse==1.2.1
beautifulsoup4==4.1.3
boto==2.10.0
cloudformation==0.0.0
decorator==3.4.0
distribute==0.6.30
docopt==0.6.1
dogapi==1.2.3
ipython==0.13.1
jenkinsapi==0.1.11
lxml==3.1beta1
newrelic==1.10.2.38
path.py==3.0.1
pingdom==0.2.0
pycrypto==2.6
pyparsing==1.5.6
pyrelic==0.2.0
python-dateutil==2.1
requests==1.1.0
schema==0.1.1
simplejson==3.3.0
simples3==1.0-alpha
six==1.2.0
-e git+https://github.com/bos/statprof.py.git@a17f7923b102c9039763583be9e377e8422e8f5f#egg=statprof-dev
ujson==1.30
distribute==0.6.24
ecdsa==0.10
paramiko==1.12.0
pycrypto==2.6.1
wsgiref==0.1.2
ansible==1.3.2
GitPython==0.3.2.RC1
pymongo==2.4.1
import argparse
import json
import logging as log
import pickle
import requests
import yaml
from datetime import datetime
from git import Repo
from os import path
from pprint import pformat
from pymongo import MongoClient, DESCENDING
from stage_release import uri_from
def releases(repo):
"""
Yield a list of all release candidates from the origin.
"""
for ref in repo.refs:
if ref.name.startswith('origin/rc/'):
yield ref
def candidates_since(repo, time):
"""
Given a repo yield a list of release candidate refs that have a
commit on them after the passed in time
"""
for rc in releases(repo):
last_update = datetime.utcfromtimestamp(rc.commit.committed_date)
if last_update > time:
# New or updated RC
yield rc
def stage_release(url, token, repo, rc):
"""
Submit a job to stage a new release for the new rc of the repo.
"""
# Setup the Jenkins params.
params = []
params.append({'name': "{}_REF".format(repo), 'value': True})
params.append({'name': repo, 'value': rc.commit.hexsha})
build_params = {'parameter': params}
log.info("New rc found{}, staging new release.".format(rc.name))
r = requests.post(url,
data={"token", token},
params={"json": json.dumps(build_params)})
if r.status_code != 201:
msg = "Failed to submit request with params: {}"
raise Exception(msg.format(pformat(build_params)))
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Monitor git repos for new rc branches.")
parser.add_argument('-c', '--config', required=True,
help="Config file.")
parser.add_argument('-p', '--pickle', default="data.pickle",
help="Pickle of presistent data.")
args = parser.parse_args()
config = yaml.safe_load(open(args.config))
if path.exists(args.pickle):
data = pickle.load(open(args.pickle))
else:
data = {}
# Presist the last time we made this check.
if 'last_check' not in data:
last_check = datetime.utcnow()
else:
last_check = data['last_check']
data['last_check'] = datetime.utcnow()
# Find plays that are affected by this repo.
repos_with_changes = {}
for repo in config['repos']:
# Check for new rc candidates.
for rc in candidates_since(Repo(repo), last_check):
# Notify stage-release to build for the new repo.
stage_release(config['abbey_url'], config['abbey_token'], repo, rc)
pickle.dump(data, open(args.pickle, 'w'))
"""
Take in a YAML file with the basic data of all the things we could
deploy and command line hashes for the repos that we want to deploy
right now.
Example Config YAML file:
---
DOC_STORE_CONFIG:
hosts: [ list, of, mongo, hosts]
port: #
db: 'db'
user: 'jenkins'
password: 'password'
configuration_repo: "/path/to/configuration/repo"
configuration_secure_repo: "/path/to/configuration-secure"
repos:
edxapp:
plays:
- edxapp
- worker
xqueue:
plays:
- xqueue
6.00x:
plays:
- xserver
xserver:
plays:
- xserver
deployments:
edx:
- stage
- prod
edge:
- stage
- prod
loadtest:
- stage
# A jenkins URL to post requests for building AMIs
abbey_url: "http://...."
abbey_token: "API_TOKEN"
---
"""
import argparse
import json
import yaml
import logging as log
from datetime import datetime
from git import Repo
from pprint import pformat
from pymongo import MongoClient, DESCENDING
log.basicConfig(level=log.DEBUG)
def uri_from(doc_store_config):
"""
Convert the below structure to a mongodb uri.
DOC_STORE_CONFIG:
hosts:
- 'host1.com'
- 'host2.com'
port: 10012
db: 'devops'
user: 'username'
password: 'password'
"""
uri_format = "mongodb://{user}:{password}@{hosts}/{db}"
host_format = "{host}:{port}"
port = doc_store_config['port']
host_uris = [host_format.format(host=host,port=port) for host in doc_store_config['hosts']]
return uri_format.format(
user=doc_store_config['user'],
password=doc_store_config['password'],
hosts=",".join(host_uris),
db=doc_store_config['db'])
def prepare_release(args):
config = yaml.safe_load(open(args.config))
client = MongoClient(uri_from(config['DOC_STORE_CONFIG']))
db = client[config['DOC_STORE_CONFIG']['db']]
# Get configuration repo versions
config_repo_ver = Repo(config['configuration_repo']).commit().hexsha
config_secure_ver = Repo(config['configuration_secure_repo']).commit().hexsha
# Parse the vars.
var_array = map(lambda key_value: key_value.split('='), args.REPOS)
update_repos = { item[0]:item[1] for item in var_array }
log.info("Update repos: {}".format(pformat(update_repos)))
release = {}
now = datetime.utcnow()
release['_id'] = args.release_id
release['date_created'] = now
release['date_modified'] = now
release['build_status'] = 'Unknown'
release['build_user'] = args.user
release_coll = db[args.deployment]
releases = release_coll.find({'build_status': 'Succeeded'}).sort('_id', DESCENDING)
all_plays = {}
try:
last_successful = releases.next()
all_plays = last_successful['plays']
except StopIteration:
# No successful builds.
log.warn("No Previously successful builds.")
# For all repos that were updated
for repo, ref in update_repos.items():
var_name = "{}_version".format(repo.replace('-','_'))
if repo not in config['repos']:
raise Exception("No info for repo with name '{}'".format(repo))
# For any play that uses the updated repo
for play in config['repos'][repo]:
if play not in all_plays:
all_plays[play] = {}
if 'vars' not in all_plays[play]:
all_plays[play]['vars'] = {}
all_plays[play]['vars'][var_name] = ref
# Configuration to use to build these AMIs
all_plays[play]['configuration_ref'] = config_repo_ver
all_plays[play]['configuration_secure_ref'] = config_secure_ver
# Set amis to None for all envs of this deployment
all_plays[play]['amis'] = {}
for env in config['deployments'][args.deployment]:
# Check the AMIs collection to see if an ami already exist
# for this configuration.
potential_ami = ami_for(db, env,
args.deployment,
play, config_repo_ver,
config_secure_ver,
ref)
if potential_ami:
all_plays[play]['amis'][env] = potential_ami['_id']
else:
all_plays[play]['amis'][env] = None
release['plays'] = all_plays
release_coll.insert(release)
# All plays that need new AMIs have been updated.
notify_abbey(config['abbey_url'], config['abbey_token'], args.deployment, all_plays, args.release_id)
def ami_for(db, env, deployment, play, configuration,
configuration_secure, ansible_vars):
ami_signature = {
'env': env,
'deployment': deployment,
'play': play,
'configuration_ref': configuration,
'configuration_secure_ref': configuration_secure,
'vars': ansible_vars,
}
return db.amis.find_one(ami_signature)
import requests
def notify_abbey(abbey_url, abbey_token, deployment, all_plays, release_id):
for play_name, play in all_plays.items():
for env, ami in play['amis'].items():
if ami is None:
params = []
params.append({ 'name': 'play', 'value': play_name})
params.append({ 'name': 'deployment', 'value': deployment})
params.append({ 'name': 'environment', 'value': env})
params.append({ 'name': 'vars', 'value': yaml.dump(play['vars'], default_flow_style=False)})
params.append({ 'name': 'release_id', 'value': release_id})
build_params = {'parameter': params}
log.info("Need ami for {}".format(pformat(build_params)))
r = requests.post(abbey_url,
data={"token": abbey_token},
params={"json": json.dumps(build_params)})
log.info("Sent request got {}".format(r))
if r.status_code != 201:
# Something went wrong.
msg = "Failed to submit request with params: {}"
raise Exception(msg.format(pformat(build_params)))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Prepare a new release.")
parser.add_argument('-c', '--config', required=True, help="Configuration for deploys")
parser.add_argument('-u', '--user', required=True, help="User staging the release.")
msg = "The deployment to build for eg. edx, edge, loadtest"
parser.add_argument('-d', '--deployment', required=True, help=msg)
parser.add_argument('-r', '--release-id', required=True, help="Id of Release.")
parser.add_argument('REPOS', nargs='+',
help="Any number of var=value(no spcae around '='" + \
" e.g. 'edxapp=3233bac xqueue=92832ab'")
args = parser.parse_args()
log.debug(args)
prepare_release(args)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment