Commit aa419044 by Josh Drake Committed by Michael DeHaan

WIP on the re-implementation of fact caching and various backends.

parent fb5a1403
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from collections import MutableMapping
from ansible import utils
from ansible import constants as C
from ansible import errors
class FactCache(MutableMapping):
def __init__(self, *args, **kwargs):
self._plugin = utils.plugins.cache_loader.get(C.CACHE_PLUGIN)
if self._plugin is None:
return
def __getitem__(self, key):
if key not in self:
raise KeyError
return self._plugin.get(key)
def __setitem__(self, key, value):
self._plugin.set(key, value)
def __delitem__(self, key):
self._plugin.delete(key)
def __contains__(self, key):
return self._plugin.contains(key)
def __iter__(self):
return iter(self._plugin.keys())
def __len__(self):
return len(self._plugin.keys())
def copy(self):
"""
Return a primitive copy of the keys and values from the cache.
"""
return dict([(k, v) for (k, v) in self.iteritems()])
def keys(self):
return self._plugin.keys()
class BaseCacheModule(object):
def get(self, key):
raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__))
def set(self, key, value):
raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__))
def keys(self):
raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__))
def contains(self, key):
raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__))
def delete(self, key):
raise NotImplementedError("Subclasses of {} must implement the '{}' method".format(self.__class__.__name__, self.__name__))
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import collections
import json
import os
import shutil
import sys
import tempfile
from datetime import datetime
from ansible import constants as C
from ansible.cache.memory import CacheModule as MemoryCacheModule
class CacheModule(MemoryCacheModule):
def __init__(self):
super(CacheModule, self).__init__()
self._timeout = int(C.CACHE_PLUGIN_TIMEOUT)
self._filename = '/tmp/ansible_facts.json'
if os.access(self._filename, os.R_OK):
mtime = datetime.fromtimestamp(os.path.getmtime(self._filename))
if self._timeout == 0 or (datetime.now() - mtime).total_seconds() < self._timeout:
with open(self._filename, 'rb') as f:
# we could make assumptions about the MemoryCacheModule here if we wanted
# to be more efficient, but performance isn't the priority with this module
data = json.load(f)
if isinstance(data, collections.Mapping):
for k, v in data.items():
super(CacheModule, self).set(k, v)
def set(self, *args, **kwargs):
super(CacheModule, self).set(*args, **kwargs)
self.flush()
def delete(self, *args, **kwargs):
super(CacheModule, self).delete(*args, **kwargs)
self.flush()
def flush(self):
temp = tempfile.TemporaryFile('r+b')
try:
json.dump(self._cache, temp, separators=(',', ':'))
temp.seek(0)
with open(self._filename, 'w+b') as f:
shutil.copyfileobj(temp, f)
finally:
temp.close()
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import collections
import sys
import time
from ansible import constants as C
from ansible.cache.base import BaseCacheModule
try:
import memcache
except ImportError:
print 'python-memcached is required for the memcached fact cache'
sys.exit(1)
class CacheModuleKeys(collections.MutableSet):
"""
A set subclass that keeps track of insertion time and persists
the set in memcached.
"""
PREFIX = 'ansible_cache_keys'
def __init__(self, cache, *args, **kwargs):
self._cache = cache
self._keyset = dict(*args, **kwargs)
def __contains__(self, key):
return key in self._keyset
def __iter__(self):
return iter(self._keyset)
def __len__(self):
return len(self._keyset)
def add(self, key):
self._keyset[key] = time.time()
self._cache.set(self.PREFIX, self._keyset)
def discard(self, key):
del self._keyset[key]
self._cache.set(self.PREFIX, self._keyset)
def remove_by_timerange(self, s_min, s_max):
for k in self._keyset.keys():
t = self._keyset[k]
if s_min < t < s_max:
del self._keyset[k]
self._cache.set(self.PREFIX, self._keyset)
class CacheModule(BaseCacheModule):
def __init__(self, *args, **kwargs):
if C.CACHE_PLUGIN_CONNECTION:
connection = C.CACHE_PLUGIN_CONNECTION.split(',')
else:
connection = ['127.0.0.1:11211']
self._timeout = C.CACHE_PLUGIN_TIMEOUT
self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = memcache.Client(connection, debug=0)
self._keys = CacheModuleKeys(self._cache, self._cache.get(CacheModuleKeys.PREFIX) or [])
def _make_key(self, key):
return "{}{}".format(self._prefix, key)
def _expire_keys(self):
if self._timeout > 0:
expiry_age = time.time() - self._timeout
self._keys.remove_by_timerange(0, expiry_age)
def get(self, key):
value = self._cache.get(self._make_key(key))
# guard against the key not being removed from the zset;
# this could happen in cases where the timeout value is changed
# between invocations
if value is None:
self.delete(key)
raise KeyError
return value
def set(self, key, value):
self._cache.set(self._make_key(key), value, time=self._timeout)
self._keys.add(key)
def keys(self):
self._expire_keys()
return list(iter(self._keys))
def contains(self, key):
self._expire_keys()
return key in self._keys
def delete(self, key):
self._cache.delete(self._make_key(key))
self._keys.discard(key)
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class CacheModule(object):
def __init__(self, *args, **kwargs):
self._cache = {}
def get(self, key):
return self._cache.get(key)
def set(self, key, value):
self._cache[key] = value
def keys(self):
return self._cache.keys()
def contains(self, key):
return key in self._cache
def delete(self, key):
del self._cache[key]
# (c) 2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
import collections
import pickle
import sys
import time
from ansible import constants as C
from ansible.cache.base import BaseCacheModule
try:
from redis import StrictRedis
except ImportError:
print "The 'redis' Python module is required for the redis fact cache"
sys.exit(1)
class PickledRedis(StrictRedis):
"""
A subclass of StricRedis that uses the pickle module to store and load
representations of the provided values.
"""
def get(self, name):
pickled_value = super(PickledRedis, self).get(name)
if pickled_value is None:
return None
return pickle.loads(pickled_value)
def set(self, name, value, *args, **kwargs):
return super(PickledRedis, self).set(name, pickle.dumps(value), *args, **kwargs)
def setex(self, name, time, value):
return super(PickledRedis, self).setex(name, time, pickle.dumps(value))
class CacheModule(BaseCacheModule):
"""
A caching module backed by redis.
Keys are maintained in a zset with their score being the timestamp
when they are inserted. This allows for the usage of 'zremrangebyscore'
to expire keys. This mechanism is used or a pattern matched 'scan' for
performance.
"""
def __init__(self, *args, **kwargs):
if C.CACHE_PLUGIN_CONNECTION:
connection = C.CACHE_PLUGIN_CONNECTION.split(':')
else:
connection = []
self._timeout = C.CACHE_PLUGIN_TIMEOUT
self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = PickledRedis(*connection)
self._keys_set = 'ansible_cache_keys'
def _make_key(self, key):
return "{}{}".format(self._prefix, key)
def get(self, key):
value = self._cache.get(self._make_key(key))
# guard against the key not being removed from the zset;
# this could happen in cases where the timeout value is changed
# between invocations
if value is None:
self.delete(key)
raise KeyError
return value
def set(self, key, value):
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._cache.setex(self._make_key(key), self._timeout, value)
else:
self._cache.set(self._make_key(key), value)
self._cache.zadd(self._keys_set, time.time(), key)
def _expire_keys(self):
if self._timeout > 0:
expiry_age = time.time() - self._timeout
self._cache.zremrangebyscore(self._keys_set, 0, expiry_age)
def keys(self):
self._expire_keys()
return self._cache.zrange(self._keys_set, 0, -1)
def contains(self, key):
self._expire_keys()
return (self._cache.zrank(self._keys_set, key) >= 0)
def delete(self, key):
self._cache.delete(self._make_key(key))
self._cache.zrem(self._keys_set, key)
...@@ -140,6 +140,7 @@ DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_ ...@@ -140,6 +140,7 @@ DEFAULT_ASK_SU_PASS = get_config(p, DEFAULTS, 'ask_su_pass', 'ANSIBLE_ASK_
DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower() DEFAULT_GATHERING = get_config(p, DEFAULTS, 'gathering', 'ANSIBLE_GATHERING', 'implicit').lower()
DEFAULT_ACTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS', '/usr/share/ansible_plugins/action_plugins') DEFAULT_ACTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'action_plugins', 'ANSIBLE_ACTION_PLUGINS', '/usr/share/ansible_plugins/action_plugins')
DEFAULT_CACHE_PLUGIN_PATH = get_config(p, DEFAULTS, 'cache_plugins', 'ANSIBLE_CACHE_PLUGINS', '/usr/share/ansible_plugins/cache_plugins')
DEFAULT_CALLBACK_PLUGIN_PATH = get_config(p, DEFAULTS, 'callback_plugins', 'ANSIBLE_CALLBACK_PLUGINS', '/usr/share/ansible_plugins/callback_plugins') DEFAULT_CALLBACK_PLUGIN_PATH = get_config(p, DEFAULTS, 'callback_plugins', 'ANSIBLE_CALLBACK_PLUGINS', '/usr/share/ansible_plugins/callback_plugins')
DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', 'ANSIBLE_CONNECTION_PLUGINS', '/usr/share/ansible_plugins/connection_plugins') DEFAULT_CONNECTION_PLUGIN_PATH = get_config(p, DEFAULTS, 'connection_plugins', 'ANSIBLE_CONNECTION_PLUGINS', '/usr/share/ansible_plugins/connection_plugins')
DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '/usr/share/ansible_plugins/lookup_plugins') DEFAULT_LOOKUP_PLUGIN_PATH = get_config(p, DEFAULTS, 'lookup_plugins', 'ANSIBLE_LOOKUP_PLUGINS', '/usr/share/ansible_plugins/lookup_plugins')
...@@ -147,6 +148,11 @@ DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', ' ...@@ -147,6 +148,11 @@ DEFAULT_VARS_PLUGIN_PATH = get_config(p, DEFAULTS, 'vars_plugins', '
DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '/usr/share/ansible_plugins/filter_plugins') DEFAULT_FILTER_PLUGIN_PATH = get_config(p, DEFAULTS, 'filter_plugins', 'ANSIBLE_FILTER_PLUGINS', '/usr/share/ansible_plugins/filter_plugins')
DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', '')) DEFAULT_LOG_PATH = shell_expand_path(get_config(p, DEFAULTS, 'log_path', 'ANSIBLE_LOG_PATH', ''))
CACHE_PLUGIN = get_config(p, DEFAULTS, 'cache_plugin', 'ANSIBLE_CACHE_PLUGIN', 'memory')
CACHE_PLUGIN_CONNECTION = get_config(p, DEFAULTS, 'cache_plugin_connection', 'ANSIBLE_CACHE_PLUGIN_CONNECTION', None)
CACHE_PLUGIN_PREFIX = get_config(p, DEFAULTS, 'cache_plugin_prefix', 'ANSIBLE_CACHE_PLUGIN_PREFIX', 'ansible_facts')
CACHE_PLUGIN_TIMEOUT = get_config(p, DEFAULTS, 'cache_plugin_timeout', 'ANSIBLE_CACHE_PLUGIN_TIMEOUT', (60 * 60 * 24), integer=True)
ANSIBLE_FORCE_COLOR = get_config(p, DEFAULTS, 'force_color', 'ANSIBLE_FORCE_COLOR', None, boolean=True) ANSIBLE_FORCE_COLOR = get_config(p, DEFAULTS, 'force_color', 'ANSIBLE_FORCE_COLOR', None, boolean=True)
ANSIBLE_NOCOLOR = get_config(p, DEFAULTS, 'nocolor', 'ANSIBLE_NOCOLOR', None, boolean=True) ANSIBLE_NOCOLOR = get_config(p, DEFAULTS, 'nocolor', 'ANSIBLE_NOCOLOR', None, boolean=True)
ANSIBLE_NOCOWS = get_config(p, DEFAULTS, 'nocows', 'ANSIBLE_NOCOWS', None, boolean=True) ANSIBLE_NOCOWS = get_config(p, DEFAULTS, 'nocows', 'ANSIBLE_NOCOWS', None, boolean=True)
......
...@@ -22,6 +22,7 @@ from ansible.utils.template import template ...@@ -22,6 +22,7 @@ from ansible.utils.template import template
from ansible import utils from ansible import utils
from ansible import errors from ansible import errors
import ansible.callbacks import ansible.callbacks
import ansible.cache
import os import os
import shlex import shlex
import collections import collections
...@@ -32,9 +33,10 @@ import pipes ...@@ -32,9 +33,10 @@ import pipes
# the setup cache stores all variables about a host # the setup cache stores all variables about a host
# gathered during the setup step, while the vars cache # gathered during the setup step, while the vars cache
# holds all other variables about a host # holds all other variables about a host
SETUP_CACHE = collections.defaultdict(dict) SETUP_CACHE = ansible.cache.FactCache()
VARS_CACHE = collections.defaultdict(dict) VARS_CACHE = collections.defaultdict(dict)
class PlayBook(object): class PlayBook(object):
''' '''
runs an ansible playbook, given as a datastructure or YAML filename. runs an ansible playbook, given as a datastructure or YAML filename.
...@@ -98,7 +100,7 @@ class PlayBook(object): ...@@ -98,7 +100,7 @@ class PlayBook(object):
inventory: can be specified instead of host_list to use a pre-existing inventory object inventory: can be specified instead of host_list to use a pre-existing inventory object
check: don't change anything, just try to detect some potential changes check: don't change anything, just try to detect some potential changes
any_errors_fatal: terminate the entire execution immediately when one of the hosts has failed any_errors_fatal: terminate the entire execution immediately when one of the hosts has failed
force_handlers: continue to notify and run handlers even if a task fails force_handlers: continue to notify and run handlers even if a task fails
""" """
self.SETUP_CACHE = SETUP_CACHE self.SETUP_CACHE = SETUP_CACHE
...@@ -187,7 +189,7 @@ class PlayBook(object): ...@@ -187,7 +189,7 @@ class PlayBook(object):
def _get_playbook_vars(self, play_ds, existing_vars): def _get_playbook_vars(self, play_ds, existing_vars):
''' '''
Gets the vars specified with the play and blends them Gets the vars specified with the play and blends them
with any existing vars that have already been read in with any existing vars that have already been read in
''' '''
new_vars = existing_vars.copy() new_vars = existing_vars.copy()
...@@ -470,6 +472,13 @@ class PlayBook(object): ...@@ -470,6 +472,13 @@ class PlayBook(object):
contacted = results.get('contacted', {}) contacted = results.get('contacted', {})
self.stats.compute(results, ignore_errors=task.ignore_errors) self.stats.compute(results, ignore_errors=task.ignore_errors)
def _register_play_vars(host, result):
# when 'register' is used, persist the result in the vars cache
# rather than the setup cache - vars should be transient between playbook executions
if 'stdout' in result and 'stdout_lines' not in result:
result['stdout_lines'] = result['stdout'].splitlines()
utils.update_hash(self.VARS_CACHE, host, {task.register: result})
# add facts to the global setup cache # add facts to the global setup cache
for host, result in contacted.iteritems(): for host, result in contacted.iteritems():
if 'results' in result: if 'results' in result:
...@@ -478,22 +487,19 @@ class PlayBook(object): ...@@ -478,22 +487,19 @@ class PlayBook(object):
for res in result['results']: for res in result['results']:
if type(res) == dict: if type(res) == dict:
facts = res.get('ansible_facts', {}) facts = res.get('ansible_facts', {})
self.SETUP_CACHE[host].update(facts) utils.update_hash(self.SETUP_CACHE, host, facts)
else: else:
# when facts are returned, persist them in the setup cache
facts = result.get('ansible_facts', {}) facts = result.get('ansible_facts', {})
self.SETUP_CACHE[host].update(facts) utils.update_hash(self.SETUP_CACHE, host, facts)
if task.register: if task.register:
if 'stdout' in result and 'stdout_lines' not in result: _register_play_vars(host, result)
result['stdout_lines'] = result['stdout'].splitlines()
self.SETUP_CACHE[host][task.register] = result
# also have to register some failed, but ignored, tasks # also have to register some failed, but ignored, tasks
if task.ignore_errors and task.register: if task.ignore_errors and task.register:
failed = results.get('failed', {}) failed = results.get('failed', {})
for host, result in failed.iteritems(): for host, result in failed.iteritems():
if 'stdout' in result and 'stdout_lines' not in result: _register_play_vars(host, result)
result['stdout_lines'] = result['stdout'].splitlines()
self.SETUP_CACHE[host][task.register] = result
# flag which notify handlers need to be run # flag which notify handlers need to be run
if len(task.notify) > 0: if len(task.notify) > 0:
...@@ -585,8 +591,8 @@ class PlayBook(object): ...@@ -585,8 +591,8 @@ class PlayBook(object):
# let runner template out future commands # let runner template out future commands
setup_ok = setup_results.get('contacted', {}) setup_ok = setup_results.get('contacted', {})
for (host, result) in setup_ok.iteritems(): for (host, result) in setup_ok.iteritems():
self.SETUP_CACHE[host].update({'module_setup': True}) utils.update_hash(self.SETUP_CACHE, host, {'module_setup': True})
self.SETUP_CACHE[host].update(result.get('ansible_facts', {})) utils.update_hash(self.SETUP_CACHE, host, result.get('ansible_facts', {}))
return setup_results return setup_results
# ***************************************************** # *****************************************************
...@@ -620,7 +626,7 @@ class PlayBook(object): ...@@ -620,7 +626,7 @@ class PlayBook(object):
def _run_play(self, play): def _run_play(self, play):
''' run a list of tasks for a given pattern, in order ''' ''' run a list of tasks for a given pattern, in order '''
self.callbacks.on_play_start(play.name) self.callbacks.on_play_start(play.name)
# Get the hosts for this play # Get the hosts for this play
play._play_hosts = self.inventory.list_hosts(play.hosts) play._play_hosts = self.inventory.list_hosts(play.hosts)
......
...@@ -29,6 +29,7 @@ import os ...@@ -29,6 +29,7 @@ import os
import sys import sys
import uuid import uuid
class Play(object): class Play(object):
__slots__ = [ __slots__ = [
...@@ -85,7 +86,7 @@ class Play(object): ...@@ -85,7 +86,7 @@ class Play(object):
# now we load the roles into the datastructure # now we load the roles into the datastructure
self.included_roles = [] self.included_roles = []
ds = self._load_roles(self.roles, ds) ds = self._load_roles(self.roles, ds)
# and finally re-process the vars files as they may have # and finally re-process the vars files as they may have
# been updated by the included roles # been updated by the included roles
self.vars_files = ds.get('vars_files', []) self.vars_files = ds.get('vars_files', [])
...@@ -153,6 +154,7 @@ class Play(object): ...@@ -153,6 +154,7 @@ class Play(object):
self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars) self._tasks = self._load_tasks(self._ds.get('tasks', []), load_vars)
self._handlers = self._load_tasks(self._ds.get('handlers', []), load_vars) self._handlers = self._load_tasks(self._ds.get('handlers', []), load_vars)
# apply any missing tags to role tasks # apply any missing tags to role tasks
self._late_merge_role_tags() self._late_merge_role_tags()
...@@ -167,7 +169,7 @@ class Play(object): ...@@ -167,7 +169,7 @@ class Play(object):
def _get_role_path(self, role): def _get_role_path(self, role):
""" """
Returns the path on disk to the directory containing Returns the path on disk to the directory containing
the role directories like tasks, templates, etc. Also the role directories like tasks, templates, etc. Also
returns any variables that were included with the role returns any variables that were included with the role
""" """
orig_path = template(self.basedir,role,self.vars) orig_path = template(self.basedir,role,self.vars)
...@@ -242,7 +244,7 @@ class Play(object): ...@@ -242,7 +244,7 @@ class Play(object):
allow_dupes = utils.boolean(meta_data.get('allow_duplicates','')) allow_dupes = utils.boolean(meta_data.get('allow_duplicates',''))
# if any tags were specified as role/dep variables, merge # if any tags were specified as role/dep variables, merge
# them into the current dep_vars so they're passed on to any # them into the current dep_vars so they're passed on to any
# further dependencies too, and so we only have one place # further dependencies too, and so we only have one place
# (dep_vars) to look for tags going forward # (dep_vars) to look for tags going forward
def __merge_tags(var_obj): def __merge_tags(var_obj):
...@@ -318,7 +320,7 @@ class Play(object): ...@@ -318,7 +320,7 @@ class Play(object):
dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data]) dep_stack.append([dep,dep_path,dep_vars,dep_defaults_data])
# only add the current role when we're at the top level, # only add the current role when we're at the top level,
# otherwise we'll end up in a recursive loop # otherwise we'll end up in a recursive loop
if level == 0: if level == 0:
self.included_roles.append(role) self.included_roles.append(role)
dep_stack.append([role,role_path,role_vars,defaults_data]) dep_stack.append([role,role_path,role_vars,defaults_data])
...@@ -505,7 +507,7 @@ class Play(object): ...@@ -505,7 +507,7 @@ class Play(object):
if not isinstance(x, dict): if not isinstance(x, dict):
raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file)) raise errors.AnsibleError("expecting dict; got: %s, error in %s" % (x, original_file))
# evaluate sudo vars for current and child tasks # evaluate sudo vars for current and child tasks
included_sudo_vars = {} included_sudo_vars = {}
for k in ["sudo", "sudo_user"]: for k in ["sudo", "sudo_user"]:
if k in x: if k in x:
...@@ -554,7 +556,7 @@ class Play(object): ...@@ -554,7 +556,7 @@ class Play(object):
else: else:
default_vars = utils.combine_vars(self.default_vars, default_vars) default_vars = utils.combine_vars(self.default_vars, default_vars)
# append the vars defined with the include (from above) # append the vars defined with the include (from above)
# as well as the old-style 'vars' element. The old-style # as well as the old-style 'vars' element. The old-style
# vars are given higher precedence here (just in case) # vars are given higher precedence here (just in case)
task_vars = utils.combine_vars(task_vars, include_vars) task_vars = utils.combine_vars(task_vars, include_vars)
...@@ -610,8 +612,8 @@ class Play(object): ...@@ -610,8 +612,8 @@ class Play(object):
def _is_valid_tag(self, tag_list): def _is_valid_tag(self, tag_list):
""" """
Check to see if the list of tags passed in is in the list of tags Check to see if the list of tags passed in is in the list of tags
we only want (playbook.only_tags), or if it is not in the list of we only want (playbook.only_tags), or if it is not in the list of
tags we don't want (playbook.skip_tags). tags we don't want (playbook.skip_tags).
""" """
matched_skip_tags = set(tag_list) & set(self.playbook.skip_tags) matched_skip_tags = set(tag_list) & set(self.playbook.skip_tags)
...@@ -774,7 +776,7 @@ class Play(object): ...@@ -774,7 +776,7 @@ class Play(object):
inject.update(self.vars) inject.update(self.vars)
filename4 = template(self.basedir, filename3, inject) filename4 = template(self.basedir, filename3, inject)
filename4 = utils.path_dwim(self.basedir, filename4) filename4 = utils.path_dwim(self.basedir, filename4)
else: else:
filename4 = utils.path_dwim(self.basedir, filename3) filename4 = utils.path_dwim(self.basedir, filename3)
return filename2, filename3, filename4 return filename2, filename3, filename4
...@@ -823,7 +825,7 @@ class Play(object): ...@@ -823,7 +825,7 @@ class Play(object):
inject.update(self.playbook.SETUP_CACHE.get(host, {})) inject.update(self.playbook.SETUP_CACHE.get(host, {}))
inject.update(self.playbook.VARS_CACHE.get(host, {})) inject.update(self.playbook.VARS_CACHE.get(host, {}))
else: else:
inject = None inject = None
for filename in self.vars_files: for filename in self.vars_files:
if type(filename) == list: if type(filename) == list:
...@@ -854,4 +856,4 @@ class Play(object): ...@@ -854,4 +856,4 @@ class Play(object):
# finally, update the VARS_CACHE for the host, if it is set # finally, update the VARS_CACHE for the host, if it is set
if host is not None: if host is not None:
self.playbook.VARS_CACHE[host].update(self.playbook.extra_vars) self.playbook.VARS_CACHE.setdefault(host, {}).update(self.playbook.extra_vars)
...@@ -108,14 +108,14 @@ class PluginLoader(object): ...@@ -108,14 +108,14 @@ class PluginLoader(object):
if fullpath not in ret: if fullpath not in ret:
ret.append(fullpath) ret.append(fullpath)
# look in any configured plugin paths, allow one level deep for subcategories # look in any configured plugin paths, allow one level deep for subcategories
configured_paths = self.config.split(os.pathsep) configured_paths = self.config.split(os.pathsep)
for path in configured_paths: for path in configured_paths:
path = os.path.realpath(os.path.expanduser(path)) path = os.path.realpath(os.path.expanduser(path))
contents = glob.glob("%s/*" % path) contents = glob.glob("%s/*" % path)
for c in contents: for c in contents:
if os.path.isdir(c) and c not in ret: if os.path.isdir(c) and c not in ret:
ret.append(c) ret.append(c)
if path not in ret: if path not in ret:
ret.append(path) ret.append(path)
...@@ -181,7 +181,7 @@ class PluginLoader(object): ...@@ -181,7 +181,7 @@ class PluginLoader(object):
return getattr(self._module_cache[path], self.class_name)(*args, **kwargs) return getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
def all(self, *args, **kwargs): def all(self, *args, **kwargs):
''' instantiates all plugins with the same arguments ''' ''' instantiates all plugins with the same arguments '''
for i in self._get_paths(): for i in self._get_paths():
matches = glob.glob(os.path.join(i, "*.py")) matches = glob.glob(os.path.join(i, "*.py"))
...@@ -195,24 +195,31 @@ class PluginLoader(object): ...@@ -195,24 +195,31 @@ class PluginLoader(object):
yield getattr(self._module_cache[path], self.class_name)(*args, **kwargs) yield getattr(self._module_cache[path], self.class_name)(*args, **kwargs)
action_loader = PluginLoader( action_loader = PluginLoader(
'ActionModule', 'ActionModule',
'ansible.runner.action_plugins', 'ansible.runner.action_plugins',
C.DEFAULT_ACTION_PLUGIN_PATH, C.DEFAULT_ACTION_PLUGIN_PATH,
'action_plugins' 'action_plugins'
) )
cache_loader = PluginLoader(
'CacheModule',
'ansible.cache',
C.DEFAULT_CACHE_PLUGIN_PATH,
'cache_plugins'
)
callback_loader = PluginLoader( callback_loader = PluginLoader(
'CallbackModule', 'CallbackModule',
'ansible.callback_plugins', 'ansible.callback_plugins',
C.DEFAULT_CALLBACK_PLUGIN_PATH, C.DEFAULT_CALLBACK_PLUGIN_PATH,
'callback_plugins' 'callback_plugins'
) )
connection_loader = PluginLoader( connection_loader = PluginLoader(
'Connection', 'Connection',
'ansible.runner.connection_plugins', 'ansible.runner.connection_plugins',
C.DEFAULT_CONNECTION_PLUGIN_PATH, C.DEFAULT_CONNECTION_PLUGIN_PATH,
'connection_plugins', 'connection_plugins',
aliases={'paramiko': 'paramiko_ssh'} aliases={'paramiko': 'paramiko_ssh'}
) )
...@@ -224,30 +231,30 @@ shell_loader = PluginLoader( ...@@ -224,30 +231,30 @@ shell_loader = PluginLoader(
) )
module_finder = PluginLoader( module_finder = PluginLoader(
'', '',
'', '',
C.DEFAULT_MODULE_PATH, C.DEFAULT_MODULE_PATH,
'library' 'library'
) )
lookup_loader = PluginLoader( lookup_loader = PluginLoader(
'LookupModule', 'LookupModule',
'ansible.runner.lookup_plugins', 'ansible.runner.lookup_plugins',
C.DEFAULT_LOOKUP_PLUGIN_PATH, C.DEFAULT_LOOKUP_PLUGIN_PATH,
'lookup_plugins' 'lookup_plugins'
) )
vars_loader = PluginLoader( vars_loader = PluginLoader(
'VarsModule', 'VarsModule',
'ansible.inventory.vars_plugins', 'ansible.inventory.vars_plugins',
C.DEFAULT_VARS_PLUGIN_PATH, C.DEFAULT_VARS_PLUGIN_PATH,
'vars_plugins' 'vars_plugins'
) )
filter_loader = PluginLoader( filter_loader = PluginLoader(
'FilterModule', 'FilterModule',
'ansible.runner.filter_plugins', 'ansible.runner.filter_plugins',
C.DEFAULT_FILTER_PLUGIN_PATH, C.DEFAULT_FILTER_PLUGIN_PATH,
'filter_plugins' 'filter_plugins'
) )
......
...@@ -38,6 +38,7 @@ setup(name='ansible', ...@@ -38,6 +38,7 @@ setup(name='ansible',
package_dir={ 'ansible': 'lib/ansible' }, package_dir={ 'ansible': 'lib/ansible' },
packages=[ packages=[
'ansible', 'ansible',
'ansible.cache',
'ansible.utils', 'ansible.utils',
'ansible.utils.module_docs_fragments', 'ansible.utils.module_docs_fragments',
'ansible.inventory', 'ansible.inventory',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment