Commit 4af2d0a9 by James Cammarata

Reworking v2 play iterator and fixing some other bugs

Still not working quite right:
* dynamic includes are not adding the included tasks yet
* running roles with tags not quite working right
parent fbc525cf
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from multiprocessing.managers import SyncManager, BaseProxy
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
from ansible.playbook.play import Play
from ansible.errors import AnsibleError
__all__ = ['AnsibleManager']
class VariableManagerWrapper:
'''
This class simply acts as a wrapper around the VariableManager class,
since manager proxies expect a new object to be returned rather than
any existing one. Using this wrapper, a shared proxy can be created
and an existing VariableManager class assigned to it, which can then
be accessed through the exposed proxy methods.
'''
def __init__(self):
self._vm = None
def get_vars(self, loader, play=None, host=None, task=None):
return self._vm.get_vars(loader=loader, play=play, host=host, task=task)
def set_variable_manager(self, vm):
self._vm = vm
def set_host_variable(self, host, varname, value):
self._vm.set_host_variable(host, varname, value)
def set_host_facts(self, host, facts):
self._vm.set_host_facts(host, facts)
class AnsibleManager(SyncManager):
'''
This is our custom manager class, which exists only so we may register
the new proxy below
'''
pass
AnsibleManager.register(
typeid='VariableManagerWrapper',
callable=VariableManagerWrapper,
)
...@@ -137,12 +137,13 @@ class ResultProcess(multiprocessing.Process): ...@@ -137,12 +137,13 @@ class ResultProcess(multiprocessing.Process):
result_items = [ result._result ] result_items = [ result._result ]
for result_item in result_items: for result_item in result_items:
if 'include' in result_item: #if 'include' in result_item:
include_variables = result_item.get('include_variables', dict()) # include_variables = result_item.get('include_variables', dict())
if 'item' in result_item: # if 'item' in result_item:
include_variables['item'] = result_item['item'] # include_variables['item'] = result_item['item']
self._send_result(('include', result._host, result._task, result_item['include'], include_variables)) # self._send_result(('include', result._host, result._task, result_item['include'], include_variables))
elif 'add_host' in result_item: #elif 'add_host' in result_item:
if 'add_host' in result_item:
# this task added a new host (add_host module) # this task added a new host (add_host module)
self._send_result(('add_host', result_item)) self._send_result(('add_host', result_item))
elif 'add_group' in result_item: elif 'add_group' in result_item:
......
...@@ -132,13 +132,14 @@ class TaskExecutor: ...@@ -132,13 +132,14 @@ class TaskExecutor:
res = self._execute(variables=task_vars) res = self._execute(variables=task_vars)
(self._task, tmp_task) = (tmp_task, self._task) (self._task, tmp_task) = (tmp_task, self._task)
# FIXME: we should be sending back a callback result for each item in the loop here
# now update the result with the item info, and append the result # now update the result with the item info, and append the result
# to the list of results # to the list of results
res['item'] = item res['item'] = item
results.append(res) results.append(res)
# FIXME: we should be sending back a callback result for each item in the loop here
print(res)
return results return results
def _squash_items(self, items, variables): def _squash_items(self, items, variables):
......
...@@ -26,7 +26,6 @@ import sys ...@@ -26,7 +26,6 @@ import sys
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.executor.connection_info import ConnectionInformation from ansible.executor.connection_info import ConnectionInformation
#from ansible.executor.manager import AnsibleManager
from ansible.executor.play_iterator import PlayIterator from ansible.executor.play_iterator import PlayIterator
from ansible.executor.process.worker import WorkerProcess from ansible.executor.process.worker import WorkerProcess
from ansible.executor.process.result import ResultProcess from ansible.executor.process.result import ResultProcess
...@@ -36,7 +35,6 @@ from ansible.utils.debug import debug ...@@ -36,7 +35,6 @@ from ansible.utils.debug import debug
__all__ = ['TaskQueueManager'] __all__ = ['TaskQueueManager']
class TaskQueueManager: class TaskQueueManager:
''' '''
...@@ -59,10 +57,6 @@ class TaskQueueManager: ...@@ -59,10 +57,6 @@ class TaskQueueManager:
# a special flag to help us exit cleanly # a special flag to help us exit cleanly
self._terminated = False self._terminated = False
# create and start the multiprocessing manager
#self._manager = AnsibleManager()
#self._manager.start()
# this dictionary is used to keep track of notified handlers # this dictionary is used to keep track of notified handlers
self._notified_handlers = dict() self._notified_handlers = dict()
......
...@@ -273,7 +273,7 @@ class Base: ...@@ -273,7 +273,7 @@ class Base:
if needle in self._attributes: if needle in self._attributes:
return self._attributes[needle] return self._attributes[needle]
raise AttributeError("attribute not found: %s" % needle) raise AttributeError("attribute not found in %s: %s" % (self.__class__.__name__, needle))
def __getstate__(self): def __getstate__(self):
return self.serialize() return self.serialize()
......
...@@ -28,9 +28,9 @@ from ansible.playbook.taggable import Taggable ...@@ -28,9 +28,9 @@ from ansible.playbook.taggable import Taggable
class Block(Base, Conditional, Taggable): class Block(Base, Conditional, Taggable):
_block = FieldAttribute(isa='list') _block = FieldAttribute(isa='list', default=[])
_rescue = FieldAttribute(isa='list') _rescue = FieldAttribute(isa='list', default=[])
_always = FieldAttribute(isa='list') _always = FieldAttribute(isa='list', default=[])
# for future consideration? this would be functionally # for future consideration? this would be functionally
# similar to the 'else' clause for exceptions # similar to the 'else' clause for exceptions
...@@ -41,6 +41,7 @@ class Block(Base, Conditional, Taggable): ...@@ -41,6 +41,7 @@ class Block(Base, Conditional, Taggable):
self._role = role self._role = role
self._task_include = task_include self._task_include = task_include
self._use_handlers = use_handlers self._use_handlers = use_handlers
self._dep_chain = []
super(Block, self).__init__() super(Block, self).__init__()
...@@ -141,6 +142,7 @@ class Block(Base, Conditional, Taggable): ...@@ -141,6 +142,7 @@ class Block(Base, Conditional, Taggable):
def copy(self): def copy(self):
new_me = super(Block, self).copy() new_me = super(Block, self).copy()
new_me._use_handlers = self._use_handlers new_me._use_handlers = self._use_handlers
new_me._dep_chain = self._dep_chain[:]
new_me._parent_block = None new_me._parent_block = None
if self._parent_block: if self._parent_block:
...@@ -163,6 +165,7 @@ class Block(Base, Conditional, Taggable): ...@@ -163,6 +165,7 @@ class Block(Base, Conditional, Taggable):
''' '''
data = dict(when=self.when) data = dict(when=self.when)
data['dep_chain'] = self._dep_chain
if self._role is not None: if self._role is not None:
data['role'] = self._role.serialize() data['role'] = self._role.serialize()
...@@ -177,11 +180,11 @@ class Block(Base, Conditional, Taggable): ...@@ -177,11 +180,11 @@ class Block(Base, Conditional, Taggable):
serialize method serialize method
''' '''
#from ansible.playbook.task_include import TaskInclude
from ansible.playbook.task import Task from ansible.playbook.task import Task
# unpack the when attribute, which is the only one we want # unpack the when attribute, which is the only one we want
self.when = data.get('when') self.when = data.get('when')
self._dep_chain = data.get('dep_chain', [])
# if there was a serialized role, unpack it too # if there was a serialized role, unpack it too
role_data = data.get('role') role_data = data.get('role')
...@@ -198,6 +201,10 @@ class Block(Base, Conditional, Taggable): ...@@ -198,6 +201,10 @@ class Block(Base, Conditional, Taggable):
self._task_include = ti self._task_include = ti
def evaluate_conditional(self, all_vars): def evaluate_conditional(self, all_vars):
if len(self._dep_chain):
for dep in self._dep_chain:
if not dep.evaluate_conditional(all_vars):
return False
if self._task_include is not None: if self._task_include is not None:
if not self._task_include.evaluate_conditional(all_vars): if not self._task_include.evaluate_conditional(all_vars):
return False return False
...@@ -211,6 +218,9 @@ class Block(Base, Conditional, Taggable): ...@@ -211,6 +218,9 @@ class Block(Base, Conditional, Taggable):
def evaluate_tags(self, only_tags, skip_tags, all_vars): def evaluate_tags(self, only_tags, skip_tags, all_vars):
result = False result = False
if len(self._dep_chain):
for dep in self._dep_chain:
result |= dep.evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars)
if self._parent_block is not None: if self._parent_block is not None:
result |= self._parent_block.evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars) result |= self._parent_block.evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars)
elif self._role is not None: elif self._role is not None:
...@@ -227,3 +237,6 @@ class Block(Base, Conditional, Taggable): ...@@ -227,3 +237,6 @@ class Block(Base, Conditional, Taggable):
if self._task_include: if self._task_include:
self._task_include.set_loader(loader) self._task_include.set_loader(loader)
for dep in self._dep_chain:
dep.set_loader(loader)
...@@ -198,13 +198,13 @@ class Play(Base, Taggable): ...@@ -198,13 +198,13 @@ class Play(Base, Taggable):
the parent role R last. This is done for all roles in the Play. the parent role R last. This is done for all roles in the Play.
''' '''
task_list = [] block_list = []
if len(self.roles) > 0: if len(self.roles) > 0:
for r in self.roles: for r in self.roles:
task_list.extend(r.compile()) block_list.extend(r.compile())
return task_list return block_list
def compile(self): def compile(self):
''' '''
...@@ -213,14 +213,14 @@ class Play(Base, Taggable): ...@@ -213,14 +213,14 @@ class Play(Base, Taggable):
tasks specified in the play. tasks specified in the play.
''' '''
task_list = [] block_list = []
task_list.extend(compile_block_list(self.pre_tasks)) block_list.extend(self.pre_tasks)
task_list.extend(self._compile_roles()) block_list.extend(self._compile_roles())
task_list.extend(compile_block_list(self.tasks)) block_list.extend(self.tasks)
task_list.extend(compile_block_list(self.post_tasks)) block_list.extend(self.post_tasks)
return task_list return block_list
def get_vars(self): def get_vars(self):
return self.vars.copy() return self.vars.copy()
......
...@@ -304,25 +304,25 @@ class Role(Base, Conditional, Taggable): ...@@ -304,25 +304,25 @@ class Role(Base, Conditional, Taggable):
can correctly take their parent's tags/conditionals into account. can correctly take their parent's tags/conditionals into account.
''' '''
task_list = [] block_list = []
# update the dependency chain here # update the dependency chain here
new_dep_chain = dep_chain + [self] new_dep_chain = dep_chain + [self]
deps = self.get_direct_dependencies() deps = self.get_direct_dependencies()
for dep in deps: for dep in deps:
dep_tasks = dep.compile(dep_chain=new_dep_chain) dep_blocks = dep.compile(dep_chain=new_dep_chain)
for dep_task in dep_tasks: for dep_block in dep_blocks:
# since we're modifying the task, and need it to be unique, # since we're modifying the task, and need it to be unique,
# we make a copy of it here and assign the dependency chain # we make a copy of it here and assign the dependency chain
# to the copy, then append the copy to the task list. # to the copy, then append the copy to the task list.
new_dep_task = dep_task.copy() new_dep_block = dep_block.copy()
new_dep_task._dep_chain = new_dep_chain new_dep_block._dep_chain = new_dep_chain
task_list.append(new_dep_task) block_list.append(new_dep_block)
task_list.extend(compile_block_list(self._task_blocks)) block_list.extend(self._task_blocks)
return task_list return block_list
def serialize(self, include_deps=True): def serialize(self, include_deps=True):
res = super(Role, self).serialize() res = super(Role, self).serialize()
......
...@@ -102,7 +102,6 @@ class Task(Base, Conditional, Taggable): ...@@ -102,7 +102,6 @@ class Task(Base, Conditional, Taggable):
self._block = block self._block = block
self._role = role self._role = role
self._task_include = task_include self._task_include = task_include
self._dep_chain = []
super(Task, self).__init__() super(Task, self).__init__()
...@@ -226,7 +225,6 @@ class Task(Base, Conditional, Taggable): ...@@ -226,7 +225,6 @@ class Task(Base, Conditional, Taggable):
def copy(self): def copy(self):
new_me = super(Task, self).copy() new_me = super(Task, self).copy()
new_me._dep_chain = self._dep_chain[:]
new_me._block = None new_me._block = None
if self._block: if self._block:
...@@ -244,7 +242,6 @@ class Task(Base, Conditional, Taggable): ...@@ -244,7 +242,6 @@ class Task(Base, Conditional, Taggable):
def serialize(self): def serialize(self):
data = super(Task, self).serialize() data = super(Task, self).serialize()
data['dep_chain'] = self._dep_chain
if self._block: if self._block:
data['block'] = self._block.serialize() data['block'] = self._block.serialize()
...@@ -263,7 +260,6 @@ class Task(Base, Conditional, Taggable): ...@@ -263,7 +260,6 @@ class Task(Base, Conditional, Taggable):
#from ansible.playbook.task_include import TaskInclude #from ansible.playbook.task_include import TaskInclude
block_data = data.get('block') block_data = data.get('block')
self._dep_chain = data.get('dep_chain', [])
if block_data: if block_data:
b = Block() b = Block()
...@@ -289,10 +285,6 @@ class Task(Base, Conditional, Taggable): ...@@ -289,10 +285,6 @@ class Task(Base, Conditional, Taggable):
super(Task, self).deserialize(data) super(Task, self).deserialize(data)
def evaluate_conditional(self, all_vars): def evaluate_conditional(self, all_vars):
if len(self._dep_chain):
for dep in self._dep_chain:
if not dep.evaluate_conditional(all_vars):
return False
if self._block is not None: if self._block is not None:
if not self._block.evaluate_conditional(all_vars): if not self._block.evaluate_conditional(all_vars):
return False return False
...@@ -303,9 +295,6 @@ class Task(Base, Conditional, Taggable): ...@@ -303,9 +295,6 @@ class Task(Base, Conditional, Taggable):
def evaluate_tags(self, only_tags, skip_tags, all_vars): def evaluate_tags(self, only_tags, skip_tags, all_vars):
result = False result = False
if len(self._dep_chain):
for dep in self._dep_chain:
result |= dep.evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars)
if self._block is not None: if self._block is not None:
result |= self._block.evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars) result |= self._block.evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars)
return result | super(Task, self).evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars) return result | super(Task, self).evaluate_tags(only_tags=only_tags, skip_tags=skip_tags, all_vars=all_vars)
...@@ -324,5 +313,3 @@ class Task(Base, Conditional, Taggable): ...@@ -324,5 +313,3 @@ class Task(Base, Conditional, Taggable):
if self._task_include: if self._task_include:
self._task_include.set_loader(loader) self._task_include.set_loader(loader)
for dep in self._dep_chain:
dep.set_loader(loader)
...@@ -68,8 +68,8 @@ class StrategyBase: ...@@ -68,8 +68,8 @@ class StrategyBase:
num_failed = len(self._tqm._failed_hosts) num_failed = len(self._tqm._failed_hosts)
num_unreachable = len(self._tqm._unreachable_hosts) num_unreachable = len(self._tqm._unreachable_hosts)
debug("running the cleanup portion of the play") #debug("running the cleanup portion of the play")
result &= self.cleanup(iterator, connection_info) #result &= self.cleanup(iterator, connection_info)
debug("running handlers") debug("running handlers")
result &= self.run_handlers(iterator, connection_info) result &= self.run_handlers(iterator, connection_info)
...@@ -131,6 +131,7 @@ class StrategyBase: ...@@ -131,6 +131,7 @@ class StrategyBase:
if result[0] == 'host_task_failed': if result[0] == 'host_task_failed':
if not task.ignore_errors: if not task.ignore_errors:
debug("marking %s as failed" % host.get_name()) debug("marking %s as failed" % host.get_name())
iterator.mark_host_failed(host)
self._tqm._failed_hosts[host.get_name()] = True self._tqm._failed_hosts[host.get_name()] = True
self._callback.runner_on_failed(task, task_result) self._callback.runner_on_failed(task, task_result)
elif result[0] == 'host_unreachable': elif result[0] == 'host_unreachable':
...@@ -151,26 +152,25 @@ class StrategyBase: ...@@ -151,26 +152,25 @@ class StrategyBase:
# lookup the role in the ROLE_CACHE to make sure we're dealing # lookup the role in the ROLE_CACHE to make sure we're dealing
# with the correct object and mark it as executed # with the correct object and mark it as executed
for (entry, role_obj) in ROLE_CACHE[task_result._task._role._role_name].iteritems(): for (entry, role_obj) in ROLE_CACHE[task_result._task._role._role_name].iteritems():
#hashed_entry = frozenset(task_result._task._role._role_params.iteritems())
hashed_entry = hash_params(task_result._task._role._role_params) hashed_entry = hash_params(task_result._task._role._role_params)
if entry == hashed_entry : if entry == hashed_entry :
role_obj._had_task_run = True role_obj._had_task_run = True
elif result[0] == 'include': #elif result[0] == 'include':
host = result[1] # host = result[1]
task = result[2] # task = result[2]
include_file = result[3] # include_file = result[3]
include_vars = result[4] # include_vars = result[4]
#
if isinstance(task, Handler): # if isinstance(task, Handler):
# FIXME: figure out how to make includes work for handlers # # FIXME: figure out how to make includes work for handlers
pass # pass
else: # else:
original_task = iterator.get_original_task(task) # original_task = iterator.get_original_task(host, task)
if original_task._role: # if original_task and original_task._role:
include_file = self._loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_file) # include_file = self._loader.path_dwim_relative(original_task._role._role_path, 'tasks', include_file)
new_tasks = self._load_included_file(original_task, include_file, include_vars) # new_tasks = self._load_included_file(original_task, include_file, include_vars)
iterator.add_tasks(host, new_tasks) # iterator.add_tasks(host, new_tasks)
elif result[0] == 'add_host': elif result[0] == 'add_host':
task_result = result[1] task_result = result[1]
...@@ -314,6 +314,8 @@ class StrategyBase: ...@@ -314,6 +314,8 @@ class StrategyBase:
task_list = compile_block_list(block_list) task_list = compile_block_list(block_list)
# set the vars for this task from those specified as params to the include
for t in task_list: for t in task_list:
t.vars = include_vars.copy() t.vars = include_vars.copy()
...@@ -355,10 +357,12 @@ class StrategyBase: ...@@ -355,10 +357,12 @@ class StrategyBase:
iterator.mark_host_failed(host) iterator.mark_host_failed(host)
del self._tqm._failed_hosts[host_name] del self._tqm._failed_hosts[host_name]
if host_name not in self._tqm._unreachable_hosts and iterator.get_next_task_for_host(host, peek=True): if host_name in self._blocked_hosts:
work_to_do = True
continue
elif iterator.get_next_task_for_host(host, peek=True) and host_name not in self._tqm._unreachable_hosts:
work_to_do = True work_to_do = True
# check to see if this host is blocked (still executing a previous task)
if not host_name in self._blocked_hosts:
# pop the task, mark the host blocked, and queue it # pop the task, mark the host blocked, and queue it
self._blocked_hosts[host_name] = True self._blocked_hosts[host_name] = True
task = iterator.get_next_task_for_host(host) task = iterator.get_next_task_for_host(host)
...@@ -367,6 +371,7 @@ class StrategyBase: ...@@ -367,6 +371,7 @@ class StrategyBase:
self._queue_task(host, task, task_vars, connection_info) self._queue_task(host, task, task_vars, connection_info)
self._process_pending_results(iterator) self._process_pending_results(iterator)
time.sleep(0.01)
# no more work, wait until the queue is drained # no more work, wait until the queue is drained
self._wait_on_pending_results(iterator) self._wait_on_pending_results(iterator)
......
...@@ -20,11 +20,96 @@ from __future__ import (absolute_import, division, print_function) ...@@ -20,11 +20,96 @@ from __future__ import (absolute_import, division, print_function)
__metaclass__ = type __metaclass__ = type
from ansible.errors import AnsibleError from ansible.errors import AnsibleError
from ansible.executor.play_iterator import PlayIterator
from ansible.playbook.task import Task
from ansible.plugins.strategies import StrategyBase from ansible.plugins.strategies import StrategyBase
from ansible.utils.debug import debug from ansible.utils.debug import debug
class StrategyModule(StrategyBase): class StrategyModule(StrategyBase):
def _get_next_task_lockstep(self, hosts, iterator):
'''
Returns a list of (host, task) tuples, where the task may
be a noop task to keep the iterator in lock step across
all hosts.
'''
noop_task = Task()
noop_task.action = 'meta'
noop_task.args['_raw_params'] = 'noop'
noop_task.set_loader(iterator._play._loader)
host_tasks = {}
for host in hosts:
host_tasks[host.name] = iterator.get_next_task_for_host(host, peek=True)
num_setups = 0
num_tasks = 0
num_rescue = 0
num_always = 0
lowest_cur_block = len(iterator._blocks)
for (k, v) in host_tasks.iteritems():
(s, t) = v
if s.cur_block < lowest_cur_block and s.run_state != PlayIterator.ITERATING_COMPLETE:
lowest_cur_block = s.cur_block
if s.run_state == PlayIterator.ITERATING_SETUP:
num_setups += 1
elif s.run_state == PlayIterator.ITERATING_TASKS:
num_tasks += 1
elif s.run_state == PlayIterator.ITERATING_RESCUE:
num_rescue += 1
elif s.run_state == PlayIterator.ITERATING_ALWAYS:
num_always += 1
def _advance_selected_hosts(hosts, cur_block, cur_state):
'''
This helper returns the task for all hosts in the requested
state, otherwise they get a noop dummy task. This also advances
the state of the host, since the given states are determined
while using peek=True.
'''
# we return the values in the order they were originally
# specified in the given hosts array
rvals = []
for host in hosts:
(s, t) = host_tasks[host.name]
if s.run_state == cur_state and s.cur_block == cur_block:
new_t = iterator.get_next_task_for_host(host)
#if new_t != t:
# raise AnsibleError("iterator error, wtf?")
rvals.append((host, t))
else:
rvals.append((host, noop_task))
return rvals
# if any hosts are in ITERATING_SETUP, return the setup task
# while all other hosts get a noop
if num_setups:
return _advance_selected_hosts(hosts, lowest_cur_block, PlayIterator.ITERATING_SETUP)
# if any hosts are in ITERATING_TASKS, return the next normal
# task for these hosts, while all other hosts get a noop
if num_tasks:
return _advance_selected_hosts(hosts, lowest_cur_block, PlayIterator.ITERATING_TASKS)
# if any hosts are in ITERATING_RESCUE, return the next rescue
# task for these hosts, while all other hosts get a noop
if num_rescue:
return _advance_selected_hosts(hosts, lowest_cur_block, PlayIterator.ITERATING_RESCUE)
# if any hosts are in ITERATING_ALWAYS, return the next always
# task for these hosts, while all other hosts get a noop
if num_always:
return _advance_selected_hosts(hosts, lowest_cur_block, PlayIterator.ITERATING_ALWAYS)
# at this point, everything must be ITERATING_COMPLETE, so we
# return None for all hosts in the list
return [(host, None) for host in hosts]
def run(self, iterator, connection_info): def run(self, iterator, connection_info):
''' '''
The linear strategy is simple - get the next task and queue The linear strategy is simple - get the next task and queue
...@@ -40,6 +125,7 @@ class StrategyModule(StrategyBase): ...@@ -40,6 +125,7 @@ class StrategyModule(StrategyBase):
try: try:
debug("getting the remaining hosts for this loop") debug("getting the remaining hosts for this loop")
self._tqm._failed_hosts = iterator.get_failed_hosts()
hosts_left = self.get_hosts_remaining(iterator._play) hosts_left = self.get_hosts_remaining(iterator._play)
debug("done getting the remaining hosts for this loop") debug("done getting the remaining hosts for this loop")
if len(hosts_left) == 0: if len(hosts_left) == 0:
...@@ -51,11 +137,13 @@ class StrategyModule(StrategyBase): ...@@ -51,11 +137,13 @@ class StrategyModule(StrategyBase):
# queue up this task for each host in the inventory # queue up this task for each host in the inventory
callback_sent = False callback_sent = False
work_to_do = False work_to_do = False
for host in hosts_left:
while True: host_tasks = self._get_next_task_lockstep(hosts_left, iterator)
task = iterator.get_next_task_for_host(host) for (host, task) in host_tasks:
if not task: if not task:
break continue
work_to_do = True
debug("getting variables") debug("getting variables")
task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task) task_vars = self._variable_manager.get_vars(loader=self._loader, play=iterator._play, host=host, task=task)
...@@ -74,17 +162,14 @@ class StrategyModule(StrategyBase): ...@@ -74,17 +162,14 @@ class StrategyModule(StrategyBase):
debug("'%s' failed tag evaluation" % task) debug("'%s' failed tag evaluation" % task)
continue continue
break
if not task:
continue
work_to_do = True
if task.action == 'meta': if task.action == 'meta':
# meta tasks store their args in the _raw_params field of args, # meta tasks store their args in the _raw_params field of args,
# since they do not use k=v pairs, so get that # since they do not use k=v pairs, so get that
meta_action = task.args.get('_raw_params') meta_action = task.args.get('_raw_params')
if meta_action == 'flush_handlers': if meta_action == 'noop':
# FIXME: issue a callback for the noop here?
continue
elif meta_action == 'flush_handlers':
self.run_handlers(iterator, connection_info) self.run_handlers(iterator, connection_info)
else: else:
raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds) raise AnsibleError("invalid meta action requested: %s" % meta_action, obj=task._ds)
...@@ -100,6 +185,11 @@ class StrategyModule(StrategyBase): ...@@ -100,6 +185,11 @@ class StrategyModule(StrategyBase):
debug("done queuing things up, now waiting for results queue to drain") debug("done queuing things up, now waiting for results queue to drain")
self._wait_on_pending_results(iterator) self._wait_on_pending_results(iterator)
# FIXME: MAKE PENDING RESULTS RETURN RESULTS PROCESSED AND USE THEM
# TO TAKE ACTION, ie. FOR INCLUDE STATEMENTS TO PRESERVE THE
# LOCK STEP OPERATION
debug("results queue empty") debug("results queue empty")
except (IOError, EOFError), e: except (IOError, EOFError), e:
debug("got IOError/EOFError in task loop: %s" % e) debug("got IOError/EOFError in task loop: %s" % e)
......
- debug: msg="this is the include, a=={{a}}"
- debug: msg="this is the second debug in the include"
- debug: msg="this is the third debug in the include, and a is still {{a}}"
l1 ansible_ssh_host=127.0.0.1
l2 ansible_ssh_host=127.0.0.2
l3 ansible_ssh_host=127.0.0.3
- debug: msg="this is a task from the common role"
- debug: msg="this is a task from role A"
- debug: msg="this is a task from role B"
dependencies:
- test_role_dep
- debug: msg="here we are in the role, foo={{foo}}" - debug: msg="here we are in the role, foo={{foo}}"
- fail:
when: foo != "bar"
- debug: msg="here we are in the role dependency"
- hosts: localhost - hosts: all
connection: local connection: local
gather_facts: no gather_facts: yes
tasks: tasks:
- block: - block:
- command: /bin/false - debug: msg="this is the first task"
- debug: msg="you shouldn't see me" - fail:
when: inventory_hostname == "l2"
- debug: msg="only l1 and l3 should see me"
rescue: rescue:
- debug: msg="this is the rescue" - debug: msg="this is the rescue"
- command: /bin/false - command: /bin/false
- debug: msg="you shouldn't see this either" - debug: msg="no host should see this run"
always: always:
- debug: msg="this is the always block, it will always be seen" - debug: msg="this is the always block, it will always be seen"
when: foo|default('') != "some value" when: foo|default('') != "some value"
tags: tags:
- foo - foo
- bar - bar
- debug: msg="you should only see l1 and l3 run this"
- hosts: localhost
gather_facts: no
tasks:
- block:
- block:
- block:
- block:
- debug: msg="are we there yet?"
- hosts: localhost
gather_facts: no
tasks:
- block:
- include: include.yml
when: 1 == 2
- include: include.yml a=1
when: 1 == 1
notify: foo
- include: include.yml a={{item}}
with_items:
- foo
- bar
- bam
- fail:
#rescue:
#- include: include.yml a=rescue
always:
- include: include.yml a=always
handlers:
- name: foo
include: include.yml a="this is a handler"
- hosts: ubuntu1404 - hosts: localhost
connection: local
gather_facts: no gather_facts: no
vars: vars:
foo: "BAD!!" foo: "BAD!!"
......
- hosts: localhost
gather_facts: no
roles:
- { role: role_a, tags: A, when: skip != "A" }
- { role: role_b, tags: B, when: skip != "B" }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment