From 065733ad93abcc0031bbd3578bfc34b4f31a1753 Mon Sep 17 00:00:00 2001
From: James Cammarata <jimi@sngx.net>
Date: Fri, 2 Jan 2015 07:51:15 -0600
Subject: [PATCH] Moving more action plugins over and fixing some bugs with role loading

---
 v2/ansible/executor/connection_info.py    |  24 ++++++++++++++----------
 v2/ansible/executor/process/result.py     |  14 ++++++++------
 v2/ansible/executor/process/worker.py     |   5 ++++-
 v2/ansible/executor/task_executor.py      |  59 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++---
 v2/ansible/parsing/mod_args.py            |   2 +-
 v2/ansible/playbook/helpers.py            |   4 ++--
 v2/ansible/playbook/role/definition.py    |  11 ++++++++---
 v2/ansible/playbook/role/include.py       |   8 ++++----
 v2/ansible/playbook/role/metadata.py      |  15 +++++++++++----
 v2/ansible/playbook/taggable.py           |   6 +++++-
 v2/ansible/playbook/task.py               |   2 +-
 v2/ansible/playbook/task_include.py       |  10 ++++++----
 v2/ansible/plugins/action/__init__.py     |  16 +++++++++++-----
 v2/ansible/plugins/action/add_host.py     |  62 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/assert.py       |  19 +++++++++++++------
 v2/ansible/plugins/action/async.py        |  68 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/copy.py         |   8 ++++----
 v2/ansible/plugins/action/fail.py         |  33 +++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/fetch.py        | 152 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/group_by.py     |  37 +++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/pause.py        | 134 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/raw.py          |  39 +++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/script.py       |  99 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/synchronize.py  | 178 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/template.py     | 165 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/action/unarchive.py    | 118 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/plugins/strategies/__init__.py |  85 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
 v2/ansible/template/__init__.py           |  17 ++++++++++++++---
 v2/hacking/env-setup                      |   2 +-
 v2/test/playbook/test_task.py             |   1 -
 30 files changed, 1333 insertions(+), 60 deletions(-)
 create mode 100644 v2/ansible/plugins/action/add_host.py
 create mode 100644 v2/ansible/plugins/action/async.py
 create mode 100644 v2/ansible/plugins/action/fail.py
 create mode 100644 v2/ansible/plugins/action/fetch.py
 create mode 100644 v2/ansible/plugins/action/group_by.py
 create mode 100644 v2/ansible/plugins/action/pause.py
 create mode 100644 v2/ansible/plugins/action/raw.py
 create mode 100644 v2/ansible/plugins/action/script.py
 create mode 100644 v2/ansible/plugins/action/synchronize.py
 create mode 100644 v2/ansible/plugins/action/template.py
 create mode 100644 v2/ansible/plugins/action/unarchive.py

diff --git a/v2/ansible/executor/connection_info.py b/v2/ansible/executor/connection_info.py
index dbc988d..8f53c3f 100644
--- a/v2/ansible/executor/connection_info.py
+++ b/v2/ansible/executor/connection_info.py
@@ -51,7 +51,7 @@ class ConnectionInformation:
         self.sudo_user   = ''
         self.sudo_pass   = ''
         self.verbosity   = 0
-        self.only_tags   = set()
+        self.only_tags   = set(['all'])
         self.skip_tags   = set()
 
         if play:
@@ -93,15 +93,19 @@ class ConnectionInformation:
             self.connection = options.connection
 
         # get the tag info from options, converting a comma-separated list
-        # of values into a proper list if need be
-        if isinstance(options.tags, list):
-            self.only_tags.update(options.tags)
-        elif isinstance(options.tags, basestring):
-            self.only_tags.update(options.tags.split(','))
-        if isinstance(options.skip_tags, list):
-            self.skip_tags.update(options.skip_tags)
-        elif isinstance(options.skip_tags, basestring):
-            self.skip_tags.update(options.skip_tags.split(','))
+        # of values into a proper list if need be. We check to see if the
+        # options have the attribute, as it is not always added via the CLI
+        if hasattr(options, 'tags'):
+            if isinstance(options.tags, list):
+                self.only_tags.update(options.tags)
+            elif isinstance(options.tags, basestring):
+                self.only_tags.update(options.tags.split(','))
+
+        if hasattr(options, 'skip_tags'):
+            if isinstance(options.skip_tags, list):
+                self.skip_tags.update(options.skip_tags)
+            elif isinstance(options.skip_tags, basestring):
+                self.skip_tags.update(options.skip_tags.split(','))
 
     def copy(self, ci):
         '''
diff --git a/v2/ansible/executor/process/result.py b/v2/ansible/executor/process/result.py
index 71bfdd7..cb85801 100644
--- a/v2/ansible/executor/process/result.py
+++ b/v2/ansible/executor/process/result.py
@@ -110,16 +110,12 @@ class ResultProcess(multiprocessing.Process):
 
                 # send callbacks, execute other options based on the result status
                 if result.is_failed():
-                    #self._callback.runner_on_failed(result._task, result)
                     self._send_result(('host_task_failed', result))
                 elif result.is_unreachable():
-                    #self._callback.runner_on_unreachable(result._task, result)
                     self._send_result(('host_unreachable', result))
                 elif result.is_skipped():
-                    #self._callback.runner_on_skipped(result._task, result)
                     self._send_result(('host_task_skipped', result))
                 else:
-                    #self._callback.runner_on_ok(result._task, result)
                     self._send_result(('host_task_ok', result))
 
                     # if this task is notifying a handler, do it now
@@ -131,8 +127,14 @@ class ResultProcess(multiprocessing.Process):
                         for notify in result._task.notify:
                             self._send_result(('notify_handler', notify, result._host))
 
-                    # if this task is registering facts, do that now
-                    if 'ansible_facts' in result._result:
+                    if 'add_host' in result._result:
+                        # this task added a new host (add_host module)
+                        self._send_result(('add_host', result))
+                    elif 'add_group' in result._result:
+                        # this task added a new group (group_by module)
+                        self._send_result(('add_group', result))
+                    elif 'ansible_facts' in result._result:
+                        # if this task is registering facts, do that now
                         if result._task.action in ('set_fact', 'include_vars'):
                             for (key, value) in result._result['ansible_facts'].iteritems():
                                 self._send_result(('set_host_var', result._host, key, value))
diff --git a/v2/ansible/executor/process/worker.py b/v2/ansible/executor/process/worker.py
index dcb8e4e..3419d4e 100644
--- a/v2/ansible/executor/process/worker.py
+++ b/v2/ansible/executor/process/worker.py
@@ -74,6 +74,9 @@ class WorkerProcess(multiprocessing.Process):
                 # using the one that was passed in
                 pass
 
+        if self._new_stdin:
+            sys.stdin = self._new_stdin
+
         super(WorkerProcess, self).__init__()
 
     def run(self):
@@ -130,7 +133,7 @@ class WorkerProcess(multiprocessing.Process):
                 debug("WORKER EXCEPTION: %s" % traceback.format_exc())
                 try:
                     if task:
-                        task_result = TaskResult(host, task, dict(failed=True, exception=True, stdout=traceback.format_exc()))
+                        task_result = TaskResult(host, task, dict(failed=True, exception=traceback.format_exc(), stdout=''))
                         self._rslt_q.put(task_result, block=False)
                 except:
                     # FIXME: most likely an abort, catch those kinds of errors specifically
diff --git a/v2/ansible/executor/task_executor.py b/v2/ansible/executor/task_executor.py
index cc3e04a..b86b282 100644
--- a/v2/ansible/executor/task_executor.py
+++ b/v2/ansible/executor/task_executor.py
@@ -22,6 +22,7 @@ __metaclass__ = type
 from ansible import constants as C
 from ansible.errors import AnsibleError
 from ansible.executor.connection_info import ConnectionInformation
+from ansible.playbook.task import Task
 from ansible.plugins import lookup_loader, connection_loader, action_loader
 
 from ansible.utils.debug import debug
@@ -110,8 +111,8 @@ class TaskExecutor:
         the retry/until and block rescue/always execution
         '''
 
-        connection = self._get_connection()
-        handler    = self._get_action_handler(connection=connection)
+        self._connection = self._get_connection()
+        self._handler    = self._get_action_handler(connection=self._connection)
 
         # check to see if this task should be skipped, due to it being a member of a
         # role which has already run (and whether that role allows duplicate execution)
@@ -147,8 +148,12 @@ class TaskExecutor:
                 result['attempts'] = attempt + 1
 
             debug("running the handler")
-            result = handler.run(task_vars=self._job_vars)
+            result = self._handler.run(task_vars=self._job_vars)
             debug("handler run complete")
+
+            if self._task.async > 0 and self._task.poll > 0:
+                result = self._poll_async_result(result=result)
+
             if self._task.until:
                 # TODO: implement until logic (pseudo logic follows...)
                 # if VariableManager.check_conditional(cond, extra_vars=(dict(result=result))):
@@ -164,6 +169,54 @@ class TaskExecutor:
         debug("attempt loop complete, returning result")
         return result
 
+    def _poll_async_result(self, result):
+        '''
+        Polls for the specified JID to be complete
+        '''
+
+        # the async_wrapper module returns dumped JSON via its stdout
+        # response, so we parse it here
+        try:
+            async_data = json.loads(result.get('stdout'))
+        except ValueError, e:
+            return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))
+
+        async_jid = async_data.get('ansible_job_id')
+        if async_jid is None:
+            return dict(failed=True, msg="No job id was returned by the async task")
+
+        # Create a new psuedo-task to run the async_status module, and run
+        # that (with a sleep for "poll" seconds between each retry) until the
+        # async time limit is exceeded.
+
+        async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
+
+        # Because this is an async task, the action handler is async. However,
+        # we need the 'normal' action handler for the status check, so get it
+        # now via the action_loader
+        normal_handler = action_loader.get(
+            'normal',
+            task=async_task,
+            connection=self._connection,
+            connection_info=self._connection_info,
+            loader=self._loader
+        )
+
+        time_left = self._task.async
+        while time_left > 0:
+            time.sleep(self._task.poll)
+
+            async_result = normal_handler.run()
+            if int(async_result.get('finished', 0)) == 1 or 'failed' in async_result or 'skipped' in async_result:
+                break
+
+            time_left -= self._task.poll
+
+        if int(async_result.get('finished', 0)) != 1:
+            return dict(failed=True, msg="async task did not complete within the requested time")
+        else:
+            return async_result
+
     def _get_connection(self):
         '''
         Reads the connection property for the host, and returns the
diff --git a/v2/ansible/parsing/mod_args.py b/v2/ansible/parsing/mod_args.py
index 55dc12b..2e11b47 100644
--- a/v2/ansible/parsing/mod_args.py
+++ b/v2/ansible/parsing/mod_args.py
@@ -228,7 +228,7 @@ class ModuleArgsParser:
 
         # walk the input dictionary to see we recognize a module name
         for (item, value) in iteritems(self._task_ds):
-            if item in module_loader:
+            if item in module_loader or item == 'meta':
                 # finding more than one module name is a problem
                 if action is not None:
                     raise AnsibleParserError("conflicting action statements", obj=self._task_ds)
diff --git a/v2/ansible/playbook/helpers.py b/v2/ansible/playbook/helpers.py
index 3a7c2c0..f4bbc8d 100644
--- a/v2/ansible/playbook/helpers.py
+++ b/v2/ansible/playbook/helpers.py
@@ -101,7 +101,7 @@ def load_list_of_tasks(ds, block=None, role=None, task_include=None, use_handler
     return task_list
 
 
-def load_list_of_roles(ds, variable_manager=None, loader=None):
+def load_list_of_roles(ds, current_role_path=None, variable_manager=None, loader=None):
     '''
     Loads and returns a list of RoleInclude objects from the datastructure
     list of role definitions
@@ -114,7 +114,7 @@ def load_list_of_roles(ds, variable_manager=None, loader=None):
 
     roles = []
     for role_def in ds:
-        i = RoleInclude.load(role_def, variable_manager=variable_manager, loader=loader)
+        i = RoleInclude.load(role_def, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
         roles.append(i)
 
     return roles
diff --git a/v2/ansible/playbook/role/definition.py b/v2/ansible/playbook/role/definition.py
index 7f9a93d..9f2a7ed 100644
--- a/v2/ansible/playbook/role/definition.py
+++ b/v2/ansible/playbook/role/definition.py
@@ -38,8 +38,8 @@ class RoleDefinition(Base, Conditional, Taggable):
 
     _role = FieldAttribute(isa='string')
 
-    def __init__(self):
-        self._role_path   = None
+    def __init__(self, role_path=None):
+        self._role_path   = role_path
         self._role_params = dict()
         super(RoleDefinition, self).__init__()
 
@@ -112,7 +112,11 @@ class RoleDefinition(Base, Conditional, Taggable):
         '''
 
         # FIXME: this should use unfrackpath once the utils code has been sorted out
-        role_path = os.path.normpath(role_name)
+        if self._role_path:
+            role_path = self._role_path
+        else:
+            role_path = os.path.normpath(role_name)
+
         if self._loader.path_exists(role_path):
             role_name = os.path.basename(role_name)
             return (role_name, role_path)
@@ -127,6 +131,7 @@ class RoleDefinition(Base, Conditional, Taggable):
         #        in the yaml so the error line/file can be reported
         #        here
 
+        #import epdb; epdb.st()
         raise AnsibleError("the role '%s' was not found" % role_name)
 
     def _split_role_params(self, ds):
diff --git a/v2/ansible/playbook/role/include.py b/v2/ansible/playbook/role/include.py
index e036e78..4d39050 100644
--- a/v2/ansible/playbook/role/include.py
+++ b/v2/ansible/playbook/role/include.py
@@ -37,13 +37,13 @@ class RoleInclude(RoleDefinition):
     FIXME: docstring
     """
 
-    def __init__(self):
-        super(RoleInclude, self).__init__()
+    def __init__(self, role_path=None):
+        super(RoleInclude, self).__init__(role_path=role_path)
 
     @staticmethod
-    def load(data, parent_role=None, variable_manager=None, loader=None):
+    def load(data, current_role_path=None, parent_role=None, variable_manager=None, loader=None):
         assert isinstance(data, string_types) or isinstance(data, dict)
 
-        ri = RoleInclude()
+        ri = RoleInclude(role_path=current_role_path)
         return ri.load_data(data, variable_manager=variable_manager, loader=loader)
 
diff --git a/v2/ansible/playbook/role/metadata.py b/v2/ansible/playbook/role/metadata.py
index ecddd0e..05ed2f3 100644
--- a/v2/ansible/playbook/role/metadata.py
+++ b/v2/ansible/playbook/role/metadata.py
@@ -19,6 +19,8 @@
 from __future__ import (absolute_import, division, print_function)
 __metaclass__ = type
 
+import os
+
 from six import iteritems, string_types
 
 from ansible.errors import AnsibleParserError
@@ -41,8 +43,8 @@ class RoleMetadata(Base):
     _dependencies     = FieldAttribute(isa='list', default=[])
     _galaxy_info      = FieldAttribute(isa='GalaxyInfo')
 
-    def __init__(self):
-        self._owner = None
+    def __init__(self, owner=None):
+        self._owner = owner
         super(RoleMetadata, self).__init__()
 
     @staticmethod
@@ -54,7 +56,7 @@ class RoleMetadata(Base):
         if not isinstance(data, dict):
             raise AnsibleParserError("the 'meta/main.yml' for role %s is not a dictionary" % owner.get_name())
 
-        m = RoleMetadata().load_data(data, variable_manager=variable_manager, loader=loader)
+        m = RoleMetadata(owner=owner).load_data(data, variable_manager=variable_manager, loader=loader)
         return m
 
     def _load_dependencies(self, attr, ds):
@@ -62,7 +64,12 @@ class RoleMetadata(Base):
         This is a helper loading function for the dependencies list,
         which returns a list of RoleInclude objects
         '''
-        return load_list_of_roles(ds, variable_manager=self._variable_manager, loader=self._loader)
+
+        current_role_path = None
+        if self._owner:
+            current_role_path = os.path.dirname(self._owner._role_path)
+
+        return load_list_of_roles(ds, current_role_path=current_role_path, variable_manager=self._variable_manager, loader=self._loader)
 
     def _load_galaxy_info(self, attr, ds):
         '''
diff --git a/v2/ansible/playbook/taggable.py b/v2/ansible/playbook/taggable.py
index 1b2e25e..a1d9f64 100644
--- a/v2/ansible/playbook/taggable.py
+++ b/v2/ansible/playbook/taggable.py
@@ -31,7 +31,11 @@ class Taggable:
         return self._tags[:]
 
     def evaluate_tags(self, only_tags, skip_tags):
-        my_tags = set(self.tags)
+        if self.tags:
+            my_tags = set(self.tags)
+        else:
+            my_tags = set()
+
         if skip_tags:
             skipped_tags = my_tags.intersection(skip_tags)
             if len(skipped_tags) > 0:
diff --git a/v2/ansible/playbook/task.py b/v2/ansible/playbook/task.py
index fa9b8cb..5919759 100644
--- a/v2/ansible/playbook/task.py
+++ b/v2/ansible/playbook/task.py
@@ -53,7 +53,7 @@ class Task(Base, Conditional, Taggable):
     # will be used if defined
     # might be possible to define others
 
-    _args                 = FieldAttribute(isa='dict')
+    _args                 = FieldAttribute(isa='dict', default=dict())
     _action               = FieldAttribute(isa='string')
 
     _always_run           = FieldAttribute(isa='bool')
diff --git a/v2/ansible/playbook/task_include.py b/v2/ansible/playbook/task_include.py
index 0b561b0..e30a558 100644
--- a/v2/ansible/playbook/task_include.py
+++ b/v2/ansible/playbook/task_include.py
@@ -49,6 +49,7 @@ class TaskInclude(Base):
     #-----------------------------------------------------------------
     # Attributes
 
+    _name      = FieldAttribute(isa='string')
     _include   = FieldAttribute(isa='string')
     _loop      = FieldAttribute(isa='string', private=True)
     _loop_args = FieldAttribute(isa='list', private=True)
@@ -56,18 +57,19 @@ class TaskInclude(Base):
     _vars      = FieldAttribute(isa='dict', default=dict())
     _when      = FieldAttribute(isa='list', default=[])
 
-    def __init__(self, block=None, role=None, task_include=None):
+    def __init__(self, block=None, role=None, task_include=None, use_handlers=False):
         self._block        = block
         self._role         = role
         self._task_include = task_include
+        self._use_handlers = use_handlers
 
         self._task_blocks  = []
 
         super(TaskInclude, self).__init__()
 
     @staticmethod
-    def load(data, block=None, role=None, task_include=None, variable_manager=None, loader=None):
-        ti = TaskInclude(block=block, role=role, task_include=None)
+    def load(data, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
+        ti = TaskInclude(block=block, role=role, task_include=None, use_handlers=use_handlers)
         return ti.load_data(data, variable_manager=variable_manager, loader=loader)
 
     def munge(self, ds):
@@ -148,7 +150,7 @@ class TaskInclude(Base):
                                 parent_block=self._block,
                                 task_include=self,
                                 role=self._role,
-                                variable_manager=self._variable_manager,
+                                use_handlers=self._use_handlers,
                                 loader=self._loader
                             )
         return ds
diff --git a/v2/ansible/plugins/action/__init__.py b/v2/ansible/plugins/action/__init__.py
index a093009..c9c5022 100644
--- a/v2/ansible/plugins/action/__init__.py
+++ b/v2/ansible/plugins/action/__init__.py
@@ -23,6 +23,7 @@ import StringIO
 import json
 import os
 import random
+import sys # FIXME: probably not needed
 import tempfile
 import time
 
@@ -249,8 +250,8 @@ class ActionBase:
         data = self._low_level_execute_command(cmd, tmp, sudoable=True)
         # FIXME: implement this function?
         #data2 = utils.last_non_blank_line(data['stdout'])
-        data2 = data['stdout'].strip().splitlines()[-1]
         try:
+            data2 = data['stdout'].strip().splitlines()[-1]
             if data2 == '':
                 # this may happen if the connection to the remote server
                 # failed, so just return "INVALIDCHECKSUM" to avoid errors
@@ -258,6 +259,8 @@ class ActionBase:
             else:
                 return data2.split()[0]
         except IndexError:
+            # FIXME: this should probably not print to sys.stderr, but should instead
+            #        fail in a more normal way?
             sys.stderr.write("warning: Calculating checksum failed unusually, please report this to the list so it can be fixed\n")
             sys.stderr.write("command: %s\n" % cmd)
             sys.stderr.write("----\n")
@@ -331,7 +334,7 @@ class ActionBase:
 
         # a remote tmp path may be necessary and not already created
         remote_module_path = None
-        if self._late_needs_tmp_path(tmp, module_style):
+        if not tmp and self._late_needs_tmp_path(tmp, module_style):
             tmp = self._make_tmp_path()
             remote_module_path = self._shell.join_path(tmp, module_name)
 
@@ -384,9 +387,12 @@ class ActionBase:
 
         # FIXME: in error situations, the stdout may not contain valid data, so we
         #        should check for bad rc codes better to catch this here
-        data = json.loads(self._filter_leading_non_json_lines(res['stdout']))
-        if 'parsed' in data and data['parsed'] == False:
-            data['msg'] += res['stderr']
+        if 'stdout' in res and res['stdout'].strip():
+            data = json.loads(self._filter_leading_non_json_lines(res['stdout']))
+            if 'parsed' in data and data['parsed'] == False:
+                data['msg'] += res['stderr']
+        else:
+            data = dict()
 
         debug("done with _execute_module (%s, %s)" % (module_name, module_args))
         return data
diff --git a/v2/ansible/plugins/action/add_host.py b/v2/ansible/plugins/action/add_host.py
new file mode 100644
index 0000000..e28361b
--- /dev/null
+++ b/v2/ansible/plugins/action/add_host.py
@@ -0,0 +1,62 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+# Copyright 2012, Seth Vidal <skvidal@fedoraproject.org>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+    ''' Create inventory hosts and groups in the memory inventory'''
+
+    ### We need to be able to modify the inventory
+    BYPASS_HOST_LOOP = True
+    TRANSFERS_FILES = False
+
+    def run(self, tmp=None, task_vars=dict()):
+
+        # FIXME: is this necessary in v2?
+        #if self.runner.noop_on_check(inject):
+        #    return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not supported for this module'))
+
+        # Parse out any hostname:port patterns
+        new_name = self._task.args.get('name', self._task.args.get('hostname', None))
+        #vv("creating host via 'add_host': hostname=%s" % new_name)
+
+        if ":" in new_name:
+            new_name, new_port = new_name.split(":")
+            self._task.args['ansible_ssh_port'] = new_port
+
+        groups = self._task.args.get('groupname', self._task.args.get('groups', self._task.args.get('group', ''))) 
+        # add it to the group if that was specified
+        new_groups = []
+        if groups:
+            for group_name in groups.split(","):
+                if group_name not in new_groups:
+                    new_groups.append(group_name.strip())
+
+        # Add any variables to the new_host
+        host_vars = dict()
+        for k in self._task.args.keys():
+            if not k in [ 'name', 'hostname', 'groupname', 'groups' ]:
+                host_vars[k] = self._task.args[k] 
+
+        return dict(changed=True, add_host=dict(host_name=new_name, groups=new_groups, host_vars=host_vars))
+
+
diff --git a/v2/ansible/plugins/action/assert.py b/v2/ansible/plugins/action/assert.py
index 7b3f2df..e1b2ba1 100644
--- a/v2/ansible/plugins/action/assert.py
+++ b/v2/ansible/plugins/action/assert.py
@@ -25,17 +25,24 @@ class ActionModule(ActionBase):
 
     def run(self, tmp=None, task_vars=dict()):
 
-        # note: the fail module does not need to pay attention to check mode
-        # it always runs.
+        if not 'that' in self._task.args:
+            raise AnsibleError('conditional required in "that" string')
 
         msg = None
         if 'msg' in self._task.args:
             msg = self._task.args['msg']
 
-        if not 'that' in self._task.args:
-            raise AnsibleError('conditional required in "that" string')
-
-        for that in self._task.args['that']:
+        # make sure the 'that' items are a list
+        thats = self._task.args['that']
+        if not isinstance(thats, list):
+            thats = [ thats ]
+
+        # Now we iterate over the that items, temporarily assigning them
+        # to the task's when value so we can evaluate the conditional using
+        # the built in evaluate function. The when has already been evaluated
+        # by this point, and is not used again, so we don't care about mangling
+        # that value now
+        for that in thats:
             self._task.when = [ that ]
             test_result = self._task.evaluate_conditional(all_vars=task_vars)
             if not test_result:
diff --git a/v2/ansible/plugins/action/async.py b/v2/ansible/plugins/action/async.py
new file mode 100644
index 0000000..6fbf93d
--- /dev/null
+++ b/v2/ansible/plugins/action/async.py
@@ -0,0 +1,68 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import json
+import random
+
+from ansible import constants as C
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+
+    def run(self, tmp=None, task_vars=dict()):
+        ''' transfer the given module name, plus the async module, then run it '''
+
+        # FIXME: noop stuff needs to be sorted ut
+        #if self.runner.noop_on_check(inject):
+        #    return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not supported for this module'))
+
+        if not tmp:
+            tmp = self._make_tmp_path()
+
+        module_name = self._task.action
+        async_module_path  = self._shell.join_path(tmp, 'async_wrapper')
+        remote_module_path = self._shell.join_path(tmp, module_name)
+
+        env_string = self._compute_environment_string()
+
+        # configure, upload, and chmod the target module
+        (module_style, shebang, module_data) = self._configure_module(module_name=module_name, module_args=self._task.args)
+        self._transfer_data(remote_module_path, module_data)
+        self._remote_chmod(tmp, 'a+rx', remote_module_path)
+
+        # configure, upload, and chmod the async_wrapper module
+        (async_module_style, shebang, async_module_data) = self._configure_module(module_name='async_wrapper', module_args=dict())
+        self._transfer_data(async_module_path, async_module_data)
+        self._remote_chmod(tmp, 'a+rx', async_module_path)
+
+        argsfile = self._transfer_data(self._shell.join_path(tmp, 'arguments'), json.dumps(self._task.args))
+
+        async_limit = self._task.async
+        async_jid   = str(random.randint(0, 999999999999))
+
+        async_cmd = " ".join([str(x) for x in [async_module_path, async_jid, async_limit, remote_module_path, argsfile]])
+        result = self._low_level_execute_command(cmd=async_cmd, tmp=None)
+
+        # clean up after
+        if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
+            self._remove_tmp_path(tmp)
+
+        result['changed'] = True
+
+        return result
+
+
diff --git a/v2/ansible/plugins/action/copy.py b/v2/ansible/plugins/action/copy.py
index 6e1591c..b7aefeb 100644
--- a/v2/ansible/plugins/action/copy.py
+++ b/v2/ansible/plugins/action/copy.py
@@ -87,8 +87,8 @@ class ActionModule(ActionBase):
                     content_tempfile = self._create_content_tempfile(content)
                 source = content_tempfile
             except Exception, err:
-                result = dict(failed=True, msg="could not write content temp file: %s" % err)
-                return ReturnData(conn=conn, result=result)
+                return dict(failed=True, msg="could not write content temp file: %s" % err)
+
         ###############################################################################################
         # FIXME: first_available_file needs to be reworked?
         ###############################################################################################
@@ -263,7 +263,7 @@ class ActionModule(ActionBase):
                 #if self.runner.no_log:
                 #    new_module_args['NO_LOG'] = True
 
-                module_return = self._execute_module(module_name='copy', module_args=new_module_args, tmp=tmp, delete_remote_tmp=delete_remote_tmp)
+                module_return = self._execute_module(module_name='copy', module_args=new_module_args, delete_remote_tmp=delete_remote_tmp)
                 module_executed = True
 
             else:
@@ -292,7 +292,7 @@ class ActionModule(ActionBase):
                 #    new_module_args['NO_LOG'] = True
 
                 # Execute the file module.
-                module_return = self._execute_module(module_name='file', module_args=new_module_args, tmp=tmp, delete_remote_tmp=delete_remote_tmp)
+                module_return = self._execute_module(module_name='file', module_args=new_module_args, delete_remote_tmp=delete_remote_tmp)
                 module_executed = True
 
             if not module_return.get('checksum'):
diff --git a/v2/ansible/plugins/action/fail.py b/v2/ansible/plugins/action/fail.py
new file mode 100644
index 0000000..a95ccb3
--- /dev/null
+++ b/v2/ansible/plugins/action/fail.py
@@ -0,0 +1,33 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+# (c) 2012, Dag Wieers <dag@wieers.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+    ''' Fail with custom message '''
+
+    TRANSFERS_FILES = False
+
+    def run(self, tmp=None, task_vars=dict()):
+
+        msg = 'Failed as requested from task'
+        if self._task.args and 'msg' in self._task.args:
+            msg = self._task.args.get('msg')
+
+        return dict(failed=True, msg=msg)
+
diff --git a/v2/ansible/plugins/action/fetch.py b/v2/ansible/plugins/action/fetch.py
new file mode 100644
index 0000000..0ce33c6
--- /dev/null
+++ b/v2/ansible/plugins/action/fetch.py
@@ -0,0 +1,152 @@
+# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+import pwd
+import random
+import traceback
+import tempfile
+import base64
+
+from ansible import constants as C
+from ansible.errors import *
+from ansible.plugins.action import ActionBase
+from ansible.utils.boolean import boolean
+from ansible.utils.hashing import checksum, checksum_s, md5, secure_hash
+
+class ActionModule(ActionBase):
+
+    def run(self, tmp=None, task_vars=dict()):
+        ''' handler for fetch operations '''
+
+        # FIXME: is this even required anymore?
+        #if self.runner.noop_on_check(inject):
+        #    return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True, msg='check mode not (yet) supported for this module'))
+
+        source            = self._task.args.get('src', None)
+        dest              = self._task.args.get('dest', None)
+        flat              = boolean(self._task.args.get('flat'))
+        fail_on_missing   = boolean(self._task.args.get('fail_on_missing'))
+        validate_checksum = boolean(self._task.args.get('validate_checksum', self._task.args.get('validate_md5')))
+
+        if 'validate_md5' in self._task.args and 'validate_checksum' in self._task.args:
+            return dict(failed=True, msg="validate_checksum and validate_md5 cannot both be specified")
+
+        if source is None or dest is None:
+            return dict(failed=True, msg="src and dest are required")
+
+        source = self._shell.join_path(source)
+        source = self._remote_expand_user(source, tmp)
+
+        # calculate checksum for the remote file
+        remote_checksum = self._remote_checksum(tmp, source)
+
+        # use slurp if sudo and permissions are lacking
+        remote_data = None
+        if remote_checksum in ('1', '2') or self._connection_info.sudo:
+            slurpres = self._execute_module(module_name='slurp', module_args=dict(src=source), tmp=tmp)
+            if slurpres.get('rc') == 0:
+                if slurpres['encoding'] == 'base64':
+                    remote_data = base64.b64decode(slurpres['content'])
+                if remote_data is not None:
+                    remote_checksum = checksum_s(remote_data)
+                # the source path may have been expanded on the
+                # target system, so we compare it here and use the
+                # expanded version if it's different
+                remote_source = slurpres.get('source')
+                if remote_source and remote_source != source:
+                    source = remote_source
+            else:
+                # FIXME: should raise an error here? the old code did nothing
+                pass
+
+        # calculate the destination name
+        if os.path.sep not in self._shell.join_path('a', ''):
+            source_local = source.replace('\\', '/')
+        else:
+            source_local = source
+
+        dest = os.path.expanduser(dest)
+        if flat:
+            if dest.endswith("/"):
+                # if the path ends with "/", we'll use the source filename as the
+                # destination filename
+                base = os.path.basename(source_local)
+                dest = os.path.join(dest, base)
+            if not dest.startswith("/"):
+                # if dest does not start with "/", we'll assume a relative path
+                dest = self._loader.path_dwim(dest)
+        else:
+            # files are saved in dest dir, with a subdir for each host, then the filename
+            dest = "%s/%s/%s" % (self._loader.path_dwim(dest), self._connection._host, source_local)
+
+        dest = dest.replace("//","/")
+
+        if remote_checksum in ('0', '1', '2', '3', '4'):
+            # these don't fail because you may want to transfer a log file that possibly MAY exist
+            # but keep going to fetch other log files
+            if remote_checksum == '0':
+                result = dict(msg="unable to calculate the checksum of the remote file", file=source, changed=False)
+            elif remote_checksum == '1':
+                if fail_on_missing:
+                    result = dict(failed=True, msg="the remote file does not exist", file=source)
+                else:
+                    result = dict(msg="the remote file does not exist, not transferring, ignored", file=source, changed=False)
+            elif remote_checksum == '2':
+                result = dict(msg="no read permission on remote file, not transferring, ignored", file=source, changed=False)
+            elif remote_checksum == '3':
+                result = dict(msg="remote file is a directory, fetch cannot work on directories", file=source, changed=False)
+            elif remote_checksum == '4':
+                result = dict(msg="python isn't present on the system.  Unable to compute checksum", file=source, changed=False)
+            return result
+
+        # calculate checksum for the local file
+        local_checksum = checksum(dest)
+
+        if remote_checksum != local_checksum:
+            # create the containing directories, if needed
+            if not os.path.isdir(os.path.dirname(dest)):
+                os.makedirs(os.path.dirname(dest))
+
+            # fetch the file and check for changes
+            if remote_data is None:
+                self._connection.fetch_file(source, dest)
+            else:
+                f = open(dest, 'w')
+                f.write(remote_data)
+                f.close()
+            new_checksum = secure_hash(dest)
+            # For backwards compatibility.  We'll return None on FIPS enabled
+            # systems
+            try:
+                new_md5 = md5(dest)
+            except ValueError:
+                new_md5 = None
+
+            if validate_checksum and new_checksum != remote_checksum:
+                return dict(failed=True, md5sum=new_md5, msg="checksum mismatch", file=source, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)
+            return dict(changed=True, md5sum=new_md5, dest=dest, remote_md5sum=None, checksum=new_checksum, remote_checksum=remote_checksum)
+        else:
+            # For backwards compatibility.  We'll return None on FIPS enabled
+            # systems
+            try:
+                local_md5 = md5(dest)
+            except ValueError:
+                local_md5 = None
+
+            return dict(changed=False, md5sum=local_md5, file=source, dest=dest, checksum=local_checksum)
+
diff --git a/v2/ansible/plugins/action/group_by.py b/v2/ansible/plugins/action/group_by.py
new file mode 100644
index 0000000..50e0cc0
--- /dev/null
+++ b/v2/ansible/plugins/action/group_by.py
@@ -0,0 +1,37 @@
+# Copyright 2012, Jeroen Hoekx <jeroen@hoekx.be>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+from ansible.errors import *
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+    ''' Create inventory groups based on variables '''
+
+    ### We need to be able to modify the inventory
+    BYPASS_HOST_LOOP = True
+    TRANSFERS_FILES = False
+
+    def run(self, tmp=None, task_vars=dict()):
+
+        if not 'key' in self._task.args:
+            return dict(failed=True, msg="the 'key' param is required when using group_by")
+
+        group_name = self._task.args.get('key')
+        group_name = group_name.replace(' ','-')
+
+        return dict(changed=True, add_group=group_name)
+
diff --git a/v2/ansible/plugins/action/pause.py b/v2/ansible/plugins/action/pause.py
new file mode 100644
index 0000000..9c6075e
--- /dev/null
+++ b/v2/ansible/plugins/action/pause.py
@@ -0,0 +1,134 @@
+# Copyright 2012, Tim Bielawa <tbielawa@redhat.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import datetime
+import sys
+import time
+
+from termios import tcflush, TCIFLUSH
+
+from ansible.errors import *
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+    ''' pauses execution for a length or time, or until input is received '''
+
+    PAUSE_TYPES = ['seconds', 'minutes', 'prompt', '']
+    BYPASS_HOST_LOOP = True
+
+    def run(self, tmp=None, task_vars=dict()):
+        ''' run the pause action module '''
+
+        duration_unit = 'minutes'
+        prompt = None
+        seconds = None
+        result = dict(
+            changed = False,
+            rc      = 0,
+            stderr  = '',
+            stdout  = '',
+            start   = None,
+            stop    = None,
+            delta   = None,
+        )
+
+        # FIXME: not sure if we can get this info directly like this anymore?
+        #hosts = ', '.join(self.runner.host_set)
+
+        # Is 'args' empty, then this is the default prompted pause
+        if self._task.args is None or len(self._task.args.keys()) == 0:
+            pause_type = 'prompt'
+            #prompt = "[%s]\nPress enter to continue:\n" % hosts
+            prompt = "[%s]\nPress enter to continue:\n" % self._task.get_name().strip()
+
+        # Are 'minutes' or 'seconds' keys that exist in 'args'?
+        elif 'minutes' in self._task.args or 'seconds' in self._task.args:
+            try:
+                if 'minutes' in self._task.args:
+                    pause_type = 'minutes'
+                    # The time() command operates in seconds so we need to
+                    # recalculate for minutes=X values.
+                    seconds = int(self._task.args['minutes']) * 60
+                else:
+                    pause_type = 'seconds'
+                    seconds = int(self._task.args['seconds'])
+                    duration_unit = 'seconds'
+
+            except ValueError, e:
+                return dict(failed=True, msg="non-integer value given for prompt duration:\n%s" % str(e))
+
+        # Is 'prompt' a key in 'args'?
+        elif 'prompt' in self._task.args:
+            pause_type = 'prompt'
+            #prompt = "[%s]\n%s:\n" % (hosts, self._task.args['prompt'])
+            prompt = "[%s]\n%s:\n" % (self._task.get_name().strip(), self._task.args['prompt'])
+
+        # I have no idea what you're trying to do. But it's so wrong.
+        else:
+            return dict(failed=True, msg="invalid pause type given. must be one of: %s" % ", ".join(self.PAUSE_TYPES))
+
+        #vv("created 'pause' ActionModule: pause_type=%s, duration_unit=%s, calculated_seconds=%s, prompt=%s" % \
+        #        (self.pause_type, self.duration_unit, self.seconds, self.prompt))
+
+        ########################################################################
+        # Begin the hard work!
+
+        start = time.time()
+        result['start'] = str(datetime.datetime.now())
+
+
+        # FIXME: this is all very broken right now, as prompting from the worker side
+        #        is not really going to be supported, and actions marked as BYPASS_HOST_LOOP
+        #        probably should not be run through the executor engine at all. Also, ctrl+c
+        #        is now captured on the parent thread, so it can't be caught here via the
+        #        KeyboardInterrupt exception.
+
+        try:
+            if not pause_type == 'prompt':
+                print "(^C-c = continue early, ^C-a = abort)"
+                #print("[%s]\nPausing for %s seconds" % (hosts, seconds))
+                print("[%s]\nPausing for %s seconds" % (self._task.get_name().strip(), seconds))
+                time.sleep(seconds)
+            else:
+                # Clear out any unflushed buffered input which would
+                # otherwise be consumed by raw_input() prematurely.
+                #tcflush(sys.stdin, TCIFLUSH)
+                result['user_input'] = raw_input(prompt.encode(sys.stdout.encoding))
+        except KeyboardInterrupt:
+            while True:
+                print '\nAction? (a)bort/(c)ontinue: '
+                c = getch()
+                if c == 'c':
+                    # continue playbook evaluation
+                    break
+                elif c == 'a':
+                    # abort further playbook evaluation
+                    raise ae('user requested abort!')
+        finally:
+            duration = time.time() - start
+            result['stop'] = str(datetime.datetime.now())
+            result['delta'] = int(duration)
+
+            if duration_unit == 'minutes':
+                duration = round(duration / 60.0, 2)
+            else:
+                duration = round(duration, 2)
+
+            result['stdout'] = "Paused for %s %s" % (duration, duration_unit)
+
+        return result
+
diff --git a/v2/ansible/plugins/action/raw.py b/v2/ansible/plugins/action/raw.py
new file mode 100644
index 0000000..d1d1b28
--- /dev/null
+++ b/v2/ansible/plugins/action/raw.py
@@ -0,0 +1,39 @@
+# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+    TRANSFERS_FILES = False
+
+    def run(self, tmp=None, task_vars=dict()):
+
+        # FIXME: need to rework the noop stuff still
+        #if self.runner.noop_on_check(inject):
+        #    # in --check mode, always skip this module execution
+        #    return ReturnData(conn=conn, comm_ok=True, result=dict(skipped=True))
+
+        executable = self._task.args.get('executable')
+        result = self._low_level_execute_command(self._task.args.get('_raw_params'), tmp=tmp, executable=executable)
+
+        # for some modules (script, raw), the sudo success key
+        # may leak into the stdout due to the way the sudo/su
+        # command is constructed, so we filter that out here
+        if result.get('stdout','').strip().startswith('SUDO-SUCCESS-'):
+            result['stdout'] = re.sub(r'^((\r)?\n)?SUDO-SUCCESS.*(\r)?\n', '', result['stdout'])
+
+        return result
diff --git a/v2/ansible/plugins/action/script.py b/v2/ansible/plugins/action/script.py
new file mode 100644
index 0000000..ceccd71
--- /dev/null
+++ b/v2/ansible/plugins/action/script.py
@@ -0,0 +1,99 @@
+# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+
+from ansible import constants as C
+from ansible.plugins.action import ActionBase
+
+class ActionModule(ActionBase):
+    TRANSFERS_FILES = True
+
+    def run(self, tmp=None, task_vars=None):
+        ''' handler for file transfer operations '''
+
+        # FIXME: noop stuff still needs to be sorted out
+        #if self.runner.noop_on_check(inject):
+        #    # in check mode, always skip this module
+        #    return ReturnData(conn=conn, comm_ok=True,
+        #                      result=dict(skipped=True, msg='check mode not supported for this module'))
+
+        if not tmp:
+            tmp = self._make_tmp_path()
+
+        creates = self._task.args.get('creates')
+        if creates:
+            # do not run the command if the line contains creates=filename
+            # and the filename already exists. This allows idempotence
+            # of command executions.
+            result = self._execute_module(module_name='stat', module_args=dict(path=creates), tmp=tmp, persist_files=True)
+            stat = result.get('stat', None)
+            if stat and stat.get('exists', False):
+                return dict(skipped=True, msg=("skipped, since %s exists" % creates))
+
+        removes = self._task.args.get('removes')
+        if removes:
+            # do not run the command if the line contains removes=filename
+            # and the filename does not exist. This allows idempotence
+            # of command executions.
+            result = self._execute_module(module_name='stat', module_args=dict(path=removes), tmp=tmp, persist_files=True)
+            stat = result.get('stat', None)
+            if stat and not stat.get('exists', False):
+                return dict(skipped=True, msg=("skipped, since %s does not exist" % removes))
+
+        # the script name is the first item in the raw params, so we split it
+        # out now so we know the file name we need to transfer to the remote,
+        # and everything else is an argument to the script which we need later
+        # to append to the remote command
+        parts  = self._task.args.get('_raw_params', '').strip().split()
+        source = parts[0]
+        args   = ' '.join(parts[1:])
+
+        # FIXME: need to sort out all the _original_file stuff still
+        #if '_original_file' in task_vars:
+        #    source = self._loader.path_dwim_relative(inject['_original_file'], 'files', source, self.runner.basedir)
+        #else:
+        #    source = self._loader.path_dwim(self.runner.basedir, source)
+        source = self._loader.path_dwim(source)
+
+        # transfer the file to a remote tmp location
+        tmp_src = self._shell.join_path(tmp, os.path.basename(source))
+        self._connection.put_file(source, tmp_src)
+
+        sudoable = True
+        # set file permissions, more permissive when the copy is done as a different user
+        if ((self._connection_info.sudo and self._connection_info.sudo_user != 'root') or
+            (self._connection_info.su and self._connection_info.su_user != 'root')):
+            chmod_mode = 'a+rx'
+            sudoable = False
+        else:
+            chmod_mode = '+rx'
+        self._remote_chmod(tmp, chmod_mode, tmp_src, sudoable=sudoable)
+
+        # add preparation steps to one ssh roundtrip executing the script
+        env_string = self._compute_environment_string()
+        script_cmd = ' '.join([env_string, tmp_src, args])
+        
+        result = self._low_level_execute_command(cmd=script_cmd, tmp=None, sudoable=sudoable)
+
+        # clean up after
+        if tmp and "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES:
+            self._remove_tmp_path(tmp)
+
+        result['changed'] = True
+
+        return result
diff --git a/v2/ansible/plugins/action/synchronize.py b/v2/ansible/plugins/action/synchronize.py
new file mode 100644
index 0000000..0699fdf
--- /dev/null
+++ b/v2/ansible/plugins/action/synchronize.py
@@ -0,0 +1,178 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+
+# (c) 2012-2013, Timothy Appnel <tim@appnel.com>
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import os.path
+
+from ansible.plugins.action import ActionBase
+from ansible.utils.boolean import boolean
+
+class ActionModule(ActionBase):
+
+    def _get_absolute_path(self, path, task_vars):
+        if 'vars' in task_vars:
+            if '_original_file' in task_vars['vars']:
+                # roles
+                original_path = path
+                path = self._loader.path_dwim_relative(task_vars['_original_file'], 'files', path, self.runner.basedir)
+                if original_path and original_path[-1] == '/' and path[-1] != '/':
+                    # make sure the dwim'd path ends in a trailing "/"
+                    # if the original path did
+                    path += '/'
+
+        return path
+
+    def _process_origin(self, host, path, user, task_vars):
+
+        if not host in ['127.0.0.1', 'localhost']:
+            if user:
+                return '%s@%s:%s' % (user, host, path)
+            else:
+                return '%s:%s' % (host, path)
+        else:
+            if not ':' in path:
+                if not path.startswith('/'):
+                    path = self._get_absolute_path(path=path, task_vars=task_vars)
+            return path
+
+    def _process_remote(self, host, path, user, task_vars):
+        transport = self._connection_info.connection
+        return_data = None
+        if not host in ['127.0.0.1', 'localhost'] or transport != "local":
+            if user:
+                return_data = '%s@%s:%s' % (user, host, path)
+            else:
+                return_data = '%s:%s' % (host, path)
+        else:
+            return_data = path
+
+        if not ':' in return_data:
+            if not return_data.startswith('/'):
+                return_data = self._get_absolute_path(path=return_data, task_vars=task_vars)
+
+        return return_data
+
+    def run(self, tmp=None, task_vars=dict()):
+        ''' generates params and passes them on to the rsync module '''
+
+        original_transport   = task_vars.get('ansible_connection', self._connection_info.connection)
+        transport_overridden = False
+        if task_vars.get('delegate_to') is None:
+            task_vars['delegate_to'] = '127.0.0.1'
+            # IF original transport is not local, override transport and disable sudo.
+            if original_transport != 'local':
+                task_vars['ansible_connection'] = 'local'
+                self.transport_overridden = True
+                self.runner.sudo = False
+
+        src  = self._task.args.get('src', None)
+        dest = self._task.args.get('dest', None)
+
+        # FIXME: this doesn't appear to be used anywhere?
+        local_rsync_path = task_vars.get('ansible_rsync_path')
+
+        # from the perspective of the rsync call the delegate is the localhost
+        src_host  = '127.0.0.1'
+        dest_host = task_vars.get('ansible_ssh_host', task_vars.get('inventory_hostname'))
+
+        # allow ansible_ssh_host to be templated
+        # FIXME: does this still need to be templated?
+        #dest_host = template.template(self.runner.basedir, dest_host, task_vars, fail_on_undefined=True)
+        dest_is_local = dest_host in ['127.0.0.1', 'localhost']
+
+        # CHECK FOR NON-DEFAULT SSH PORT
+        dest_port = self._task.args.get('dest_port')
+        inv_port  = task_vars.get('ansible_ssh_port', task_vars.get('inventory_hostname'))
+        if inv_port != dest_port and inv_port != task_vars.get('inventory_hostname'):
+            dest_port = inv_port
+
+        # edge case: explicit delegate and dest_host are the same
+        if dest_host == task_vars.get('delegate_to'):
+            dest_host = '127.0.0.1'
+
+        # SWITCH SRC AND DEST PER MODE
+        if self._task.args.get('mode', 'push') == 'pull':
+            (dest_host, src_host) = (src_host, dest_host)
+
+        # CHECK DELEGATE HOST INFO
+        use_delegate = False
+        # FIXME: not sure if this is in connection info yet or not...
+        #if conn.delegate != conn.host:
+        #    if 'hostvars' in task_vars:
+        #        if conn.delegate in task_vars['hostvars'] and self.original_transport != 'local':
+        #            # use a delegate host instead of localhost
+        #            use_delegate = True
+
+        # COMPARE DELEGATE, HOST AND TRANSPORT                             
+        process_args = False
+        if not dest_host is src_host and self.original_transport != 'local':
+            # interpret and task_vars remote host info into src or dest
+            process_args = True
+
+        # MUNGE SRC AND DEST PER REMOTE_HOST INFO
+        if process_args or use_delegate:
+
+            user = None
+            if boolean(options.get('set_remote_user', 'yes')):
+                if use_delegate:
+                    user = task_vars['hostvars'][conn.delegate].get('ansible_ssh_user')
+
+                if not use_delegate or not user:
+                    user = task_vars.get('ansible_ssh_user', self.runner.remote_user)
+                
+            if use_delegate:
+                # FIXME
+                private_key = task_vars.get('ansible_ssh_private_key_file', self.runner.private_key_file)
+            else:
+                private_key = task_vars.get('ansible_ssh_private_key_file', self.runner.private_key_file)
+
+            private_key = template.template(self.runner.basedir, private_key, task_vars, fail_on_undefined=True)
+
+            if not private_key is None:
+                private_key = os.path.expanduser(private_key)
+                
+            # use the mode to define src and dest's url
+            if self._task.args.get('mode', 'push') == 'pull':
+                # src is a remote path: <user>@<host>, dest is a local path
+                src  = self._process_remote(src_host, src, user, task_vars)
+                dest = self._process_origin(dest_host, dest, user, task_vars)
+            else:
+                # src is a local path, dest is a remote path: <user>@<host>
+                src  = self._process_origin(src_host, src, user, task_vars)
+                dest = self._process_remote(dest_host, dest, user, task_vars)
+
+        # Allow custom rsync path argument.
+        rsync_path = self._task.args.get('rsync_path', None)
+
+        # If no rsync_path is set, sudo was originally set, and dest is remote then add 'sudo rsync' argument.
+        if not rsync_path and self.transport_overridden and self._connection_info.sudo and not dest_is_local:
+            self._task.args['rsync_path'] = 'sudo rsync'
+
+        # make sure rsync path is quoted.
+        if rsync_path:
+            rsync_path = '"%s"' % rsync_path
+
+        # FIXME: noop stuff still needs to be figured out
+        #module_args = ""
+        #if self.runner.noop_on_check(task_vars):
+        #    module_args = "CHECKMODE=True"
+
+        # run the module and store the result
+        result = self.runner._execute_module('synchronize', tmp=tmpmodule_args, complex_args=options, task_vars=task_vars)
+
+        return result
+
diff --git a/v2/ansible/plugins/action/template.py b/v2/ansible/plugins/action/template.py
new file mode 100644
index 0000000..82dbedf
--- /dev/null
+++ b/v2/ansible/plugins/action/template.py
@@ -0,0 +1,165 @@
+# (c) 2015, Michael DeHaan <michael.dehaan@gmail.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import base64
+import os
+
+from ansible.plugins.action import ActionBase
+from ansible.template import Templar
+from ansible.utils.hashing import checksum_s
+
+class ActionModule(ActionBase):
+
+    TRANSFERS_FILES = True
+
+    def run(self, tmp=None, task_vars=dict()):
+        ''' handler for template operations '''
+
+        source = self._task.args.get('src', None)
+        dest   = self._task.args.get('dest', None)
+
+        if (source is None and 'first_available_file' not in task_vars) or dest is None:
+            return dict(failed=True, msg="src and dest are required")
+
+        if tmp is None:
+            tmp = self._make_tmp_path()
+
+        ##################################################################################################
+        # FIXME: this all needs to be sorted out
+        ##################################################################################################
+        # if we have first_available_file in our vars
+        # look up the files and use the first one we find as src
+        #if 'first_available_file' in task_vars:
+        #    found = False
+        #    for fn in self.runner.module_vars.get('first_available_file'):
+        #        fn_orig = fn
+        #        fnt = template.template(self.runner.basedir, fn, task_vars)
+        #        fnd = utils.path_dwim(self.runner.basedir, fnt)
+        #        if not os.path.exists(fnd) and '_original_file' in task_vars:
+        #            fnd = utils.path_dwim_relative(task_vars['_original_file'], 'templates', fnt, self.runner.basedir, check=False)
+        #        if os.path.exists(fnd):
+        #            source = fnd
+        #            found = True
+        #            break
+        #    if not found:
+        #        result = dict(failed=True, msg="could not find src in first_available_file list")
+        #        return ReturnData(conn=conn, comm_ok=False, result=result)
+        #else:
+        #    source = template.template(self.runner.basedir, source, task_vars)
+        #        
+        #    if '_original_file' in task_vars:
+        #        source = utils.path_dwim_relative(task_vars['_original_file'], 'templates', source, self.runner.basedir)
+        #    else:
+        #        source = utils.path_dwim(self.runner.basedir, source)
+        ##################################################################################################
+        source = self._loader.path_dwim(source)
+        ##################################################################################################
+
+        # Expand any user home dir specification
+        dest = self._remote_expand_user(dest, tmp)
+
+        if dest.endswith("/"): # CCTODO: Fix path for Windows hosts.
+            base = os.path.basename(source)
+            dest = os.path.join(dest, base)
+
+        # template the source data locally & get ready to transfer
+        templar = Templar(basedir=self._loader.get_basedir(), variables=task_vars)
+        try:
+            with open(source, 'r') as f:
+                template_data = f.read()
+            resultant = templar.template(template_data, preserve_trailing_newlines=True)
+        except Exception, e:
+            return dict(failed=True, msg=type(e).__name__ + ": " + str(e))
+
+        local_checksum = checksum_s(resultant)
+        remote_checksum = self._remote_checksum(tmp, dest)
+
+        if remote_checksum in ('0', '2', '3', '4'):
+            # Note: 1 means the file is not present which is fine; template will create it
+            return dict(failed=True, msg="failed to checksum remote file. Checksum error code: %s" % remote_checksum)
+
+        if local_checksum != remote_checksum:
+            # if showing diffs, we need to get the remote value
+            dest_contents = ''
+
+            # FIXME: still need to implement diff mechanism
+            #if self.runner.diff:
+            #    # using persist_files to keep the temp directory around to avoid needing to grab another
+            #    dest_result = self.runner._execute_module(conn, tmp, 'slurp', "path=%s" % dest, task_vars=task_vars, persist_files=True)
+            #    if 'content' in dest_result.result:
+            #        dest_contents = dest_result.result['content']
+            #        if dest_result.result['encoding'] == 'base64':
+            #            dest_contents = base64.b64decode(dest_contents)
+            #        else:
+            #            raise Exception("unknown encoding, failed: %s" % dest_result.result)
+ 
+            xfered = self._transfer_data(self._shell.join_path(tmp, 'source'), resultant)
+
+            # fix file permissions when the copy is done as a different user
+            if self._connection_info.sudo and self._connection_info.sudo_user != 'root' or self._connection_info.su and self._connection_info.su_user != 'root':
+                self._remote_chmod('a+r', xfered, tmp)
+
+            # run the copy module
+            new_module_args = self._task.args.copy()
+            new_module_args.update(
+               dict(
+                   src=xfered,
+                   dest=dest,
+                   original_basename=os.path.basename(source),
+                   follow=True,
+                ),
+            )
+
+            # FIXME: noop stuff needs to be sorted out
+            #if self.runner.noop_on_check(task_vars):
+            #    return ReturnData(conn=conn, comm_ok=True, result=dict(changed=True), diff=dict(before_header=dest, after_header=source, before=dest_contents, after=resultant))
+            #else:
+            #    res = self.runner._execute_module(conn, tmp, 'copy', module_args_tmp, task_vars=task_vars, complex_args=complex_args)
+            #    if res.result.get('changed', False):
+            #        res.diff = dict(before=dest_contents, after=resultant)
+            #    return res
+
+            result = self._execute_module(module_name='copy', module_args=new_module_args)
+            if result.get('changed', False):
+                result['diff'] = dict(before=dest_contents, after=resultant)
+            return result
+
+        else:
+            # when running the file module based on the template data, we do
+            # not want the source filename (the name of the template) to be used,
+            # since this would mess up links, so we clear the src param and tell
+            # the module to follow links.  When doing that, we have to set
+            # original_basename to the template just in case the dest is
+            # a directory.
+            new_module_args = self._task.args.copy()
+            new_module_args.update(
+                dict(
+                    src=None,
+                    original_basename=os.path.basename(source),
+                    follow=True,
+                ),
+            )
+
+            # FIXME: this may not be required anymore, as the checkmod params
+            #        should be in the regular module args?
+            # be sure to task_vars the check mode param into the module args and
+            # rely on the file module to report its changed status
+            #if self.runner.noop_on_check(task_vars):
+            #    new_module_args['CHECKMODE'] = True
+
+            return self._execute_module(module_name='file', module_args=new_module_args)
+
diff --git a/v2/ansible/plugins/action/unarchive.py b/v2/ansible/plugins/action/unarchive.py
new file mode 100644
index 0000000..fab0843
--- /dev/null
+++ b/v2/ansible/plugins/action/unarchive.py
@@ -0,0 +1,118 @@
+# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
+# (c) 2013, Dylan Martin <dmartin@seattlecentral.edu>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible.  If not, see <http://www.gnu.org/licenses/>.
+
+import os
+
+from ansible.plugins.action import ActionBase
+
+## fixes https://github.com/ansible/ansible/issues/3518
+# http://mypy.pythonblogs.com/12_mypy/archive/1253_workaround_for_python_bug_ascii_codec_cant_encode_character_uxa0_in_position_111_ordinal_not_in_range128.html
+import sys
+reload(sys)
+sys.setdefaultencoding("utf8")
+import pipes
+
+
+class ActionModule(ActionBase):
+
+    TRANSFERS_FILES = True
+
+    def run(self, tmp=None, task_vars=dict()):
+        ''' handler for unarchive operations '''
+
+        source  = self._task.args.get('src', None)
+        dest    = self._task.args.get('dest', None)
+        copy    = self._task.args.get('copy', True)
+        creates = self._task.args.get('creates', None)
+
+        if source is None or dest is None:
+            return dict(failed=True, msg="src (or content) and dest are required")
+
+        if not tmp:
+            tmp = self._make_tmp_path()
+
+        if creates:
+            # do not run the command if the line contains creates=filename
+            # and the filename already exists. This allows idempotence
+            # of command executions.
+            module_args_tmp = "path=%s" % creates
+            result = self._execute_module(module_name='stat', module_args=dict(path=creates))
+            stat = result.get('stat', None)
+            if stat and stat.get('exists', False):
+                return dict(skipped=True, msg=("skipped, since %s exists" % creates))
+
+        dest = self._remote_expand_user(dest, tmp) # CCTODO: Fix path for Windows hosts.
+        source = os.path.expanduser(source)
+
+        if copy:
+            # FIXME: the original file stuff needs to be reworked
+            if '_original_file' in task_vars:
+                source = self._loader.path_dwim_relative(task_vars['_original_file'], 'files', source)
+            else:
+                source = self._loader.path_dwim(source)
+
+        remote_checksum = self._remote_checksum(tmp, dest)
+        if remote_checksum != '3':
+            return dict(failed=True, msg="dest '%s' must be an existing dir" % dest)
+        elif remote_checksum == '4':
+            return dict(failed=True, msg="python isn't present on the system.  Unable to compute checksum")
+
+        if copy:
+            # transfer the file to a remote tmp location
+            tmp_src = tmp + 'source'
+            self._connection.put_file(source, tmp_src)
+
+        # handle diff mode client side
+        # handle check mode client side
+        # fix file permissions when the copy is done as a different user
+        if copy:
+            if self._connection_info.sudo and self._connection_info.sudo_user != 'root' or self._connection_info.su and self._connection_info.su_user != 'root':
+                # FIXME: noop stuff needs to be reworked
+                #if not self.runner.noop_on_check(task_vars):
+                #    self.runner._remote_chmod(conn, 'a+r', tmp_src, tmp)
+                self._remote_chmod(tmp, 'a+r', tmp_src)
+
+            # Build temporary module_args.
+            new_module_args = self._task.args.copy()
+            new_module_args.update(
+                dict(
+                    src=tmp_src,
+                    original_basename=os.path.basename(source),
+                ),
+            )
+
+            # make sure checkmod is passed on correctly
+            # FIXME: noop again, probably doesn't need to be done here anymore?
+            #if self.runner.noop_on_check(task_vars):
+            #    new_module_args['CHECKMODE'] = True
+
+        else:
+            new_module_args = self._task.args.copy()
+            new_module_args.update(
+                dict(
+                    original_basename=os.path.basename(source),
+                ),
+            )
+            # make sure checkmod is passed on correctly
+            # FIXME: noop again, probably doesn't need to be done here anymore?
+            #if self.runner.noop_on_check(task_vars):
+            #    module_args += " CHECKMODE=True"
+
+        # execute the unarchive module now, with the updated args
+        return self._execute_module(module_args=new_module_args)
+
diff --git a/v2/ansible/plugins/strategies/__init__.py b/v2/ansible/plugins/strategies/__init__.py
index 4de83d8..cb290dd 100644
--- a/v2/ansible/plugins/strategies/__init__.py
+++ b/v2/ansible/plugins/strategies/__init__.py
@@ -23,6 +23,10 @@ import Queue
 import time
 
 from ansible.errors import *
+
+from ansible.inventory.host import Host
+from ansible.inventory.group import Group
+
 from ansible.playbook.helpers import compile_block_list
 from ansible.playbook.role import ROLE_CACHE
 from ansible.utils.debug import debug
@@ -154,6 +158,19 @@ class StrategyBase:
                             if entry == hashed_entry :
                                 role_obj._had_task_run = True
 
+                elif result[0] == 'add_host':
+                    task_result = result[1]
+                    new_host_info = task_result._result.get('add_host', dict())
+                    
+                    self._add_host(new_host_info)
+
+                elif result[0] == 'add_group':
+                    task_result = result[1]
+                    host        = task_result._host
+                    group_name  = task_result._result.get('add_group')
+
+                    self._add_group(host, group_name)
+
                 elif result[0] == 'notify_handler':
                     handler_name = result[1]
                     host         = result[2]
@@ -189,6 +206,74 @@ class StrategyBase:
                 break
             time.sleep(0.01)
 
+    def _add_host(self, host_info):
+        '''
+        Helper function to add a new host to inventory based on a task result.
+        '''
+
+        host_name = host_info.get('host_name')
+
+        # Check if host in cache, add if not
+        if host_name in self._inventory._hosts_cache:
+            new_host = self._inventory._hosts_cache[host_name]
+        else:
+            new_host = Host(host_name)
+            self._inventory._hosts_cache[host_name] = new_host
+
+            allgroup = self._inventory.get_group('all')
+            allgroup.add_host(new_host)
+
+        # Set/update the vars for this host
+        # FIXME: probably should have a set vars method for the host?
+        new_vars = host_info.get('host_vars', dict())
+        new_host.vars.update(new_vars)
+
+        new_groups = host_info.get('groups', [])
+        for group_name in new_groups:
+            if not self._inventory.get_group(group_name):
+                new_group = Group(group_name)
+                self._inventory.add_group(new_group)
+                new_group.vars = self._inventory.get_group_variables(group_name)
+            else:
+                new_group = self._inventory.get_group(group_name)
+
+            new_group.add_host(new_host)
+
+            # add this host to the group cache
+            if self._inventory._groups_list is not None:
+                if group_name in self._inventory._groups_list:
+                    if new_host.name not in self._inventory._groups_list[group_name]:
+                        self._inventory._groups_list[group_name].append(new_host.name)
+
+        # clear pattern caching completely since it's unpredictable what
+        # patterns may have referenced the group
+        # FIXME: is this still required?
+        self._inventory.clear_pattern_cache()
+
+    def _add_group(self, host, group_name):
+        '''
+        Helper function to add a group (if it does not exist), and to assign the
+        specified host to that group.
+        '''
+
+        new_group = self._inventory.get_group(group_name)
+        if not new_group:
+            # create the new group and add it to inventory
+            new_group = Group(group_name)
+            self._inventory.add_group(new_group)
+
+            # and add the group to the proper hierarchy
+            allgroup = self._inventory.get_group('all')
+            allgroup.add_child_group(new_group)
+
+        # the host here is from the executor side, which means it was a
+        # serialized/cloned copy and we'll need to look up the proper
+        # host object from the master inventory
+        actual_host = self._inventory.get_host(host.name)
+
+        # and add the host to the group
+        new_group.add_host(actual_host)
+
     def cleanup(self, iterator, connection_info):
         '''
         Iterates through failed hosts and runs any outstanding rescue/always blocks
diff --git a/v2/ansible/template/__init__.py b/v2/ansible/template/__init__.py
index e05ecc3..74b1117 100644
--- a/v2/ansible/template/__init__.py
+++ b/v2/ansible/template/__init__.py
@@ -109,7 +109,7 @@ class Templar:
         assert isinstance(variables, dict)
         self._available_variables = variables.copy()
 
-    def template(self, variable, convert_bare=False):
+    def template(self, variable, convert_bare=False, preserve_trailing_newlines=False):
         '''
         Templates (possibly recursively) any given data as input. If convert_bare is
         set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
@@ -123,7 +123,7 @@ class Templar:
             if isinstance(variable, basestring):
                 result = variable
                 if self._contains_vars(variable):
-                    result = self._do_template(variable)
+                    result = self._do_template(variable, preserve_trailing_newlines=preserve_trailing_newlines)
     
                     # if this looks like a dictionary or list, convert it to such using the safe_eval method
                     if (result.startswith("{") and not result.startswith("{{")) or result.startswith("["):
@@ -196,7 +196,7 @@ class Templar:
         else:
             raise AnsibleError("lookup plugin (%s) not found" % name)
 
-    def _do_template(self, data):
+    def _do_template(self, data, preserve_trailing_newlines=False):
 
         try:
             # FIXME: is this even required? it seems to conflict with the lines
@@ -251,6 +251,17 @@ class Templar:
                     )
                 else:
                     raise AnsibleError("an unexpected type error occurred. Error was %s" % te)
+
+            if preserve_trailing_newlines:
+                # The low level calls above do not preserve the newline
+                # characters at the end of the input data, so we use the
+                # calculate the difference in newlines and append them
+                # to the resulting output for parity
+                res_newlines  = self._count_newlines_from_end(res)
+                data_newlines = self._count_newlines_from_end(data)
+                if data_newlines > res_newlines:
+                    res += '\n' * (data_newlines - res_newlines)
+
             return res
         except UndefinedError, AnsibleUndefinedVariable:
             if self._fail_on_undefined_errors:
diff --git a/v2/hacking/env-setup b/v2/hacking/env-setup
index e197041..854d870 100755
--- a/v2/hacking/env-setup
+++ b/v2/hacking/env-setup
@@ -16,7 +16,7 @@ FULL_PATH=`python -c "import os; print(os.path.realpath('$HACKING_DIR'))"`
 ANSIBLE_HOME=`dirname "$FULL_PATH"`
 
 PREFIX_PYTHONPATH="$ANSIBLE_HOME"
-PREFIX_PATH="$ANSIBLE_HOME"
+PREFIX_PATH="$ANSIBLE_HOME/bin"
 PREFIX_MANPATH="$ANSIBLE_HOME/docs/man"
 
 [[ $PYTHONPATH != ${PREFIX_PYTHONPATH}* ]] && export PYTHONPATH=$PREFIX_PYTHONPATH:$PYTHONPATH
diff --git a/v2/test/playbook/test_task.py b/v2/test/playbook/test_task.py
index 0af53c9..b2160e0 100644
--- a/v2/test/playbook/test_task.py
+++ b/v2/test/playbook/test_task.py
@@ -60,7 +60,6 @@ class TestTask(unittest.TestCase):
 
     def test_load_task_kv_form(self):
         t = Task.load(kv_shell_task)
-        print("task action is %s" % t.action)
         self.assertEqual(t.action, 'command')
         self.assertEqual(t.args, dict(_raw_params='echo hi', _uses_shell=True))
 
--
libgit2 0.26.0