diff -Nru ansible-2.3.2.0/bin/ansible ansible-2.4.0.0/bin/ansible --- ansible-2.3.2.0/bin/ansible 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible 2017-09-19 17:10:47.000000000 +0000 @@ -37,9 +37,6 @@ import sys import traceback -# for debug -from multiprocessing import Lock - import ansible.constants as C from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.utils.display import Display @@ -47,7 +44,7 @@ ######################################## -### OUTPUT OF LAST RESORT ### +# OUTPUT OF LAST RESORT class LastResort(object): def display(self, msg): print(msg, file=sys.stderr) @@ -71,7 +68,7 @@ sub = None target = me.split('-') if target[-1][0].isdigit(): - # Remove any version or pthon version info as downstreams + # Remove any version or python version info as downstreams # sometimes add that target = target[:-1] @@ -130,7 +127,7 @@ exit_code = 99 except Exception as e: have_cli_options = cli is not None and cli.options is not None - display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False) + display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False) if not have_cli_options or have_cli_options and cli.options.verbosity > 2: log_only = False else: diff -Nru ansible-2.3.2.0/bin/ansible-connection ansible-2.4.0.0/bin/ansible-connection --- ansible-2.3.2.0/bin/ansible-connection 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible-connection 2017-09-19 17:10:47.000000000 +0000 @@ -1,6 +1,6 @@ #!/usr/bin/env python -# (c) 2016, Ansible, Inc. +# (c) 2017, Ansible, Inc. # # This file is part of Ansible # @@ -33,21 +33,19 @@ import shlex import signal import socket -import struct import sys import time import traceback -import syslog import datetime -import logging - -from io import BytesIO +import errno from ansible import constants as C -from ansible.module_utils._text import to_bytes, to_native -from ansible.module_utils.six.moves import cPickle, StringIO +from ansible.module_utils._text import to_bytes, to_native, to_text +from ansible.module_utils.six import PY3 +from ansible.module_utils.six.moves import cPickle +from ansible.module_utils.connection import send_data, recv_data from ansible.playbook.play_context import PlayContext -from ansible.plugins import connection_loader +from ansible.plugins.loader import connection_loader from ansible.utils.path import unfrackpath, makedirs_safe from ansible.errors import AnsibleConnectionFailure from ansible.utils.display import Display @@ -62,8 +60,9 @@ pid = os.fork() if pid > 0: return pid - - #os.chdir("/") + # This is done as a 'good practice' for daemons, but we need to keep the cwd + # leaving it here as a note that we KNOW its good practice but are not doing it on purpose. + # os.chdir("/") os.setsid() os.umask(0) @@ -73,11 +72,11 @@ sys.exit(0) if C.DEFAULT_LOG_PATH != '': - out_file = file(C.DEFAULT_LOG_PATH, 'a+') - err_file = file(C.DEFAULT_LOG_PATH, 'a+', 0) + out_file = open(C.DEFAULT_LOG_PATH, 'ab+') + err_file = open(C.DEFAULT_LOG_PATH, 'ab+', 0) else: - out_file = file('/dev/null', 'a+') - err_file = file('/dev/null', 'a+', 0) + out_file = open('/dev/null', 'ab+') + err_file = open('/dev/null', 'ab+', 0) os.dup2(out_file.fileno(), sys.stdout.fileno()) os.dup2(err_file.fileno(), sys.stderr.fileno()) @@ -89,33 +88,11 @@ except OSError as e: sys.exit(1) -def send_data(s, data): - packed_len = struct.pack('!Q',len(data)) - return s.sendall(packed_len + data) - -def recv_data(s): - header_len = 8 # size of a packed unsigned long long - data = b"" - while len(data) < header_len: - d = s.recv(header_len - len(data)) - if not d: - return None - data += d - data_len = struct.unpack('!Q',data[:header_len])[0] - data = data[header_len:] - while len(data) < data_len: - d = s.recv(data_len - len(data)) - if not d: - return None - data += d - return data - class Server(): - def __init__(self, path, play_context): - - self.path = path + def __init__(self, socket_path, play_context): + self.socket_path = socket_path self.play_context = play_context display.display( @@ -124,178 +101,219 @@ log_only=True ) - display.display('control socket path is %s' % path, log_only=True) + display.display('control socket path is %s' % socket_path, log_only=True) display.display('current working directory is %s' % os.getcwd(), log_only=True) self._start_time = datetime.datetime.now() display.display("using connection plugin %s" % self.play_context.connection, log_only=True) - self.conn = connection_loader.get(play_context.connection, play_context, sys.stdin) - self.conn._connect() - if not self.conn.connected: + self.connection = connection_loader.get(play_context.connection, play_context, sys.stdin) + self.connection._connect() + + if not self.connection.connected: raise AnsibleConnectionFailure('unable to connect to remote host %s' % self._play_context.remote_addr) connection_time = datetime.datetime.now() - self._start_time display.display('connection established to %s in %s' % (play_context.remote_addr, connection_time), log_only=True) self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - self.socket.bind(path) + self.socket.bind(self.socket_path) self.socket.listen(1) - - signal.signal(signal.SIGALRM, self.alarm_handler) - - def dispatch(self, obj, name, *args, **kwargs): - meth = getattr(obj, name, None) - if meth: - return meth(*args, **kwargs) - - def alarm_handler(self, signum, frame): - ''' - Alarm handler - ''' - # FIXME: this should also set internal flags for other - # areas of code to check, so they can terminate - # earlier than the socket going back to the accept - # call and failing there. - # - # hooks the connection plugin to handle any cleanup - self.dispatch(self.conn, 'alarm_handler', signum, frame) - self.socket.close() + display.display('local socket is set to listening', log_only=True) def run(self): try: while True: - # set the alarm, if we don't get an accept before it - # goes off we exit (via an exception caused by the socket - # getting closed while waiting on accept()) - # FIXME: is this the best way to exit? as noted above in the - # handler we should probably be setting a flag to check - # here and in other parts of the code + signal.signal(signal.SIGALRM, self.connect_timeout) + signal.signal(signal.SIGTERM, self.handler) signal.alarm(C.PERSISTENT_CONNECT_TIMEOUT) - try: - (s, addr) = self.socket.accept() - display.display('incoming request accepted on persistent socket', log_only=True) - # clear the alarm - # FIXME: potential race condition here between the accept and - # time to this call. - signal.alarm(0) - except: - break + + (s, addr) = self.socket.accept() + display.display('incoming request accepted on persistent socket', log_only=True) + signal.alarm(0) while True: data = recv_data(s) if not data: break + signal.signal(signal.SIGALRM, self.command_timeout) signal.alarm(self.play_context.timeout) + op = to_text(data.split(b':')[0]) + display.display('socket operation is %s' % op, log_only=True) + + method = getattr(self, 'do_%s' % op, None) + rc = 255 - try: - if data.startswith(b'EXEC: '): - display.display("socket operation is EXEC", log_only=True) - cmd = data.split(b'EXEC: ')[1] - (rc, stdout, stderr) = self.conn.exec_command(cmd) - elif data.startswith(b'PUT: ') or data.startswith(b'FETCH: '): - (op, src, dst) = shlex.split(to_native(data)) - stdout = stderr = '' - try: - if op == 'FETCH:': - display.display("socket operation is FETCH", log_only=True) - self.conn.fetch_file(src, dst) - elif op == 'PUT:': - display.display("socket operation is PUT", log_only=True) - self.conn.put_file(src, dst) - rc = 0 - except: - pass - elif data.startswith(b'CONTEXT: '): - display.display("socket operation is CONTEXT", log_only=True) - pc_data = data.split(b'CONTEXT: ')[1] - - src = StringIO(pc_data) - pc_data = cPickle.load(src) - src.close() - - pc = PlayContext() - pc.deserialize(pc_data) - - self.dispatch(self.conn, 'update_play_context', pc) - continue - else: - display.display("socket operation is UNKNOWN", log_only=True) - stdout = '' - stderr = 'Invalid action specified' - except: - stdout = '' - stderr = traceback.format_exc() + stdout = stderr = '' + + if not method: + stderr = 'Invalid action specified' + else: + rc, stdout, stderr = method(data) signal.alarm(0) - display.display("socket operation completed with rc %s" % rc, log_only=True) + display.display('socket operation completed with rc %s' % rc, log_only=True) - send_data(s, to_bytes(str(rc))) + send_data(s, to_bytes(rc)) send_data(s, to_bytes(stdout)) send_data(s, to_bytes(stderr)) + s.close() + except Exception as e: - display.display(traceback.format_exec(), log_only=True) + # socket.accept() will raise EINTR if the socket.close() is called + if e.errno != errno.EINTR: + display.display(traceback.format_exc(), log_only=True) + finally: # when done, close the connection properly and cleanup # the socket file so it can be recreated + self.shutdown() end_time = datetime.datetime.now() delta = end_time - self._start_time - display.display('shutting down control socket, connection was active for %s secs' % delta, log_only=True) - try: - self.conn.close() + display.display('shutdown local socket, connection was active for %s secs' % delta, log_only=True) + + def connect_timeout(self, signum, frame): + display.display('persistent connection idle timeout triggered, timeout value is %s secs' % C.PERSISTENT_CONNECT_TIMEOUT, log_only=True) + self.shutdown() + + def command_timeout(self, signum, frame): + display.display('command timeout triggered, timeout value is %s secs' % self.play_context.timeout, log_only=True) + self.shutdown() + + def handler(self, signum, frame): + display.display('signal handler called with signal %s' % signum, log_only=True) + self.shutdown() + + def shutdown(self): + display.display('shutdown persistent connection requested', log_only=True) + + if not os.path.exists(self.socket_path): + display.display('persistent connection is not active', log_only=True) + return + + try: + if self.socket: + display.display('closing local listener', log_only=True) self.socket.close() - except Exception as e: - pass - os.remove(self.path) + if self.connection: + display.display('closing the connection', log_only=True) + self.connection.close() + except: + pass + finally: + if os.path.exists(self.socket_path): + display.display('removing the local control socket', log_only=True) + os.remove(self.socket_path) + + display.display('shutdown complete', log_only=True) + + def do_EXEC(self, data): + cmd = data.split(b'EXEC: ')[1] + display.display('Command executed: %s' % cmd, log_only=True) + return self.connection.exec_command(cmd) + + def do_PUT(self, data): + (op, src, dst) = shlex.split(to_native(data)) + return self.connection.fetch_file(src, dst) + + def do_FETCH(self, data): + (op, src, dst) = shlex.split(to_native(data)) + return self.connection.put_file(src, dst) + + def do_CONTEXT(self, data): + pc_data = data.split(b'CONTEXT: ', 1)[1] + + if PY3: + pc_data = cPickle.loads(pc_data, encoding='bytes') + else: + pc_data = cPickle.loads(pc_data) + + pc = PlayContext() + pc.deserialize(pc_data) + + try: + self.connection.update_play_context(pc) + except AttributeError: + pass + + return (0, 'ok', '') + + def do_RUN(self, data): + timeout = self.play_context.timeout + while bool(timeout): + if os.path.exists(self.socket_path): + break + time.sleep(1) + timeout -= 1 + socket_bytes = to_bytes(self.socket_path, errors='surrogate_or_strict') + return 0, b'\n#SOCKET_PATH#: %s\n' % socket_bytes, '' + + +def communicate(sock, data): + send_data(sock, data) + rc = int(recv_data(sock), 10) + stdout = recv_data(sock) + stderr = recv_data(sock) + return (rc, stdout, stderr) + def main(): + # Need stdin as a byte stream + if PY3: + stdin = sys.stdin.buffer + else: + stdin = sys.stdin try: # read the play context data via stdin, which means depickling it # FIXME: as noted above, we will probably need to deserialize the # connection loader here as well at some point, otherwise this # won't find role- or playbook-based connection plugins - cur_line = sys.stdin.readline() - init_data = '' - while cur_line.strip() != '#END_INIT#': - if cur_line == '': - raise Exception("EOL found before init data was complete") + cur_line = stdin.readline() + init_data = b'' + while cur_line.strip() != b'#END_INIT#': + if cur_line == b'': + raise Exception("EOF found before init data was complete") init_data += cur_line - cur_line = sys.stdin.readline() - src = BytesIO(to_bytes(init_data)) - pc_data = cPickle.load(src) + cur_line = stdin.readline() + if PY3: + pc_data = cPickle.loads(init_data, encoding='bytes') + else: + pc_data = cPickle.loads(init_data) pc = PlayContext() pc.deserialize(pc_data) + except Exception as e: # FIXME: better error message/handling/logging sys.stderr.write(traceback.format_exc()) sys.exit("FAIL: %s" % e) ssh = connection_loader.get('ssh', class_only=True) - m = ssh._create_control_path(pc.remote_addr, pc.port, pc.remote_user) + cp = ssh._create_control_path(pc.remote_addr, pc.port, pc.remote_user, pc.connection) # create the persistent connection dir if need be and create the paths # which we will be using later - tmp_path = unfrackpath("$HOME/.ansible/pc") + tmp_path = unfrackpath(C.PERSISTENT_CONTROL_PATH_DIR) makedirs_safe(tmp_path) - lk_path = unfrackpath("%s/.ansible_pc_lock" % tmp_path) - sf_path = unfrackpath(m % dict(directory=tmp_path)) + lock_path = unfrackpath("%s/.ansible_pc_lock" % tmp_path) + socket_path = unfrackpath(cp % dict(directory=tmp_path)) # if the socket file doesn't exist, spin up the daemon process - lock_fd = os.open(lk_path, os.O_RDWR|os.O_CREAT, 0o600) + lock_fd = os.open(lock_path, os.O_RDWR | os.O_CREAT, 0o600) fcntl.lockf(lock_fd, fcntl.LOCK_EX) - if not os.path.exists(sf_path): + + if not os.path.exists(socket_path): pid = do_fork() if pid == 0: rc = 0 try: - server = Server(sf_path, pc) + server = Server(socket_path, pc) except AnsibleConnectionFailure as exc: display.display('connecting to host %s returned an error' % pc.remote_addr, log_only=True) display.display(str(exc), log_only=True) @@ -311,51 +329,57 @@ sys.exit(rc) else: display.display('re-using existing socket for %s@%s:%s' % (pc.remote_user, pc.remote_addr, pc.port), log_only=True) + fcntl.lockf(lock_fd, fcntl.LOCK_UN) os.close(lock_fd) + timeout = pc.timeout + while bool(timeout): + if os.path.exists(socket_path): + display.vvvv('connected to local socket in %s' % (pc.timeout - timeout), pc.remote_addr) + break + time.sleep(1) + timeout -= 1 + else: + raise AnsibleConnectionFailure('timeout waiting for local socket', pc.remote_addr) + # now connect to the daemon process # FIXME: if the socket file existed but the daemonized process was killed, # the connection will timeout here. Need to make this more resilient. - rc = 0 - while rc == 0: - data = sys.stdin.readline() - if data == '': + while True: + data = stdin.readline() + if data == b'': break - if data.strip() == '': + if data.strip() == b'': continue - sf = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) - attempts = 1 - while True: + + sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) + + connect_retry_timeout = C.PERSISTENT_CONNECT_RETRY_TIMEOUT + while bool(connect_retry_timeout): try: - sf.connect(sf_path) + sock.connect(socket_path) break except socket.error: - # FIXME: better error handling/logging/message here - time.sleep(C.PERSISTENT_CONNECT_INTERVAL) - attempts += 1 - if attempts > C.PERSISTENT_CONNECT_RETRIES: - display.display('number of connection attempts exceeded, unable to connect to control socket', pc.remote_addr, pc.remote_user, log_only=True) - display.display('persistent_connect_interval=%s, persistent_connect_retries=%s' % (C.PERSISTENT_CONNECT_INTERVAL, C.PERSISTENT_CONNECT_RETRIES), pc.remote_addr, pc.remote_user, log_only=True) - sys.stderr.write('failed to connect to control socket') - sys.exit(255) + time.sleep(1) + connect_retry_timeout -= 1 + else: + display.display('connect retry timeout expired, unable to connect to control socket', pc.remote_addr, pc.remote_user, log_only=True) + display.display('persistent_connect_retry_timeout is %s secs' % (C.PERSISTENT_CONNECT_RETRY_TIMEOUT), pc.remote_addr, pc.remote_user, log_only=True) + sys.stderr.write('failed to connect to control socket') + sys.exit(255) # send the play_context back into the connection so the connection # can handle any privilege escalation activities - pc_data = 'CONTEXT: %s' % src.getvalue() - send_data(sf, to_bytes(pc_data)) - src.close() - - send_data(sf, to_bytes(data.strip())) - - rc = int(recv_data(sf), 10) - stdout = recv_data(sf) - stderr = recv_data(sf) + pc_data = b'CONTEXT: %s' % init_data + communicate(sock, pc_data) + + rc, stdout, stderr = communicate(sock, data.strip()) sys.stdout.write(to_native(stdout)) sys.stderr.write(to_native(stderr)) - sf.close() + sock.close() break sys.exit(rc) diff -Nru ansible-2.3.2.0/bin/ansible-console ansible-2.4.0.0/bin/ansible-console --- ansible-2.3.2.0/bin/ansible-console 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible-console 2017-09-19 17:10:47.000000000 +0000 @@ -37,9 +37,6 @@ import sys import traceback -# for debug -from multiprocessing import Lock - import ansible.constants as C from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.utils.display import Display @@ -47,7 +44,7 @@ ######################################## -### OUTPUT OF LAST RESORT ### +# OUTPUT OF LAST RESORT class LastResort(object): def display(self, msg): print(msg, file=sys.stderr) @@ -71,7 +68,7 @@ sub = None target = me.split('-') if target[-1][0].isdigit(): - # Remove any version or pthon version info as downstreams + # Remove any version or python version info as downstreams # sometimes add that target = target[:-1] @@ -130,7 +127,7 @@ exit_code = 99 except Exception as e: have_cli_options = cli is not None and cli.options is not None - display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False) + display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False) if not have_cli_options or have_cli_options and cli.options.verbosity > 2: log_only = False else: diff -Nru ansible-2.3.2.0/bin/ansible-doc ansible-2.4.0.0/bin/ansible-doc --- ansible-2.3.2.0/bin/ansible-doc 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible-doc 2017-09-19 17:10:47.000000000 +0000 @@ -37,9 +37,6 @@ import sys import traceback -# for debug -from multiprocessing import Lock - import ansible.constants as C from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.utils.display import Display @@ -47,7 +44,7 @@ ######################################## -### OUTPUT OF LAST RESORT ### +# OUTPUT OF LAST RESORT class LastResort(object): def display(self, msg): print(msg, file=sys.stderr) @@ -71,7 +68,7 @@ sub = None target = me.split('-') if target[-1][0].isdigit(): - # Remove any version or pthon version info as downstreams + # Remove any version or python version info as downstreams # sometimes add that target = target[:-1] @@ -130,7 +127,7 @@ exit_code = 99 except Exception as e: have_cli_options = cli is not None and cli.options is not None - display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False) + display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False) if not have_cli_options or have_cli_options and cli.options.verbosity > 2: log_only = False else: diff -Nru ansible-2.3.2.0/bin/ansible-galaxy ansible-2.4.0.0/bin/ansible-galaxy --- ansible-2.3.2.0/bin/ansible-galaxy 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible-galaxy 2017-09-19 17:10:47.000000000 +0000 @@ -37,9 +37,6 @@ import sys import traceback -# for debug -from multiprocessing import Lock - import ansible.constants as C from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.utils.display import Display @@ -47,7 +44,7 @@ ######################################## -### OUTPUT OF LAST RESORT ### +# OUTPUT OF LAST RESORT class LastResort(object): def display(self, msg): print(msg, file=sys.stderr) @@ -71,7 +68,7 @@ sub = None target = me.split('-') if target[-1][0].isdigit(): - # Remove any version or pthon version info as downstreams + # Remove any version or python version info as downstreams # sometimes add that target = target[:-1] @@ -130,7 +127,7 @@ exit_code = 99 except Exception as e: have_cli_options = cli is not None and cli.options is not None - display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False) + display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False) if not have_cli_options or have_cli_options and cli.options.verbosity > 2: log_only = False else: diff -Nru ansible-2.3.2.0/bin/ansible-playbook ansible-2.4.0.0/bin/ansible-playbook --- ansible-2.3.2.0/bin/ansible-playbook 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible-playbook 2017-09-19 17:10:47.000000000 +0000 @@ -37,9 +37,6 @@ import sys import traceback -# for debug -from multiprocessing import Lock - import ansible.constants as C from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.utils.display import Display @@ -47,7 +44,7 @@ ######################################## -### OUTPUT OF LAST RESORT ### +# OUTPUT OF LAST RESORT class LastResort(object): def display(self, msg): print(msg, file=sys.stderr) @@ -71,7 +68,7 @@ sub = None target = me.split('-') if target[-1][0].isdigit(): - # Remove any version or pthon version info as downstreams + # Remove any version or python version info as downstreams # sometimes add that target = target[:-1] @@ -130,7 +127,7 @@ exit_code = 99 except Exception as e: have_cli_options = cli is not None and cli.options is not None - display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False) + display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False) if not have_cli_options or have_cli_options and cli.options.verbosity > 2: log_only = False else: diff -Nru ansible-2.3.2.0/bin/ansible-pull ansible-2.4.0.0/bin/ansible-pull --- ansible-2.3.2.0/bin/ansible-pull 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible-pull 2017-09-19 17:10:47.000000000 +0000 @@ -37,9 +37,6 @@ import sys import traceback -# for debug -from multiprocessing import Lock - import ansible.constants as C from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.utils.display import Display @@ -47,7 +44,7 @@ ######################################## -### OUTPUT OF LAST RESORT ### +# OUTPUT OF LAST RESORT class LastResort(object): def display(self, msg): print(msg, file=sys.stderr) @@ -71,7 +68,7 @@ sub = None target = me.split('-') if target[-1][0].isdigit(): - # Remove any version or pthon version info as downstreams + # Remove any version or python version info as downstreams # sometimes add that target = target[:-1] @@ -130,7 +127,7 @@ exit_code = 99 except Exception as e: have_cli_options = cli is not None and cli.options is not None - display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False) + display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False) if not have_cli_options or have_cli_options and cli.options.verbosity > 2: log_only = False else: diff -Nru ansible-2.3.2.0/bin/ansible-vault ansible-2.4.0.0/bin/ansible-vault --- ansible-2.3.2.0/bin/ansible-vault 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/bin/ansible-vault 2017-09-19 17:10:47.000000000 +0000 @@ -37,9 +37,6 @@ import sys import traceback -# for debug -from multiprocessing import Lock - import ansible.constants as C from ansible.errors import AnsibleError, AnsibleOptionsError, AnsibleParserError from ansible.utils.display import Display @@ -47,7 +44,7 @@ ######################################## -### OUTPUT OF LAST RESORT ### +# OUTPUT OF LAST RESORT class LastResort(object): def display(self, msg): print(msg, file=sys.stderr) @@ -71,7 +68,7 @@ sub = None target = me.split('-') if target[-1][0].isdigit(): - # Remove any version or pthon version info as downstreams + # Remove any version or python version info as downstreams # sometimes add that target = target[:-1] @@ -130,7 +127,7 @@ exit_code = 99 except Exception as e: have_cli_options = cli is not None and cli.options is not None - display.error("Unexpected Exception: %s" % to_text(e), wrap_text=False) + display.error("Unexpected Exception, this is probably a bug: %s" % to_text(e), wrap_text=False) if not have_cli_options or have_cli_options and cli.options.verbosity > 2: log_only = False else: diff -Nru ansible-2.3.2.0/CHANGELOG.md ansible-2.4.0.0/CHANGELOG.md --- ansible-2.3.2.0/CHANGELOG.md 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/CHANGELOG.md 2017-09-19 17:10:47.000000000 +0000 @@ -1,6 +1,672 @@ Ansible Changes By Release ========================== + + + +## 2.4 "Dancing Days" - ACTIVE DEVELOPMENT + +### Major Changes + +* Support for Python-2.4 and Python-2.5 on the managed system's side was dropped. If you need to manage a system that ships with Python-2.4 or Python-2.5, you'll need to install Python-2.6 or better on the managed system or run Ansible-2.3 until you can upgrade the system. +* New import/include keywords to replace the old bare `include` directives. The use of `static: {yes|no}` on such includes is now deprecated. + - Using `import_*` (`import_playbook`, `import_tasks`, `import_role`) directives are static. + - Using `include_*` (`include_tasks`, `include_role`) directives are dynamic. + This is done to avoid collisions and possible security issues as facts come from the remote targets and they might be compromised. +* New `order` play level keyword that allows the user to change the order in which Ansible processes hosts when dispatching tasks. +* Users can now set group merge priority for groups of the same depth (parent child relationship), using the new `ansible_group_priority` variable, when values are the same or don't exist it will fallback to the previous sorting by name'. +* Inventory has been revamped: + - Inventory classes have been split to allow for better management and deduplication + - Logic that each inventory source duplicated is now common and pushed up to reconciliation + - VariableManager has been updated for better interaction with inventory + - Updated CLI with helper method to initialize base objects for plays + - New inventory plugins for creating inventory + - Old inventory formats are still supported via plugins + - Inline host_list is also an inventory plugin, an example alternative `advanced_host_list` is also provided (it supports ranges) + - New configuration option to list enabled plugins and precedence order: `whitelist_inventory` in ansible.cfg + - vars_plugins have been reworked, they are now run from Vars manager and API has changed (need docs) + - Loading group_vars/host_vars is now a vars plugin and can be overridden + - It is now possible to specify mulitple inventory sources in the command line (-i /etc/hosts1 -i /opt/hosts2) + - Inventory plugins can use the cache plugin (i.e. virtualbox) and is affected by `meta: refresh_inventory` + - Group variable precedence is now configurable via new 'precedence' option in ansible.cfg (needs docs) + - Improved warnings and error messages across the board +* Configuration has been changed from a hardcoded listing in the constants module to dynamically loaded from yaml definitions + - Also added an ansible-config CLI to allow for listing config options and dumping current config (including origin) + - TODO: build upon this to add many features detailed in ansible-config proposal https://github.com/ansible/proposals/issues/35 +* Windows modules now support the use of multiple shared module_utils files in the form of Powershell modules (.psm1), via `#Requires -Module Ansible.ModuleUtils.Whatever.psm1` +* Python module argument_spec now supports custom validation logic by accepting a callable as the `type` argument. +* Windows become_method: runas now works across all authtypes and will auto-elevate under UAC if WinRM user has "Act as part of the operating system" privilege + +### Deprecations +* The behaviour when specifying `--tags` (or `--skip-tags`) multiple times on the command line + has changed so that the tags are merged together by default. See the + documentation for how to temporarily use the old behaviour if needed: + https://docs.ansible.com/ansible/intro_configuration.html#merge-multiple-cli-tags +* The `fetch` module's `validate_md5` parameter has been deprecated and will be + removed in 2.8. If you wish to disable post-validation of the downloaded + file, use validate_checksum instead. +* Those using ansible as a library should note that the `ansible.vars.unsafe_proxy` + module is deprecated and slated to go away in 2.8. The functionality has been + moved to `ansible.utils.unsafe_proxy` to avoid a circular import. +* The win_get_url module has the dictionary 'win_get_url' in its results deprecated, + its content is now also available directly in the resulting output, like other modules. + +#### Deprecated Modules (to be removed in 2.8): +* azure: use M(azure_rm_virtualmachine) instead +* cs_nic: replaced by cs_instance_nic_secondaryip, also see new module cs_instance_nic for managing nics +* ec2_facts: replaced by ec2_metadata_facts +* ec2_remote_facts: replaced by ec2_instance_facts +* panos_address: use M(panos_object) instead +* panos_nat_policy: use M(panos_nat_rule) instead +* panos_security_policy: use M(panos_security_rule) instead +* panos_service: use M(panos_object) instead +* s3: replaced by aws_s3 +* win_msi: use M(win_package) instead + +#### Removed Modules (previously deprecated): +* eos_template: use eos_config instead +* ios_template: use ios_config instead +* iosxr_template: use iosxr_config instead +* junos_template: use junos_config instead +* nxos_template: use nxos_config instead +* openswitch +* ops_template: use ops_config instead + + +### Minor Changes +* Now deprecated configuration options issue warnings when set. +* Removed unused and deprecated config option `pattern` +* Updated the copy of six bundled for modules to use from 1.4.1 to 1.10.0 +* The `include_dir` var is not a global anymore, as we now allow multiple inventory sources, it is now host dependant. + This means it cannot be used wherever host vars are not permitted, for example in task/handler names. +* Fixed a cornercase with ini inventory vars. Previously, if an inventory var + was a quoted string with hash marks ("#") in it then the parsed string + included the quotes. Now the string will not be quoted. Previously, if the + quoting ended before the string finished and then the hash mark appeared, the + hash mark was included as part of the string. Now it is treated as + a trailing comment: + + # Before: + var1="string#comment" ===> var1: "\"string#comment\"" + var1="string" #comment ===> var1: "\"string\" #comment" + # After: + var1="string#comment" ===> var1: "string#comment" + var1="string" #comment ===> var1: "string" + + The new behaviour mirrors how the variables would appear if there was no hash + mark in the string. +* As of 2.4.0, the fetch module fails if there are errors reading the remote file. + Use `ignore_errors` or `failed_when` in playbooks if you wish to ignore errors. +* Experimentally added pmrun become method. +* Enable the docker connection plugin to use su as a become method +* Add an encoding parameter for the replace module so that it can operate on non-utf-8 files +* By default, Ansible now uses the cryptography module to implement vault instead of the older pycrypto module. +* Changed task state resulting from both `rc` and `failed` fields returned, 'rc' no longer overrides 'failed'. Test plugins have also been updated accordingly. +* The win_unzip module no longer includes dictionary 'win_unzip' in its results, + the content is now directly in the resulting output, like pretty much every other module. +* Rewrite of the copy module so that it handles cornercases with symbolic links + and empty directories. The copy module has a new parameter, `local_follow` + which controls how links on the source system are treated. (The older + parameter, follow is for links on the remote system.) +* Update the handling of symbolic file permissions in file-related mode + parameters to deal with multiple operators. For instance, `mode='u=rw+x-X'` to + set the execute bit on directories, remove it from filea, and set read-write + on both is now supported +* Added better cookie parsing to fetch_url/open_url. Cookies are now in a dictionary named `cookies` + in the fetch_url result. Anything using `open_url` directly can pass a cookie object as a named arg + (`cookies`), and then parse/format the cookies in the result. +* The bundled copy of six in lib/ansible/module_utils/six is now used + unconditionally. The code to fallback on a system six interfered with static + analysis of the code so the cost of using the fallback code became too high. + Distributions which wish to unbundle may do so by replacing the bundled six + in ansible/module_utils/six/__init__.py. Six is tricky to unbundle, however, + so they may want to base their efforts off the code we were using: + * https://github.com/ansible/ansible/blob/2fff690caab6a1c6a81973f704be3fbd0bde2c2f/lib/ansible/module_utils/six/__init__.py +* Update ipaddr Jinja filters to replace existing non RFC compliant ones. Added additional filters for easier use + of handling IP addresses. (PR# 26566) +* datetime filter updated to use default format of datetime.datetime (ISO8601) +* The junit plugin now has an option to report a junit test failure on changes for idempotent testing. +* New 'diff' keyword allows setting diff mode on playbook objects, overriding command line option and config. +* New config settings for inventory to: + - control inventory plugins used + - extensions of files to ignore when using inventory directory + - patterns of flies to ignore when using inventory directory + - option to toggle failed inventory source parsing between an error or a warning +* More fixes for Python 3 across the code base. +* win_shell and win_command modules now properly preserve quoted arguments passed on the command-line. Tasks that attempted to work around the issue by adding extra quotes/escaping may need to be reworked. See https://github.com/ansible/ansible/issues/23019 for additional detail. + + +#### New Callbacks: +- full_skip +- profile_roles +- stderr + +#### New Connection plugins: +- buildah +- saltstack + +#### New Filters: +- ipaddr filter gained several new suboptions + - first_usable + - ip/prefix + - ip_netmask + - last_usable + - next_usable + - network_id + - network/prefix + - network_netmask + - network_wildcard + - previous_usable + - range_usable + - size_usable + - wildcard +- next_nth_usable +- network_in_network +- network_in_usable +- previous_nth_usable +- parse_cli +- parse_cli_textfsm +- strftime +- urlsplit + +#### New Inventory Plugins: +- advanced_host_list +- constructed +- host_list +- ini +- openstack +- script +- virtualbox +- yaml + +#### New Inventory scripts: +- lxd + +#### New Lookups: +- chef_databag +- cyberarkpassword +- hiera + +#### New Tests: +- any : true if any element is true +- all: true if all elements are true + +### Module Notes +- By mistake, an early version of elb_classic_lb, elb_instance, and elb_classic_lb_facts modules + were released and marked as stableinterface. These will be marked as preview in 2.4.1 and their + parameters and return values may change in 2.5.0. Part of this mistake included deprecating the + ec2_elb_lb, ec2_lb, and ec2_elb_facts modules prematurely. These modules won't be deprecated + until the replacements above have a stableinterface and the erroneous deprecation will be fixed + in 2.4.1. +- The docker_container module has gained a new option, `working_dir` which allows + specifying the working directory for the command being run in the image. +- The ec2_win_password module now requires the cryptography python module be installed to run +- The stat module added a field, lnk_target. When the file being stated is + a symlink, lnk_target will contain the target of the link. This differs from + lnk_source when the target is specified relative to the symlink. In this + case, lnk_target will remain relative while lnk_source will be expanded to an + absolute path. +- The archive module has a new parameter exclude_path which lists paths to exclude from the archive +- The yum module has a new parameter security which limits state=latest to security updates +- The template module gained a follow parameter to match with copy and file. + Like those modules, template defaults this parameter to False. Previously, + template hardcoded this to true. +- Added a new parameter to command module that lets users specify data to pipe + into the command's stdin. +- The azure_rm modules now accept a `cloud_environment` arg to access regional and private clouds. +- The azure_rm modules and inventory script now require at least version 2.0.0 of the Azure Python SDK. + +### New Modules + +#### Cloud +- amazon + * aws_api_gateway + * aws_direct_connect_connection + * aws_direct_connect_link_aggregation_group + * aws_s3 + * aws_s3_bucket_facts + * aws_waf_facts + * data_pipeline + * dynamodb_ttl + * ec2_instance_facts + * ec2_metadata_facts + * ec2_vpc_dhcp_option_facts + * ec2_vpc_endpoint + * ec2_vpc_endpoint_facts + * ec2_vpc_peering_facts + * ecs_attribute + * elb_application_lb + * elb_application_lb_facts + * elb_target_group + * elb_target_group_facts + * iam_group + * iam_managed_policy + * lightsail + * redshift_facts +- azure + * azure_rm_acs + * azure_rm_availabilityset + * azure_rm_availabilityset_facts + * azure_rm_dnsrecordset + * azure_rm_dnsrecordset_facts + * azure_rm_dnszone + * azure_rm_dnszone_facts + * azure_rm_functionapp + * azure_rm_functionapp_facts + * azure_rm_loadbalancer + * azure_rm_loadbalancer_facts + * azure_rm_managed_disk + * azure_rm_managed_disk_facts + * azure_rm_virtualmachine_extension + * azure_rm_virtualmachine_scaleset + * azure_rm_virtualmachine_scaleset_facts +- atomic + * atomic_container +- cloudstack + * cs_instance_nic + * cs_instance_nic_secondaryip + * cs_network_acl + * cs_network_acl_rule + * cs_storage_pool + * cs_vpn_gateway +- digital_ocean + * digital_ocean_floating_ip +- docker + * docker_secret + * docker_volume +- google + * gce_labels + * gcp_backend_service + * gcp_forwarding_rule + * gcp_healthcheck + * gcp_target_proxy + * gcp_url_map +- misc + * helm +- ovirt + * ovirt_host_storage_facts + * ovirt_scheduling_policies_facts + * ovirt_storage_connections +- vmware + * vcenter_license + * vmware_guest_find + * vmware_guest_tools_wait + * vmware_resource_pool + +#### Commands + * telnet + +#### Crypto + * openssl_certificate + * openssl_csr + +#### Files + * xml + +#### Identity +- cyberark + * cyberark_authentication + * cyberark_user +- ipa + * ipa_dnsrecord + +#### Monitoring + * sensu_client + * sensu_handler + * sensu_silence + +#### Network +- aci + * aci_aep + * aci_ap + * aci_bd + * aci_bd_subnet + * aci_bd_to_l3out + * aci_contract + * aci_contract_subject_to_filter + * aci_epg + * aci_epg_monitoring_policy + * aci_epg_to_contract + * aci_epg_to_domain + * aci_filter + * aci_filter_entry + * aci_intf_policy_fc + * aci_intf_policy_l2 + * aci_intf_policy_lldp + * aci_intf_policy_mcp + * aci_intf_policy_port_channel + * aci_intf_policy_port_security + * aci_l3out_route_tag_policy + * aci_rest + * aci_taboo_contract + * aci_tenant + * aci_tenant_action_rule_profile + * aci_tenant_span_dst_group + * aci_vrf +- aireos + * aireos_command + * aireos_config +- aruba + * aruba_command + * aruba_config +- avi + * avi_actiongroupconfig + * avi_alertconfig + * avi_alertemailconfig + * avi_alertscriptconfig + * avi_alertsyslogconfig + * avi_authprofile + * avi_backup + * avi_backupconfiguration + * avi_cloud + * avi_cloudconnectoruser + * avi_cloudproperties + * avi_cluster + * avi_controllerproperties + * avi_dnspolicy + * avi_gslb + * avi_gslbapplicationpersistenceprofile + * avi_gslbgeodbprofile + * avi_gslbhealthmonitor + * avi_gslbservice + * avi_hardwaresecuritymodulegroup + * avi_httppolicyset + * avi_ipaddrgroup + * avi_ipamdnsproviderprofile + * avi_microservicegroup + * avi_network + * avi_networksecuritypolicy + * avi_poolgroupdeploymentpolicy + * avi_prioritylabels + * avi_scheduler + * avi_seproperties + * avi_serverautoscalepolicy + * avi_serviceengine + * avi_serviceenginegroup + * avi_snmptrapprofile + * avi_stringgroup + * avi_trafficcloneprofile + * avi_useraccountprofile + * avi_vrfcontext + * avi_vsdatascriptset + * avi_vsvip + * avi_webhook +- bigswitch + * bcf_switch +- cloudengine + * ce_aaa_server + * ce_aaa_server_host + * ce_acl + * ce_acl_advance + * ce_acl_interface + * ce_bfd_global + * ce_bfd_session + * ce_bfd_view + * ce_bgp + * ce_bgp_af + * ce_bgp_neighbor + * ce_bgp_neighbor_af + * ce_config + * ce_dldp + * ce_dldp_interface + * ce_eth_trunk + * ce_evpn_bd_vni + * ce_evpn_bgp + * ce_evpn_bgp_rr + * ce_evpn_global + * ce_facts + * ce_file_copy + * ce_info_center_debug + * ce_info_center_global + * ce_info_center_log + * ce_info_center_trap + * ce_interface + * ce_interface_ospf + * ce_ip_interface + * ce_link_status + * ce_mlag_config + * ce_mlag_interface + * ce_mtu + * ce_netconf + * ce_netstream_aging + * ce_netstream_export + * ce_netstream_global + * ce_netstream_template + * ce_ntp + * ce_ntp_auth + * ce_ospf + * ce_ospf_vrf + * ce_reboot + * ce_rollback + * ce_sflow + * ce_snmp_community + * ce_snmp_contact + * ce_snmp_location + * ce_snmp_target_host + * ce_snmp_traps + * ce_snmp_user + * ce_startup + * ce_static_route + * ce_stp + * ce_switchport + * ce_vlan + * ce_vrf + * ce_vrf_af + * ce_vrf_interface + * ce_vrrp + * ce_vxlan_arp + * ce_vxlan_gateway + * ce_vxlan_global + * ce_vxlan_tunnel + * ce_vxlan_vap +- cloudvision + * cv_server_provision +- eos + * eos_logging + * eos_vlan + * eos_vrf +- f5 + * bigip_command + * bigip_config + * bigip_configsync_actions + * bigip_gtm_pool + * bigip_iapp_service + * bigip_iapp_template + * bigip_monitor_tcp_echo + * bigip_monitor_tcp_half_open + * bigip_provision + * bigip_qkview + * bigip_snmp + * bigip_snmp_trap + * bigip_ucs + * bigip_user + * bigip_virtual_address +- fortios + * fortios_address +- interface + * net_interface + * net_linkagg + * net_lldp_interface +- ios + * ios_interface + * ios_logging + * ios_static_route + * ios_user +- iosxr + * iosxr_banner + * iosxr_interface + * iosxr_logging + * iosxr_user +- junos + * junos_banner + * junos_interface + * junos_l3_interface + * junos_linkagg + * junos_lldp + * junos_lldp_interface + * junos_logging + * junos_static_route + * junos_system + * junos_vlan + * junos_vrf +- layer2 + * net_l2_interface + * net_vlan +- layer3 + * net_l3_interface + * net_vrf +- netscaler + * netscaler_cs_action + * netscaler_cs_policy + * netscaler_cs_vserver + * netscaler_gslb_service + * netscaler_gslb_site + * netscaler_gslb_vserver + * netscaler_lb_monitor + * netscaler_lb_vserver + * netscaler_save_config + * netscaler_server + * netscaler_service + * netscaler_servicegroup + * netscaler_ssl_certkey +- nuage + * nuage_vspk +- nxos + * nxos_banner + * nxos_logging +- panos + * panos_nat_rule + * panos_object + * panos_security_rule +- protocol + * net_lldp +- routing + * net_static_route +- system + * net_banner + * net_logging + * net_system + * net_user +- vyos + * vyos_banner + * vyos_interface + * vyos_l3_interface + * vyos_linkagg + * vyos_lldp + * vyos_lldp_interface + * vyos_logging + * vyos_static_route + * vyos_user + +#### Notification + * bearychat + * catapult + * office_365_connector_card + +#### Remote Management +- hpe + * oneview_fc_network +- imc + * imc_rest +- manageiq + * manageiq_user + +#### Source Control + * github_deploy_key + * github_issue + +#### Storage + * nuage_vpsk +- panos + * panos_sag +- purestorage + * purefa_hg + * purefa_host + * purefa_pg + * purefa_snap + * purefa_volume + +#### System + * aix_lvol + * awall + * dconf + * interfaces_file + +#### Web Infrastructure + * gunicorn + * rundeck_acl_policy + * rundeck_project + +#### Windows + * win_defrag + * win_domain_group + * win_domain_user + * win_dsc + * win_eventlog + * win_eventlog_entry + * win_firewall + * win_group_membership + * win_hotfix + * win_mapped_drive + * win_pagefile + * win_power_plan + * win_psmodule + * win_rabbitmq_plugin + * win_route + * win_security_policy + * win_toast + * win_user_right + * win_wait_for + * win_wakeonlan + + + +## 2.3.3 "Ramble On" - TBD + +### Bugfixes +* Fix alternatives module handlling of non existing options +* Fix synchronize traceback with the docker connection plugin +* Do not escape backslashes in the template lookup plugin to mirror what the template module does +* Fix the expires option of the postgresq_user module +* Fix for win_acl when settings permissions on registry objects that use `ALL APPLICATION PACKAGES` and `ALL RESTRICTED APPLICATION PACKAGES` +* Python3 fixes + * asorted azure modules + * pause module + * hacking/env-setup script + * Fix traceback when checking for passwords in logged strings when logging executed commands. + * docker_login module + * Workaround python-libselinux API change in the seboolean module + * digital_ocean_tag module + * Fix the zip filter + * Fix user module combining bytes and text + * Fix for security groups in the amazon efs module + * Fix for the jail connection plugin not finding the named jail + * Fix for blockinfile's parameters insertbefore and insertafter +* ios_config: Fix traceback when the efaults parameter is not set +* iosxr_config: Fixed unicode error when UTF-8 characters are in configs +* Fix check mode in archive module +* Fix UnboundLocalError in check mode in cs_role module +* Fix to always use lowercase hostnames for host keys in known_hosts module +* Added missing return results for win_stat +* Fix rabbitmq modules to give a helpful error if requests is not installed +* Fix yum module not deleting rpms that it downloaded +* Fix yum module failing with a URL to an rpm +* Fix file module inappropriately expanding literal dollar signs in a path read + from the filesystem as an environment variable. +* Fix the ssh "smart" transport setting which automatically selects the best means of + transferring files over ssh (sftp, ssh, piped). +* Fix authentication by api_key parameter in exoscale modules. +* vmware module_utils shared code ssl/validate_certs fixes in connection logic +* allow 'bridge' facts to work for certain containers that create conflicting ones with connection plugins +* Fix for win_get_url to use TLS 1.2/1.1 if it is available on the host +* Fix for the filetree lookup with non-ascii group names +* Better message for invalid keywords/options in task due to undefined expressions +* Fixed check mode for enable on Solaris for service module +* Fix cloudtrail module to allow AWS profiles other than the default +* Fix an encoding issue with secret (password) vars_prompts + + + ## 2.3.2 "Ramble On" - 2017-08-04 ### Bugfixes @@ -59,6 +725,10 @@ * ensure prefix in plugin loading to avoid conflicts * fix for a small number of modules (tempfile, possibly copy) which could fail if the tempdir on the remote box was a symlink ++ fix non-pipelined code paths for Windows (eg, ANSIBLE_KEEP_REMOTE_FILES, non-pipelined connection plugins) +* fix for win_updates where args and check mode were ignored due to common code change + + ## 2.3.1 "Ramble On" - 2017-06-01 @@ -84,8 +754,12 @@ * Handle detection of docker image changes when published ports is changed * Fix for docker_container restarting images when links list is empty. + + ## 2.3 "Ramble On" - 2017-04-12 +Moving to Ansible 2.3 guide http://docs.ansible.com/ansible/porting_guide_2.3.html + ### Major Changes * Documented and renamed the previously released 'single var vaulting' feature, allowing user to use vault encryption for single variables in a normal YAML vars file. * Allow module_utils for custom modules to be placed in site-specific directories and shipped in roles @@ -144,6 +818,7 @@ #### New: lookups - keyring: allows getting password from the 'controller' system's keyrings +- chef_databag: allows querying Chef Databags via pychef library #### New: cache @@ -189,6 +864,8 @@ - bigswitch: * bigmon_chain * bigmon_policy +- cisco + * cisco_spark - cloudengine: * ce_command - cloudscale_server @@ -405,6 +1082,7 @@ * zfs_facts * zpool_facts + ## 2.2.1 "The Battle of Evermore" - 2017-01-16 @@ -431,10 +1109,11 @@ * Improvements and fixes to OpenBSD fact gathering. * Updated `make deb` to use pbuilder. Use `make local_deb` for the previous non-pbuilder build. * Fixed Windows async to avoid blocking due to handle inheritance. -* Fixed bugs in the mount module on older Linux kernels and *BSDs +* Fixed bugs in the mount module on older Linux kernels and BSDs * Various minor fixes for Python 3 * Inserted some checks for jinja2-2.9, which can cause some issues with Ansible currently. + ## 2.2 "The Battle of Evermore" - 2016-11-01 @@ -749,29 +1428,35 @@ * nxos_template * ops_template + + ## 2.1.4 "The Song Remains the Same" - 2017-01-16 * Security fix for CVE-2016-9587 - An attacker with control over a client system being managed by Ansible and the ability to send facts back to the Ansible server could use this flaw to execute arbitrary code on the Ansible server as the user and group Ansible is running as. * Fixed a bug with conditionals in loops, where undefined variables and other errors will defer raising the error until the conditional has been evaluated. * Added a version check for jinja2-2.9, which does not fully work with Ansible currently. + + ## 2.1.3 "The Song Remains the Same" - 2016-11-04 * Security fix for CVE-2016-8628 - Command injection by compromised server via fact variables. In some situations, facts returned by modules could overwrite connection-based facts or some other special variables, leading to injected commands running on the Ansible controller as the user running Ansible (or via escalated permissions). * Security fix for CVE-2016-8614 - apt_key module not properly validating keys in some situations. -###Minor Changes: +### Minor Changes: * The subversion module from core now marks its password parameter as no_log so the password is obscured when logging. * The postgresql_lang and postgresql_ext modules from extras now mark login_password as no_log so the password is obscured when logging. * Fixed several bugs related to locating files relative to role/playbook directories. * Fixed a bug in the way hosts were tested for failed states, resulting in incorrectly skipped block sessions. -* Fixed a bug in the way our custom JSON encoder is used for the to_json* filters. +* Fixed a bug in the way our custom JSON encoder is used for the `to_json*` filters. * Fixed some bugs related to the use of non-ascii characters in become passwords. * Fixed a bug with Azure modules which may be using the latest rc6 library. * Backported some docker_common fixes. + + ## 2.1.2 "The Song Remains the Same" - 2016-09-29 ### Minor Changes @@ -834,9 +1519,11 @@ Use `_fixup_perms2` if support for previous releases is not required. Otherwise use `_fixup_perms` with `recursive=False`. + + ## 2.1 "The Song Remains the Same" -###Major Changes: +### Major Changes: * Official support for the networking modules, originally available in 2.0 as a tech preview. * Refactored and expanded support for Docker with new modules and many improvements to existing modules, as well as a new Kubernetes module. @@ -994,12 +1681,12 @@ * issubset * issuperset -####New Inventory scripts: +#### New Inventory scripts: * brook * rackhd * azure_rm -###Minor Changes: +### Minor Changes: * Added support for pipelining mode to more connection plugins, which helps prevent module data from being written to disk. @@ -1014,13 +1701,15 @@ 10-first-callback.py and 20-second-callback.py. * Added (alpha) Centirfy's dzdo as another become meethod (privilege escalation) -###Deprecations: +### Deprecations: * Deprecated the use of "bare" variables in loops (ie. `with_items: foo`, where `foo` is a variable). The full jinja2 variable syntax of `{{foo}}` should always be used instead. This warning will be removed completely in 2.3, after which time it will be an error. * play_hosts magic variable, use ansible_play_batch or ansible_play_hosts instead. + + ## 2.0.2 "Over the Hills and Far Away" * Backport of the 2.1 feature to ensure per-item callbacks are sent as they occur, @@ -1062,10 +1751,12 @@ permissions on the temporary file too leniently on a temporary file that was executed as a script. Addresses CVE-2016-3096 * Fix a bug in the uri module where setting headers via module params that - start with HEADER_ were causing a traceback. + start with `HEADER_` were causing a traceback. * Fix bug in the free strategy that was causing it to synchronize its workers after every task (making it a lot more like linear than it should have been). + + ## 2.0.1 "Over the Hills and Far Away" * Fixes a major compatibility break in the synchronize module shipped with @@ -1107,9 +1798,11 @@ this might cause an error if settings environment on play depending on 'ansible_env' which was previouslly ignored + + ## 2.0 "Over the Hills and Far Away" - Jan 12, 2016 -###Major Changes: +### Major Changes: * Releases are now named after Led Zeppelin songs, 1.9 will be the last Van Halen named release. * The new block/rescue/always directives allow for making task blocks and exception-like semantics @@ -1140,7 +1833,7 @@ * Backslashes used when specifying parameters in jinja2 expressions in YAML dicts sometimes needed to be escaped twice. This has been fixed so that escaping once works. Here's an example of how playbooks need to be modified: - ``` + ```yaml # Syntax in 1.9.x - debug: msg: "{{ 'test1_junk 1\\\\3' | regex_replace('(.*)_junk (.*)', '\\\\1 \\\\2') }}" @@ -1158,7 +1851,7 @@ string will keep the trailing newlines. If you relied on the trailing newline being stripped you can change your playbook like this: - ``` + ```yaml # Syntax in 1.9.2 vars: message: > @@ -1185,7 +1878,7 @@ In fact, even specifying args with variables has been deprecated, and will not be allowed in future versions: - ``` + ```yaml --- - hosts: localhost connection: local @@ -1200,13 +1893,13 @@ with_items: my_dirs ``` -###Plugins +### Plugins * Rewritten dnf module that should be faster and less prone to encountering bugs in cornercases * WinRM connection plugin passes all vars named `ansible_winrm_*` to the underlying pywinrm client. This allows, for instance, `ansible_winrm_server_cert_validation=ignore` to be used with newer versions of pywinrm to disable certificate validation on Python 2.7.9+. * WinRM connection plugin put_file is significantly faster and no longer has file size limitations. -####Deprecated Modules (new ones in parens): +#### Deprecated Modules (new ones in parens): * ec2_ami_search (ec2_ami_find) * quantum_network (os_network) @@ -1217,7 +1910,7 @@ * quantum_router_gateway (os_router) * quantum_router_interface (os_router) -####New Modules: +#### New Modules: - amazon * ec2_ami_copy @@ -1433,7 +2126,7 @@ * zabbix_screen - znode -####New Inventory scripts: +#### New Inventory scripts: * cloudstack * fleetctl @@ -1444,27 +2137,27 @@ * rudder * serf -####New Lookups: +#### New Lookups: * credstash * hashi_vault * ini * shelvefile -####New Filters: +#### New Filters: * combine -####New Connection: +#### New Connection: * docker: for talking to docker containers on the ansible controller machine without using ssh. -####New Callbacks: +#### New Callbacks: * logentries: plugin to send play data to logentries service * skippy: same as default but does not display skip messages -###Minor changes: +### Minor changes: * Many more tests. The new API makes things more testable and we took advantage of it. * big_ip modules now support turning off ssl certificate validation (use only for self-signed certificates). @@ -2606,7 +3299,7 @@ * added basename and dirname as Jinja2 filters available to all templates * pip works better when sudoing from unprivileged users * fix for user creation with groups specification reporting 'changed' incorrectly in some cases -* fix for some unicode encoding errors in outputing some data in verbose mode +* fix for some unicode encoding errors in outputting some data in verbose mode * improved FreeBSD, NetBSD and Solaris facts * debug module always outputs data without having to specify -v * fix for sysctl module creating new keys (must specify checks=none) @@ -2980,7 +3673,7 @@ * new LSB facts (release, distro, etc) * pause module -- (pause seconds=10) (pause minutes=1) (pause prompt=foo) -- it's an action plugin * a module for adding entries to the main crontab (though you may still wish to just drop template files into cron.d) -* debug module can be used for outputing messages without using 'shell echo' +* debug module can be used for outputting messages without using 'shell echo' * a fail module is now available for causing errors, you might want to use it with only_if to fail in certain conditions Other module Changes, Upgrades, and Fixes: diff -Nru ansible-2.3.2.0/contrib/inventory/abiquo.py ansible-2.4.0.0/contrib/inventory/abiquo.py --- ansible-2.3.2.0/contrib/inventory/abiquo.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/abiquo.py 2017-09-19 17:10:47.000000000 +0000 @@ -53,64 +53,68 @@ from ansible.module_utils.urls import open_url + def api_get(link, config): try: if link is None: - url = config.get('api','uri') + config.get('api','login_path') - headers = {"Accept": config.get('api','login_type')} + url = config.get('api', 'uri') + config.get('api', 'login_path') + headers = {"Accept": config.get('api', 'login_type')} else: url = link['href'] + '?limit=0' headers = {"Accept": link['type']} - result = open_url(url, headers=headers, url_username=config.get('auth','apiuser').replace('\n', ''), - url_password=config.get('auth','apipass').replace('\n', '')) + result = open_url(url, headers=headers, url_username=config.get('auth', 'apiuser').replace('\n', ''), + url_password=config.get('auth', 'apipass').replace('\n', '')) return json.loads(result.read()) except: return None + def save_cache(data, config): ''' saves item to cache ''' - dpath = config.get('cache','cache_dir') + dpath = config.get('cache', 'cache_dir') try: - cache = open('/'.join([dpath,'inventory']), 'w') + cache = open('/'.join([dpath, 'inventory']), 'w') cache.write(json.dumps(data)) cache.close() except IOError as e: - pass # not really sure what to do here + pass # not really sure what to do here def get_cache(cache_item, config): ''' returns cached item ''' - dpath = config.get('cache','cache_dir') + dpath = config.get('cache', 'cache_dir') inv = {} try: - cache = open('/'.join([dpath,'inventory']), 'r') + cache = open('/'.join([dpath, 'inventory']), 'r') inv = cache.read() cache.close() except IOError as e: - pass # not really sure what to do here + pass # not really sure what to do here return inv + def cache_available(config): ''' checks if we have a 'fresh' cache available for item requested ''' - if config.has_option('cache','cache_dir'): - dpath = config.get('cache','cache_dir') + if config.has_option('cache', 'cache_dir'): + dpath = config.get('cache', 'cache_dir') try: - existing = os.stat( '/'.join([dpath,'inventory'])) + existing = os.stat('/'.join([dpath, 'inventory'])) except: # cache doesn't exist or isn't accessible return False if config.has_option('cache', 'cache_max_age'): maxage = config.get('cache', 'cache_max_age') - if ((int(time.time()) - int(existing.st_mtime)) <= int(maxage)): + if (int(time.time()) - int(existing.st_mtime)) <= int(maxage): return True return False -def generate_inv_from_api(enterprise_entity,config): + +def generate_inv_from_api(enterprise_entity, config): try: inventory['all'] = {} inventory['all']['children'] = [] @@ -118,22 +122,22 @@ inventory['_meta'] = {} inventory['_meta']['hostvars'] = {} - enterprise = api_get(enterprise_entity,config) - vms_entity = next(link for link in (enterprise['links']) if (link['rel']=='virtualmachines')) - vms = api_get(vms_entity,config) + enterprise = api_get(enterprise_entity, config) + vms_entity = next(link for link in enterprise['links'] if link['rel'] == 'virtualmachines') + vms = api_get(vms_entity, config) for vmcollection in vms['collection']: for link in vmcollection['links']: if link['rel'] == 'virtualappliance': - vm_vapp = link['title'].replace('[','').replace(']','').replace(' ','_') + vm_vapp = link['title'].replace('[', '').replace(']', '').replace(' ', '_') elif link['rel'] == 'virtualdatacenter': - vm_vdc = link['title'].replace('[','').replace(']','').replace(' ','_') + vm_vdc = link['title'].replace('[', '').replace(']', '').replace(' ', '_') elif link['rel'] == 'virtualmachinetemplate': - vm_template = link['title'].replace('[','').replace(']','').replace(' ','_') + vm_template = link['title'].replace('[', '').replace(']', '').replace(' ', '_') # From abiquo.ini: Only adding to inventory VMs with public IP if config.getboolean('defaults', 'public_ip_only') is True: for link in vmcollection['links']: - if (link['type']=='application/vnd.abiquo.publicip+json' and link['rel']=='ip'): + if link['type'] == 'application/vnd.abiquo.publicip+json' and link['rel'] == 'ip': vm_nic = link['title'] break else: @@ -166,10 +170,10 @@ inventory[vm_template]['children'] = [] inventory[vm_template]['hosts'] = [] if config.getboolean('defaults', 'get_metadata') is True: - meta_entity = next(link for link in (vmcollection['links']) if (link['rel']=='metadata')) + meta_entity = next(link for link in vmcollection['links'] if link['rel'] == 'metadata') try: - metadata = api_get(meta_entity,config) - if (config.getfloat("api","version") >= 3.0): + metadata = api_get(meta_entity, config) + if (config.getfloat("api", "version") >= 3.0): vm_metadata = metadata['metadata'] else: vm_metadata = metadata['metadata']['metadata'] @@ -187,7 +191,8 @@ return inventory except Exception as e: # Return empty hosts output - return { 'all': {'hosts': []}, '_meta': { 'hostvars': {} } } + return {'all': {'hosts': []}, '_meta': {'hostvars': {}}} + def get_inventory(enterprise, config): ''' Reads the inventory from cache or Abiquo api ''' @@ -197,11 +202,12 @@ else: default_group = os.path.basename(sys.argv[0]).rstrip('.py') # MAKE ABIQUO API CALLS # - inv = generate_inv_from_api(enterprise,config) + inv = generate_inv_from_api(enterprise, config) save_cache(inv, config) return json.dumps(inv) + if __name__ == '__main__': inventory = {} enterprise = {} @@ -214,8 +220,8 @@ break try: - login = api_get(None,config) - enterprise = next(link for link in (login['links']) if (link['rel']=='enterprise')) + login = api_get(None, config) + enterprise = next(link for link in login['links'] if link['rel'] == 'enterprise') except Exception as e: enterprise = None diff -Nru ansible-2.3.2.0/contrib/inventory/apache-libcloud.py ansible-2.4.0.0/contrib/inventory/apache-libcloud.py --- ansible-2.3.2.0/contrib/inventory/apache-libcloud.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/apache-libcloud.py 2017-09-19 17:10:47.000000000 +0000 @@ -82,7 +82,6 @@ print(data_to_print) - def is_cache_valid(self): ''' Determines if the cache files have expired, or if it is still valid ''' @@ -95,7 +94,6 @@ return False - def read_settings(self): ''' Reads the settings from the libcloud.ini file ''' @@ -108,17 +106,17 @@ raise ValueError('libcloud.ini file must contain a [driver] section') if config.has_option('driver', 'provider'): - self.provider = config.get('driver','provider') + self.provider = config.get('driver', 'provider') else: raise ValueError('libcloud.ini does not have a provider defined') if config.has_option('driver', 'key'): - self.key = config.get('driver','key') + self.key = config.get('driver', 'key') else: raise ValueError('libcloud.ini does not have a key defined') if config.has_option('driver', 'secret'): - self.secret = config.get('driver','secret') + self.secret = config.get('driver', 'secret') else: raise ValueError('libcloud.ini does not have a secret defined') @@ -146,7 +144,6 @@ self.cache_path_index = cache_path + "/ansible-libcloud.index" self.cache_max_age = config.getint('cache', 'cache_max_age') - def parse_cli_args(self): ''' Command line argument processing @@ -154,14 +151,13 @@ parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on libcloud supported providers') parser.add_argument('--list', action='store_true', default=True, - help='List instances (default: True)') + help='List instances (default: True)') parser.add_argument('--host', action='store', - help='Get all the variables about a specific instance') + help='Get all the variables about a specific instance') parser.add_argument('--refresh-cache', action='store_true', default=False, - help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)') + help='Force refresh of cache by making API requests to libcloud supported providers (default: False - use cache files)') self.args = parser.parse_args() - def do_api_calls_update_cache(self): ''' Do API calls to a location, and save data in cache files @@ -172,7 +168,6 @@ self.write_to_cache(self.inventory, self.cache_path_cache) self.write_to_cache(self.index, self.cache_path_index) - def get_nodes(self): ''' Gets the list of all nodes @@ -181,7 +176,6 @@ for node in self.conn.list_nodes(): self.add_node(node) - def get_node(self, node_id): ''' Gets details about a specific node @@ -189,7 +183,6 @@ return [node for node in self.conn.list_nodes() if node.id == node_id][0] - def add_node(self, node): ''' Adds a node to the inventory and index, as long as it is @@ -244,10 +237,10 @@ # Need to load index from cache self.load_index_from_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # try updating the cache self.do_api_calls_update_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # host migh not exist anymore return self.json_format_dict({}, True) @@ -255,8 +248,7 @@ node = self.get_node(node_id) instance_vars = {} - for key in vars(instance): - value = getattr(instance, key) + for key, value in vars(node).items(): key = self.to_safe('ec2_' + key) # Handle complex types @@ -283,13 +275,12 @@ else: pass # TODO Product codes if someone finds them useful - #print(key) - #print(type(value)) - #print(value) + # print(key) + # print(type(value)) + # print(value) return self.json_format_dict(instance_vars, True) - def push(self, my_dict, key, element): ''' Pushed an element onto an array that may not have been defined in @@ -301,7 +292,6 @@ else: my_dict[key] = [element] - def get_inventory_from_cache(self): ''' Reads the inventory from the cache file and returns it as a JSON @@ -312,7 +302,6 @@ json_inventory = cache.read() return json_inventory - def load_index_from_cache(self): ''' Reads the index from the cache file sets self.index @@ -322,7 +311,6 @@ json_index = cache.read() self.index = json.loads(json_index) - def write_to_cache(self, data, filename): ''' Writes data in JSON format to a file @@ -333,7 +321,6 @@ cache.write(json_data) cache.close() - def to_safe(self, word): ''' Converts 'bad' characters in a string to underscores so they can be @@ -342,7 +329,6 @@ return re.sub("[^A-Za-z0-9\-]", "_", word) - def json_format_dict(self, data, pretty=False): ''' Converts a dict to a JSON object and dumps it as a formatted @@ -354,6 +340,7 @@ else: return json.dumps(data) + def main(): LibcloudInventory() diff -Nru ansible-2.3.2.0/contrib/inventory/apstra_aos.py ansible-2.4.0.0/contrib/inventory/apstra_aos.py --- ansible-2.3.2.0/contrib/inventory/apstra_aos.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/apstra_aos.py 2017-09-19 17:10:47.000000000 +0000 @@ -27,7 +27,7 @@ To use this: - copy this file over /etc/ansible/hosts and chmod +x the file. - - Copy both files (.py and .ini) in your prefered directory + - Copy both files (.py and .ini) in your preferred directory More information about Ansible Dynamic Inventory here http://unix.stackexchange.com/questions/205479/in-ansible-dynamic-inventory-json-can-i-render-hostvars-based-on-the-hostname @@ -49,23 +49,25 @@ Author: Damien Garros (@dgarros) Version: 0.2.0 """ +import json import os -import argparse import re +import sys -from ansible.compat.six.moves import configparser +try: + import argparse + HAS_ARGPARSE = True +except ImportError: + HAS_ARGPARSE = False try: from apstra.aosom.session import Session - HAS_AOS_PYEZ = True except ImportError: HAS_AOS_PYEZ = False -try: - import json -except ImportError: - import simplejson as json +from ansible.module_utils.six.moves import configparser + """ ## @@ -278,10 +280,12 @@ } """ + def fail(msg): sys.stderr.write("%s\n" % msg) sys.exit(1) + class AosInventory(object): def __init__(self): @@ -290,6 +294,8 @@ if not HAS_AOS_PYEZ: raise Exception('aos-pyez is not installed. Please see details here: https://github.com/Apstra/aos-pyez') + if not HAS_ARGPARSE: + raise Exception('argparse is not installed. Please install the argparse library or upgrade to python-2.7') # Initialize inventory self.inventory = dict() # A list of groups and the hosts in that group @@ -303,10 +309,10 @@ # ---------------------------------------------------- # Open session to AOS # ---------------------------------------------------- - aos = Session( server=self.aos_server, - port=self.aos_server_port, - user=self.aos_username, - passwd=self.aos_password) + aos = Session(server=self.aos_server, + port=self.aos_server_port, + user=self.aos_username, + passwd=self.aos_password) aos.login() @@ -314,10 +320,10 @@ self.add_var_to_group('all', 'aos_session', aos.session) # Add the AOS server itself in the inventory - self.add_host_to_group("all", 'aos' ) - self.add_var_to_host("aos", "ansible_ssh_host", self.aos_server ) - self.add_var_to_host("aos", "ansible_ssh_pass", self.aos_password ) - self.add_var_to_host("aos", "ansible_ssh_user", self.aos_username ) + self.add_host_to_group("all", 'aos') + self.add_var_to_host("aos", "ansible_ssh_host", self.aos_server) + self.add_var_to_host("aos", "ansible_ssh_pass", self.aos_password) + self.add_var_to_host("aos", "ansible_ssh_user", self.aos_username) # ---------------------------------------------------- # Build the inventory @@ -336,7 +342,7 @@ for dev_name, dev_id in bp.params['devices'].value.items(): self.add_host_to_group('all', dev_name) - device = aos.Devices.find( uid=dev_id) + device = aos.Devices.find(uid=dev_id) if 'facts' in device.value.keys(): self.add_device_facts_to_var(dev_name, device) @@ -344,7 +350,7 @@ # Define admin State and Status if 'user_config' in device.value.keys(): if 'admin_state' in device.value['user_config'].keys(): - self.add_var_to_host(dev_name, 'admin_state', device.value['user_config']['admin_state'] ) + self.add_var_to_host(dev_name, 'admin_state', device.value['user_config']['admin_state']) self.add_device_status_to_var(dev_name, device) @@ -496,7 +502,6 @@ except: pass - def parse_cli_args(self): """ Command line argument processing """ @@ -554,7 +559,7 @@ 'ansible_ssh_host', device.value['facts']['mgmt_ipaddr']) - self.add_var_to_host(device_name,'id', device.id) + self.add_var_to_host(device_name, 'id', device.id) # self.add_host_to_group('all', device.name) for key, value in device.value['facts'].items(): diff -Nru ansible-2.3.2.0/contrib/inventory/azure_rm.py ansible-2.4.0.0/contrib/inventory/azure_rm.py --- ansible-2.3.2.0/contrib/inventory/azure_rm.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/azure_rm.py 2017-09-19 17:10:47.000000000 +0000 @@ -49,6 +49,7 @@ - tenant - ad_user - password + - cloud_environment Environment variables: - AZURE_PROFILE @@ -58,6 +59,7 @@ - AZURE_TENANT - AZURE_AD_USER - AZURE_PASSWORD + - AZURE_CLOUD_ENVIRONMENT Run for Specific Host ----------------------- @@ -190,22 +192,27 @@ import os import re import sys +import inspect +import traceback + from packaging.version import Version from os.path import expanduser +import ansible.module_utils.six.moves.urllib.parse as urlparse HAS_AZURE = True HAS_AZURE_EXC = None try: from msrestazure.azure_exceptions import CloudError + from msrestazure import azure_cloud from azure.mgmt.compute import __version__ as azure_compute_version from azure.common import AzureMissingResourceHttpError, AzureHttpError from azure.common.credentials import ServicePrincipalCredentials, UserPassCredentials - from azure.mgmt.network.network_management_client import NetworkManagementClient - from azure.mgmt.resource.resources.resource_management_client import ResourceManagementClient - from azure.mgmt.compute.compute_management_client import ComputeManagementClient + from azure.mgmt.network import NetworkManagementClient + from azure.mgmt.resource.resources import ResourceManagementClient + from azure.mgmt.compute import ComputeManagementClient except ImportError as exc: HAS_AZURE_EXC = exc HAS_AZURE = False @@ -218,7 +225,8 @@ secret='AZURE_SECRET', tenant='AZURE_TENANT', ad_user='AZURE_AD_USER', - password='AZURE_PASSWORD' + password='AZURE_PASSWORD', + cloud_environment='AZURE_CLOUD_ENVIRONMENT', ) AZURE_CONFIG_SETTINGS = dict( @@ -232,7 +240,7 @@ group_by_tag='AZURE_GROUP_BY_TAG' ) -AZURE_MIN_VERSION = "0.30.0rc5" +AZURE_MIN_VERSION = "2.0.0" def azure_id_to_dict(id): @@ -249,6 +257,7 @@ def __init__(self, args): self._args = args + self._cloud_environment = None self._compute_client = None self._resource_client = None self._network_client = None @@ -262,6 +271,26 @@ self.fail("Failed to get credentials. Either pass as parameters, set environment variables, " "or define a profile in ~/.azure/credentials.") + # if cloud_environment specified, look up/build Cloud object + raw_cloud_env = self.credentials.get('cloud_environment') + if not raw_cloud_env: + self._cloud_environment = azure_cloud.AZURE_PUBLIC_CLOUD # SDK default + else: + # try to look up "well-known" values via the name attribute on azure_cloud members + all_clouds = [x[1] for x in inspect.getmembers(azure_cloud) if isinstance(x[1], azure_cloud.Cloud)] + matched_clouds = [x for x in all_clouds if x.name == raw_cloud_env] + if len(matched_clouds) == 1: + self._cloud_environment = matched_clouds[0] + elif len(matched_clouds) > 1: + self.fail("Azure SDK failure: more than one cloud matched for cloud_environment name '{0}'".format(raw_cloud_env)) + else: + if not urlparse.urlparse(raw_cloud_env).scheme: + self.fail("cloud_environment must be an endpoint discovery URL or one of {0}".format([x.name for x in all_clouds])) + try: + self._cloud_environment = azure_cloud.get_cloud_from_metadata_endpoint(raw_cloud_env) + except Exception as e: + self.fail("cloud_environment {0} could not be resolved: {1}".format(raw_cloud_env, e.message)) + if self.credentials.get('subscription_id', None) is None: self.fail("Credentials did not include a subscription_id value.") self.log("setting subscription_id") @@ -272,16 +301,23 @@ self.credentials.get('tenant') is not None: self.azure_credentials = ServicePrincipalCredentials(client_id=self.credentials['client_id'], secret=self.credentials['secret'], - tenant=self.credentials['tenant']) + tenant=self.credentials['tenant'], + cloud_environment=self._cloud_environment) elif self.credentials.get('ad_user') is not None and self.credentials.get('password') is not None: - self.azure_credentials = UserPassCredentials(self.credentials['ad_user'], self.credentials['password']) + tenant = self.credentials.get('tenant') + if not tenant: + tenant = 'common' + self.azure_credentials = UserPassCredentials(self.credentials['ad_user'], + self.credentials['password'], + tenant=tenant, + cloud_environment=self._cloud_environment) else: self.fail("Failed to authenticate with provided credentials. Some attributes were missing. " "Credentials must include client_id, secret and tenant or ad_user and password.") def log(self, msg): if self.debug: - print (msg + u'\n') + print(msg + u'\n') def fail(self, msg): raise Exception(msg) @@ -341,6 +377,10 @@ self.log('Received credentials from parameters.') return arg_credentials + if arg_credentials['ad_user'] is not None: + self.log('Received credentials from parameters.') + return arg_credentials + # try environment env_credentials = self._get_env_credentials() if env_credentials: @@ -372,7 +412,12 @@ def network_client(self): self.log('Getting network client') if not self._network_client: - self._network_client = NetworkManagementClient(self.azure_credentials, self.subscription_id) + self._network_client = NetworkManagementClient( + self.azure_credentials, + self.subscription_id, + base_url=self._cloud_environment.endpoints.resource_manager, + api_version='2017-06-01' + ) self._register('Microsoft.Network') return self._network_client @@ -380,14 +425,24 @@ def rm_client(self): self.log('Getting resource manager client') if not self._resource_client: - self._resource_client = ResourceManagementClient(self.azure_credentials, self.subscription_id) + self._resource_client = ResourceManagementClient( + self.azure_credentials, + self.subscription_id, + base_url=self._cloud_environment.endpoints.resource_manager, + api_version='2017-05-10' + ) return self._resource_client @property def compute_client(self): self.log('Getting compute client') if not self._compute_client: - self._compute_client = ComputeManagementClient(self.azure_credentials, self.subscription_id) + self._compute_client = ComputeManagementClient( + self.azure_credentials, + self.subscription_id, + base_url=self._cloud_environment.endpoints.resource_manager, + api_version='2017-03-30' + ) self._register('Microsoft.Compute') return self._compute_client @@ -440,7 +495,7 @@ self.include_powerstate = False self.get_inventory() - print (self._json_format_dict(pretty=self._args.pretty)) + print(self._json_format_dict(pretty=self._args.pretty)) sys.exit(0) def _parse_cli_args(self): @@ -448,13 +503,13 @@ parser = argparse.ArgumentParser( description='Produce an Ansible Inventory file for an Azure subscription') parser.add_argument('--list', action='store_true', default=True, - help='List instances (default: True)') + help='List instances (default: True)') parser.add_argument('--debug', action='store_true', default=False, - help='Send debug messages to STDOUT') + help='Send debug messages to STDOUT') parser.add_argument('--host', action='store', - help='Get all information about an instance') + help='Get all information about an instance') parser.add_argument('--pretty', action='store_true', default=False, - help='Pretty print JSON output(default: False)') + help='Pretty print JSON output(default: False)') parser.add_argument('--profile', action='store', help='Azure profile contained in ~/.azure/credentials') parser.add_argument('--subscription_id', action='store', @@ -465,10 +520,12 @@ help='Azure Client Secret') parser.add_argument('--tenant', action='store', help='Azure Tenant Id') - parser.add_argument('--ad-user', action='store', + parser.add_argument('--ad_user', action='store', help='Active Directory User') parser.add_argument('--password', action='store', help='password') + parser.add_argument('--cloud_environment', action='store', + help='Azure Cloud Environment name or metadata discovery URL') parser.add_argument('--resource-groups', action='store', help='Return inventory for comma separated list of resource group names') parser.add_argument('--tags', action='store', @@ -486,8 +543,7 @@ try: virtual_machines = self._compute_client.virtual_machines.list(resource_group) except Exception as exc: - sys.exit("Error: fetching virtual machines for resource group {0} - {1}".format(resource_group, - str(exc))) + sys.exit("Error: fetching virtual machines for resource group {0} - {1}".format(resource_group, str(exc))) if self._args.host or self.tags: selected_machines = self._selected_machines(virtual_machines) self._load_machines(selected_machines) @@ -510,7 +566,7 @@ for machine in machines: id_dict = azure_id_to_dict(machine.id) - #TODO - The API is returning an ID value containing resource group name in ALL CAPS. If/when it gets + # TODO - The API is returning an ID value containing resource group name in ALL CAPS. If/when it gets # fixed, we should remove the .lower(). Opened Issue # #574: https://github.com/Azure/azure-sdk-for-python/issues/574 resource_group = id_dict['resourceGroups'].lower() @@ -538,7 +594,7 @@ mac_address=None, plan=(machine.plan.name if machine.plan else None), virtual_machine_size=machine.hardware_profile.vm_size, - computer_name=machine.os_profile.computer_name, + computer_name=(machine.os_profile.computer_name if machine.os_profile else None), provisioning_state=machine.provisioning_state, ) @@ -559,7 +615,7 @@ ) # Add windows details - if machine.os_profile.windows_configuration is not None: + if machine.os_profile is not None and machine.os_profile.windows_configuration is not None: host_vars['windows_auto_updates_enabled'] = \ machine.os_profile.windows_configuration.enable_automatic_updates host_vars['windows_timezone'] = machine.os_profile.windows_configuration.time_zone @@ -790,13 +846,10 @@ def main(): if not HAS_AZURE: - sys.exit("The Azure python sdk is not installed (try `pip install 'azure>=2.0.0rc5' --upgrade`) - {0}".format(HAS_AZURE_EXC)) - - if Version(azure_compute_version) < Version(AZURE_MIN_VERSION): - sys.exit("Expecting azure.mgmt.compute.__version__ to be {0}. Found version {1} " - "Do you have Azure >= 2.0.0rc5 installed? (try `pip install 'azure>=2.0.0rc5' --upgrade`)".format(AZURE_MIN_VERSION, azure_compute_version)) + sys.exit("The Azure python sdk is not installed (try `pip install 'azure>={0}' --upgrade`) - {1}".format(AZURE_MIN_VERSION, HAS_AZURE_EXC)) AzureInventory() + if __name__ == '__main__': main() diff -Nru ansible-2.3.2.0/contrib/inventory/brook.py ansible-2.4.0.0/contrib/inventory/brook.py --- ansible-2.3.2.0/contrib/inventory/brook.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/brook.py 2017-09-19 17:10:47.000000000 +0000 @@ -32,11 +32,13 @@ The following variables are established for every host. They can be retrieved from the hostvars dictionary. + - brook_pid: str - brook_name: str - brook_description: str - brook_project: str - brook_template: str - brook_region: str + - brook_zone: str - brook_status: str - brook_tags: list(str) - brook_internal_ips: list(str) @@ -70,7 +72,7 @@ This script is tested on Python 2.7 and 3.4. It may work on other versions though. Author: Francisco Ros -Version: 0.1 +Version: 0.2 """ @@ -122,9 +124,8 @@ self.project_id = config.get('brook', 'project_id') if not self.api_token: - print('You must provide (at least) your Brook.io API token to generate the dynamic ' - 'inventory.') - sys.exit(1) + sys.exit('You must provide (at least) your Brook.io API token to generate the dynamic ' + 'inventory.') def get_api_client(self): """Authenticate user via the provided credentials and return the corresponding API client. @@ -168,8 +169,8 @@ for project_id in projects: project = projects_api.show_project(project_id=project_id) for instance in instances_api.index_instances(project_id=project_id): - # Get template used for this instance - template = templates_api.show_template(template_id=instance.template) + # Get template used for this instance if known + template = templates_api.show_template(template_id=instance.template) if instance.template else None # Update hostvars try: @@ -212,11 +213,13 @@ """ hostvars = instance.to_dict() + hostvars['brook_pid'] = hostvars.pop('pid') hostvars['brook_name'] = hostvars.pop('name') hostvars['brook_description'] = hostvars.pop('description') hostvars['brook_project'] = hostvars.pop('project') hostvars['brook_template'] = hostvars.pop('template') hostvars['brook_region'] = hostvars.pop('region') + hostvars['brook_zone'] = hostvars.pop('zone') hostvars['brook_created_at'] = hostvars.pop('created_at') hostvars['brook_updated_at'] = hostvars.pop('updated_at') del hostvars['id'] @@ -227,7 +230,7 @@ # Substitute identifiers for names # hostvars['brook_project'] = project.name - hostvars['brook_template'] = template.name + hostvars['brook_template'] = template.name if template else None # Retrieve instance state # diff -Nru ansible-2.3.2.0/contrib/inventory/cobbler.py ansible-2.4.0.0/contrib/inventory/cobbler.py --- ansible-2.3.2.0/contrib/inventory/cobbler.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/cobbler.py 2017-09-19 17:10:47.000000000 +0000 @@ -110,9 +110,9 @@ if self.args.host: data_to_print += self.get_host_info() else: - self.inventory['_meta'] = { 'hostvars': {} } + self.inventory['_meta'] = {'hostvars': {}} for hostname in self.cache: - self.inventory['_meta']['hostvars'][hostname] = {'cobbler': self.cache[hostname] } + self.inventory['_meta']['hostvars'][hostname] = {'cobbler': self.cache[hostname]} data_to_print += self.json_format_dict(self.inventory, True) print(data_to_print) @@ -179,7 +179,7 @@ for host in data: # Get the FQDN for the host and add it to the right groups - dns_name = host['hostname'] #None + dns_name = host['hostname'] # None ksmeta = None interfaces = host['interfaces'] # hostname is often empty for non-static IP hosts @@ -190,7 +190,7 @@ if this_dns_name is not None and this_dns_name is not "": dns_name = this_dns_name - if dns_name == '': + if dns_name == '' or dns_name is None: continue status = host['status'] @@ -229,11 +229,11 @@ # Need to load index from cache self.load_cache_from_cache() - if not self.args.host in self.cache: + if self.args.host not in self.cache: # try updating the cache self.update_cache() - if not self.args.host in self.cache: + if self.args.host not in self.cache: # host might not exist anymore return self.json_format_dict({}, True) diff -Nru ansible-2.3.2.0/contrib/inventory/collins.py ansible-2.4.0.0/contrib/inventory/collins.py --- ansible-2.3.2.0/contrib/inventory/collins.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/collins.py 2017-09-19 17:10:47.000000000 +0000 @@ -74,7 +74,6 @@ import sys from time import time import traceback -import urllib try: import json @@ -82,9 +81,11 @@ import simplejson as json from six import iteritems +from six.moves.urllib.parse import urlencode from ansible.module_utils.urls import open_url + class CollinsDefaults(object): ASSETS_API_ENDPOINT = '%s/api/assets' SPECIAL_ATTRIBUTES = set([ @@ -117,7 +118,7 @@ self.parse_cli_args() logging.basicConfig(format=CollinsDefaults.LOG_FORMAT, - filename=self.log_location) + filename=self.log_location) self.log = logging.getLogger('CollinsInventory') def _asset_get_attribute(self, asset, attrib): @@ -168,14 +169,13 @@ print(data_to_print) return successful - def find_assets(self, attributes = {}, operation = 'AND'): + def find_assets(self, attributes={}, operation='AND'): """ Obtains Collins assets matching the provided attributes. """ # Formats asset search query to locate assets matching attributes, using # the CQL search feature as described here: # http://tumblr.github.io/collins/recipes.html - attributes_query = [ '='.join(attr_pair) - for attr_pair in iteritems(attributes) ] + attributes_query = ['='.join(attr_pair) for attr_pair in iteritems(attributes)] query_parameters = { 'details': ['True'], 'operation': [operation], @@ -190,19 +190,18 @@ # Locates all assets matching the provided query, exhausting pagination. while True: if num_retries == self.collins_max_retries: - raise MaxRetriesError("Maximum of %s retries reached; giving up" % \ - self.collins_max_retries) + raise MaxRetriesError("Maximum of %s retries reached; giving up" % self.collins_max_retries) query_parameters['page'] = cur_page query_url = "%s?%s" % ( (CollinsDefaults.ASSETS_API_ENDPOINT % self.collins_host), - urllib.urlencode(query_parameters, doseq=True) + urlencode(query_parameters, doseq=True) ) try: response = open_url(query_url, - timeout=self.collins_timeout_secs, - url_username=self.collins_username, - url_password=self.collins_password, - force_basic_auth=True) + timeout=self.collins_timeout_secs, + url_username=self.collins_username, + url_password=self.collins_password, + force_basic_auth=True) json_response = json.loads(response.read()) # Adds any assets found to the array of assets. assets += json_response['data']['Data'] @@ -212,8 +211,7 @@ cur_page += 1 num_retries = 0 except: - self.log.error("Error while communicating with Collins, retrying:\n%s", - traceback.format_exc()) + self.log.error("Error while communicating with Collins, retrying:\n%s" % traceback.format_exc()) num_retries += 1 return assets @@ -232,19 +230,15 @@ def read_settings(self): """ Reads the settings from the collins.ini file """ - config_loc = os.getenv('COLLINS_CONFIG', - os.path.dirname(os.path.realpath(__file__)) + '/collins.ini') + config_loc = os.getenv('COLLINS_CONFIG', os.path.dirname(os.path.realpath(__file__)) + '/collins.ini') config = ConfigParser.SafeConfigParser() config.read(os.path.dirname(os.path.realpath(__file__)) + '/collins.ini') self.collins_host = config.get('collins', 'host') - self.collins_username = os.getenv('COLLINS_USERNAME', - config.get('collins', 'username')) - self.collins_password = os.getenv('COLLINS_PASSWORD', - config.get('collins', 'password')) - self.collins_asset_type = os.getenv('COLLINS_ASSET_TYPE', - config.get('collins', 'asset_type')) + self.collins_username = os.getenv('COLLINS_USERNAME', config.get('collins', 'username')) + self.collins_password = os.getenv('COLLINS_PASSWORD', config.get('collins', 'password')) + self.collins_asset_type = os.getenv('COLLINS_ASSET_TYPE', config.get('collins', 'asset_type')) self.collins_timeout_secs = config.getint('collins', 'timeout_secs') self.collins_max_retries = config.getint('collins', 'max_retries') @@ -268,16 +262,12 @@ parser = argparse.ArgumentParser( description='Produces an Ansible Inventory file based on Collins') - parser.add_argument('--list', - action='store_true', default=True, help='List instances (default: True)') - parser.add_argument('--host', - action='store', help='Get all the variables about a specific instance') - parser.add_argument('--refresh-cache', - action='store_true', default=False, - help='Force refresh of cache by making API requests to Collins ' \ - '(default: False - use cache files)') - parser.add_argument('--pretty', - action='store_true', default=False, help='Pretty print all JSON output') + parser.add_argument('--list', action='store_true', default=True, help='List instances (default: True)') + parser.add_argument('--host', action='store', help='Get all the variables about a specific instance') + parser.add_argument('--refresh-cache', action='store_true', default=False, + help='Force refresh of cache by making API requests to Collins ' + '(default: False - use cache files)') + parser.add_argument('--pretty', action='store_true', default=False, help='Pretty print all JSON output') self.args = parser.parse_args() def update_cache(self): @@ -290,8 +280,7 @@ try: server_assets = self.find_assets() except: - self.log.error("Error while locating assets from Collins:\n%s", - traceback.format_exc()) + self.log.error("Error while locating assets from Collins:\n%s" % traceback.format_exc()) return False for asset in server_assets: @@ -315,8 +304,7 @@ if self.prefer_hostnames and self._asset_has_attribute(asset, 'HOSTNAME'): asset_identifier = self._asset_get_attribute(asset, 'HOSTNAME') elif 'ADDRESSES' not in asset: - self.log.warning("No IP addresses found for asset '%s', skipping", - asset) + self.log.warning("No IP addresses found for asset '%s', skipping" % asset) continue elif len(asset['ADDRESSES']) < ip_index + 1: self.log.warning( @@ -384,11 +372,11 @@ # Need to load index from cache self.load_cache_from_cache() - if not self.args.host in self.cache: + if self.args.host not in self.cache: # try updating the cache self.update_cache() - if not self.args.host in self.cache: + if self.args.host not in self.cache: # host might not exist anymore return self.json_format_dict({}, self.args.pretty) @@ -404,7 +392,7 @@ return True except: self.log.error("Error while loading inventory:\n%s", - traceback.format_exc()) + traceback.format_exc()) self.inventory = {} return False @@ -418,7 +406,7 @@ return True except: self.log.error("Error while loading host cache:\n%s", - traceback.format_exc()) + traceback.format_exc()) self.cache = {} return False diff -Nru ansible-2.3.2.0/contrib/inventory/consul.ini ansible-2.4.0.0/contrib/inventory/consul.ini --- ansible-2.3.2.0/contrib/inventory/consul.ini 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/consul.ini 1970-01-01 00:00:00.000000000 +0000 @@ -1,37 +0,0 @@ -# Ansible Consul external inventory script settings. - -[consul] - -# restrict included nodes to those from this datacenter -#datacenter = nyc1 - -# url of the the consul cluster to query -#url = http://demo.consul.io -url = http://localhost:8500 - -# suffix added to each service to create a group name e.g Service of 'redis' and -# a suffix of '_servers' will add each address to the group name 'redis_servers' -servers_suffix = _servers - -# if specified then the inventory will generate domain names that will resolve -# via Consul's inbuilt DNS. -#domain=consul - -# make groups from service tags. the name of the group is derived from the -# service name and the tag name e.g. a service named nginx with tags ['master', 'v1'] -# will create groups nginx_master and nginx_v1 -tags = true - -# looks up the node name at the given path for a list of groups to which the -# node should be added. -kv_groups=ansible/groups - -# looks up the node name at the given path for a json dictionary of metadata that -# should be attached as metadata for the node -kv_metadata=ansible/metadata - -# looks up the health of each service and adds the node to 'up' and 'down' groups -# based on the service availibility -availability = true -available_suffix = _up -unavailable_suffix = _down diff -Nru ansible-2.3.2.0/contrib/inventory/consul_io.ini ansible-2.4.0.0/contrib/inventory/consul_io.ini --- ansible-2.3.2.0/contrib/inventory/consul_io.ini 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/consul_io.ini 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,37 @@ +# Ansible Consul external inventory script settings. + +[consul] + +# restrict included nodes to those from this datacenter +#datacenter = nyc1 + +# url of the consul cluster to query +#url = http://demo.consul.io +url = http://localhost:8500 + +# suffix added to each service to create a group name e.g Service of 'redis' and +# a suffix of '_servers' will add each address to the group name 'redis_servers' +servers_suffix = _servers + +# if specified then the inventory will generate domain names that will resolve +# via Consul's inbuilt DNS. +#domain=consul + +# make groups from service tags. the name of the group is derived from the +# service name and the tag name e.g. a service named nginx with tags ['master', 'v1'] +# will create groups nginx_master and nginx_v1 +tags = true + +# looks up the node name at the given path for a list of groups to which the +# node should be added. +kv_groups=ansible/groups + +# looks up the node name at the given path for a json dictionary of metadata that +# should be attached as metadata for the node +kv_metadata=ansible/metadata + +# looks up the health of each service and adds the node to 'up' and 'down' groups +# based on the service availibility +availability = true +available_suffix = _up +unavailable_suffix = _down diff -Nru ansible-2.3.2.0/contrib/inventory/consul_io.py ansible-2.4.0.0/contrib/inventory/consul_io.py --- ansible-2.3.2.0/contrib/inventory/consul_io.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/consul_io.py 2017-09-19 17:10:47.000000000 +0000 @@ -37,7 +37,7 @@ --datacenter, to restrict the nodes to a single datacenter --host to restrict the inventory to a single named node. (requires datacenter config) -The configuration for this plugin is read from a consul.ini file located in the +The configuration for this plugin is read from a consul_io.ini file located in the same directory as this inventory script. All config options in the config file are optional except the host and port, which must point to a valid agent or server running the http api. For more information on enabling the endpoint see. @@ -133,7 +133,11 @@ import re import argparse import sys -import ConfigParser + +try: + import configparser +except ImportError: + import ConfigParser as configparser def get_log_filename(): @@ -239,7 +243,6 @@ self.current_dc = datacenter self.load_data_for_datacenter(datacenter) - def load_availability_groups(self, node, datacenter): '''check the health of each service on a node and add add the node to either an 'available' or 'unavailable' grouping. The suffix for each group can be @@ -257,8 +260,7 @@ suffix = self.config.get_availability_suffix( 'unavailable_suffix', '_unavailable') self.add_node_to_map(self.nodes_by_availability, - service_name + suffix, node['Node']) - + service_name + suffix, node['Node']) def load_data_for_datacenter(self, datacenter): '''processes all the nodes in a particular datacenter''' @@ -286,7 +288,7 @@ def load_node_metadata_from_kv(self, node_data): ''' load the json dict at the metadata path defined by the kv_metadata value - and the node name add each entry in the dictionary to the the node's + and the node name add each entry in the dictionary to the node's metadata ''' node = node_data['Node'] if self.config.has_config('kv_metadata'): @@ -295,7 +297,7 @@ if metadata and metadata['Value']: try: metadata = json.loads(metadata['Value']) - for k,v in metadata.items(): + for k, v in metadata.items(): self.add_metadata(node_data, k, v) except: pass @@ -337,19 +339,19 @@ tags = service['Tags'] self.add_metadata(node_data, "consul_%s_tags" % service_name, tags) for tag in service['Tags']: - tagname = service_name +'_'+tag + tagname = service_name + '_' + tag self.add_node_to_map(self.nodes_by_tag, tagname, node_data['Node']) def combine_all_results(self): '''prunes and sorts all groupings for combination into the final map''' - self.inventory = {"_meta": { "hostvars" : self.node_metadata}} + self.inventory = {"_meta": {"hostvars": self.node_metadata}} groupings = [self.nodes, self.nodes_by_datacenter, self.nodes_by_service, - self.nodes_by_tag, self.nodes_by_kv, self.nodes_by_availability] + self.nodes_by_tag, self.nodes_by_kv, self.nodes_by_availability] for grouping in groupings: for name, addresses in grouping.items(): self.inventory[name] = sorted(list(set(addresses))) - def add_metadata(self, node_data, key, value, is_list = False): + def add_metadata(self, node_data, key, value, is_list=False): ''' Pushed an element onto a metadata dict for the node, creating the dict if it doesn't exist ''' key = self.to_safe(key) @@ -371,16 +373,15 @@ if domain: node_name = node_data['Node'] if self.current_dc: - return '%s.node.%s.%s' % ( node_name, self.current_dc, domain) + return '%s.node.%s.%s' % (node_name, self.current_dc, domain) else: - return '%s.node.%s' % ( node_name, domain) + return '%s.node.%s' % (node_name, domain) else: return node_data['Address'] def add_node_to_map(self, map, name, node): self.push(map, name, self.get_inventory_name(node)) - def push(self, my_dict, key, element): ''' Pushed an element onto an array that may not have been defined in the dict ''' @@ -423,9 +424,12 @@ return False def read_settings(self): - ''' Reads the settings from the consul.ini file ''' - config = ConfigParser.SafeConfigParser() - config.read(os.path.dirname(os.path.realpath(__file__)) + '/consul.ini') + ''' Reads the settings from the consul_io.ini file (or consul.ini for backwards compatibility)''' + config = configparser.SafeConfigParser() + if os.path.isfile(os.path.dirname(os.path.realpath(__file__)) + '/consul_io.ini'): + config.read(os.path.dirname(os.path.realpath(__file__)) + '/consul_io.ini') + else: + config.read(os.path.dirname(os.path.realpath(__file__)) + '/consul.ini') config_options = ['host', 'token', 'datacenter', 'servers_suffix', 'tags', 'kv_metadata', 'kv_groups', 'availability', @@ -439,16 +443,15 @@ def read_cli_args(self): ''' Command line argument processing ''' - parser = argparse.ArgumentParser(description= - 'Produce an Ansible Inventory file based nodes in a Consul cluster') + parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based nodes in a Consul cluster') parser.add_argument('--list', action='store_true', - help='Get all inventory variables from all nodes in the consul cluster') + help='Get all inventory variables from all nodes in the consul cluster') parser.add_argument('--host', action='store', - help='Get all inventory variables about a specific consul node, \ - requires datacenter set in consul.ini.') + help='Get all inventory variables about a specific consul node,' + 'requires datacenter set in consul.ini.') parser.add_argument('--datacenter', action='store', - help='Get all inventory about a specific consul datacenter') + help='Get all inventory about a specific consul datacenter') args = parser.parse_args() arg_names = ['host', 'datacenter'] @@ -462,16 +465,18 @@ return self.has_config(suffix) return default - def get_consul_api(self): '''get an instance of the api based on the supplied configuration''' host = 'localhost' - port = 8500 + port = 8500 token = None scheme = 'http' if hasattr(self, 'url'): - from urlparse import urlparse + try: + from urlparse import urlparse + except ImportError: + from urllib.parse import urlparse o = urlparse(self.url) if o.hostname: host = o.hostname diff -Nru ansible-2.3.2.0/contrib/inventory/digital_ocean.py ansible-2.4.0.0/contrib/inventory/digital_ocean.py --- ansible-2.3.2.0/contrib/inventory/digital_ocean.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/digital_ocean.py 2017-09-19 17:10:47.000000000 +0000 @@ -22,7 +22,7 @@ ---- Configuration is read from `digital_ocean.ini`, then from environment variables, -then and command-line arguments. +and then from command-line arguments. Most notably, the DigitalOcean API Token must be specified. It can be specified in the INI file or with the following environment variables: @@ -153,8 +153,7 @@ try: from dopy.manager import DoManager except ImportError as e: - sys.exit("failed=True msg='`dopy` library required for this script'") - + sys.exit("failed=True msg={}".format(e.message)) class DigitalOceanInventory(object): @@ -167,8 +166,8 @@ ''' Main execution path ''' # DigitalOceanInventory data - self.data = {} # All DigitalOcean data - self.inventory = {} # Ansible Inventory + self.data = {} # All DigitalOcean data + self.inventory = {} # Ansible Inventory # Define defaults self.cache_path = '.' @@ -197,7 +196,7 @@ self.cache_filename = self.cache_path + "/ansible-digital_ocean.cache" self.cache_refreshed = False - if self.is_cache_valid: + if self.is_cache_valid(): self.load_from_cache() if len(self.data) == 0: if self.args.force_cache: @@ -244,7 +243,6 @@ print(json.dumps(json_data)) # That's all she wrote... - ########################################################################### # Script configuration ########################################################################### @@ -280,7 +278,6 @@ if os.getenv("DO_API_KEY"): self.api_token = os.getenv("DO_API_KEY") - def read_cli_args(self): ''' Command line argument processing ''' parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on DigitalOcean credentials') @@ -289,23 +286,23 @@ parser.add_argument('--host', action='store', help='Get all Ansible inventory variables about a specific Droplet') parser.add_argument('--all', action='store_true', help='List all DigitalOcean information as JSON') - parser.add_argument('--droplets','-d', action='store_true', help='List Droplets as JSON') + parser.add_argument('--droplets', '-d', action='store_true', help='List Droplets as JSON') parser.add_argument('--regions', action='store_true', help='List Regions as JSON') parser.add_argument('--images', action='store_true', help='List Images as JSON') parser.add_argument('--sizes', action='store_true', help='List Sizes as JSON') parser.add_argument('--ssh-keys', action='store_true', help='List SSH keys as JSON') - parser.add_argument('--domains', action='store_true',help='List Domains as JSON') + parser.add_argument('--domains', action='store_true', help='List Domains as JSON') - parser.add_argument('--pretty','-p', action='store_true', help='Pretty-print results') + parser.add_argument('--pretty', '-p', action='store_true', help='Pretty-print results') parser.add_argument('--cache-path', action='store', help='Path to the cache files (default: .)') parser.add_argument('--cache-max_age', action='store', help='Maximum age of the cached items (default: 0)') parser.add_argument('--force-cache', action='store_true', default=False, help='Only use data from the cache') - parser.add_argument('--refresh-cache','-r', action='store_true', default=False, + parser.add_argument('--refresh-cache', '-r', action='store_true', default=False, help='Force refresh of cache by making API requests to DigitalOcean (default: False - use cache files)') - parser.add_argument('--env','-e', action='store_true', help='Display DO_API_TOKEN') - parser.add_argument('--api-token','-a', action='store', help='DigitalOcean API Token') + parser.add_argument('--env', '-e', action='store_true', help='Display DO_API_TOKEN') + parser.add_argument('--api-token', '-a', action='store', help='DigitalOcean API Token') self.args = parser.parse_args() @@ -319,20 +316,19 @@ not self.args.all and not self.args.host): self.args.list = True - ########################################################################### # Data Management ########################################################################### def load_from_digital_ocean(self, resource=None): '''Get JSON from DigitalOcean API''' - if self.args.force_cache: + if self.args.force_cache and os.path.isfile(self.cache_filename): return # We always get fresh droplets - if self.is_cache_valid() and not (resource=='droplets' or resource is None): + if self.is_cache_valid() and not (resource == 'droplets' or resource is None): return if self.args.refresh_cache: - resource=None + resource = None if resource == 'droplets' or resource is None: self.data['droplets'] = self.manager.all_active_droplets() @@ -353,24 +349,23 @@ self.data['domains'] = self.manager.all_domains() self.cache_refreshed = True - def build_inventory(self): '''Build Ansible inventory of droplets''' self.inventory = { 'all': { 'hosts': [], 'vars': self.group_variables - }, + }, '_meta': {'hostvars': {}} - } + } # add all droplets by id and name for droplet in self.data['droplets']: - #when using private_networking, the API reports the private one in "ip_address". + # when using private_networking, the API reports the private one in "ip_address". if 'private_networking' in droplet['features'] and not self.use_private_network: for net in droplet['networks']['v4']: - if net['type']=='public': - dest=net['ip_address'] + if net['type'] == 'public': + dest = net['ip_address'] else: continue else: @@ -388,7 +383,7 @@ 'distro_' + self.to_safe(droplet['image']['distribution']), 'status_' + droplet['status']): if group not in self.inventory: - self.inventory[group] = { 'hosts': [ ], 'vars': {} } + self.inventory[group] = {'hosts': [], 'vars': {}} self.inventory[group]['hosts'].append(dest) # groups that are not always present @@ -397,20 +392,19 @@ if group: image = 'image_' + self.to_safe(group) if image not in self.inventory: - self.inventory[image] = { 'hosts': [ ], 'vars': {} } + self.inventory[image] = {'hosts': [], 'vars': {}} self.inventory[image]['hosts'].append(dest) if droplet['tags']: for tag in droplet['tags']: if tag not in self.inventory: - self.inventory[tag] = { 'hosts': [ ], 'vars': {} } + self.inventory[tag] = {'hosts': [], 'vars': {}} self.inventory[tag]['hosts'].append(dest) # hostvars info = self.do_namespace(droplet) self.inventory['_meta']['hostvars'][dest] = info - def load_droplet_variables_for_host(self): '''Generate a JSON response to a --host call''' host = int(self.args.host) @@ -418,8 +412,6 @@ info = self.do_namespace(droplet) return {'droplet': info} - - ########################################################################### # Cache Management ########################################################################### @@ -433,7 +425,6 @@ return True return False - def load_from_cache(self): ''' Reads the data from the cache file and assigns it to member variables as Python Objects''' try: @@ -447,17 +438,15 @@ self.data = data['data'] self.inventory = data['inventory'] - def write_to_cache(self): ''' Writes data in JSON format to a file ''' - data = { 'data': self.data, 'inventory': self.inventory } + data = {'data': self.data, 'inventory': self.inventory} json_data = json.dumps(data, sort_keys=True, indent=2) cache = open(self.cache_filename, 'w') cache.write(json_data) cache.close() - ########################################################################### # Utilities ########################################################################### @@ -469,7 +458,6 @@ else: my_dict[key] = [element] - def to_safe(self, word): ''' Converts 'bad' characters in a string to underscores so they can be used as Ansible groups ''' return re.sub("[^A-Za-z0-9\-\.]", "_", word) @@ -478,11 +466,10 @@ ''' Returns a copy of the dictionary with all the keys put in a 'do_' namespace ''' info = {} for k, v in data.items(): - info['do_'+k] = v + info['do_' + k] = v return info - ########################################################################### # Run the script DigitalOceanInventory() diff -Nru ansible-2.3.2.0/contrib/inventory/docker.py ansible-2.4.0.0/contrib/inventory/docker.py --- ansible-2.3.2.0/contrib/inventory/docker.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/docker.py 2017-09-19 17:10:47.000000000 +0000 @@ -371,7 +371,6 @@ HAS_DOCKER_ERROR = False try: - from docker import Client from docker.errors import APIError, TLSParameterError from docker.tls import TLSConfig from docker.constants import DEFAULT_TIMEOUT_SECONDS, DEFAULT_DOCKER_API_VERSION @@ -379,6 +378,19 @@ HAS_DOCKER_ERROR = str(exc) HAS_DOCKER_PY = False +# Client has recently been split into DockerClient and APIClient +try: + from docker import Client +except ImportError as exc: + try: + from docker import APIClient as Client + except ImportError as exc: + HAS_DOCKER_ERROR = str(exc) + HAS_DOCKER_PY = False + + class Client: + pass + DEFAULT_DOCKER_HOST = 'unix://var/run/docker.sock' DEFAULT_TLS = False DEFAULT_TLS_VERIFY = False @@ -779,6 +791,10 @@ if config_path: try: config_file = os.path.abspath(config_path) + # default config path is docker.yml in same directory as this script + # old behaviour is docker.yml in current directory. Handle both. + if not os.path.exists(config_file): + config_file = os.path.abspath(os.path.basename(config_path)) except: config_file = None @@ -813,30 +829,30 @@ # Parse command line arguments basename = os.path.splitext(os.path.basename(__file__))[0] - default_config = basename + '.yml' + default_config = os.path.join(os.path.dirname(__file__), basename + '.yml') parser = argparse.ArgumentParser( description='Return Ansible inventory for one or more Docker hosts.') parser.add_argument('--list', action='store_true', default=True, - help='List all containers (default: True)') + help='List all containers (default: True)') parser.add_argument('--debug', action='store_true', default=False, - help='Send debug messages to STDOUT') + help='Send debug messages to STDOUT') parser.add_argument('--host', action='store', help='Only get information for a specific container.') parser.add_argument('--pretty', action='store_true', default=False, - help='Pretty print JSON output(default: False)') + help='Pretty print JSON output(default: False)') parser.add_argument('--config-file', action='store', default=default_config, help="Name of the config file to use. Default is %s" % (default_config)) parser.add_argument('--docker-host', action='store', default=None, help="The base url or Unix sock path to connect to the docker daemon. Defaults to %s" - % (DEFAULT_DOCKER_HOST)) + % (DEFAULT_DOCKER_HOST)) parser.add_argument('--tls-hostname', action='store', default='localhost', help="Host name to expect in TLS certs. Defaults to 'localhost'") parser.add_argument('--api-version', action='store', default=None, help="Docker daemon API version. Defaults to %s" % (DEFAULT_DOCKER_API_VERSION)) parser.add_argument('--timeout', action='store', default=None, help="Docker connection timeout in seconds. Defaults to %s" - % (DEFAULT_TIMEOUT_SECONDS)) + % (DEFAULT_TIMEOUT_SECONDS)) parser.add_argument('--cacert-path', action='store', default=None, help="Path to the TLS certificate authority pem file.") parser.add_argument('--cert-path', action='store', default=None, diff -Nru ansible-2.3.2.0/contrib/inventory/ec2.ini ansible-2.4.0.0/contrib/inventory/ec2.ini --- ansible-2.3.2.0/contrib/inventory/ec2.ini 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/ec2.ini 2017-09-19 17:10:47.000000000 +0000 @@ -10,8 +10,9 @@ # AWS regions to make calls to. Set this to 'all' to make request to all regions # in AWS and merge the results together. Alternatively, set this to a comma -# separated list of regions. E.g. 'us-east-1, us-west-1, us-west-2' -# 'auto' is AWS_REGION or AWS_DEFAULT_REGION environment variable. +# separated list of regions. E.g. 'us-east-1,us-west-1,us-west-2' and do not +# provide the 'regions_exclude' option. If this is set to 'auto', AWS_REGION or +# AWS_DEFAULT_REGION environment variable will be read to determine the region. regions = all regions_exclude = us-gov-west-1, cn-north-1 @@ -134,6 +135,7 @@ group_by_ami_id = True group_by_instance_type = True group_by_instance_state = False +group_by_platform = True group_by_key_pair = True group_by_vpc_id = True group_by_security_group = True @@ -179,6 +181,11 @@ # (ex. webservers15, webservers1a, webservers123 etc) # instance_filters = tag:Name=webservers1* +# An IAM role can be assumed, so all requests are run as that role. +# This can be useful for connecting across different accounts, or to limit user +# access +# iam_role = role-arn + # A boto configuration profile may be used to separate out credentials # see http://boto.readthedocs.org/en/latest/boto_config_tut.html # boto_profile = some-boto-profile-name diff -Nru ansible-2.3.2.0/contrib/inventory/ec2.py ansible-2.4.0.0/contrib/inventory/ec2.py --- ansible-2.3.2.0/contrib/inventory/ec2.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/ec2.py 2017-09-19 17:10:47.000000000 +0000 @@ -12,7 +12,7 @@ export AWS_ACCESS_KEY_ID='AK123' export AWS_SECRET_ACCESS_KEY='abc123' -optional region environement variable if region is 'auto' +optional region environment variable if region is 'auto' This script also assumes there is an ec2.ini file alongside it. To specify a different path to ec2.ini, define the EC2_INI_PATH environment variable: @@ -132,6 +132,7 @@ from boto import rds from boto import elasticache from boto import route53 +from boto import sts import six from ansible.module_utils import ec2 as ec2_utils @@ -155,7 +156,7 @@ class Ec2Inventory(object): def _empty_inventory(self): - return {"_meta" : {"hostvars" : {}}} + return {"_meta": {"hostvars": {}}} def __init__(self): ''' Main execution path ''' @@ -204,7 +205,6 @@ print(data_to_print) - def is_cache_valid(self): ''' Determines if the cache files have expired, or if it is still valid ''' @@ -217,7 +217,6 @@ return False - def read_settings(self): ''' Reads the settings from the ec2.ini file ''' @@ -225,8 +224,10 @@ scriptbasename = os.path.basename(scriptbasename) scriptbasename = scriptbasename.replace('.py', '') - defaults = {'ec2': { - 'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename) + defaults = { + 'ec2': { + 'ini_fallback': os.path.join(os.path.dirname(__file__), 'ec2.ini'), + 'ini_path': os.path.join(os.path.dirname(__file__), '%s.ini' % scriptbasename) } } @@ -236,6 +237,10 @@ config = configparser.SafeConfigParser() ec2_ini_path = os.environ.get('EC2_INI_PATH', defaults['ec2']['ini_path']) ec2_ini_path = os.path.expanduser(os.path.expandvars(ec2_ini_path)) + + if not os.path.isfile(ec2_ini_path): + ec2_ini_path = os.path.expanduser(defaults['ec2']['ini_fallback']) + config.read(ec2_ini_path) # is eucalyptus? @@ -249,11 +254,11 @@ # Regions self.regions = [] configRegions = config.get('ec2', 'regions') - configRegions_exclude = config.get('ec2', 'regions_exclude') if (configRegions == 'all'): if self.eucalyptus_host: self.regions.append(boto.connect_euca(host=self.eucalyptus_host).region.name, **self.credentials) else: + configRegions_exclude = config.get('ec2', 'regions_exclude') for regionInfo in ec2.regions(): if regionInfo.name not in configRegions_exclude: self.regions.append(regionInfo.name) @@ -263,7 +268,7 @@ env_region = os.environ.get('AWS_REGION') if env_region is None: env_region = os.environ.get('AWS_DEFAULT_REGION') - self.regions = [ env_region ] + self.regions = [env_region] # Destination addresses self.destination_variable = config.get('ec2', 'destination_variable') @@ -421,6 +426,12 @@ else: self.replace_dash_in_groups = True + # IAM role to assume for connection + if config.has_option('ec2', 'iam_role'): + self.iam_role = config.get('ec2', 'iam_role') + else: + self.iam_role = None + # Configure which groups should be created. group_by_options = [ 'group_by_instance_id', @@ -429,6 +440,7 @@ 'group_by_ami_id', 'group_by_instance_type', 'group_by_instance_state', + 'group_by_platform', 'group_by_key_pair', 'group_by_vpc_id', 'group_by_security_group', @@ -495,16 +507,15 @@ parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on EC2') parser.add_argument('--list', action='store_true', default=True, - help='List instances (default: True)') + help='List instances (default: True)') parser.add_argument('--host', action='store', - help='Get all the variables about a specific instance') + help='Get all the variables about a specific instance') parser.add_argument('--refresh-cache', action='store_true', default=False, - help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)') + help='Force refresh of cache by making API requests to EC2 (default: False - use cache files)') parser.add_argument('--profile', '--boto-profile', action='store', dest='boto_profile', - help='Use boto profile for connections to EC2') + help='Use boto profile for connections to EC2') self.args = parser.parse_args() - def do_api_calls_update_cache(self): ''' Do API calls to each region, and save data in cache files ''' @@ -548,6 +559,13 @@ connect_args['profile_name'] = self.boto_profile self.boto_fix_security_token_in_profile(connect_args) + if self.iam_role: + sts_conn = sts.connect_to_region(region, **connect_args) + role = sts_conn.assume_role(self.iam_role, 'ansible_dynamic_inventory') + connect_args['aws_access_key_id'] = role.credentials.access_key + connect_args['aws_secret_access_key'] = role.credentials.secret_key + connect_args['security_token'] = role.credentials.session_token + conn = module.connect_to_region(region, **connect_args) # connect_to_region will fail "silently" by returning None if the region name is wrong or not supported if conn is None: @@ -566,10 +584,10 @@ filters_dict = {} for filter_key, filter_values in self.ec2_instance_filters.items(): filters_dict[filter_key] = filter_values - reservations.extend(conn.get_all_instances(filters = filters_dict)) + reservations.extend(conn.get_all_instances(filters=filters_dict)) else: for filter_key, filter_values in self.ec2_instance_filters.items(): - reservations.extend(conn.get_all_instances(filters = { filter_key : filter_values })) + reservations.extend(conn.get_all_instances(filters={filter_key: filter_values})) else: reservations = conn.get_all_instances() @@ -583,7 +601,7 @@ max_filter_value = 199 tags = [] for i in range(0, len(instance_ids), max_filter_value): - tags.extend(conn.get_all_tags(filters={'resource-type': 'instance', 'resource-id': instance_ids[i:i+max_filter_value]})) + tags.extend(conn.get_all_tags(filters={'resource-type': 'instance', 'resource-id': instance_ids[i:i + max_filter_value]})) tags_by_instance_id = defaultdict(dict) for tag in tags: @@ -609,6 +627,13 @@ ''' Makes an AWS API call to the list of RDS instances in a particular region ''' + if not HAS_BOTO3: + self.fail_with_error("Working with RDS instances requires boto3 - please install boto3 and try again", + "getting RDS instances") + + client = ec2_utils.boto3_inventory_conn('client', 'rds', region, **self.credentials) + db_instances = client.describe_db_instances() + try: conn = self.connect_to_aws(rds, region) if conn: @@ -616,7 +641,14 @@ while True: instances = conn.get_all_dbinstances(marker=marker) marker = instances.marker - for instance in instances: + for index, instance in enumerate(instances): + # Add tags to instances. + instance.arn = db_instances['DBInstances'][index]['DBInstanceArn'] + tags = client.list_tags_for_resource(ResourceName=instance.arn)['TagList'] + instance.tags = {} + for tag in tags: + instance.tags[tag['Key']] = tag['Value'] + self.add_rds_instance(instance, region) if not marker: break @@ -625,7 +657,11 @@ if e.error_code == 'AuthFailure': error = self.get_auth_error_message() - if not e.reason == "Forbidden": + elif e.error_code == "OptInRequired": + error = "RDS hasn't been enabled for this account yet. " \ + "You must either log in to the RDS service through the AWS console to enable it, " \ + "or set 'rds = False' in ec2.ini" + elif not e.reason == "Forbidden": error = "Looks like AWS RDS is down:\n%s" % e.message self.fail_with_error(error, 'getting RDS instances') @@ -692,7 +728,7 @@ ''' Makes an AWS API call to the list of ElastiCache clusters (with nodes' info) in a particular region.''' - # ElastiCache boto module doesn't provide a get_all_intances method, + # ElastiCache boto module doesn't provide a get_all_instances method, # that's why we need to call describe directly (it would be called by # the shorthand method anyway...) try: @@ -707,7 +743,11 @@ if e.error_code == 'AuthFailure': error = self.get_auth_error_message() - if not e.reason == "Forbidden": + elif e.error_code == "OptInRequired": + error = "ElastiCache hasn't been enabled for this account yet. " \ + "You must either log in to the ElastiCache service through the AWS console to enable it, " \ + "or set 'elasticache = False' in ec2.ini" + elif not e.reason == "Forbidden": error = "Looks like AWS ElastiCache is down:\n%s" % e.message self.fail_with_error(error, 'getting ElastiCache clusters') @@ -728,7 +768,7 @@ ''' Makes an AWS API call to the list of ElastiCache replication groups in a particular region.''' - # ElastiCache boto module doesn't provide a get_all_intances method, + # ElastiCache boto module doesn't provide a get_all_instances method, # that's why we need to call describe directly (it would be called by # the shorthand method anyway...) try: @@ -767,7 +807,7 @@ errors.append(' - AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY environment vars found but may not be correct') boto_paths = ['/etc/boto.cfg', '~/.boto', '~/.aws/credentials'] - boto_config_found = list(p for p in boto_paths if os.path.isfile(os.path.expanduser(p))) + boto_config_found = [p for p in boto_paths if os.path.isfile(os.path.expanduser(p))] if len(boto_config_found) > 0: errors.append(" - Boto configs found at '%s', but the credentials contained may not be correct" % ', '.join(boto_config_found)) else: @@ -801,7 +841,7 @@ # Select the best destination address if self.destination_format and self.destination_format_tags: - dest = self.destination_format.format(*[ getattr(instance, 'tags').get(tag, '') for tag in self.destination_format_tags ]) + dest = self.destination_format.format(*[getattr(instance, 'tags').get(tag, '') for tag in self.destination_format_tags]) elif instance.subnet_id: dest = getattr(instance, self.vpc_destination_variable, None) if dest is None: @@ -891,6 +931,16 @@ if self.nested_groups: self.push_group(self.inventory, 'instance_states', state_name) + # Inventory: Group by platform + if self.group_by_platform: + if instance.platform: + platform = self.to_safe('platform_' + instance.platform) + else: + platform = self.to_safe('platform_undefined') + self.push(self.inventory, platform, hostname) + if self.nested_groups: + self.push_group(self.inventory, 'platforms', platform) + # Inventory: Group by key pair if self.group_by_key_pair and instance.key_name: key_name = self.to_safe('key_' + instance.key_name) @@ -915,7 +965,7 @@ self.push_group(self.inventory, 'security_groups', key) except AttributeError: self.fail_with_error('\n'.join(['Package boto seems a bit older.', - 'Please upgrade boto >= 2.3.0.'])) + 'Please upgrade boto >= 2.3.0.'])) # Inventory: Group by AWS account ID if self.group_by_aws_account: @@ -960,8 +1010,7 @@ self.push(self.inventory, 'ec2', hostname) self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance) - self.inventory["_meta"]["hostvars"][hostname]['ansible_ssh_host'] = dest - + self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = dest def add_rds_instance(self, instance, region): ''' Adds an RDS instance to the inventory and index, as long as it is @@ -1040,8 +1089,25 @@ except AttributeError: self.fail_with_error('\n'.join(['Package boto seems a bit older.', - 'Please upgrade boto >= 2.3.0.'])) + 'Please upgrade boto >= 2.3.0.'])) + # Inventory: Group by tag keys + if self.group_by_tag_keys: + for k, v in instance.tags.items(): + if self.expand_csv_tags and v and ',' in v: + values = map(lambda x: x.strip(), v.split(',')) + else: + values = [v] + for v in values: + if v: + key = self.to_safe("tag_" + k + "=" + v) + else: + key = self.to_safe("tag_" + k) + self.push(self.inventory, key, hostname) + if self.nested_groups: + self.push_group(self.inventory, 'tags', self.to_safe("tag_" + k)) + if v: + self.push_group(self.inventory, self.to_safe("tag_" + k), key) # Inventory: Group by engine if self.group_by_rds_engine: @@ -1055,11 +1121,17 @@ if self.nested_groups: self.push_group(self.inventory, 'rds_parameter_groups', self.to_safe("rds_parameter_group_" + instance.parameter_group.name)) + # Global Tag: instances without tags + if self.group_by_tag_none and len(instance.tags) == 0: + self.push(self.inventory, 'tag_none', hostname) + if self.nested_groups: + self.push_group(self.inventory, 'tags', 'tag_none') + # Global Tag: all RDS instances self.push(self.inventory, 'rds', hostname) self.inventory["_meta"]["hostvars"][hostname] = self.get_host_info_dict_from_instance(instance) - self.inventory["_meta"]["hostvars"][hostname]['ansible_ssh_host'] = dest + self.inventory["_meta"]["hostvars"][hostname]['ansible_host'] = dest def add_elasticache_cluster(self, cluster, region): ''' Adds an ElastiCache cluster to the inventory and index, as long as @@ -1310,8 +1382,7 @@ r53_conn = route53.Route53Connection() all_zones = r53_conn.get_zones() - route53_zones = [ zone for zone in all_zones if zone.name[:-1] - not in self.route53_excluded_zones ] + route53_zones = [zone for zone in all_zones if zone.name[:-1] not in self.route53_excluded_zones] self.route53_records = {} @@ -1328,14 +1399,13 @@ self.route53_records.setdefault(resource, set()) self.route53_records[resource].add(record_name) - def get_instance_route53_names(self, instance): ''' Check if an instance is referenced in the records we have from Route53. If it is, return the list of domain names pointing to said instance. If nothing points to it, return an empty list. ''' - instance_attributes = [ 'public_dns_name', 'private_dns_name', - 'ip_address', 'private_ip_address' ] + instance_attributes = ['public_dns_name', 'private_dns_name', + 'ip_address', 'private_ip_address'] name_list = set() @@ -1364,7 +1434,7 @@ elif key == 'ec2__previous_state': instance_vars['ec2_previous_state'] = instance.previous_state or '' instance_vars['ec2_previous_state_code'] = instance.previous_state_code - elif type(value) in [int, bool]: + elif isinstance(value, (int, bool)): instance_vars[key] = value elif isinstance(value, six.string_types): instance_vars[key] = value.strip() @@ -1391,13 +1461,13 @@ elif key == 'ec2_block_device_mapping': instance_vars["ec2_block_devices"] = {} for k, v in value.items(): - instance_vars["ec2_block_devices"][ os.path.basename(k) ] = v.volume_id + instance_vars["ec2_block_devices"][os.path.basename(k)] = v.volume_id else: pass # TODO Product codes if someone finds them useful - #print key - #print type(value) - #print value + # print key + # print type(value) + # print value instance_vars[self.to_safe('ec2_account_id')] = self.aws_account_id @@ -1441,9 +1511,9 @@ host_info['ec2_primary_cluster_port'] = node['ReadEndpoint']['Port'] host_info['ec2_primary_cluster_id'] = node['CacheClusterId'] elif node['CurrentRole'] == 'replica': - host_info['ec2_replica_cluster_address_'+ str(replica_count)] = node['ReadEndpoint']['Address'] - host_info['ec2_replica_cluster_port_'+ str(replica_count)] = node['ReadEndpoint']['Port'] - host_info['ec2_replica_cluster_id_'+ str(replica_count)] = node['CacheClusterId'] + host_info['ec2_replica_cluster_address_' + str(replica_count)] = node['ReadEndpoint']['Address'] + host_info['ec2_replica_cluster_port_' + str(replica_count)] = node['ReadEndpoint']['Port'] + host_info['ec2_replica_cluster_id_' + str(replica_count)] = node['CacheClusterId'] replica_count += 1 # Target: Redis Replication Groups @@ -1469,7 +1539,7 @@ # Target: Everything # Preserve booleans and integers - elif type(value) in [int, bool]: + elif isinstance(value, (int, bool)): host_info[key] = value # Target: Everything @@ -1495,10 +1565,10 @@ # Need to load index from cache self.load_index_from_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # try updating the cache self.do_api_calls_update_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # host might not exist anymore return self.json_format_dict({}, True) diff -Nru ansible-2.3.2.0/contrib/inventory/fleet.py ansible-2.4.0.0/contrib/inventory/fleet.py --- ansible-2.3.2.0/contrib/inventory/fleet.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/fleet.py 2017-09-19 17:10:47.000000000 +0000 @@ -35,7 +35,7 @@ import simplejson as json # Options -#------------------------------ +# ------------------------------ parser = OptionParser(usage="%prog [options] --list | --host ") parser.add_option('--list', default=False, dest="list", action="store_true", @@ -48,6 +48,7 @@ # helper functions # + def get_ssh_config(): configs = [] for box in list_running_boxes(): @@ -55,7 +56,8 @@ configs.append(config) return configs -#list all the running instances in the fleet + +# list all the running instances in the fleet def list_running_boxes(): boxes = [] for line in subprocess.check_output(["fleetctl", "list-machines"]).split('\n'): @@ -65,6 +67,7 @@ return boxes + def get_a_ssh_config(box_name): config = {} config['Host'] = box_name @@ -72,11 +75,12 @@ config['ansible_python_interpreter'] = '/opt/bin/python' return config + # List out servers that vagrant has running -#------------------------------ +# ------------------------------ if options.list: ssh_config = get_ssh_config() - hosts = { 'coreos': []} + hosts = {'coreos': []} for data in ssh_config: hosts['coreos'].append(data['Host']) @@ -85,14 +89,14 @@ sys.exit(1) # Get out the host details -#------------------------------ +# ------------------------------ elif options.host: result = {} ssh_config = get_ssh_config() details = filter(lambda x: (x['Host'] == options.host), ssh_config) if len(details) > 0: - #pass through the port, in case it's non standard. + # pass through the port, in case it's non standard. result = details[0] print(json.dumps(result)) @@ -100,7 +104,7 @@ # Print out help -#------------------------------ +# ------------------------------ else: parser.print_help() sys.exit(1) diff -Nru ansible-2.3.2.0/contrib/inventory/foreman.ini ansible-2.4.0.0/contrib/inventory/foreman.ini --- ansible-2.3.2.0/contrib/inventory/foreman.ini 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/foreman.ini 2017-09-19 17:10:47.000000000 +0000 @@ -68,6 +68,21 @@ # # foreman_hostgroup_myapp_webtier_datacenter1 # +# If the parameter want_hostcollections is set to true, the +# collections each host is in are created as Ansible groups with a +# foreman_hostcollection prefix, all lowercase and problematic +# parameters removed. So e.g. the Foreman host collection +# +# Patch Window Thursday +# +# would turn into the Ansible group: +# +# foreman_hostcollection_patchwindowthursday +# +# If the parameter host_filters is set, it will be used as the +# "search" parameter for the /api/v2/hosts call. This can be used to +# restrict the list of returned host, as shown below. +# # Furthermore Ansible groups can be created on the fly using the # *group_patterns* variable in *foreman.ini* so that you can build up # hierarchies using parameters on the hostgroup and host variables. @@ -108,15 +123,35 @@ password = secret ssl_verify = True +# Retrieve only hosts from the organization "Web Engineering". +# host_filters = organization="Web Engineering" + +# Retrieve only hosts from the organization "Web Engineering" that are +# also in the host collection "Apache Servers". +# host_filters = organization="Web Engineering" and host_collection="Apache Servers" + [ansible] group_patterns = ["{app}-{tier}-{color}", "{app}-{color}", "{app}", "{tier}"] group_prefix = foreman_ + # Whether to fetch facts from Foreman and store them on the host want_facts = True +# Whether to create Ansible groups for host collections. Only tested +# with Katello (Red Hat Satellite). Disabled by default to not break +# the script for stand-alone Foreman. +want_hostcollections = False + +# Whether to interpret global parameters value as JSON (if possible, else +# take as is). Only tested with Katello (Red Hat Satellite). +# This allows to define lists and dictionaries (and more complicated structures) +# variables by entering them as JSON string in Foreman parameters. +# Disabled by default as the change would else not be backward compatible. +rich_params = False + [cache] path = . max_age = 60 diff -Nru ansible-2.3.2.0/contrib/inventory/foreman.py ansible-2.4.0.0/contrib/inventory/foreman.py --- ansible-2.3.2.0/contrib/inventory/foreman.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/foreman.py 2017-09-19 17:10:47.000000000 +0000 @@ -46,6 +46,7 @@ from requests.auth import HTTPBasicAuth + def json_format_dict(data, pretty=False): """Converts a dict to a JSON object and dumps it as a formatted string""" @@ -54,6 +55,7 @@ else: return json.dumps(data) + class ForemanInventory(object): def __init__(self): @@ -62,6 +64,7 @@ self.params = dict() # Params of each host self.facts = dict() # Facts of each host self.hostgroups = dict() # host groups + self.hostcollections = dict() # host collections self.session = None # Requests session self.config_paths = [ "/etc/ansible/foreman.ini", @@ -105,6 +108,22 @@ except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): self.want_facts = True + try: + self.want_hostcollections = config.getboolean('ansible', 'want_hostcollections') + except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + self.want_hostcollections = False + + # Do we want parameters to be interpreted if possible as JSON? (no by default) + try: + self.rich_params = config.getboolean('ansible', 'rich_params') + except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + self.rich_params = False + + try: + self.host_filters = config.get('foreman', 'host_filters') + except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): + self.host_filters = None + # Cache related try: cache_path = os.path.expanduser(config.get('cache', 'path')) @@ -115,10 +134,12 @@ self.cache_path_inventory = cache_path + "/%s.index" % script self.cache_path_params = cache_path + "/%s.params" % script self.cache_path_facts = cache_path + "/%s.facts" % script + self.cache_path_hostcollections = cache_path + "/%s.hostcollections" % script try: self.cache_max_age = config.getint('cache', 'max_age') except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): self.cache_max_age = 60 + return True def parse_cli_args(self): @@ -138,12 +159,17 @@ self.session.verify = self.foreman_ssl_verify return self.session - def _get_json(self, url, ignore_errors=None): + def _get_json(self, url, ignore_errors=None, params=None): + if params is None: + params = {} + params['per_page'] = 250 + page = 1 results = [] s = self._get_session() while True: - ret = s.get(url, params={'page': page, 'per_page': 250}) + params['page'] = page + ret = s.get(url, params=params) if ignore_errors and ret.status_code in ignore_errors: break ret.raise_for_status() @@ -156,7 +182,7 @@ return json['results'] # List of all hosts is returned paginaged results = results + json['results'] - if len(results) >= json['total']: + if len(results) >= json['subtotal']: break page += 1 if len(json['results']) == 0: @@ -167,22 +193,35 @@ return results def _get_hosts(self): - return self._get_json("%s/api/v2/hosts" % self.foreman_url) + url = "%s/api/v2/hosts" % self.foreman_url + + params = {} + if self.host_filters: + params['search'] = self.host_filters - def _get_all_params_by_id(self, hid): + return self._get_json(url, params=params) + + def _get_host_data_by_id(self, hid): url = "%s/api/v2/hosts/%s" % (self.foreman_url, hid) - ret = self._get_json(url, [404]) - if ret == []: - ret = {} - return ret.get('all_parameters', {}) + return self._get_json(url) + + def _get_facts_by_id(self, hid): + url = "%s/api/v2/hosts/%s/facts" % (self.foreman_url, hid) + return self._get_json(url) - def _resolve_params(self, host): - """Fetch host params and convert to dict""" + def _resolve_params(self, host_params): + """Convert host params to dict""" params = {} - for param in self._get_all_params_by_id(host['id']): + for param in host_params: name = param['name'] - params[name] = param['value'] + if self.rich_params: + try: + params[name] = json.loads(param['value']) + except ValueError: + params[name] = param['value'] + else: + params[name] = param['value'] return params @@ -216,6 +255,7 @@ self.write_to_cache(self.inventory, self.cache_path_inventory) self.write_to_cache(self.params, self.cache_path_params) self.write_to_cache(self.facts, self.cache_path_facts) + self.write_to_cache(self.hostcollections, self.cache_path_hostcollections) def to_safe(self, word): '''Converts 'bad' characters in a string to underscores @@ -236,6 +276,9 @@ for host in self._get_hosts(): dns_name = host['name'] + host_data = self._get_host_data_by_id(host['id']) + host_params = host_data.get('all_parameters', {}) + # Create ansible groups for hostgroup group = 'hostgroup' val = host.get('%s_title' % group) or host.get('%s_name' % group) @@ -256,16 +299,13 @@ safe_key = self.to_safe('%s%s_%s' % (self.group_prefix, group, val.lower())) self.inventory[safe_key].append(dns_name) - params = self._resolve_params(host) + params = self._resolve_params(host_params) # Ansible groups by parameters in host groups and Foreman host # attributes. - groupby = copy.copy(params) - for k, v in host.items(): - if isinstance(v, str): - groupby[k] = self.to_safe(v) - elif isinstance(v, int): - groupby[k] = v + groupby = dict() + for k, v in params.items(): + groupby[k] = self.to_safe(str(v)) # The name of the ansible groups is given by group_patterns: for pattern in self.group_patterns: @@ -275,6 +315,17 @@ except KeyError: pass # Host not part of this group + if self.want_hostcollections: + hostcollections = host_data.get('host_collections') + + if hostcollections: + # Create Ansible groups for host collections + for hostcollection in hostcollections: + safe_key = self.to_safe('%shostcollection_%s' % (self.group_prefix, hostcollection['name'].lower())) + self.inventory[safe_key].append(dns_name) + + self.hostcollections[dns_name] = hostcollections + self.cache[dns_name] = host self.params[dns_name] = params self.facts[dns_name] = self._get_facts(host) @@ -296,31 +347,36 @@ def load_inventory_from_cache(self): """Read the index from the cache file sets self.index""" - cache = open(self.cache_path_inventory, 'r') - json_inventory = cache.read() - self.inventory = json.loads(json_inventory) + with open(self.cache_path_inventory, 'r') as fp: + self.inventory = json.load(fp) def load_params_from_cache(self): """Read the index from the cache file sets self.index""" - cache = open(self.cache_path_params, 'r') - json_params = cache.read() - self.params = json.loads(json_params) + with open(self.cache_path_params, 'r') as fp: + self.params = json.load(fp) def load_facts_from_cache(self): """Read the index from the cache file sets self.facts""" + if not self.want_facts: return - cache = open(self.cache_path_facts, 'r') - json_facts = cache.read() - self.facts = json.loads(json_facts) + with open(self.cache_path_facts, 'r') as fp: + self.facts = json.load(fp) + + def load_hostcollections_from_cache(self): + """Read the index from the cache file sets self.hostcollections""" + + if not self.want_hostcollections: + return + with open(self.cache_path_hostcollections, 'r') as fp: + self.hostcollections = json.load(fp) def load_cache_from_cache(self): """Read the cache from the cache file sets self.cache""" - cache = open(self.cache_path_cache, 'r') - json_cache = cache.read() - self.cache = json.loads(json_cache) + with open(self.cache_path_cache, 'r') as fp: + self.cache = json.load(fp) def get_inventory(self): if self.args.refresh_cache or not self.is_cache_valid(): @@ -329,6 +385,7 @@ self.load_inventory_from_cache() self.load_params_from_cache() self.load_facts_from_cache() + self.load_hostcollections_from_cache() self.load_cache_from_cache() def get_host_info(self): diff -Nru ansible-2.3.2.0/contrib/inventory/freeipa.py ansible-2.4.0.0/contrib/inventory/freeipa.py --- ansible-2.3.2.0/contrib/inventory/freeipa.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/freeipa.py 2017-09-19 17:10:47.000000000 +0000 @@ -1,8 +1,12 @@ #!/usr/bin/env python +# Copyright (c) 2017 Ansible Project +# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) import argparse from ipalib import api import json +from distutils.version import StrictVersion + def initialize(): ''' @@ -16,11 +20,12 @@ try: api.Backend.rpcclient.connect() except AttributeError: - #FreeIPA < 4.0 compatibility + # FreeIPA < 4.0 compatibility api.Backend.xmlclient.connect() return api + def list_groups(api): ''' This function prints a list of all host groups. This function requires @@ -28,14 +33,18 @@ ''' inventory = {} - hostvars={} - meta={} + hostvars = {} + ipa_version = api.Command.env()['result']['version'] result = api.Command.hostgroup_find()['result'] for hostgroup in result: # Get direct and indirect members (nested hostgroups) of hostgroup members = [] + if StrictVersion(ipa_version) >= StrictVersion('4.0.0'): + hostgroup_name = hostgroup['cn'][0] + hostgroup = api.Command.hostgroup_show(hostgroup_name)['result'] + if 'member_host' in hostgroup: members = [host for host in hostgroup['member_host']] if 'memberindirect_host' in hostgroup: @@ -51,6 +60,7 @@ return None + def parse_args(): ''' This function parses the arguments that were passed in via the command line. @@ -66,6 +76,7 @@ return parser.parse_args() + def print_host(host): ''' This function is really a stub, it could return variables to be used in @@ -79,6 +90,7 @@ return None + if __name__ == '__main__': args = parse_args() diff -Nru ansible-2.3.2.0/contrib/inventory/gce.ini ansible-2.4.0.0/contrib/inventory/gce.ini --- ansible-2.3.2.0/contrib/inventory/gce.ini 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/gce.ini 2017-09-19 17:10:47.000000000 +0000 @@ -37,13 +37,14 @@ # statement in the inventory script. However, you can specify an absolute # path to the secrets.py file with 'libcloud_secrets' parameter. # This option will be deprecated in a future release. -libcloud_secrets = +libcloud_secrets = # If you are not going to use a 'secrets.py' file, you can set the necessary # authorization parameters here. -gce_service_account_email_address = -gce_service_account_pem_file_path = -gce_project_id = +gce_service_account_email_address = +gce_service_account_pem_file_path = +gce_project_id = +gce_zone = # Filter inventory based on on state. Leave undefined to return instances regardless of state. # example: Uncomment to only return inventory in the running or provisioning state diff -Nru ansible-2.3.2.0/contrib/inventory/gce.py ansible-2.4.0.0/contrib/inventory/gce.py --- ansible-2.3.2.0/contrib/inventory/gce.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/gce.py 2017-09-19 17:10:47.000000000 +0000 @@ -40,6 +40,7 @@ - gce_tags - gce_metadata - gce_network + - gce_subnetwork When run in --list mode, instances are grouped by the following categories: - zone: @@ -73,7 +74,6 @@ Version: 0.0.3 ''' -__requires__ = ['pycrypto>=2.6'] try: import pkg_resources except ImportError: @@ -83,8 +83,8 @@ # library is used. pass -USER_AGENT_PRODUCT="Ansible-gce_inventory_plugin" -USER_AGENT_VERSION="v2" +USER_AGENT_PRODUCT = "Ansible-gce_inventory_plugin" +USER_AGENT_VERSION = "v2" import sys import os @@ -92,7 +92,10 @@ from time import time -import ConfigParser +if sys.version_info >= (3, 0): + import configparser +else: + import ConfigParser as configparser import logging logging.getLogger('libcloud.common.google').addHandler(logging.NullHandler()) @@ -213,10 +216,11 @@ # This provides empty defaults to each key, so that environment # variable configuration (as opposed to INI configuration) is able # to work. - config = ConfigParser.SafeConfigParser(defaults={ + config = configparser.SafeConfigParser(defaults={ 'gce_service_account_email_address': '', 'gce_service_account_pem_file_path': '', 'gce_project_id': '', + 'gce_zone': '', 'libcloud_secrets': '', 'inventory_ip_type': '', 'cache_path': '~/.ansible/tmp', @@ -270,10 +274,11 @@ # exists. secrets_path = self.config.get('gce', 'libcloud_secrets') secrets_found = False + try: import secrets - args = list(getattr(secrets, 'GCE_PARAMS', [])) - kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {}) + args = list(secrets.GCE_PARAMS) + kwargs = secrets.GCE_KEYWORD_PARAMS secrets_found = True except: pass @@ -291,18 +296,23 @@ secrets_found = True except: pass + if not secrets_found: args = [ - self.config.get('gce','gce_service_account_email_address'), - self.config.get('gce','gce_service_account_pem_file_path') + self.config.get('gce', 'gce_service_account_email_address'), + self.config.get('gce', 'gce_service_account_pem_file_path') ] - kwargs = {'project': self.config.get('gce', 'gce_project_id')} + kwargs = {'project': self.config.get('gce', 'gce_project_id'), + 'datacenter': self.config.get('gce', 'gce_zone')} # If the appropriate environment variables are set, they override # other configuration; process those into our args and kwargs. args[0] = os.environ.get('GCE_EMAIL', args[0]) args[1] = os.environ.get('GCE_PEM_FILE_PATH', args[1]) + args[1] = os.environ.get('GCE_CREDENTIALS_FILE_PATH', args[1]) + kwargs['project'] = os.environ.get('GCE_PROJECT', kwargs['project']) + kwargs['datacenter'] = os.environ.get('GCE_ZONE', kwargs['datacenter']) # Retrieve and return the GCE driver. gce = get_driver(Provider.GCE)(*args, **kwargs) @@ -315,7 +325,7 @@ '''returns a list of comma separated zones parsed from the GCE_ZONE environment variable. If provided, this will be used to filter the results of the grouped_instances call''' import csv - reader = csv.reader([os.environ.get('GCE_ZONE',"")], skipinitialspace=True) + reader = csv.reader([os.environ.get('GCE_ZONE', "")], skipinitialspace=True) zones = [r for r in reader] return [z for z in zones[0]] @@ -325,17 +335,16 @@ parser = argparse.ArgumentParser( description='Produce an Ansible Inventory file based on GCE') parser.add_argument('--list', action='store_true', default=True, - help='List instances (default: True)') + help='List instances (default: True)') parser.add_argument('--host', action='store', - help='Get all information about an instance') + help='Get all information about an instance') parser.add_argument('--pretty', action='store_true', default=False, - help='Pretty format (default: False)') + help='Pretty format (default: False)') parser.add_argument( '--refresh-cache', action='store_true', default=False, help='Force refresh of cache by making API requests (default: False - use cache files)') self.args = parser.parse_args() - def node_to_dict(self, inst): md = {} @@ -347,6 +356,9 @@ md[entry['key']] = entry['value'] net = inst.extra['networkInterfaces'][0]['network'].split('/')[-1] + subnet = None + if 'subnetwork' in inst.extra['networkInterfaces'][0]: + subnet = inst.extra['networkInterfaces'][0]['subnetwork'].split('/')[-1] # default to exernal IP unless user has specified they prefer internal if self.ip_type == 'internal': ssh_host = inst.private_ips[0] @@ -367,6 +379,7 @@ 'gce_tags': inst.extra['tags'], 'gce_metadata': md, 'gce_network': net, + 'gce_subnetwork': subnet, # Hosts don't have a public name, so we add an IP 'ansible_ssh_host': ssh_host } @@ -394,7 +407,7 @@ all_nodes = [] params, more_results = {'maxResults': 500}, True while more_results: - self.driver.connection.gce_params=params + self.driver.connection.gce_params = params all_nodes.extend(self.driver.list_nodes()) more_results = 'pageToken' in params return all_nodes @@ -470,6 +483,13 @@ else: groups[stat] = [name] + for private_ip in node.private_ips: + groups[private_ip] = [name] + + if len(node.public_ips) >= 1: + for public_ip in node.public_ips: + groups[public_ip] = [name] + groups["_meta"] = meta return groups diff -Nru ansible-2.3.2.0/contrib/inventory/jail.py ansible-2.4.0.0/contrib/inventory/jail.py --- ansible-2.3.2.0/contrib/inventory/jail.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/jail.py 2017-09-19 17:10:47.000000000 +0000 @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . -from subprocess import Popen,PIPE +from subprocess import Popen, PIPE import sys import json diff -Nru ansible-2.3.2.0/contrib/inventory/libvirt_lxc.py ansible-2.4.0.0/contrib/inventory/libvirt_lxc.py --- ansible-2.3.2.0/contrib/inventory/libvirt_lxc.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/libvirt_lxc.py 2017-09-19 17:10:47.000000000 +0000 @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . -from subprocess import Popen,PIPE +from subprocess import Popen, PIPE import sys import json diff -Nru ansible-2.3.2.0/contrib/inventory/linode.py ansible-2.4.0.0/contrib/inventory/linode.py --- ansible-2.3.2.0/contrib/inventory/linode.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/linode.py 2017-09-19 17:10:47.000000000 +0000 @@ -113,11 +113,15 @@ # Imports for ansible import ConfigParser + class LinodeInventory(object): + def _empty_inventory(self): + return {"_meta": {"hostvars": {}}} + def __init__(self): """Main execution path.""" # Inventory grouped by display group - self.inventory = {} + self.inventory = self._empty_inventory() # Index of label to Linode ID self.index = {} # Local cache of Datacenter objects populated by populate_datacenter_cache() @@ -138,7 +142,7 @@ data_to_print = self.get_host_info() elif self.args.list: # Display list of nodes for inventory - if len(self.inventory) == 0: + if len(self.inventory) == 1: data_to_print = self.get_inventory_from_cache() else: data_to_print = self.json_format_dict(self.inventory, True) @@ -171,11 +175,11 @@ """Command line argument processing""" parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Linode') parser.add_argument('--list', action='store_true', default=True, - help='List nodes (default: True)') + help='List nodes (default: True)') parser.add_argument('--host', action='store', - help='Get all the variables about a specific node') + help='Get all the variables about a specific node') parser.add_argument('--refresh-cache', action='store_true', default=False, - help='Force refresh of cache by making API requests to Linode (default: False - use cache files)') + help='Force refresh of cache by making API requests to Linode (default: False - use cache files)') self.args = parser.parse_args() def do_api_calls_update_cache(self): @@ -231,9 +235,15 @@ # Inventory: Group by datacenter city self.push(self.inventory, self.get_datacenter_city(node), dest) - # Inventory: Group by dipslay group + # Inventory: Group by display group self.push(self.inventory, node.display_group, dest) + # Inventory: Add a "linode" global tag group + self.push(self.inventory, "linode", dest) + + # Add host info to hostvars + self.inventory["_meta"]["hostvars"][dest] = self.get_host_info(node) + def get_node_public_ip(self, node): """Returns a the public IP address of the node""" return [addr.address for addr in node.ipaddresses if addr.is_public][0] @@ -245,16 +255,19 @@ # Need to load index from cache self.load_index_from_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # try updating the cache self.do_api_calls_update_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # host might not exist anymore return self.json_format_dict({}, True) node_id = self.index[self.args.host] - node = self.get_node(node_id) + + return self.json_format_dict(self.get_host_info(node), True) + + def get_host_info(self, node): node_vars = {} for direct_attr in [ "api_id", @@ -295,7 +308,7 @@ if private_ips: node_vars["private_ip"] = private_ips[0] - return self.json_format_dict(node_vars, True) + return node_vars def push(self, my_dict, key, element): """Pushed an element onto an array that may not have been defined in the dict.""" diff -Nru ansible-2.3.2.0/contrib/inventory/lxc_inventory.py ansible-2.4.0.0/contrib/inventory/lxc_inventory.py --- ansible-2.3.2.0/contrib/inventory/lxc_inventory.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/lxc_inventory.py 2017-09-19 17:10:47.000000000 +0000 @@ -35,6 +35,7 @@ import lxc import json + def build_dict(): """Returns a dictionary keyed to the defined LXC groups. All containers, including the ones not in any group, are included in the @@ -51,7 +52,8 @@ # Create a dictionary for each group (including the 'all' group return dict([(g, {'hosts': [k for k, v in containers.items() if g in v], - 'vars': {'ansible_connection':'lxc'}}) for g in groups]) + 'vars': {'ansible_connection': 'lxc'}}) for g in groups]) + def main(argv): """Returns a JSON dictionary as expected by Ansible""" diff -Nru ansible-2.3.2.0/contrib/inventory/lxd.ini ansible-2.4.0.0/contrib/inventory/lxd.ini --- ansible-2.3.2.0/contrib/inventory/lxd.ini 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/lxd.ini 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,13 @@ +# LXD external inventory script settings + +[lxd] + +# The default resource +#resource = local: + +# The group name to add the hosts to +#group = lxd + +# The connection type to return for these hosts - lxd hasn't been tested yet +#connection = lxd +connection = smart diff -Nru ansible-2.3.2.0/contrib/inventory/lxd.py ansible-2.4.0.0/contrib/inventory/lxd.py --- ansible-2.3.2.0/contrib/inventory/lxd.py 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/lxd.py 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,103 @@ +#!/usr/bin/env python + +# (c) 2013, Michael Scherer +# (c) 2014, Hiroaki Nakamura +# (c) 2016, Andew Clarke +# +# This file is based on https://github.com/ansible/ansible/blob/devel/plugins/inventory/libvirt_lxc.py which is part of Ansible, +# and https://github.com/hnakamur/lxc-ansible-playbooks/blob/master/provisioning/inventory-lxc.py +# +# NOTE, this file has some obvious limitations, improvements welcome +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +import os +from subprocess import Popen, PIPE +import distutils.spawn +import sys +import json +try: + import configparser +except: + from six.moves import configparser + +# Set up defaults +resource = 'local:' +group = 'lxd' +connection = 'lxd' +hosts = {} +result = {} + +# Read the settings from the lxd.ini file +config = configparser.SafeConfigParser() +config.read(os.path.dirname(os.path.realpath(__file__)) + '/lxd.ini') +if config.has_option('lxd', 'resource'): + resource = config.get('lxd', 'resource') +if config.has_option('lxd', 'group'): + group = config.get('lxd', 'group') +if config.has_option('lxd', 'connection'): + connection = config.get('lxd', 'connection') + +# Ensure executable exists +if distutils.spawn.find_executable('lxc'): + + # Set up containers result and hosts array + result[group] = {} + result[group]['hosts'] = [] + + # Run the command and load json result + pipe = Popen(['lxc', 'list', resource, '--format', 'json'], stdout=PIPE, universal_newlines=True) + lxdjson = json.load(pipe.stdout) + + # Iterate the json lxd output + for item in lxdjson: + + # Check state and network + if 'state' in item and item['state'] is not None and 'network' in item['state']: + network = item['state']['network'] + + # Check for eth0 and addresses + if 'eth0' in network and 'addresses' in network['eth0']: + addresses = network['eth0']['addresses'] + + # Iterate addresses + for address in addresses: + + # Only return inet family addresses + if 'family' in address and address['family'] == 'inet': + if 'address' in address: + ip = address['address'] + name = item['name'] + + # Add the host to the results and the host array + result[group]['hosts'].append(name) + hosts[name] = ip + + # Set the other containers result values + result[group]['vars'] = {} + result[group]['vars']['ansible_connection'] = connection + +# Process arguments +if len(sys.argv) == 2 and sys.argv[1] == '--list': + print(json.dumps(result)) +elif len(sys.argv) == 3 and sys.argv[1] == '--host': + if sys.argv[2] == 'localhost': + print(json.dumps({'ansible_connection': 'local'})) + else: + if connection == 'lxd': + print(json.dumps({'ansible_connection': connection})) + else: + print(json.dumps({'ansible_connection': connection, 'ansible_host': hosts[sys.argv[2]]})) +else: + print("Need an argument, either --list or --host ") diff -Nru ansible-2.3.2.0/contrib/inventory/mdt_dynamic_inventory.py ansible-2.4.0.0/contrib/inventory/mdt_dynamic_inventory.py --- ansible-2.3.2.0/contrib/inventory/mdt_dynamic_inventory.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/mdt_dynamic_inventory.py 2017-09-19 17:10:47.000000000 +0000 @@ -32,6 +32,7 @@ except ImportError: import ConfigParser as configparser + class MDTInventory(object): def __init__(self): @@ -95,7 +96,7 @@ ''' Create empty inventory dictionary ''' - return {"_meta" : {"hostvars" : {}}} + return {"_meta": {"hostvars": {}}} def read_settings(self): ''' @@ -119,7 +120,6 @@ if config.has_option('tower', 'groupname'): self.mdt_groupname = config.get('tower', 'groupname') - def parse_cli_args(self): ''' Command line argument processing diff -Nru ansible-2.3.2.0/contrib/inventory/nagios_livestatus.ini ansible-2.4.0.0/contrib/inventory/nagios_livestatus.ini --- ansible-2.3.2.0/contrib/inventory/nagios_livestatus.ini 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/nagios_livestatus.ini 2017-09-19 17:10:47.000000000 +0000 @@ -19,7 +19,7 @@ # default field group for host: groups # Uncomment to override: # group_field=state -# default fields retrieved: address, alias, display_name, childs, parents +# default fields retrieved: address, alias, display_name, children, parents # To override, uncomment the following line # fields_to_retrieve=address,alias,display_name # diff -Nru ansible-2.3.2.0/contrib/inventory/nagios_livestatus.py ansible-2.4.0.0/contrib/inventory/nagios_livestatus.py --- ansible-2.3.2.0/contrib/inventory/nagios_livestatus.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/nagios_livestatus.py 2017-09-19 17:10:47.000000000 +0000 @@ -47,6 +47,7 @@ print("Error: mk_livestatus is needed. Try something like: pip install python-mk-livestatus") exit(1) + class NagiosLivestatusInventory(object): def parse_ini_file(self): @@ -80,19 +81,19 @@ # Local unix socket unix_match = re.match('unix:(.*)', livestatus_uri) if unix_match is not None: - backend_definition = { 'connection': unix_match.group(1) } + backend_definition = {'connection': unix_match.group(1)} # Remote tcp connection tcp_match = re.match('tcp:(.*):([^:]*)', livestatus_uri) if tcp_match is not None: - backend_definition = { 'connection': (tcp_match.group(1), int(tcp_match.group(2))) } + backend_definition = {'connection': (tcp_match.group(1), int(tcp_match.group(2)))} # No valid livestatus_uri => exiting if backend_definition is None: raise Exception('livestatus_uri field is invalid (%s). Expected: unix:/path/to/live or tcp:host:port' % livestatus_uri) # Updating backend_definition with current value - backend_definition['name'] = section + backend_definition['name'] = section backend_definition['fields'] = fields_to_retrieve for key, value in section_values.items(): backend_definition[key] = value @@ -101,8 +102,8 @@ def parse_options(self): parser = argparse.ArgumentParser() - parser.add_argument('--host', nargs=1) - parser.add_argument('--list', action='store_true') + parser.add_argument('--host', nargs=1) + parser.add_argument('--list', action='store_true') parser.add_argument('--pretty', action='store_true') self.options = parser.parse_args() @@ -113,7 +114,7 @@ if hostname not in self.result[group]['hosts']: self.result[group]['hosts'].append(hostname) - def query_backend(self, backend, host = None): + def query_backend(self, backend, host=None): '''Query a livestatus backend''' hosts_request = Socket(backend['connection']).hosts.columns(backend['host_field'], backend['group_field']) @@ -127,10 +128,10 @@ hosts = hosts_request.call() for host in hosts: - hostname = host[backend['host_field']] + hostname = host[backend['host_field']] hostgroups = host[backend['group_field']] if not isinstance(hostgroups, list): - hostgroups = [ hostgroups ] + hostgroups = [hostgroups] self.add_host(hostname, 'all') self.add_host(hostname, backend['name']) for group in hostgroups: @@ -166,9 +167,9 @@ self.query_backend(backend, self.options.host) if self.options.host: - print(json.dumps(self.result['_meta']['hostvars'][self.options.host[0]], indent = self.json_indent)) + print(json.dumps(self.result['_meta']['hostvars'][self.options.host[0]], indent=self.json_indent)) elif self.options.list: - print(json.dumps(self.result, indent = self.json_indent)) + print(json.dumps(self.result, indent=self.json_indent)) else: print("usage: --list or --host HOSTNAME [--pretty]") exit(1) diff -Nru ansible-2.3.2.0/contrib/inventory/nagios_ndo.py ansible-2.4.0.0/contrib/inventory/nagios_ndo.py --- ansible-2.3.2.0/contrib/inventory/nagios_ndo.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/nagios_ndo.py 2017-09-19 17:10:47.000000000 +0000 @@ -42,6 +42,7 @@ print("Error: SQLAlchemy is needed. Try something like: pip install sqlalchemy") exit(1) + class NagiosNDOInventory(object): def read_settings(self): diff -Nru ansible-2.3.2.0/contrib/inventory/nova.py ansible-2.4.0.0/contrib/inventory/nova.py --- ansible-2.3.2.0/contrib/inventory/nova.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/nova.py 2017-09-19 17:10:47.000000000 +0000 @@ -142,7 +142,7 @@ key = 'os_' + re.sub(r"[^A-Za-z0-9\-]", "_", key).lower() # Att value to instance result (exclude manager class) - #TODO: maybe use value.__class__ or similar inside of key_name + # TODO: maybe use value.__class__ or similar inside of key_name if key != 'os_manager': results[key] = value return results diff -Nru ansible-2.3.2.0/contrib/inventory/nsot.py ansible-2.4.0.0/contrib/inventory/nsot.py --- ansible-2.3.2.0/contrib/inventory/nsot.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/nsot.py 2017-09-19 17:10:47.000000000 +0000 @@ -150,6 +150,7 @@ from six import string_types + def warning(*objs): print("WARNING: ", *objs, file=sys.stderr) diff -Nru ansible-2.3.2.0/contrib/inventory/openstack.py ansible-2.4.0.0/contrib/inventory/openstack.py --- ansible-2.3.2.0/contrib/inventory/openstack.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/openstack.py 2017-09-19 17:10:47.000000000 +0000 @@ -29,8 +29,11 @@ # - /etc/openstack/clouds.yaml # - /etc/ansible/openstack.yml # The clouds.yaml file can contain entries for multiple clouds and multiple -# regions of those clouds. If it does, this inventory module will connect to -# all of them and present them as one contiguous inventory. +# regions of those clouds. If it does, this inventory module will by default +# connect to all of them and present them as one contiguous inventory. You +# can limit to one cloud by passing the `--cloud` parameter, or use the +# OS_CLOUD environment variable. If caching is enabled, and a cloud is +# selected, then per-cloud cache folders will be used. # # See the adjacent openstack.yml file for an example config file # There are two ansible inventory specific options that can be set in @@ -44,6 +47,9 @@ # has failed (for example, bad credentials or being offline). # When set to False, the inventory will return hosts from # whichever other clouds it can contact. (Default: True) +# +# Also it is possible to pass the correct user by setting an ansible_user: $myuser +# metadata attribute. import argparse import collections @@ -108,8 +114,8 @@ return groups -def get_host_groups(inventory, refresh=False): - (cache_file, cache_expiration_time) = get_cache_settings() +def get_host_groups(inventory, refresh=False, cloud=None): + (cache_file, cache_expiration_time) = get_cache_settings(cloud) if is_cache_stale(cache_file, cache_expiration_time, refresh=refresh): groups = to_json(get_host_groups_from_cloud(inventory)) open(cache_file, 'w').write(groups) @@ -121,7 +127,13 @@ def append_hostvars(hostvars, groups, key, server, namegroup=False): hostvars[key] = dict( ansible_ssh_host=server['interface_ip'], + ansible_host=server['interface_ip'], openstack=server) + + metadata = server.get('metadata', {}) + if 'ansible_user' in metadata: + hostvars[key]['ansible_user'] = metadata['ansible_user'] + for group in get_groups_from_server(server, namegroup=namegroup): groups[group].append(key) @@ -176,12 +188,14 @@ return True -def get_cache_settings(): +def get_cache_settings(cloud=None): config = os_client_config.config.OpenStackConfig( config_files=os_client_config.config.CONFIG_FILES + CONFIG_FILES) # For inventory-wide caching cache_expiration_time = config.get_cache_expiration_time() cache_path = config.get_cache_path() + if cloud: + cache_path = '{0}_{1}'.format(cache_path, cloud) if not os.path.exists(cache_path): os.makedirs(cache_path) cache_file = os.path.join(cache_path, 'ansible-inventory.cache') @@ -194,6 +208,8 @@ def parse_args(): parser = argparse.ArgumentParser(description='OpenStack Inventory Module') + parser.add_argument('--cloud', default=os.environ.get('OS_CLOUD'), + help='Cloud name (default: None') parser.add_argument('--private', action='store_true', help='Use private address for ansible host') @@ -218,6 +234,7 @@ refresh=args.refresh, config_files=config_files, private=args.private, + cloud=args.cloud, ) if hasattr(shade.inventory.OpenStackInventory, 'extra_config'): inventory_args.update(dict( @@ -232,7 +249,7 @@ inventory = shade.inventory.OpenStackInventory(**inventory_args) if args.list: - output = get_host_groups(inventory, refresh=args.refresh) + output = get_host_groups(inventory, refresh=args.refresh, cloud=args.cloud) elif args.host: output = to_json(inventory.get_host(args.host)) print(output) diff -Nru ansible-2.3.2.0/contrib/inventory/openvz.py ansible-2.4.0.0/contrib/inventory/openvz.py --- ansible-2.3.2.0/contrib/inventory/openvz.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/openvz.py 2017-09-19 17:10:47.000000000 +0000 @@ -26,30 +26,31 @@ # Groups are determined by the description field of openvz guests # multiple groups can be separated by commas: webserver,dbserver -from subprocess import Popen,PIPE +from subprocess import Popen, PIPE import sys import json -#List openvz hosts -vzhosts = ['vzhost1','vzhost2','vzhost3'] -#Add openvz hosts to the inventory and Add "_meta" trick +# List openvz hosts +vzhosts = ['vzhost1', 'vzhost2', 'vzhost3'] +# Add openvz hosts to the inventory and Add "_meta" trick inventory = {'vzhosts': {'hosts': vzhosts}, '_meta': {'hostvars': {}}} -#default group, when description not defined +# default group, when description not defined default_group = ['vzguest'] + def get_guests(): - #Loop through vzhosts + # Loop through vzhosts for h in vzhosts: - #SSH to vzhost and get the list of guests in json - pipe = Popen(['ssh', h,'vzlist','-j'], stdout=PIPE, universal_newlines=True) + # SSH to vzhost and get the list of guests in json + pipe = Popen(['ssh', h, 'vzlist', '-j'], stdout=PIPE, universal_newlines=True) - #Load Json info of guests + # Load Json info of guests json_data = json.loads(pipe.stdout.read()) - #loop through guests + # loop through guests for j in json_data: - #Add information to host vars + # Add information to host vars inventory['_meta']['hostvars'][j['hostname']] = { 'ctid': j['ctid'], 'veid': j['veid'], @@ -59,13 +60,13 @@ 'ip': j['ip'] } - #determine group from guest description + # determine group from guest description if j['description'] is not None: groups = j['description'].split(",") else: groups = default_group - #add guest to inventory + # add guest to inventory for g in groups: if g not in inventory: inventory[g] = {'hosts': []} diff -Nru ansible-2.3.2.0/contrib/inventory/ovirt4.py ansible-2.4.0.0/contrib/inventory/ovirt4.py --- ansible-2.3.2.0/contrib/inventory/ovirt4.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/ovirt4.py 2017-09-19 17:10:47.000000000 +0000 @@ -182,9 +182,9 @@ (stat.name, stat.values[0].datum) for stat in stats ), 'devices': dict( - (device.name, [ip.address for ip in device.ips]) for device in devices + (device.name, [ip.address for ip in device.ips]) for device in devices if device.ips ), - 'ansible_host': devices[0].ips[0].address if len(devices) > 0 else None, + 'ansible_host': next((device.ips[0].address for device in devices if device.ips), None) } diff -Nru ansible-2.3.2.0/contrib/inventory/ovirt.py ansible-2.4.0.0/contrib/inventory/ovirt.py --- ansible-2.3.2.0/contrib/inventory/ovirt.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/ovirt.py 2017-09-19 17:10:47.000000000 +0000 @@ -230,7 +230,7 @@ """ return [x.get_name() for x in inst.get_tags().list()] - def get_machine_type(self,inst): + def get_machine_type(self, inst): inst_type = inst.get_instance_type() if inst_type: return self.driver.instancetypes.get(id=inst_type.id).name diff -Nru ansible-2.3.2.0/contrib/inventory/packet_net.ini ansible-2.4.0.0/contrib/inventory/packet_net.ini --- ansible-2.3.2.0/contrib/inventory/packet_net.ini 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/packet_net.ini 2017-09-19 17:10:47.000000000 +0000 @@ -37,6 +37,7 @@ # The packet inventory output can become very large. To manage its size, # configure which groups should be created. group_by_device_id = True +group_by_hostname = True group_by_facility = True group_by_project = True group_by_operating_system = True diff -Nru ansible-2.3.2.0/contrib/inventory/packet_net.py ansible-2.4.0.0/contrib/inventory/packet_net.py --- ansible-2.3.2.0/contrib/inventory/packet_net.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/packet_net.py 2017-09-19 17:10:47.000000000 +0000 @@ -63,10 +63,11 @@ ini_section = 'packet' + class PacketInventory(object): def _empty_inventory(self): - return {"_meta" : {"hostvars" : {}}} + return {"_meta": {"hostvars": {}}} def __init__(self): ''' Main execution path ''' @@ -101,7 +102,6 @@ print(data_to_print) - def is_cache_valid(self): ''' Determines if the cache files have expired, or if it is still valid ''' @@ -175,6 +175,7 @@ # Configure which groups should be created. group_by_options = [ 'group_by_device_id', + 'group_by_hostname', 'group_by_facility', 'group_by_project', 'group_by_operating_system', @@ -224,14 +225,13 @@ parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on Packet') parser.add_argument('--list', action='store_true', default=True, - help='List Devices (default: True)') + help='List Devices (default: True)') parser.add_argument('--host', action='store', - help='Get all the variables about a specific device') + help='Get all the variables about a specific device') parser.add_argument('--refresh-cache', action='store_true', default=False, - help='Force refresh of cache by making API requests to Packet (default: False - use cache files)') + help='Force refresh of cache by making API requests to Packet (default: False - use cache files)') self.args = parser.parse_args() - def do_api_calls_update_cache(self): ''' Do API calls to each region, and save data in cache files ''' @@ -244,7 +244,7 @@ def connect(self): ''' create connection to api server''' - token=os.environ.get('PACKET_API_TOKEN') + token = os.environ.get('PACKET_API_TOKEN') if token is None: raise Exception("Error reading token from environment (PACKET_API_TOKEN)!") manager = packet.Manager(auth_token=token) @@ -270,7 +270,7 @@ try: manager = self.connect() - devices = manager.list_devices(project_id=project.id, params = params) + devices = manager.list_devices(project_id=project.id, params=params) for device in devices: self.add_device(device, project) @@ -307,7 +307,6 @@ if ip_address['public'] is True and ip_address['address_family'] == 4: dest = ip_address['address'] - if not dest: # Skip devices we cannot address (e.g. private VPC subnet) return @@ -329,6 +328,12 @@ if self.nested_groups: self.push_group(self.inventory, 'devices', device.id) + # Inventory: Group by device name (hopefully a group of 1) + if self.group_by_hostname: + self.push(self.inventory, device.hostname, dest) + if self.nested_groups: + self.push_group(self.inventory, 'hostnames', project.name) + # Inventory: Group by project if self.group_by_project: self.push(self.inventory, project.name, dest) @@ -373,7 +378,6 @@ self.inventory["_meta"]["hostvars"][dest] = self.get_host_info_dict_from_device(device) - def get_host_info_dict_from_device(self, device): device_vars = {} for key in vars(device): @@ -385,7 +389,7 @@ device_vars[key] = device.state or '' elif key == 'packet_hostname': device_vars[key] = value - elif type(value) in [int, bool]: + elif isinstance(value, (int, bool)): device_vars[key] = value elif isinstance(value, six.string_types): device_vars[key] = value.strip() @@ -403,9 +407,9 @@ device_vars[key] = k else: pass - #print key - #print type(value) - #print value + # print key + # print type(value) + # print value return device_vars @@ -416,10 +420,10 @@ # Need to load index from cache self.load_index_from_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # try updating the cache self.do_api_calls_update_cache() - if not self.args.host in self.index: + if self.args.host not in self.index: # host might not exist anymore return self.json_format_dict({}, True) @@ -455,7 +459,6 @@ json_inventory = cache.read() return json_inventory - def load_index_from_cache(self): ''' Reads the index from the cache file sets self.index ''' @@ -463,7 +466,6 @@ json_index = cache.read() self.index = json.loads(json_index) - def write_to_cache(self, data, filename): ''' Writes data in JSON format to a file ''' diff -Nru ansible-2.3.2.0/contrib/inventory/proxmox.py ansible-2.4.0.0/contrib/inventory/proxmox.py --- ansible-2.3.2.0/contrib/inventory/proxmox.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/proxmox.py 2017-09-19 17:10:47.000000000 +0000 @@ -25,7 +25,6 @@ # # { "groups": ["utility", "databases"], "a": false, "b": true } -import urllib try: import json except ImportError: @@ -35,13 +34,16 @@ from optparse import OptionParser from six import iteritems +from six.moves.urllib.parse import urlencode from ansible.module_utils.urls import open_url + class ProxmoxNodeList(list): def get_names(self): return [node['node'] for node in self] + class ProxmoxVM(dict): def get_variables(self): variables = {} @@ -49,6 +51,7 @@ variables['proxmox_' + key] = value return variables + class ProxmoxVMList(list): def __init__(self, data=[]): for item in data: @@ -68,14 +71,17 @@ return variables + class ProxmoxPoolList(list): def get_names(self): return [pool['poolid'] for pool in self] + class ProxmoxPool(dict): def get_members_name(self): return [member['name'] for member in self['members'] if member['template'] != 1] + class ProxmoxAPI(object): def __init__(self, options): self.options = options @@ -91,7 +97,7 @@ def auth(self): request_path = '{}api2/json/access/ticket'.format(self.options.url) - request_params = urllib.urlencode({ + request_params = urlencode({ 'username': self.options.username, 'password': self.options.password, }) @@ -139,6 +145,7 @@ def pool(self, poolid): return ProxmoxPool(self.get('api2/json/pools/{}'.format(poolid))) + def main_list(options): results = { 'all': { @@ -199,6 +206,7 @@ return results + def main_host(options): proxmox_api = ProxmoxAPI(options) proxmox_api.auth() @@ -211,6 +219,7 @@ return {} + def main(): parser = OptionParser(usage='%prog [options] --list | --host HOSTNAME') parser.add_option('--list', action="store_true", default=False, dest="list") @@ -235,5 +244,6 @@ print(json.dumps(data, indent=indent)) + if __name__ == '__main__': main() diff -Nru ansible-2.3.2.0/contrib/inventory/rackhd.py ansible-2.4.0.0/contrib/inventory/rackhd.py --- ansible-2.3.2.0/contrib/inventory/rackhd.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/rackhd.py 2017-09-19 17:10:47.000000000 +0000 @@ -1,31 +1,46 @@ #!/usr/bin/env python +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + import json -import requests import os +import requests import argparse -import types RACKHD_URL = 'http://localhost:8080' + class RackhdInventory(object): def __init__(self, nodeids): self._inventory = {} for nodeid in nodeids: self._load_inventory_data(nodeid) inventory = {} - for nodeid,info in self._inventory.items(): - inventory[nodeid]= (self._format_output(nodeid, info)) + for (nodeid, info) in self._inventory.items(): + inventory[nodeid] = (self._format_output(nodeid, info)) print(json.dumps(inventory)) def _load_inventory_data(self, nodeid): info = {} - info['ohai'] = RACKHD_URL + '/api/common/nodes/{0}/catalogs/ohai'.format(nodeid ) + info['ohai'] = RACKHD_URL + '/api/common/nodes/{0}/catalogs/ohai'.format(nodeid) info['lookup'] = RACKHD_URL + '/api/common/lookups/?q={0}'.format(nodeid) results = {} - for key,url in info.items(): - r = requests.get( url, verify=False) + for (key, url) in info.items(): + r = requests.get(url, verify=False) results[key] = r.text self._inventory[nodeid] = results @@ -35,8 +50,8 @@ ipaddress = '' if len(node_info) > 0: ipaddress = node_info[0]['ipAddress'] - output = { 'hosts':[ipaddress],'vars':{}} - for key,result in info.items(): + output = {'hosts': [ipaddress], 'vars': {}} + for (key, result) in info.items(): output['vars'][key] = json.loads(result) output['vars']['ansible_ssh_user'] = 'monorail' except KeyError: @@ -50,11 +65,12 @@ parser.add_argument('--list', action='store_true') return parser.parse_args() + try: - #check if rackhd url(ie:10.1.1.45:8080) is specified in the environment + # check if rackhd url(ie:10.1.1.45:8080) is specified in the environment RACKHD_URL = 'http://' + str(os.environ['RACKHD_URL']) except: - #use default values + # use default values pass # Use the nodeid specified in the environment to limit the data returned @@ -70,7 +86,7 @@ if (parse_args().list): try: url = RACKHD_URL + '/api/common/nodes' - r = requests.get( url, verify=False) + r = requests.get(url, verify=False) data = json.loads(r.text) for entry in data: if entry['type'] == 'compute': diff -Nru ansible-2.3.2.0/contrib/inventory/rax.py ansible-2.4.0.0/contrib/inventory/rax.py --- ansible-2.3.2.0/contrib/inventory/rax.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/rax.py 2017-09-19 17:10:47.000000000 +0000 @@ -168,10 +168,11 @@ from time import time -from ansible.constants import get_config, mk_boolean +from ansible.constants import get_config +from ansible.module_utils.parsing.convert_bool import boolean +from ansible.module_utils.six import text_type - -NON_CALLABLES = (basestring, bool, dict, int, list, type(None)) +NON_CALLABLES = (text_type, str, bool, dict, int, list, type(None)) def load_config_file(): @@ -229,18 +230,18 @@ try: # Ansible 2.3+ networks = get_config(p, 'rax', 'access_network', - 'RAX_ACCESS_NETWORK', 'public', value_type='list') + 'RAX_ACCESS_NETWORK', 'public', value_type='list') except TypeError: # Ansible 2.2.x and below networks = get_config(p, 'rax', 'access_network', - 'RAX_ACCESS_NETWORK', 'public', islist=True) + 'RAX_ACCESS_NETWORK', 'public', islist=True) try: try: ip_versions = map(int, get_config(p, 'rax', 'access_ip_version', - 'RAX_ACCESS_IP_VERSION', 4, value_type='list')) + 'RAX_ACCESS_IP_VERSION', 4, value_type='list')) except TypeError: ip_versions = map(int, get_config(p, 'rax', 'access_ip_version', - 'RAX_ACCESS_IP_VERSION', 4, islist=True)) + 'RAX_ACCESS_IP_VERSION', 4, islist=True)) except: ip_versions = [4] else: @@ -288,7 +289,7 @@ if not cbs_attachments[region]: cbs = pyrax.connect_to_cloud_blockstorage(region) for vol in cbs.list(): - if mk_boolean(vol.bootable): + if boolean(vol.bootable, strict=False): for attachment in vol.attachments: metadata = vol.volume_image_metadata server_id = attachment['server_id'] @@ -434,11 +435,11 @@ try: # Ansible 2.3+ region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all', - value_type='list') + value_type='list') except TypeError: # Ansible 2.2.x and below region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all', - islist=True) + islist=True) for region in region_list: region = region.strip().upper() diff -Nru ansible-2.3.2.0/contrib/inventory/rhv.py ansible-2.4.0.0/contrib/inventory/rhv.py --- ansible-2.3.2.0/contrib/inventory/rhv.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/rhv.py 2017-09-19 17:10:47.000000000 +0000 @@ -182,9 +182,9 @@ (stat.name, stat.values[0].datum) for stat in stats ), 'devices': dict( - (device.name, [ip.address for ip in device.ips]) for device in devices + (device.name, [ip.address for ip in device.ips]) for device in devices if device.ips ), - 'ansible_host': devices[0].ips[0].address if len(devices) > 0 else None, + 'ansible_host': next((device.ips[0].address for device in devices if device.ips), None) } diff -Nru ansible-2.3.2.0/contrib/inventory/softlayer.py ansible-2.4.0.0/contrib/inventory/softlayer.py --- ansible-2.3.2.0/contrib/inventory/softlayer.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/softlayer.py 2017-09-19 17:10:47.000000000 +0000 @@ -41,6 +41,7 @@ except: import simplejson as json + class SoftLayerInventory(object): common_items = [ 'id', @@ -52,7 +53,8 @@ 'primaryIpAddress', 'datacenter', 'tagReferences.tag.name', - ] + 'userData.value', + ] vs_items = [ 'lastKnownPowerState.name', @@ -61,16 +63,16 @@ 'maxMemory', 'activeTransaction.transactionStatus[friendlyName,name]', 'status', - ] + ] hw_items = [ 'hardwareStatusId', 'processorPhysicalCoreAmount', 'memoryCapacity', - ] + ] def _empty_inventory(self): - return {"_meta" : {"hostvars" : {}}} + return {"_meta": {"hostvars": {}}} def __init__(self): '''Main path''' @@ -104,9 +106,9 @@ parser = argparse.ArgumentParser(description='Produce an Ansible Inventory file based on SoftLayer') parser.add_argument('--list', action='store_true', default=False, - help='List instances (default: False)') + help='List instances (default: False)') parser.add_argument('--host', action='store', - help='Get all the variables about a specific instance') + help='Get all the variables about a specific instance') self.args = parser.parse_args() def json_format_dict(self, data, pretty=False): @@ -136,6 +138,8 @@ if 'primaryIpAddress' not in instance: return + instance['userData'] = instance['userData'][0]['value'] if instance['userData'] else '' + dest = instance['primaryIpAddress'] self.inventory["_meta"]["hostvars"][dest] = instance @@ -174,7 +178,7 @@ def get_virtual_servers(self): '''Get all the CCI instances''' vs = SoftLayer.VSManager(self.client) - mask = "mask[%s]" % ','.join(itertools.chain(self.common_items,self.vs_items)) + mask = "mask[%s]" % ','.join(itertools.chain(self.common_items, self.vs_items)) instances = vs.list_instances(mask=mask) for instance in instances: @@ -183,7 +187,7 @@ def get_physical_servers(self): '''Get all the hardware instances''' hw = SoftLayer.HardwareManager(self.client) - mask = "mask[%s]" % ','.join(itertools.chain(self.common_items,self.hw_items)) + mask = "mask[%s]" % ','.join(itertools.chain(self.common_items, self.hw_items)) instances = hw.list_hardware(mask=mask) for instance in instances: diff -Nru ansible-2.3.2.0/contrib/inventory/spacewalk.py ansible-2.4.0.0/contrib/inventory/spacewalk.py --- ansible-2.3.2.0/contrib/inventory/spacewalk.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/spacewalk.py 2017-09-19 17:10:47.000000000 +0000 @@ -56,11 +56,14 @@ except: import simplejson as json -base_dir = os.path.dirname(os.path.realpath(__file__)) +base_dir = os.path.dirname(os.path.realpath(__file__)) +default_ini_file = os.path.join(base_dir, "spacewalk.ini") + SW_REPORT = '/usr/bin/spacewalk-report' CACHE_DIR = os.path.join(base_dir, ".spacewalk_reports") -CACHE_AGE = 300 # 5min -INI_FILE = os.path.join(base_dir, "spacewalk.ini") +CACHE_AGE = 300 # 5min +INI_FILE = os.path.expanduser(os.path.expandvars(os.environ.get("SPACEWALK_INI_PATH", default_ini_file))) + # Sanity check if not os.path.exists(SW_REPORT): @@ -73,7 +76,8 @@ os.chmod(CACHE_DIR, 0o2775) # Helper functions -#------------------------------ +# ------------------------------ + def spacewalk_report(name): """Yield a dictionary form of each CSV output produced by the specified @@ -91,7 +95,7 @@ lines = open(cache_filename, 'r').readlines() keys = lines[0].strip().split(',') # add 'spacewalk_' prefix to the keys - keys = [ 'spacewalk_' + key for key in keys ] + keys = ['spacewalk_' + key for key in keys] for line in lines[1:]: values = line.strip().split(',') if len(keys) == len(values): @@ -99,7 +103,7 @@ # Options -#------------------------------ +# ------------------------------ parser = OptionParser(usage="%prog [options] --list | --host ") parser.add_option('--list', default=False, dest="list", action="store_true", @@ -117,20 +121,20 @@ # read spacewalk.ini if present -#------------------------------ +# ------------------------------ if os.path.exists(INI_FILE): config = ConfigParser.SafeConfigParser() config.read(INI_FILE) - if config.has_option('spacewalk' , 'cache_age'): - CACHE_AGE = config.get('spacewalk' , 'cache_age') - if not options.org_number and config.has_option('spacewalk' , 'org_number'): - options.org_number = config.get('spacewalk' , 'org_number') - if not options.prefix_org_name and config.has_option('spacewalk' , 'prefix_org_name'): - options.prefix_org_name = config.getboolean('spacewalk' , 'prefix_org_name') + if config.has_option('spacewalk', 'cache_age'): + CACHE_AGE = config.get('spacewalk', 'cache_age') + if not options.org_number and config.has_option('spacewalk', 'org_number'): + options.org_number = config.get('spacewalk', 'org_number') + if not options.prefix_org_name and config.has_option('spacewalk', 'prefix_org_name'): + options.prefix_org_name = config.getboolean('spacewalk', 'prefix_org_name') # Generate dictionary for mapping group_id to org_id -#------------------------------ +# ------------------------------ org_groups = {} try: for group in spacewalk_report('system-groups'): @@ -143,14 +147,14 @@ # List out the known server from Spacewalk -#------------------------------ +# ------------------------------ if options.list: # to build the "_meta"-Group with hostvars first create dictionary for later use host_vars = {} try: for item in spacewalk_report('inventory'): - host_vars[ item['spacewalk_profile_name'] ] = dict( ( key, ( value.split(';') if ';' in value else value) ) for key, value in item.items() ) + host_vars[item['spacewalk_profile_name']] = dict((key, (value.split(';') if ';' in value else value)) for key, value in item.items()) except (OSError) as e: print('Problem executing the command "%s inventory": %s' % @@ -158,11 +162,11 @@ sys.exit(2) groups = {} - meta = { "hostvars" : {} } + meta = {"hostvars": {}} try: for system in spacewalk_report('system-groups-systems'): # first get org_id of system - org_id = org_groups[ system['spacewalk_group_id'] ] + org_id = org_groups[system['spacewalk_group_id']] # shall we add the org_id as prefix to the group name: if options.prefix_org_name: @@ -178,16 +182,16 @@ groups[group_name] = set() groups[group_name].add(system['spacewalk_server_name']) - if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta[ "hostvars" ]: - meta[ "hostvars" ][ system['spacewalk_server_name'] ] = host_vars[ system['spacewalk_server_name'] ] + if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta["hostvars"]: + meta["hostvars"][system['spacewalk_server_name']] = host_vars[system['spacewalk_server_name']] # or we list all groups and systems: else: if group_name not in groups: groups[group_name] = set() groups[group_name].add(system['spacewalk_server_name']) - if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta[ "hostvars" ]: - meta[ "hostvars" ][ system['spacewalk_server_name'] ] = host_vars[ system['spacewalk_server_name'] ] + if system['spacewalk_server_name'] in host_vars and not system['spacewalk_server_name'] in meta["hostvars"]: + meta["hostvars"][system['spacewalk_server_name']] = host_vars[system['spacewalk_server_name']] except (OSError) as e: print('Problem executing the command "%s system-groups-systems": %s' % @@ -198,15 +202,15 @@ for group, systems in iteritems(groups): print('[%s]\n%s\n' % (group, '\n'.join(systems))) else: - final = dict( [ (k, list(s)) for k, s in iteritems(groups) ] ) + final = dict([(k, list(s)) for k, s in iteritems(groups)]) final["_meta"] = meta - print(json.dumps( final )) - #print(json.dumps(groups)) + print(json.dumps(final)) + # print(json.dumps(groups)) sys.exit(0) # Return a details information concerning the spacewalk server -#------------------------------ +# ------------------------------ elif options.host: host_details = {} @@ -226,7 +230,7 @@ for k, v in iteritems(host_details): print(' %s: %s' % (k, '\n '.join(v.split(';')))) else: - print( json.dumps( dict( ( key, ( value.split(';') if ';' in value else value) ) for key, value in host_details.items() ) ) ) + print(json.dumps(dict((key, (value.split(';') if ';' in value else value)) for key, value in host_details.items()))) sys.exit(0) else: diff -Nru ansible-2.3.2.0/contrib/inventory/ssh_config.py ansible-2.4.0.0/contrib/inventory/ssh_config.py --- ansible-2.3.2.0/contrib/inventory/ssh_config.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/ssh_config.py 2017-09-19 17:10:47.000000000 +0000 @@ -43,21 +43,24 @@ import argparse import os.path import sys -import paramiko +from collections import MutableSequence try: import json except ImportError: import simplejson as json +import paramiko + + SSH_CONF = '~/.ssh/config' _key = 'ssh_config' _ssh_to_ansible = [('user', 'ansible_ssh_user'), - ('hostname', 'ansible_ssh_host'), - ('identityfile', 'ansible_ssh_private_key_file'), - ('port', 'ansible_ssh_port')] + ('hostname', 'ansible_ssh_host'), + ('identityfile', 'ansible_ssh_private_key_file'), + ('port', 'ansible_ssh_port')] def get_config(): @@ -68,7 +71,7 @@ cfg.parse(f) ret_dict = {} for d in cfg._config: - if type(d['host']) is list: + if isinstance(d['host'], MutableSequence): alias = d['host'][0] else: alias = d['host'] @@ -93,7 +96,7 @@ # If the attribute is a list, just take the first element. # Private key is returned in a list for some reason. attr = attributes[ssh_opt] - if type(attr) is list: + if isinstance(attr, MutableSequence): attr = attr[0] tmp_dict[ans_opt] = attr if tmp_dict: diff -Nru ansible-2.3.2.0/contrib/inventory/stacki.py ansible-2.4.0.0/contrib/inventory/stacki.py --- ansible-2.3.2.0/contrib/inventory/stacki.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/stacki.py 2017-09-19 17:10:47.000000000 +0000 @@ -64,9 +64,9 @@ def stack_auth(params): - endpoint = params['stacki_endpoint'] - auth_creds = {'USERNAME': params['stacki_user'], - 'PASSWORD': params['stacki_password']} + endpoint = params['stacki_endpoint'] + auth_creds = {'USERNAME': params['stacki_user'], + 'PASSWORD': params['stacki_password']} client = requests.session() client.get(endpoint) @@ -99,17 +99,18 @@ def stack_host_list(endpoint, header, client): - stack_r = client.post(endpoint, data=json.dumps({ "cmd": "list host"}), + stack_r = client.post(endpoint, data=json.dumps({"cmd": "list host"}), headers=header) return json.loads(stack_r.json()) def stack_net_list(endpoint, header, client): - stack_r = client.post(endpoint, data=json.dumps({ "cmd": "list host interface"}), + stack_r = client.post(endpoint, data=json.dumps({"cmd": "list host interface"}), headers=header) return json.loads(stack_r.json()) + def format_meta(hostdata, intfdata, config): use_hostnames = config['use_hostnames'] meta = dict(all=dict(hosts=list()), @@ -159,7 +160,6 @@ def main(): args = parse_args() - if StrictVersion(requests.__version__) < StrictVersion("2.4.3"): sys.exit('requests>=2.4.3 is required for this inventory script') diff -Nru ansible-2.3.2.0/contrib/inventory/vbox.py ansible-2.4.0.0/contrib/inventory/vbox.py --- ansible-2.3.2.0/contrib/inventory/vbox.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/vbox.py 2017-09-19 17:10:47.000000000 +0000 @@ -16,20 +16,21 @@ # along with Ansible. If not, see . import sys -from subprocess import Popen,PIPE +from subprocess import Popen, PIPE try: import json except ImportError: import simplejson as json + class SetEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, set): return list(obj) return json.JSONEncoder.default(self, obj) -VBOX="VBoxManage" +VBOX = "VBoxManage" def get_hosts(host=None): @@ -39,7 +40,7 @@ if host: p = Popen([VBOX, 'showvminfo', host], stdout=PIPE) else: - returned = { 'all': set(), '_metadata': {} } + returned = {'all': set(), '_metadata': {}} p = Popen([VBOX, 'list', '-l', 'vms'], stdout=PIPE) except: sys.exit(1) @@ -50,7 +51,7 @@ for line in p.stdout.readlines(): try: - k,v = line.split(':',1) + k, v = line.split(':', 1) except: continue @@ -62,11 +63,11 @@ if v not in hostvars: curname = v hostvars[curname] = {} - try: # try to get network info - x = Popen([VBOX, 'guestproperty', 'get', curname,"/VirtualBox/GuestInfo/Net/0/V4/IP"],stdout=PIPE) + try: # try to get network info + x = Popen([VBOX, 'guestproperty', 'get', curname, "/VirtualBox/GuestInfo/Net/0/V4/IP"], stdout=PIPE) ipinfo = x.stdout.read() if 'Value' in ipinfo: - a,ip = ipinfo.split(':',1) + a, ip = ipinfo.split(':', 1) hostvars[curname]['ansible_ssh_host'] = ip.strip() except: pass @@ -83,11 +84,11 @@ returned['all'].add(curname) continue - pref_k = 'vbox_' + k.strip().replace(' ','_') + pref_k = 'vbox_' + k.strip().replace(' ', '_') if k.startswith(' '): if prevkey not in hostvars[curname]: hostvars[curname][prevkey] = {} - hostvars[curname][prevkey][pref_k]= v + hostvars[curname][prevkey][pref_k] = v else: if v != '': hostvars[curname][pref_k] = v diff -Nru ansible-2.3.2.0/contrib/inventory/vmware_inventory.py ansible-2.4.0.0/contrib/inventory/vmware_inventory.py --- ansible-2.3.2.0/contrib/inventory/vmware_inventory.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/vmware_inventory.py 2017-09-19 17:10:47.000000000 +0000 @@ -1,4 +1,19 @@ #!/usr/bin/env python +# +# This file is part of Ansible, +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . # Requirements # - pyvmomi >= 6.0.0.2016.4 @@ -23,34 +38,33 @@ from __future__ import print_function -import argparse import atexit import datetime import getpass -import jinja2 +import json import os -import six +import re import ssl import sys import uuid - from collections import defaultdict -from six.moves import configparser from time import time -HAS_PYVMOMI = False -try: - from pyVmomi import vim - from pyVim.connect import SmartConnect, Disconnect +import six +from jinja2 import Environment +from six import integer_types, string_types +from six.moves import configparser - HAS_PYVMOMI = True +try: + import argparse except ImportError: - pass + sys.exit('Error: This inventory script required "argparse" python module. Please install it or upgrade to python-2.7') try: - import json + from pyVmomi import vim, vmodl + from pyVim.connect import SmartConnect, Disconnect except ImportError: - import simplejson as json + sys.exit("ERROR: This inventory script required 'pyVmomi' Python module, it was not able to load it") hasvcr = False try: @@ -61,6 +75,15 @@ pass +def regex_match(s, pattern): + '''Custom filter for regex matching''' + reg = re.compile(pattern) + if reg.match(s): + return True + else: + return False + + class VMwareMissingHostException(Exception): pass @@ -89,10 +112,7 @@ skip_keys = [] groupby_patterns = [] - if sys.version_info > (3, 0): - safe_types = [int, bool, str, float, None] - else: - safe_types = [int, long, bool, str, float, None] + safe_types = [bool, str, float, None] + list(integer_types) iter_types = [dict, list] bad_types = ['Array', 'disabledMethod', 'declaredAlarmState'] @@ -104,15 +124,17 @@ custom_fields = {} + # use jinja environments to allow for custom filters + env = Environment() + env.filters['regex_match'] = regex_match + # translation table for attributes to fetch for known vim types - if not HAS_PYVMOMI: - vimTable = {} - else: - vimTable = { - vim.Datastore: ['_moId', 'name'], - vim.ResourcePool: ['_moId', 'name'], - vim.HostSystem: ['_moId', 'name'], - } + + vimTable = { + vim.Datastore: ['_moId', 'name'], + vim.ResourcePool: ['_moId', 'name'], + vim.HostSystem: ['_moId', 'name'], + } @staticmethod def _empty_inventory(): @@ -333,13 +355,21 @@ ''' Make API calls ''' instances = [] - si = SmartConnect(**inkwargs) + try: + si = SmartConnect(**inkwargs) + except ssl.SSLError as connection_error: + if '[SSL: CERTIFICATE_VERIFY_FAILED]' in str(connection_error) and self.validate_certs: + sys.exit("Unable to connect to ESXi server due to %s, " + "please specify validate_certs=False and try again" % connection_error) + + except Exception as exc: + self.debugl("Unable to connect to ESXi server due to %s" % exc) + sys.exit("Unable to connect to ESXi server due to %s" % exc) self.debugl('retrieving all instances') if not si: - print("Could not connect to the specified host using specified " - "username and password") - return -1 + sys.exit("Could not connect to the specified host using specified " + "username and password") atexit.register(Disconnect, si) content = si.RetrieveContent() @@ -370,12 +400,16 @@ instance_tuples.append((instance, ifacts)) self.debugl('facts collected for all instances') - cfm = content.customFieldsManager - if cfm is not None and cfm.field: - for f in cfm.field: - if f.managedObjectType == vim.VirtualMachine: - self.custom_fields[f.key] = f.name - self.debugl('%d custom fieds collected' % len(self.custom_fields)) + try: + cfm = content.customFieldsManager + if cfm is not None and cfm.field: + for f in cfm.field: + if f.managedObjectType == vim.VirtualMachine: + self.custom_fields[f.key] = f.name + self.debugl('%d custom fields collected' % len(self.custom_fields)) + except vmodl.RuntimeFault as exc: + self.debugl("Unable to gather custom fields due to %s" % exc.msg) + return instance_tuples def instances_to_inventory(self, instances): @@ -412,7 +446,7 @@ # Reset the inventory keys for k, v in name_mapping.items(): - if not host_mapping or not k in host_mapping: + if not host_mapping or k not in host_mapping: continue # set ansible_host (2.x) @@ -467,7 +501,7 @@ for k, v in inventory['_meta']['hostvars'].items(): if 'customvalue' in v: for tv in v['customvalue']: - if not isinstance(tv['value'], str) and not isinstance(tv['value'], unicode): + if not isinstance(tv['value'], string_types): continue newkey = None @@ -498,7 +532,7 @@ mapping = {} for k, v in inventory['_meta']['hostvars'].items(): - t = jinja2.Template(pattern) + t = self.env.from_string(pattern) newkey = None try: newkey = t.render(v) @@ -544,15 +578,27 @@ for idx, x in enumerate(parts): - # if the val wasn't set yet, get it from the parent - if not val: - val = getattr(vm, x) + if isinstance(val, dict): + if x in val: + val = val.get(x) + elif x.lower() in val: + val = val.get(x.lower()) else: - # in a subkey, get the subprop from the previous attrib - try: - val = getattr(val, x) - except AttributeError as e: - self.debugl(e) + # if the val wasn't set yet, get it from the parent + if not val: + try: + val = getattr(vm, x) + except AttributeError as e: + self.debugl(e) + else: + # in a subkey, get the subprop from the previous attrib + try: + val = getattr(val, x) + except AttributeError as e: + self.debugl(e) + + # make sure it serializes + val = self._process_object_types(val) # lowercase keys if requested if self.lowerkeys: @@ -616,7 +662,7 @@ return rdata - def _process_object_types(self, vobj, thisvm=None, inkey=None, level=0): + def _process_object_types(self, vobj, thisvm=None, inkey='', level=0): ''' Serialize an object ''' rdata = {} @@ -640,12 +686,10 @@ rdata = vobj.decode('ascii', 'ignore') elif issubclass(type(vobj), bool) or isinstance(vobj, bool): rdata = vobj - elif issubclass(type(vobj), int) or isinstance(vobj, int): + elif issubclass(type(vobj), integer_types) or isinstance(vobj, integer_types): rdata = vobj elif issubclass(type(vobj), float) or isinstance(vobj, float): rdata = vobj - elif issubclass(type(vobj), long) or isinstance(vobj, long): - rdata = vobj elif issubclass(type(vobj), list) or issubclass(type(vobj), tuple): rdata = [] try: @@ -710,7 +754,7 @@ return self.inventory['_meta']['hostvars'][host] elif self.args.host and self.inventory['_meta']['hostvars']: match = None - for k, v in self.inventory['_meta']['hostvars']: + for k, v in self.inventory['_meta']['hostvars'].items(): if self.inventory['_meta']['hostvars'][k]['name'] == self.args.host: match = k break @@ -725,5 +769,3 @@ if __name__ == "__main__": # Run the script print(VMWareInventory().show()) - - diff -Nru ansible-2.3.2.0/contrib/inventory/vmware.py ansible-2.4.0.0/contrib/inventory/vmware.py --- ansible-2.3.2.0/contrib/inventory/vmware.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/vmware.py 2017-09-19 17:10:47.000000000 +0000 @@ -38,18 +38,20 @@ import ssl import sys import time -import ConfigParser -from six import text_type, string_types +from six import integer_types, text_type, string_types +from six.moves import configparser # Disable logging message trigged by pSphere/suds. try: from logging import NullHandler except ImportError: from logging import Handler + class NullHandler(Handler): def emit(self, record): pass + logging.getLogger('psphere').addHandler(NullHandler()) logging.getLogger('suds').addHandler(NullHandler()) @@ -62,11 +64,11 @@ class VMwareInventory(object): def __init__(self, guests_only=None): - self.config = ConfigParser.SafeConfigParser() + self.config = configparser.SafeConfigParser() if os.environ.get('VMWARE_INI', ''): config_files = [os.environ['VMWARE_INI']] else: - config_files = [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'vmware.ini'] + config_files = [os.path.abspath(sys.argv[0]).rstrip('.py') + '.ini', 'vmware.ini'] for config_file in config_files: if os.path.exists(config_file): self.config.read(config_file) @@ -208,7 +210,7 @@ if obj_info != (): l.append(obj_info) return l - elif isinstance(obj, (type(None), bool, int, long, float, string_types)): + elif isinstance(obj, (type(None), bool, float) + string_types + integer_types): return obj else: return () @@ -362,7 +364,7 @@ # Loop through all VMs on physical host. for vm in host.vm: if prefix_filter: - if vm.name.startswith( prefix_filter ): + if vm.name.startswith(prefix_filter): continue self._add_host(inv, 'all', vm.name) self._add_host(inv, vm_group, vm.name) diff -Nru ansible-2.3.2.0/contrib/inventory/windows_azure.py ansible-2.4.0.0/contrib/inventory/windows_azure.py --- ansible-2.3.2.0/contrib/inventory/windows_azure.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/windows_azure.py 2017-09-19 17:10:47.000000000 +0000 @@ -54,6 +54,7 @@ # Imports for ansible import ConfigParser + class AzureInventory(object): def __init__(self): """Main execution path.""" @@ -171,10 +172,9 @@ parser.add_argument('--list-images', action='store', help='Get all available images.') parser.add_argument('--refresh-cache', - action='store_true', default=False, - help='Force refresh of thecache by making API requests to Azure ' - '(default: False - use cache files)', - ) + action='store_true', default=False, + help='Force refresh of thecache by making API requests to Azure ' + '(default: False - use cache files)') parser.add_argument('--host', action='store', help='Get all information about an instance.') self.args = parser.parse_args() @@ -198,7 +198,7 @@ associated with a cloud service. """ try: - for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name,embed_detail=True).deployments.deployments: + for deployment in self.sms.get_hosted_service_properties(cloud_service.service_name, embed_detail=True).deployments.deployments: self.add_deployment(cloud_service, deployment) except Exception as e: sys.exit("Error: Failed to access deployments - {0}".format(str(e))) diff -Nru ansible-2.3.2.0/contrib/inventory/zabbix.py ansible-2.4.0.0/contrib/inventory/zabbix.py --- ansible-2.3.2.0/contrib/inventory/zabbix.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/zabbix.py 2017-09-19 17:10:47.000000000 +0000 @@ -24,10 +24,12 @@ ======================================== Returns hosts and hostgroups from Zabbix Server. +If you want to run with --limit against a host group with space in the +name, use asterisk. For example --limit="Linux*servers". Configuration is read from `zabbix.ini`. -Tested with Zabbix Server 2.0.6. +Tested with Zabbix Server 2.0.6 and 3.2.3. """ from __future__ import print_function @@ -35,7 +37,10 @@ import os import sys import argparse -import ConfigParser +try: + import ConfigParser as configparser +except ImportError: + import configparser try: from zabbix_api import ZabbixAPI @@ -49,10 +54,11 @@ except: import simplejson as json + class ZabbixInventory(object): def read_settings(self): - config = ConfigParser.SafeConfigParser() + config = configparser.SafeConfigParser() conf_path = './zabbix.ini' if not os.path.exists(conf_path): conf_path = os.path.dirname(os.path.realpath(__file__)) + '/zabbix.ini' @@ -96,11 +102,14 @@ for group in host['groups']: groupname = group['name'] - if not groupname in data: + if groupname not in data: data[groupname] = self.hoststub() data[groupname]['hosts'].append(hostname) + # Prevents Ansible from calling this script for each server with --host + data['_meta'] = {'hostvars': self.meta} + return data def __init__(self): @@ -109,6 +118,7 @@ self.zabbix_server = None self.zabbix_username = None self.zabbix_password = None + self.meta = {} self.read_settings() self.read_cli() diff -Nru ansible-2.3.2.0/contrib/inventory/zone.py ansible-2.4.0.0/contrib/inventory/zone.py --- ansible-2.3.2.0/contrib/inventory/zone.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/inventory/zone.py 2017-09-19 17:10:47.000000000 +0000 @@ -17,7 +17,7 @@ # You should have received a copy of the GNU General Public License # along with Ansible. If not, see . -from subprocess import Popen,PIPE +from subprocess import Popen, PIPE import sys import json diff -Nru ansible-2.3.2.0/contrib/README.md ansible-2.4.0.0/contrib/README.md --- ansible-2.3.2.0/contrib/README.md 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/contrib/README.md 2017-09-19 17:10:47.000000000 +0000 @@ -1,16 +1,26 @@ +contrib +------- +Files here provide an extension mechanism for Ansible similar to plugins. They are not maintained by the Ansible core team or installed with Ansible. + + inventory ========= -Inventory scripts allow you to store your hosts, groups, and variables in any way -you like. Examples include discovering inventory from EC2 or pulling it from -Cobbler. These could also be used to interface with LDAP or database. - -`chmod +x` an inventory plugin and either name it `/etc/ansible/hosts` or use `ansible -i /path/to/inventory/script`. You might also need to copy a configuration -file with the same name and/or set environment variables, the scripts or configuration -files have more details. +Before 2.4 introduced inventory plugins, inventory scripts were the only way to provide sources that were not built into Ansible. Inventory scripts allow you to store your hosts, groups, and variables in any way you like. + +Starting with Ansible version 2.4, they are enabled via the 'script' inventory plugin. +Examples of use include discovering inventory from EC2 or pulling it from Cobbler. These could also be used to interface with LDAP or the database. + +`chmod +x` an inventory plugin and either name it `/etc/ansible/hosts` or use `ansible -i /path/to/inventory/script`. You might also need to copy a configuration file with the same name and/or set environment variables. The scripts or configuration files can provide more details. + +vault +===== + +If the file passed to `--vault-password-file` has the executable bit set, Ansible will execute it and use the stdout of that execution as 'the secret'. +Vault scripts provided here use this facility to retrieve the vault secret from a number of sources. contributions welcome ===================== -Send in pull requests to add plugins of your own. The sky is the limit! +Send in pull requests to add scripts of your own. The sky is the limit! diff -Nru ansible-2.3.2.0/.coveragerc ansible-2.4.0.0/.coveragerc --- ansible-2.3.2.0/.coveragerc 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/.coveragerc 2017-09-19 17:10:47.000000000 +0000 @@ -20,5 +20,6 @@ omit = */python*/dist-packages/* */python*/site-packages/* - */python*/distutils + */python*/distutils/* + */pyshared/* */pytest diff -Nru ansible-2.3.2.0/debian/changelog ansible-2.4.0.0/debian/changelog --- ansible-2.3.2.0/debian/changelog 2017-08-08 17:09:02.000000000 +0000 +++ ansible-2.4.0.0/debian/changelog 2017-09-19 17:15:24.000000000 +0000 @@ -1,28 +1,14 @@ -ansible (2.3.2.0-1ppa~precise) precise; urgency=low +ansible (2.4.0.0-1ppa~precise) precise; urgency=low - * 2.3.2.0 release + * 2.4.0.0 release - -- Ansible, Inc. Tue, 08 Aug 2017 17:08:46 +0000 + -- Ansible, Inc. Tue, 19 Sep 2017 17:15:06 +0000 -ansible (2.3.2.0) unstable; urgency=low +ansible (2.4.0.0) unstable; urgency=low - * 2.3.2.0 + * 2.4.0.0 - -- Ansible, Inc. Fri, 04 Aug 2017 15:24:07 -0500 - - -ansible (2.3.1.0) unstable; urgency=low - - * 2.3.1.0 - - -- Ansible, Inc. Thu, 01 Jun 2017 10:57:36 -0500 - - -ansible (2.3.0.0) unstable; urgency=low - - * 2.3.0.0 - - -- Ansible, Inc. Wed, 12 Apr 2017 08:56:32 -0500 + -- Ansible, Inc. Mon, 18 Sep 2017 18:22:15 -0700 ansible (2.0.1.0) unstable; urgency=low diff -Nru ansible-2.3.2.0/debian/changeloge ansible-2.4.0.0/debian/changeloge --- ansible-2.3.2.0/debian/changeloge 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/debian/changeloge 2017-09-19 17:10:47.000000000 +0000 @@ -4,25 +4,11 @@ -- Ansible, Inc. %DATE% -ansible (2.3.2.0) unstable; urgency=low +ansible (2.4.0.0) unstable; urgency=low - * 2.3.2.0 + * 2.4.0.0 - -- Ansible, Inc. Fri, 04 Aug 2017 15:24:07 -0500 - - -ansible (2.3.1.0) unstable; urgency=low - - * 2.3.1.0 - - -- Ansible, Inc. Thu, 01 Jun 2017 10:57:36 -0500 - - -ansible (2.3.0.0) unstable; urgency=low - - * 2.3.0.0 - - -- Ansible, Inc. Wed, 12 Apr 2017 08:56:32 -0500 + -- Ansible, Inc. Mon, 18 Sep 2017 18:22:15 -0700 ansible (2.0.1.0) unstable; urgency=low diff -Nru ansible-2.3.2.0/debian/control ansible-2.4.0.0/debian/control --- ansible-2.3.2.0/debian/control 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/debian/control 2017-09-19 17:10:47.000000000 +0000 @@ -3,7 +3,7 @@ Priority: optional Standards-Version: 3.9.3 Maintainer: Ansible, Inc. -Build-Depends: cdbs, debhelper (>= 5.0.0), asciidoc, python, dh-python | python-support, python-setuptools +Build-Depends: cdbs, debhelper (>= 5.0.0), asciidoc, python, dh-python | python-support, python-setuptools, lsb-release Homepage: http://ansible.github.com/ Package: ansible diff -Nru ansible-2.3.2.0/debian/rules ansible-2.4.0.0/debian/rules --- ansible-2.3.2.0/debian/rules 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/debian/rules 2017-09-19 17:10:47.000000000 +0000 @@ -5,3 +5,8 @@ include /usr/share/cdbs/1/rules/debhelper.mk include /usr/share/cdbs/1/class/python-distutils.mk + +ifeq ($(shell lsb_release -cs), trusty) + export ANSIBLE_CRYPTO_BACKEND = pycrypto +endif + diff -Nru ansible-2.3.2.0/docs/api/conf.py ansible-2.4.0.0/docs/api/conf.py --- ansible-2.3.2.0/docs/api/conf.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/api/conf.py 2017-09-19 17:10:47.000000000 +0000 @@ -18,7 +18,7 @@ # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('../bin')) +# sys.path.insert(0, os.path.abspath('../bin')) sys.path.insert(0, os.path.abspath('../lib/ansible')) import sphinx_rtd_theme import alabaster @@ -26,7 +26,7 @@ # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom @@ -41,8 +41,8 @@ 'alabaster', ] -#autodoc_default_flags = ['members', 'show-inheritance', 'inherited-members', 'undoc-members',] -autodoc_default_flags = ['members', 'show-inheritance', 'undoc-members',] +# autodoc_default_flags = ['members', 'show-inheritance', 'inherited-members', 'undoc-members', ] +autodoc_default_flags = ['members', 'show-inheritance', 'undoc-members', ] autoclass_content = 'both' autodoc_member_order = 'bysource' autodoc_mock_imports = ['xmltodict', 'winrm', 'redis', 'StricRedis'] @@ -56,7 +56,7 @@ source_suffix = '.rst' # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' @@ -84,9 +84,9 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -94,27 +94,27 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True @@ -124,13 +124,13 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -#html_theme = 'alabaster' -#html_theme_path = ['../docsite/_themes'] -#html_theme = 'srtd' +# html_theme = 'alabaster' +# html_theme_path = ['../docsite/_themes'] +# html_theme = 'srtd' html_short_title = 'Ansible Documentation' -#html_theme = "sphinx_rtd_theme" -#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] +# html_theme = "sphinx_rtd_theme" +# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_theme_path = [alabaster.get_path()] html_theme = 'alabaster' @@ -138,26 +138,26 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +# html_title = None # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (relative to this directory) to use as a favicon of # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, @@ -167,62 +167,62 @@ # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'Ansibledoc' @@ -231,16 +231,16 @@ latex_elements = { # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', + # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', + # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. - #'preamble': '', + # 'preamble': '', # Latex figure (float) alignment - #'figure_align': 'htbp', + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples @@ -253,23 +253,23 @@ # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- @@ -282,7 +282,7 @@ ] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -297,13 +297,13 @@ ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False diff -Nru ansible-2.3.2.0/docs/api/Makefile ansible-2.4.0.0/docs/api/Makefile --- ansible-2.3.2.0/docs/api/Makefile 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/api/Makefile 2017-09-19 17:10:47.000000000 +0000 @@ -14,14 +14,9 @@ BUILDDIR = _build RSTDIR = rst MODULES_PATH = ../../lib -EXCLUDE_PATHS = ../../lib/ansible/modules ../../lib/ansible/utils/module_docs_fragments ../../lib/ansible/compat/six ../../lib/ansible/module_utils/six.py +EXCLUDE_PATHS = ../../lib/ansible/modules ../../lib/ansible/utils/module_docs_fragments ../../lib/ansible/module_utils/six DOC_PROJECTS = "Ansible API" -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter diff -Nru ansible-2.3.2.0/docs/bin/dump_config.py ansible-2.4.0.0/docs/bin/dump_config.py --- ansible-2.3.2.0/docs/bin/dump_config.py 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/bin/dump_config.py 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,72 @@ +#!/usr/bin/env python + +import optparse +import os +import sys +import yaml + +from jinja2 import Environment, FileSystemLoader + +DEFAULT_TEMPLATE_FILE = 'config.rst.j2' + + +def generate_parser(): + p = optparse.OptionParser( + version='%prog 1.0', + usage='usage: %prog [options]', + description='Generate module documentation from metadata', + ) + p.add_option("-t", "--template-file", action="store", dest="template_file", default=DEFAULT_TEMPLATE_FILE, help="directory containing Jinja2 templates") + p.add_option("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files") + p.add_option("-d", "--docs-source", action="store", dest="docs", default=None, help="Source for attribute docs") + + (options, args) = p.parse_args() + + return p + + +def fix_description(config_options): + '''some descriptions are strings, some are lists. workaround it...''' + + for config_key in config_options: + description = config_options[config_key].get('description', []) + if isinstance(description, list): + desc_list = description + else: + desc_list = [description] + config_options[config_key]['description'] = desc_list + return config_options + + +def main(args): + + parser = generate_parser() + (options, args) = parser.parse_args() + + output_dir = os.path.abspath(options.output_dir) + template_file_full_path = os.path.abspath(options.template_file) + template_file = os.path.basename(template_file_full_path) + template_dir = os.path.dirname(os.path.abspath(template_file_full_path)) + + if options.docs: + with open(options.docs) as f: + docs = yaml.safe_load(f) + else: + docs = {} + + config_options = docs + config_options = fix_description(config_options) + + env = Environment(loader=FileSystemLoader(template_dir), trim_blocks=True,) + template = env.get_template(template_file) + output_name = os.path.join(output_dir, template_file.replace('.j2', '')) + temp_vars = {'config_options': config_options} + + with open(output_name, 'w') as f: + f.write(template.render(temp_vars).encode('utf-8')) + + return 0 + + +if __name__ == '__main__': + sys.exit(main(sys.argv[:])) diff -Nru ansible-2.3.2.0/docs/bin/dump_keywords.py ansible-2.4.0.0/docs/bin/dump_keywords.py --- ansible-2.3.2.0/docs/bin/dump_keywords.py 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/bin/dump_keywords.py 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,69 @@ +#!/usr/bin/env python + +import optparse +import yaml + +from jinja2 import Environment, FileSystemLoader + +from ansible.playbook import Play +from ansible.playbook.block import Block +from ansible.playbook.role import Role +from ansible.playbook.task import Task + +template_file = 'playbooks_keywords.rst.j2' +oblist = {} +clist = [] +class_list = [Play, Role, Block, Task] + +p = optparse.OptionParser( + version='%prog 1.0', + usage='usage: %prog [options]', + description='Generate module documentation from metadata', +) +p.add_option("-T", "--template-dir", action="store", dest="template_dir", default="../templates", help="directory containing Jinja2 templates") +p.add_option("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files") +p.add_option("-d", "--docs-source", action="store", dest="docs", default=None, help="Source for attribute docs") + +(options, args) = p.parse_args() + +for aclass in class_list: + aobj = aclass() + name = type(aobj).__name__ + + if options.docs: + with open(options.docs) as f: + docs = yaml.safe_load(f) + else: + docs = {} + + # build ordered list to loop over and dict with attributes + clist.append(name) + oblist[name] = dict((x, aobj.__dict__['_attributes'][x]) for x in aobj.__dict__['_attributes'] if 'private' not in x or not x.private) + + # pick up docs if they exist + for a in oblist[name]: + if a in docs: + oblist[name][a] = docs[a] + else: + oblist[name][a] = ' UNDOCUMENTED!! ' + + # loop is really with_ for users + if name == 'Task': + oblist[name]['with_'] = 'with_ is how loops are defined, it can use any available lookup plugin to generate the item list' + + # local_action is implicit with action + if 'action' in oblist[name]: + oblist[name]['local_action'] = 'Same as action but also implies ``delegate_to: localhost``' + + # remove unusable (used to be private?) + for nouse in ('loop', 'loop_args'): + if nouse in oblist[name]: + del oblist[name][nouse] + +env = Environment(loader=FileSystemLoader(options.template_dir), trim_blocks=True,) +template = env.get_template(template_file) +outputname = options.output_dir + template_file.replace('.j2', '') +tempvars = {'oblist': oblist, 'clist': clist} + +with open(outputname, 'w') as f: + f.write(template.render(tempvars)) diff -Nru ansible-2.3.2.0/docs/bin/generate_man.py ansible-2.4.0.0/docs/bin/generate_man.py --- ansible-2.3.2.0/docs/bin/generate_man.py 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/bin/generate_man.py 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,280 @@ +#!/usr/bin/env python + +import optparse +import os +import pprint +import sys + +from jinja2 import Environment, FileSystemLoader + +from ansible.module_utils._text import to_bytes + + +def generate_parser(): + p = optparse.OptionParser( + version='%prog 1.0', + usage='usage: %prog [options]', + description='Generate cli documentation from cli docstrings', + ) + + p.add_option("-t", "--template-file", action="store", dest="template_file", default="../templates/man.j2", help="path to jinja2 template") + p.add_option("-o", "--output-dir", action="store", dest="output_dir", default='/tmp/', help="Output directory for rst files") + p.add_option("-f", "--output-format", action="store", dest="output_format", default='man', help="Output format for docs (the default 'man' or 'rst')") + return p + + +# from https://www.python.org/dev/peps/pep-0257/ +def trim_docstring(docstring): + if not docstring: + return '' + # Convert tabs to spaces (following the normal Python rules) + # and split into a list of lines: + lines = docstring.expandtabs().splitlines() + # Determine minimum indentation (first line doesn't count): + indent = sys.maxint + for line in lines[1:]: + stripped = line.lstrip() + if stripped: + indent = min(indent, len(line) - len(stripped)) + # Remove indentation (first line is special): + trimmed = [lines[0].strip()] + if indent < sys.maxint: + for line in lines[1:]: + trimmed.append(line[indent:].rstrip()) + # Strip off trailing and leading blank lines: + while trimmed and not trimmed[-1]: + trimmed.pop() + while trimmed and not trimmed[0]: + trimmed.pop(0) + # Return a single string: + return '\n'.join(trimmed) + + +def get_options(optlist): + ''' get actual options ''' + + opts = [] + for opt in optlist: + res = { + 'desc': opt.help, + 'options': opt._short_opts + opt._long_opts + } + if opt.action == 'store': + res['arg'] = opt.dest.upper() + opts.append(res) + + return opts + + +def get_option_groups(option_parser): + groups = [] + for option_group in option_parser.option_groups: + group_info = {} + group_info['desc'] = option_group.get_description() + group_info['options'] = option_group.option_list + group_info['group_obj'] = option_group + groups.append(group_info) + return groups + + +def opt_doc_list(cli): + ''' iterate over options lists ''' + + results = [] + for option_group in cli.parser.option_groups: + results.extend(get_options(option_group.option_list)) + + results.extend(get_options(cli.parser.option_list)) + + return results + + +# def opts_docs(cli, name): +def opts_docs(cli_class_name, cli_module_name): + ''' generate doc structure from options ''' + + cli_name = 'ansible-%s' % cli_module_name + if cli_module_name == 'adhoc': + cli_name = 'ansible' + + # WIth no action/subcommand + # shared opts set + # instantiate each cli and ask its options + cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name, + fromlist=[cli_class_name]), cli_class_name) + cli = cli_klass([]) + + # parse the common options + try: + cli.parse() + except: + pass + + # base/common cli info + docs = { + 'cli': cli_module_name, + 'cli_name': cli_name, + 'usage': cli.parser.usage, + 'short_desc': cli.parser.description, + 'long_desc': trim_docstring(cli.__doc__), + 'actions': {}, + } + option_info = {'option_names': [], + 'options': [], + 'groups': []} + + for extras in ('ARGUMENTS'): + if hasattr(cli, extras): + docs[extras.lower()] = getattr(cli, extras) + + common_opts = opt_doc_list(cli) + groups_info = get_option_groups(cli.parser) + shared_opt_names = [] + for opt in common_opts: + shared_opt_names.extend(opt.get('options', [])) + + option_info['options'] = common_opts + option_info['option_names'] = shared_opt_names + + option_info['groups'].extend(groups_info) + + docs.update(option_info) + + # now for each action/subcommand + # force populate parser with per action options + + # use class attrs not the attrs on a instance (not that it matters here...) + for action in getattr(cli_klass, 'VALID_ACTIONS', ()): + # instantiate each cli and ask its options + action_cli_klass = getattr(__import__("ansible.cli.%s" % cli_module_name, + fromlist=[cli_class_name]), cli_class_name) + # init with args with action added? + cli = action_cli_klass([]) + cli.args.append(action) + + try: + cli.parse() + except: + pass + + # FIXME/TODO: needed? + # avoid dupe errors + cli.parser.set_conflict_handler('resolve') + + cli.set_action() + + action_info = {'option_names': [], + 'options': []} + # docs['actions'][action] = {} + # docs['actions'][action]['name'] = action + action_info['name'] = action + action_info['desc'] = trim_docstring(getattr(cli, 'execute_%s' % action).__doc__) + + # docs['actions'][action]['desc'] = getattr(cli, 'execute_%s' % action).__doc__.strip() + action_doc_list = opt_doc_list(cli) + + uncommon_options = [] + for action_doc in action_doc_list: + # uncommon_options = [] + + option_aliases = action_doc.get('options', []) + for option_alias in option_aliases: + + if option_alias in shared_opt_names: + continue + + # TODO: use set + if option_alias not in action_info['option_names']: + action_info['option_names'].append(option_alias) + + if action_doc in action_info['options']: + continue + + uncommon_options.append(action_doc) + + action_info['options'] = uncommon_options + + docs['actions'][action] = action_info + + docs['options'] = opt_doc_list(cli) + return docs + + +if __name__ == '__main__': + + parser = generate_parser() + + options, args = parser.parse_args() + + template_file = options.template_file + template_path = os.path.expanduser(template_file) + template_dir = os.path.abspath(os.path.dirname(template_path)) + template_basename = os.path.basename(template_file) + + output_dir = os.path.abspath(options.output_dir) + output_format = options.output_format + + cli_modules = args + + # various cli parsing things checks sys.argv if the 'args' that are passed in are [] + # so just remove any args so the cli modules dont try to parse them resulting in warnings + sys.argv = [sys.argv[0]] + # need to be in right dir + os.chdir(os.path.dirname(__file__)) + + allvars = {} + output = {} + cli_list = [] + cli_bin_name_list = [] + + # for binary in os.listdir('../../lib/ansible/cli'): + for cli_module_name in cli_modules: + binary = os.path.basename(os.path.expanduser(cli_module_name)) + + if not binary.endswith('.py'): + continue + elif binary == '__init__.py': + continue + + cli_name = os.path.splitext(binary)[0] + + if cli_name == 'adhoc': + cli_class_name = 'AdHocCLI' + # myclass = 'AdHocCLI' + output[cli_name] = 'ansible.1.asciidoc.in' + cli_bin_name = 'ansible' + else: + # myclass = "%sCLI" % libname.capitalize() + cli_class_name = "%sCLI" % cli_name.capitalize() + output[cli_name] = 'ansible-%s.1.asciidoc.in' % cli_name + cli_bin_name = 'ansible-%s' % cli_name + + # FIXME: + allvars[cli_name] = opts_docs(cli_class_name, cli_name) + cli_bin_name_list.append(cli_bin_name) + + cli_list = allvars.keys() + + doc_name_formats = {'man': '%s.1.asciidoc.in', + 'rst': '%s.rst'} + + for cli_name in cli_list: + + # template it! + env = Environment(loader=FileSystemLoader(template_dir)) + template = env.get_template(template_basename) + + # add rest to vars + tvars = allvars[cli_name] + tvars['cli_list'] = cli_list + tvars['cli_bin_name_list'] = cli_bin_name_list + tvars['cli'] = cli_name + if '-i' in tvars['options']: + print('uses inventory') + + manpage = template.render(tvars) + filename = os.path.join(output_dir, doc_name_formats[output_format] % tvars['cli_name']) + + with open(filename, 'wb') as f: + f.write(to_bytes(manpage)) + print("Wrote doc to %s" % filename) diff -Nru ansible-2.3.2.0/docs/bin/plugin_formatter.py ansible-2.4.0.0/docs/bin/plugin_formatter.py --- ansible-2.3.2.0/docs/bin/plugin_formatter.py 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/bin/plugin_formatter.py 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,526 @@ +#!/usr/bin/env python +# (c) 2012, Jan-Piet Mens +# (c) 2012-2014, Michael DeHaan and others +# (c) 2017 Ansible Project +# +# This file is part of Ansible +# +# Ansible is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# Ansible is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with Ansible. If not, see . + +from __future__ import absolute_import, division, print_function +__metaclass__ = type + + +import datetime +import glob +import optparse +import os +import re +import sys +import warnings +from collections import defaultdict +try: + from html import escape as html_escape +except ImportError: + # Python-3.2 or later + import cgi + + def html_escape(text, quote=True): + return cgi.escape(text, quote) + +import yaml +from jinja2 import Environment, FileSystemLoader +from six import iteritems + +from ansible.errors import AnsibleError +from ansible.module_utils._text import to_bytes +from ansible.utils import plugin_docs + + +##################################################################################### +# constants and paths + +# if a module is added in a version of Ansible older than this, don't print the version added information +# in the module documentation because everyone is assumed to be running something newer than this already. +TO_OLD_TO_BE_NOTABLE = 1.3 + +# Get parent directory of the directory this script lives in +MODULEDIR = os.path.abspath(os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, 'lib', 'ansible', 'modules' +)) + +# The name of the DOCUMENTATION template +EXAMPLE_YAML = os.path.abspath(os.path.join( + os.path.dirname(os.path.realpath(__file__)), os.pardir, 'examples', 'DOCUMENTATION.yml' +)) + +_ITALIC = re.compile(r"I\(([^)]+)\)") +_BOLD = re.compile(r"B\(([^)]+)\)") +_MODULE = re.compile(r"M\(([^)]+)\)") +_URL = re.compile(r"U\(([^)]+)\)") +_CONST = re.compile(r"C\(([^)]+)\)") + +DEPRECATED = b" (D)" + + +def rst_ify(text): + ''' convert symbols like I(this is in italics) to valid restructured text ''' + + try: + t = _ITALIC.sub(r'*' + r"\1" + r"*", text) + t = _BOLD.sub(r'**' + r"\1" + r"**", t) + t = _MODULE.sub(r':ref:`' + r"\1 <\1>" + r"`", t) + t = _URL.sub(r"\1", t) + t = _CONST.sub(r'``' + r"\1" + r"``", t) + except Exception as e: + raise AnsibleError("Could not process (%s) : %s" % (str(text), str(e))) + + return t + + +def html_ify(text): + ''' convert symbols like I(this is in italics) to valid HTML ''' + + t = html_escape(text) + t = _ITALIC.sub("" + r"\1" + "", t) + t = _BOLD.sub("" + r"\1" + "", t) + t = _MODULE.sub("" + r"\1" + "", t) + t = _URL.sub("" + r"\1" + "", t) + t = _CONST.sub("" + r"\1" + "", t) + + return t + + +def rst_fmt(text, fmt): + ''' helper for Jinja2 to do format strings ''' + + return fmt % (text) + + +def rst_xline(width, char="="): + ''' return a restructured text line of a given length ''' + + return char * width + + +def write_data(text, output_dir, outputname, module=None): + ''' dumps module output to a file or the screen, as requested ''' + + if output_dir is not None: + if module: + outputname = outputname % module + fname = os.path.join(output_dir, outputname) + fname = fname.replace(".py", "") + with open(fname, 'wb') as f: + f.write(to_bytes(text)) + else: + print(text) + + +def get_module_info(module_dir, limit_to_modules=None, verbose=False): + ''' + Returns information about modules and the categories that they belong to + + :arg module_dir: file system path to the top of the modules directory + :kwarg limit_to_modules: If given, this is a list of module names to + generate information for. All other modules will be ignored. + :returns: Tuple of two dicts containing module_info, categories, and + aliases and a set listing deprecated modules: + + :module_info: mapping of module names to information about them. The fields of the dict are: + + :path: filesystem path to the module + :deprecated: boolean. True means the module is deprecated otherwise not. + :aliases: set of aliases to this module name + :metadata: The modules metadata (as recorded in the module) + :doc: The documentation structure for the module + :examples: The module's examples + :returndocs: The module's returndocs + + :categories: maps category names to a dict. The dict contains at + least one key, '_modules' which contains a list of module names in + that category. Any other keys in the dict are subcategories with + the same structure. + ''' + + categories = dict() + module_info = defaultdict(dict) + + # * windows powershell modules have documentation stubs in python docstring + # format (they are not executed) so skip the ps1 format files + # * One glob level for every module level that we're going to traverse + files = ( + glob.glob("%s/*.py" % module_dir) + + glob.glob("%s/*/*.py" % module_dir) + + glob.glob("%s/*/*/*.py" % module_dir) + + glob.glob("%s/*/*/*/*.py" % module_dir) + ) + + for module_path in files: + # Do not list __init__.py files + if module_path.endswith('__init__.py'): + continue + + # Do not list blacklisted modules + module = os.path.splitext(os.path.basename(module_path))[0] + if module in plugin_docs.BLACKLIST['MODULE']: + continue + + # If requested, limit module documentation building only to passed-in + # modules. + if limit_to_modules is not None and module.lower() not in limit_to_modules: + continue + + deprecated = False + if module.startswith("_"): + if os.path.islink(module_path): + # Handle aliases + source = os.path.splitext(os.path.basename(os.path.realpath(module_path)))[0] + module = module.replace("_", "", 1) + aliases = module_info[source].get('aliases', set()) + aliases.add(module) + # In case we just created this via get()'s fallback + module_info[source]['aliases'] = aliases + continue + else: + # Handle deprecations + module = module.replace("_", "", 1) + deprecated = True + + # + # Regular module to process + # + + category = categories + + # Start at the second directory because we don't want the "vendor" + mod_path_only = os.path.dirname(module_path[len(module_dir):]) + + # build up the categories that this module belongs to + for new_cat in mod_path_only.split('/')[1:]: + if new_cat not in category: + category[new_cat] = dict() + category[new_cat]['_modules'] = [] + category = category[new_cat] + + category['_modules'].append(module) + + # use ansible core library to parse out doc metadata YAML and plaintext examples + doc, examples, returndocs, metadata = plugin_docs.get_docstring(module_path, verbose=verbose) + + # save all the information + module_info[module] = {'path': module_path, + 'deprecated': deprecated, + 'aliases': set(), + 'metadata': metadata, + 'doc': doc, + 'examples': examples, + 'returndocs': returndocs, + } + + # keep module tests out of becoming module docs + if 'test' in categories: + del categories['test'] + + return module_info, categories + + +def generate_parser(): + ''' generate an optparse parser ''' + + p = optparse.OptionParser( + version='%prog 1.0', + usage='usage: %prog [options] arg1 arg2', + description='Generate module documentation from metadata', + ) + + p.add_option("-A", "--ansible-version", action="store", dest="ansible_version", default="unknown", help="Ansible version number") + p.add_option("-M", "--module-dir", action="store", dest="module_dir", default=MODULEDIR, help="Ansible library path") + p.add_option("-T", "--template-dir", action="store", dest="template_dir", default="hacking/templates", help="directory containing Jinja2 templates") + p.add_option("-t", "--type", action='store', dest='type', choices=['rst'], default='rst', help="Document type") + p.add_option("-v", "--verbose", action='store_true', default=False, help="Verbose") + p.add_option("-o", "--output-dir", action="store", dest="output_dir", default=None, help="Output directory for module files") + p.add_option("-I", "--includes-file", action="store", dest="includes_file", default=None, help="Create a file containing list of processed modules") + p.add_option("-l", "--limit-to-modules", action="store", dest="limit_to_modules", default=None, + help="Limit building module documentation to comma-separated list of modules. Specify non-existing module name for no modules.") + p.add_option('-V', action='version', help='Show version number and exit') + return p + + +def jinja2_environment(template_dir, typ): + + env = Environment(loader=FileSystemLoader(template_dir), + variable_start_string="@{", + variable_end_string="}@", + trim_blocks=True) + env.globals['xline'] = rst_xline + + templates = {} + if typ == 'rst': + env.filters['convert_symbols_to_format'] = rst_ify + env.filters['html_ify'] = html_ify + env.filters['fmt'] = rst_fmt + env.filters['xline'] = rst_xline + templates['plugin'] = env.get_template('plugin.rst.j2') + templates['category_list'] = env.get_template('modules_by_category.rst.j2') + templates['support_list'] = env.get_template('modules_by_support.rst.j2') + templates['list_of_CATEGORY_modules'] = env.get_template('list_of_CATEGORY_modules.rst.j2') + outputname = "%s_module.rst" + else: + raise Exception("unknown module format type: %s" % typ) + + return templates, outputname + + +def too_old(added): + if not added: + return False + try: + added_tokens = str(added).split(".") + readded = added_tokens[0] + "." + added_tokens[1] + added_float = float(readded) + except ValueError as e: + warnings.warn("Could not parse %s: %s" % (added, str(e))) + return False + return added_float < TO_OLD_TO_BE_NOTABLE + + +def process_modules(module_map, templates, outputname, output_dir, ansible_version): + for module in module_map: + print("rendering: %s" % module) + + fname = module_map[module]['path'] + + # crash if module is missing documentation and not explicitly hidden from docs index + if module_map[module]['doc'] is None: + sys.exit("*** ERROR: MODULE MISSING DOCUMENTATION: %s, %s ***\n" % (fname, module)) + + # Going to reference this heavily so make a short name to reference it by + doc = module_map[module]['doc'] + + if module_map[module]['deprecated'] and 'deprecated' not in doc: + sys.exit("*** ERROR: DEPRECATED MODULE MISSING 'deprecated' DOCUMENTATION: %s, %s ***\n" % (fname, module)) + + if 'version_added' not in doc: + sys.exit("*** ERROR: missing version_added in: %s ***\n" % module) + + # + # The present template gets everything from doc so we spend most of this + # function moving data into doc for the template to reference + # + + if module_map[module]['aliases']: + doc['aliases'] = module_map[module]['aliases'] + + # don't show version added information if it's too old to be called out + added = 0 + if doc['version_added'] == 'historical': + del doc['version_added'] + else: + added = doc['version_added'] + + # Strip old version_added for the module + if too_old(added): + del doc['version_added'] + + option_names = [] + + if 'options' in doc and doc['options']: + for (k, v) in iteritems(doc['options']): + # Error out if there's no description + if 'description' not in doc['options'][k]: + raise AnsibleError("Missing required description for option %s in %s " % (k, module)) + + # Error out if required isn't a boolean (people have been putting + # information on when something is required in here. Those need + # to go in the description instead). + required_value = doc['options'][k].get('required', False) + if not isinstance(required_value, bool): + raise AnsibleError("Invalid required value '%s' for option '%s' in '%s' (must be truthy)" % (required_value, k, module)) + + # Strip old version_added information for options + if 'version_added' in doc['options'][k] and too_old(doc['options'][k]['version_added']): + del doc['options'][k]['version_added'] + + # Make sure description is a list of lines for later formatting + if not isinstance(doc['options'][k]['description'], list): + doc['options'][k]['description'] = [doc['options'][k]['description']] + + option_names.append(k) + + option_names.sort() + + doc['option_keys'] = option_names + doc['filename'] = fname + doc['docuri'] = doc['module'].replace('_', '-') + doc['now_date'] = datetime.date.today().strftime('%Y-%m-%d') + doc['ansible_version'] = ansible_version + doc['plainexamples'] = module_map[module]['examples'] # plain text + doc['metadata'] = module_map[module]['metadata'] + + if module_map[module]['returndocs']: + try: + doc['returndocs'] = yaml.safe_load(module_map[module]['returndocs']) + except: + print("could not load yaml: %s" % module_map[module]['returndocs']) + raise + else: + doc['returndocs'] = None + + text = templates['plugin'].render(doc) + + write_data(text, output_dir, outputname, module) + + +def process_categories(mod_info, categories, templates, output_dir, output_name): + for category in sorted(categories.keys()): + module_map = categories[category] + category_filename = output_name % category + + print("*** recording category %s in %s ***" % (category, category_filename)) + + # start a new category file + + category = category.replace("_", " ") + category = category.title() + + subcategories = dict((k, v) for k, v in module_map.items() if k != '_modules') + template_data = {'title': category, + 'category': module_map, + 'subcategories': subcategories, + 'module_info': mod_info, + } + + text = templates['list_of_CATEGORY_modules'].render(template_data) + write_data(text, output_dir, category_filename) + + +def process_support_levels(mod_info, templates, output_dir): + supported_by = {'Ansible Core Team': {'slug': 'core_supported', + 'modules': [], + 'output': 'core_maintained.rst', + 'blurb': "These are :doc:`modules maintained by the" + " Ansible Core Team` and will always ship" + " with Ansible itself."}, + 'Ansible Network Team': {'slug': 'network_supported', + 'modules': [], + 'output': 'network_maintained.rst', + 'blurb': "These are :doc:`modules maintained by the" + " Ansible Network Team` in" + " a relationship similar to how the Ansible Core Team" + " maintains the Core modules."}, + 'Ansible Partners': {'slug': 'partner_supported', + 'modules': [], + 'output': 'partner_maintained.rst', + 'blurb': """ +Some examples of :doc:`Certified Modules` are those submitted by other +companies. Maintainers of these types of modules must watch for any issues reported or pull requests +raised against the module. + +The Ansible Core Team will review all modules becoming certified. Core committers will review +proposed changes to existing Certified Modules once the community maintainers of the module have +approved the changes. Core committers will also ensure that any issues that arise due to Ansible +engine changes will be remediated. Also, it is strongly recommended (but not presently required) +for these types of modules to have unit tests. + +These modules are currently shipped with Ansible, but might be shipped separately in the future. +"""}, + 'Ansible Community': {'slug': 'community_supported', + 'modules': [], + 'output': 'community_maintained.rst', + 'blurb': """ +These are :doc:`modules maintained by the Ansible Community`. They **are +not** supported by the Ansible Core Team or by companies/partners associated to the module. + +They are still fully usable, but the response rate to issues is purely up to the community. Best +effort support will be provided but is not covered under any support contracts. + +These modules are currently shipped with Ansible, but will most likely be shipped separately in the future. + """}, + } + + # Separate the modules by support_level + for module, info in mod_info.items(): + if info['metadata']['supported_by'] == 'core': + supported_by['Ansible Core Team']['modules'].append(module) + elif info['metadata']['supported_by'] == 'network': + supported_by['Ansible Network Team']['modules'].append(module) + elif info['metadata']['supported_by'] == 'certified': + supported_by['Ansible Partners']['modules'].append(module) + elif info['metadata']['supported_by'] == 'community': + supported_by['Ansible Community']['modules'].append(module) + else: + raise AnsibleError('Unknown supported_by value: %s' % info['metadata']['supported_by']) + + # Render the module lists + for maintainers, data in supported_by.items(): + template_data = {'maintainers': maintainers, + 'modules': data['modules'], + 'slug': data['slug'], + 'module_info': mod_info, + } + text = templates['support_list'].render(template_data) + write_data(text, output_dir, data['output']) + + +def validate_options(options): + ''' validate option parser options ''' + + if not options.module_dir: + sys.exit("--module-dir is required", file=sys.stderr) + if not os.path.exists(options.module_dir): + sys.exit("--module-dir does not exist: %s" % options.module_dir, file=sys.stderr) + if not options.template_dir: + sys.exit("--template-dir must be specified") + + +def main(): + + p = generate_parser() + + (options, args) = p.parse_args() + validate_options(options) + + templates, outputname = jinja2_environment(options.template_dir, options.type) + + # Convert passed-in limit_to_modules to None or list of modules. + if options.limit_to_modules is not None: + options.limit_to_modules = [s.lower() for s in options.limit_to_modules.split(",")] + + mod_info, categories = get_module_info(options.module_dir, limit_to_modules=options.limit_to_modules, + verbose=options.verbose) + + categories['all'] = {'_modules': mod_info.keys()} + + # Transform the data + if options.type == 'rst': + for record in mod_info.values(): + record['doc']['short_description'] = rst_ify(record['doc']['short_description']) + + # Write master category list + category_list_text = templates['category_list'].render(categories=sorted(categories.keys())) + write_data(category_list_text, options.output_dir, 'modules_by_category.rst') + + # Render all the individual module pages + process_modules(mod_info, templates, outputname, options.output_dir, options.ansible_version) + + # Render all the categories for modules + process_categories(mod_info, categories, templates, options.output_dir, "list_of_%s_modules.rst") + + # Render all the categories for modules + process_support_levels(mod_info, templates, options.output_dir) + + +if __name__ == '__main__': + main() diff -Nru ansible-2.3.2.0/docs/bin/testing_formatter.sh ansible-2.4.0.0/docs/bin/testing_formatter.sh --- ansible-2.3.2.0/docs/bin/testing_formatter.sh 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/bin/testing_formatter.sh 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,12 @@ +#!/bin/bash -eu + +cat <<- EOF > ../docsite/rst/dev_guide/testing/sanity/index.rst +Sanity Tests +============ + +The following sanity tests are available as \`\`--test\`\` options for \`\`ansible-test sanity\`\`: + +$(for test in $(../../test/runner/ansible-test sanity --list-tests); do echo "- :doc:\`${test} <${test}>\`"; done) + +This list is also available using \`\`ansible-test sanity --list-tests\`\`. +EOF diff -Nru ansible-2.3.2.0/docs/docsite/conf.py ansible-2.4.0.0/docs/docsite/conf.py --- ansible-2.3.2.0/docs/docsite/conf.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/conf.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,226 +0,0 @@ -# -*- coding: utf-8 -*- -# -# documentation build configuration file, created by -# sphinx-quickstart on Sat Sep 27 13:23:22 2008-2009. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# The contents of this file are pickled, so don't put values in the namespace -# that aren't pickleable (module imports are okay, they're removed -# automatically). -# -# All configuration values have a default value; values that are commented out -# serve to show the default value. - -import sys -import os - -# pip install sphinx_rtd_theme -#import sphinx_rtd_theme -#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] - -# If your extensions are in another directory, add it here. If the directory -# is relative to the documentation root, use os.path.abspath to make it -# absolute, like shown here. -#sys.path.append(os.path.abspath('some/directory')) -# -sys.path.insert(0, os.path.join('ansible', 'lib')) -sys.path.append(os.path.abspath('_themes')) - -VERSION='2.2' -AUTHOR='Ansible, Inc' - - -# General configuration -# --------------------- - -# Add any Sphinx extension module names here, as strings. -# They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc'] - -# Later on, add 'sphinx.ext.viewcode' to the list if you want to have -# colorized code generated too for references. - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['.templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The master toctree document. -master_doc = 'index' - -# General substitutions. -project = 'Ansible Documentation' -copyright = "2013-2017 Ansible, Inc" - -# The default replacements for |version| and |release|, also used in various -# other places throughout the built documents. -# -# The short X.Y version. -version = VERSION -# The full version, including alpha/beta/rc tags. -release = VERSION - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -today_fmt = '%B %d, %Y' - -# List of documents that shouldn't be included in the build. -#unused_docs = [] - -# List of directories, relative to source directories, that shouldn't be -# searched for source files. -#exclude_dirs = [] - -# A list of glob-style patterns that should be excluded when looking -# for source files. -exclude_patterns = ['modules'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -highlight_language = 'YAML+Jinja' - -#Substitutions, variables, entities, & shortcuts for text which do not need to link to anything. -#For titles which should be a link, use the intersphinx anchors set at the index, chapter, and section levels, such as qi_start_: -rst_epilog = """ -.. |acapi| replace:: *Ansible Core API Guide* -.. |acrn| replace:: *Ansible Core Release Notes* -.. |ac| replace:: Ansible Core -.. |acversion| replace:: Ansible Core Version 2.1 -.. |acversionshort| replace:: Ansible Core 2.1 -.. |versionshortest| replace:: 2.2 -.. |versiondev| replace:: 2.3 -.. |pubdate| replace:: July 19, 2016 -.. |rhel| replace:: Red Hat Enterprise Linux - -""" - - -# Options for HTML output -# ----------------------- - -html_theme_path = ['_themes'] -html_theme = 'srtd' -html_short_title = 'Ansible Documentation' - -# The style sheet to use for HTML and HTML Help pages. A file of that name -# must exist either in Sphinx' static/ path, or in one of the custom paths -# given in html_static_path. -#html_style = 'solar.css' - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -html_title = 'Ansible Documentation' - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (within the static path) to place at the top of -# the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = 'favicon.ico' - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['.static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_use_modindex = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, the reST sources are included in the HTML build as _sources/. -html_copy_source = False - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'Poseidodoc' - - -# Options for LaTeX output -# ------------------------ - -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, document class -# [howto/manual]). -latex_documents = [ - ('index', 'ansible.tex', 'Ansible 2.2 Documentation', AUTHOR, 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_use_modindex = True - -autoclass_content = 'both' Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/jinja2-2.9.7.inv and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/jinja2-2.9.7.inv differ diff -Nru ansible-2.3.2.0/docs/docsite/keyword_desc.yml ansible-2.4.0.0/docs/docsite/keyword_desc.yml --- ansible-2.3.2.0/docs/docsite/keyword_desc.yml 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/keyword_desc.yml 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,57 @@ +accelerate: DEPRECATED, set to True to use accelerate connection plugin. +accelerate_ipv6: "DEPRECATED, set to True to force accelerate plugin to use ipv6 for it's connection." +accelerate_port: DEPRECATED, set to override default port use for accelerate connection. +action: "The 'action' to execute for a task, it normally translates into a C(module) or action plugin." +args: DEPRECATED, A secondary way to add arguments into a task, it takes a dictionary in which keys map to options and values .. well you get it. +always: List of tasks, in a block, that execute no matter if there is an error in the block or not. +always_run: DEPRECATED, forces a task to run even in check mode, use :term:`check_mode` directive instead. +any_errors_fatal: Force any un-handled task errors on any host to propagate to all hosts and end the play. +async: Run a task asyncronouslly if the C(action) supports this. +become: Boolean that controls if privilege escalation is used or not on :term:`Task` execution. +become_flags: A string of flag(s) to pass to the privilege escalation program when :term:`become` is True. +become_method: Which method of privilege escalation to use. i.e. sudo/su/etc. +become_user: "User that you 'become' after using privilege escalation, the remote/login user must have permissions to become this user." +block: List of tasks in a block. +changed_when: "Conditional expression that overrides the task's normal 'changed' status." +check_mode: "A boolean that controls if a task is executed in 'check' mode" +connection: Allows you to change the connection plugin used for tasks to execute on the target. +delay: Number of seconds to delay between retries, this setting is only used in combination with :term:`until`. +delegate_facts: Boolean that allows you to apply facts to delegated host instead of inventory_hostname. +delegate_to: Host to execute task instead of the target (inventory_hostname), connection vars from the delegated host will also be used for the task. +diff: "Toggle to make tasks return 'diff' information or not." +environment: A dictionary that gets converted into environment vars to be provided for the task upon execution. +fact_path: Set the fact path option for the fact gathering plugin controlled by :term:`gather_facts`. +failed_when: "Conditional expression that overrides the task's normal 'failed' status." +force_handlers: Will force notified handler execution for hosts even if they failed during the play, it will not trigger if the play itself fails. +gather_facts: "A boolean that controls if the play will automatically run the 'setup' task to gather facts for the hosts." +gather_subset: Allows you to pass subset options to the fact gathering plugin controlled by :term:`gather_facts`. +gather_timeout: Allows you to set the timeout for the fact gathering plugin controlled by :term:`gather_facts`. +handlers: "A section with tasks that are treated as handlers, these won't get executed normally, only when notified. After each section of tasks is complete." +hosts: "A list of groups, hosts or host pattern that translates into a list of hosts that are the play's target." +ignore_errors: Boolean that allows you to ignore task failures and continue with play. It does not affect connection errors. +loop_control: "Several keys here allow you to modify/set loop behaviour in a task see http://docs.ansible.com/ansible/latest/playbooks_loops.html#loop-control for details." +max_fail_percentage: can be used to abort the run after a given percentage of hosts in the current batch has failed. +name: "It's a name, works mostly for documentation, in the case of tasks/handlers it can be an identifier." +no_log: Boolean that controls information disclosure. +notify: "list of handlers to notify when the task returns a 'changed=True' status." +order: Controls the sorting of hosts as they are used for executing the play. Possible values are inventory (default), sorted, reverse_sorted, reverse_inventory and shuffle. +poll: Sets the polling interval in seconds for async tasks (default 10s). +port: Used to override the default port used in a connection. +post_tasks: A list of tasks to execute after the :term:`tasks` section. +pre_tasks: A list of tasks to execute before :term:`roles`. +remote_user: User used to log into the target via the connection plugin. AKA login user. +register: Name of variable that will contain task status and module return data. +rescue: List of tasks in a :term:`block` that run if there is a task error in the main :term:`block` list. +retries: "Number of retries before giving up in a :term:`until` loop. This setting is only used in combination with :term:`until`." +roles: List of roles to be imported into the play +run_once: Boolean that will bypass the host loop, forcing the task to execute on the first host available and will also apply any facts to all active hosts. +serial: Defines the 'batch' of hosts to execute the current play until the end. +strategy: Allows you to choose the connection plugin to use for the play. +tags: Tags applied to the task or included tasks, this allows selecting subsets of tasks from the command line. +tasks: Main list of tasks to execute in the play, they run after :term:`roles` and before :term:`post_tasks`. +until: "This keyword implies a ':term:`retries` loop' that will go on until the condition supplied here is met or we hit the :term:`retries` limit." +vars: Dictionary/map of variables +vars_files: List of files that contain vars to include in the play. +vars_prompt: list of variables to prompt for. +vault_password: Secret used to decrypt vaulted files or variables. +when: Conditional expression, determines if an iteration of a task is run or not. diff -Nru ansible-2.3.2.0/docs/docsite/Makefile ansible-2.4.0.0/docs/docsite/Makefile --- ansible-2.3.2.0/docs/docsite/Makefile 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/Makefile 2017-09-19 17:10:47.000000000 +0000 @@ -1,18 +1,27 @@ OS := $(shell uname -s) SITELIB = $(shell python -c "from distutils.sysconfig import get_python_lib; print get_python_lib()"): -FORMATTER=../../hacking/module_formatter.py -DUMPER=../../hacking/dump_playbook_attributes.py +FORMATTER=../bin/plugin_formatter.py +TESTING_FORMATTER=../bin/testing_formatter.sh +DUMPER=../bin/dump_keywords.py +CONFIG_DUMPER=../bin/dump_config.py +GENERATE_CLI=../bin/generate_man.py ifeq ($(shell echo $(OS) | egrep -ic 'Darwin|FreeBSD|OpenBSD|DragonFly'),1) CPUS ?= $(shell sysctl hw.ncpu|awk '{print $$2}') else CPUS ?= $(shell nproc) endif +assertrst: +ifndef rst + $(error specify document or pattern with rst=somefile.rst) +endif + all: docs docs: clean htmldocs -htmldocs: directives modules staticmin +htmldocs: testing keywords modules staticmin cli config + CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx html webdocs: docs @@ -22,7 +31,7 @@ -rm -rf htmlout -rm -rf _build -rm -f .buildinfo - -rm -f *.inv + -rm -f objects.inv -rm -rf *.doctrees @echo "Cleaning up minified css files" find . -type f -name "*.min.css" -delete @@ -35,15 +44,40 @@ -rm rst/list_of_*.rst -rm rst/*_by_category.rst -rm rst/*_module.rst + -rm rst/*_maintained.rst -rm rst/playbooks_directives.rst + -rm rst/playbooks_keywords.rst +# -rm rst/cli/ansible*.rst .PHONEY: docs clean -directives: $(FORMATTER) ../../hacking/templates/rst.j2 - PYTHONPATH=../../lib $(DUMPER) --template-dir=../../hacking/templates --output-dir=rst/ +# TODO: make generate_man output dir cli option +cli: $(GENERATE_CLI) + PYTHONPATH=../../lib $(GENERATE_CLI) --template-file=../templates/cli_rst.j2 --output-dir=rst/ --output-format rst ../../lib/ansible/cli/*.py + +keywords: $(FORMATTER) ../templates/playbooks_keywords.rst.j2 + PYTHONPATH=../../lib $(DUMPER) --template-dir=../templates --output-dir=rst/ -d ./keyword_desc.yml + +config: + PYTHONPATH=../../lib $(CONFIG_DUMPER) --template-file=../templates/config.rst.j2 --output-dir=rst/ -d ../../lib/ansible/config/base.yml + +modules: $(FORMATTER) ../templates/plugin.rst.j2 +# Limit building of module documentation if requested. +ifdef MODULES + PYTHONPATH=../../lib $(FORMATTER) -t rst --template-dir=../templates --module-dir=../../lib/ansible/modules -o rst/ -l $(MODULES) +else + PYTHONPATH=../../lib $(FORMATTER) -t rst --template-dir=../templates --module-dir=../../lib/ansible/modules -o rst/ +endif -modules: $(FORMATTER) ../../hacking/templates/rst.j2 - PYTHONPATH=../../lib $(FORMATTER) -t rst --template-dir=../../hacking/templates --module-dir=../../lib/ansible/modules -o rst/ +testing: + $(TESTING_FORMATTER) staticmin: cat _themes/srtd/static/css/theme.css | sed -e 's/^[ ]*//g; s/[ ]*$$//g; s/\([:{;,]\) /\1/g; s/ {/{/g; s/\/\*.*\*\///g; /^$$/d' | sed -e :a -e '$$!N; s/\n\(.\)/\1/; ta' > _themes/srtd/static/css/theme.min.css + +epub: + (CPUS=$(CPUS) make -f Makefile.sphinx epub) + +htmlsingle: assertrst + sphinx-build -j $(CPUS) -b html -d _build/doctrees ./rst _build/html rst/$(rst) + @echo "Output is in _build/html/$(rst:.rst=.html)" Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/python2-2.7.13.inv and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/python2-2.7.13.inv differ Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/python3-3.6.2.inv and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/python3-3.6.2.inv differ diff -Nru ansible-2.3.2.0/docs/docsite/README.md ansible-2.4.0.0/docs/docsite/README.md --- ansible-2.3.2.0/docs/docsite/README.md 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/README.md 2017-09-19 17:10:47.000000000 +0000 @@ -14,6 +14,12 @@ To include module documentation you'll need to run `make webdocs` at the top level of the repository. The generated html files are in docsite/htmlout/. +To limit module documentation building to a specific module, run `MODULES=NAME +make webdocs` instead. This should make testing module documentation syntax much +faster. Instead of a single module, you can also specify a comma-separated list +of modules. In order to skip building documentation for all modules, specify +non-existing module name, for example `MODULES=none make webdocs`. + If you do not want to learn the reStructuredText format, you can also [file issues] about documentation problems on the Ansible GitHub project. diff -Nru ansible-2.3.2.0/docs/docsite/rst/command_line_tools.rst ansible-2.4.0.0/docs/docsite/rst/command_line_tools.rst --- ansible-2.3.2.0/docs/docsite/rst/command_line_tools.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/command_line_tools.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,15 @@ +Command Line Tools +================== + + +.. toctree:: :maxdepth: 1 + + ansible + ansible-playbook + ansible-vault + ansible-galaxy + ansible-console + ansible-config + ansible-doc + ansible-inventory + ansible-pull diff -Nru ansible-2.3.2.0/docs/docsite/rst/committer_guidelines.rst ansible-2.4.0.0/docs/docsite/rst/committer_guidelines.rst --- ansible-2.3.2.0/docs/docsite/rst/committer_guidelines.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/committer_guidelines.rst 2017-09-19 17:10:47.000000000 +0000 @@ -12,7 +12,7 @@ Features, High Level Design, and Roadmap ======================================== -As a core team member, you are an integral part of the team that develops the roadmap. Please be engaged, and push for the features and fixes that you want to see. Also keep in mind that Red Hat, as a company, will commit to certain features, fixes, APIs, etc. for various releases. Red Hat, the company, and the Ansible team must get these committed features (etc.) completed and released as scheduled. Obligations to users, the community, and customers must come first. Because of these commitments, a feature you want to develop yourself many not get into a release if it impacts a lot of other parts within Ansible. +As a core team member, you are an integral part of the team that develops the roadmap. Please be engaged, and push for the features and fixes that you want to see. Also keep in mind that Red Hat, as a company, will commit to certain features, fixes, APIs, etc. for various releases. Red Hat, the company, and the Ansible team must get these committed features (etc.) completed and released as scheduled. Obligations to users, the community, and customers must come first. Because of these commitments, a feature you want to develop yourself may not get into a release if it impacts a lot of other parts within Ansible. Any other new features and changes to high level design should go through the proposal process (TBD), to ensure the community and core team have had a chance to review the idea and approve it. The core team has sole responsibility for merging new features based on proposals. @@ -39,7 +39,7 @@ General Rules ============= -Individuals with direct commit access to ansible/ansible (+core, + extras) are entrusted with powers that allow them to do a broad variety of things--probably more than we can write down. Rather than rules, treat these as general *guidelines*, individuals with this power are expected to use their best judgement. +Individuals with direct commit access to ansible/ansible are entrusted with powers that allow them to do a broad variety of things--probably more than we can write down. Rather than rules, treat these as general *guidelines*, individuals with this power are expected to use their best judgement. * Don’t @@ -114,11 +114,13 @@ +---------------------+----------------------+--------------------+----------------------+ | Trond Hindenes | trondhindenes | | | +---------------------+----------------------+--------------------+----------------------+ -| Jon Hawkesworth | jhawkseworth | jhawkseworth | | +| Jon Hawkesworth | jhawkesworth | jhawkesworth | | +---------------------+----------------------+--------------------+----------------------+ -| Will Thames | wilthames | willthames | | +| Will Thames | willthames | willthames | | +---------------------+----------------------+--------------------+----------------------+ | Ryan Brown | ryansb | ryansb | | +---------------------+----------------------+--------------------+----------------------+ | Adrian Likins | alikins | alikins | | +---------------------+----------------------+--------------------+----------------------+ +| Dag Wieers | dagwieers | dagwieers | dag@wieers.com | ++---------------------+----------------------+--------------------+----------------------+ diff -Nru ansible-2.3.2.0/docs/docsite/rst/common_return_values.rst ansible-2.4.0.0/docs/docsite/rst/common_return_values.rst --- ansible-2.3.2.0/docs/docsite/rst/common_return_values.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/common_return_values.rst 2017-09-19 17:10:47.000000000 +0000 @@ -3,10 +3,10 @@ .. contents:: Topics -Ansible modules normally return a data structure that can be registered into a variable, or seen directly when output by +Ansible modules normally return a data structure that can be registered into a variable, or seen directly when output by the `ansible` program. Each module can optionally document its own unique return values (visible through ansible-doc and https://docs.ansible.com). -This document covers return values common to all modules. +This document covers return values common to all modules. .. note:: Some of these keys might be set by Ansible itself once it processes the module's return information. @@ -54,7 +54,7 @@ stderr_lines ```````````` -When c(stderr) is returned we also always provide this field which is a list of strings, one item per line from the original. +When `stderr` is returned we also always provide this field which is a list of strings, one item per line from the original. stdout `````` @@ -62,7 +62,7 @@ stdout_lines ```````````` -When c(stdout) is returned, Ansible always provides a list of strings, each containing one item per line from the original output. +When `stdout` is returned, Ansible always provides a list of strings, each containing one item per line from the original output. .. _internal_return_values: diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/code_of_conduct.rst ansible-2.4.0.0/docs/docsite/rst/community/code_of_conduct.rst --- ansible-2.3.2.0/docs/docsite/rst/community/code_of_conduct.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/code_of_conduct.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,144 @@ +************************* +Community Code of Conduct +************************* + +.. contents:: Topics + +Every community can be strengthened by a diverse variety of viewpoints, insights, +opinions, skillsets, and skill levels. However, with diversity comes the potential for +disagreement and miscommunication. The purpose of this Code of Conduct is to ensure that +disagreements and differences of opinion are conducted respectfully and on their own +merits, without personal attacks or other behavior that might create an unsafe or +unwelcoming environment. + +These policies are not designed to be a comprehensive set of Things You Cannot Do. We ask +that you treat your fellow community members with respect and courtesy, and in general, +Don't Be A Jerk. This Code of Conduct is meant to be followed in spirit as much as in +letter and is not exhaustive. + +All Ansible events and participants therein are governed by this Code of Conduct and +anti-harassment policy. We expect organizers to enforce these guidelines throughout all events, +and we expect attendees, speakers, sponsors, and volunteers to help ensure a safe +environment for our whole community. Specifically, this Code of Conduct covers +participation in all Ansible-related forums and mailing lists, code and documentation +contributions, public IRC channels, private correspondence, and public meetings. + +Ansible community members are... + +**Considerate** + +Contributions of every kind have far-ranging consequences. Just as your work depends on +the work of others, decisions you make surrounding your contributions to the Ansible +community will affect your fellow community members. You are strongly encouraged to take +those consequences into account while making decisions. + +**Patient** + +Asynchronous communication can come with its own frustrations, even in the most responsive +of communities. Please remember that our community is largely built on volunteered time, +and that questions, contributions, and requests for support may take some time to receive +a response. Repeated "bumps" or "reminders" in rapid succession are not good displays of +patience. Additionally, it is considered poor manners to ping a specific person with +general questions. Pose your question to the community as a whole, and wait patiently for +a response. + +**Respectful** + +Every community inevitably has disagreements, but remember that it is +possible to disagree respectfully and courteously. Disagreements are never an excuse for +rudeness, hostility, threatening behavior, abuse (verbal or physical), or personal attacks. + +**Kind** + +Everyone should feel welcome in the Ansible community, regardless of their background. +Please be courteous, respectful and polite to fellow community members. Do not make or +post offensive comments related to skill level, gender, gender identity or expression, +sexual orientation, disability, physical appearance, body size, race, or religion. +Sexualized images or imagery, real or implied violence, intimidation, oppression, +stalking, sustained disruption of activities, publishing the personal information of +others without explicit permission to do so, unwanted physical contact, and unwelcome +sexual attention are all strictly prohibited. Additionally, you are encouraged not to +make assumptions about the background or identity of your fellow community members. + +**Inquisitive** + +The only stupid question is the one that does not get asked. We +encourage our users to ask early and ask often. Rather than asking whether you can ask a +question (the answer is always yes!), instead, simply ask your question. You are +encouraged to provide as many specifics as possible. Code snippets in the form of Gists or +other paste site links are almost always needed in order to get the most helpful answers. +Refrain from pasting multiple lines of code directly into the IRC channels - instead use +gist.github.com or another paste site to provide code snippets. + +**Helpful** + +The Ansible community is committed to being a welcoming environment for all users, +regardless of skill level. We were all beginners once upon a time, and our community +cannot grow without an environment where new users feel safe and comfortable asking questions. +It can become frustrating to answer the same questions repeatedly; however, community +members are expected to remain courteous and helpful to all users equally, regardless of +skill or knowledge level. Avoid providing responses that prioritize snideness and snark over +useful information. At the same time, everyone is expected to read the provided +documentation thoroughly. We are happy to answer questions, provide strategic guidance, +and suggest effective workflows, but we are not here to do your job for you. + +Anti-harassment policy +====================== + +Harassment includes (but is not limited to) all of the following behaviors: + +- Offensive comments related to gender (including gender expression and identity), age, sexual orientation, disability, physical appearance, body size, race, and religion +- Derogatory terminology including words commonly known to be slurs +- Posting sexualized images or imagery in public spaces +- Deliberate intimidation +- Stalking +- Posting others' personal information without explicit permission +- Sustained disruption of talks or other events +- Inappropriate physical contact +- Unwelcome sexual attention + +Participants asked to stop any harassing behavior are expected to comply immediately. +Sponsors are also subject to the anti-harassment policy. In particular, sponsors should +not use sexualized images, activities, or other material. Meetup organizing staff and +other volunteer organizers should not use sexualized attire or otherwise create a +sexualized environment at community events. + +In addition to the behaviors outlined above, continuing to behave a certain way after you +have been asked to stop also constitutes harassment, even if that behavior is not +specifically outlined in this policy. It is considerate and respectful to stop doing +something after you have been asked to stop, and all community members are expected to +comply with such requests immediately. + +Policy violations +================= + +Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by +contacting `codeofconduct@ansible.com `_, to any channel +operator in the community IRC channels, or to the local organizers of an event. Meetup +organizers are encouraged to prominently display points of contact for reporting unacceptable +behavior at local events. + +If a participant engages in harassing behavior, the meetup organizers may take any action +they deem appropriate. These actions may include but are not limited to warning the +offender, expelling the offender from the event, and barring the offender from future +community events. + +Organizers will be happy to help participants contact security or local law enforcement, +provide escorts to an alternate location, or otherwise assist those experiencing +harassment to feel safe for the duration of the meetup. We value the safety and well-being +of our community members and want everyone to feel welcome at our events, both online and +offline. + +We expect all participants, organizers, speakers, and attendees to follow these policies at +all of our event venues and event-related social events. + +The Ansible Community Code of Conduct is licensed under the Creative Commons +Attribution-Share Alike 3.0 license. Our Code of Conduct was adapted from Codes of Conduct +of other open source projects, including: + +* Contributor Covenant +* Elastic +* The Fedora Project +* OpenStack +* Puppet Labs +* Ubuntu diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/communication.rst ansible-2.4.0.0/docs/docsite/rst/community/communication.rst --- ansible-2.3.2.0/docs/docsite/rst/community/communication.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/communication.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,68 @@ +************* +Communicating +************* + +.. contents:: Topics + +Mailing List Information +======================== + +Ansible has several mailing lists. Your first post to the mailing list will be moderated (to reduce spam), so please allow up to a day or so for your first post to appear. + +`Ansible Project List `_ is for sharing Ansible tips, answering questions, and general user discussion. + +`Ansible Development List `_ is for learning how to develop on Ansible, asking about prospective feature design, or discussions about extending ansible or features in progress. + +`Ansible Announce list `_ is a read-only list that shares information about new releases of Ansible, and also rare infrequent event information, such as announcements about an upcoming AnsibleFest, which is our official conference series. + +`Ansible Container List `_ is for users and developers of the Ansible Container project. + +`Ansible Lockdown List `_ is for all things related to Ansible Lockdown projects, including DISA STIG automation and CIS Benchmarks. + +To subscribe to a group from a non-Google account, you can send an email to the subscription address requesting the subscription. For example: `ansible-devel+subscribe@googlegroups.com` + +IRC Channel +=========== + +Ansible has several IRC channels on Freenode (irc.freenode.net). + +General Channels +---------------- + +- ``#ansible`` - For general use questions and support. +- ``#ansible-devel`` - For discussions on developer topics and code related to features/bugs. +- ``#ansible-meeting`` - For public community meetings. We will generally announce these on one or more of the above mailing lists. See the `meeting schedule and agenda page `_ +- ``#ansible-notices`` - Mostly bot output from things like GitHub, etc. + +Working Group +------------- + +- ``#ansible-aws`` - For discussions on Amazon Web Services. +- ``#ansible-community`` - Channel for discussing Ansible Community related things. +- ``#ansible-container`` - For discussions on Ansible Container. +- ``#ansible-jboss`` - Channel for discussing JBoss and Ansible related things. +- ``#ansible-network`` - Channel for discussing Network and Ansible related things. +- ``#ansible-news`` - Channel for discussing Ansible Communication & News related things. +- ``#ansible-vmware`` - For discussions on Ansible & VMware. +- ``#ansible-windows`` - For discussions on Ansible & Windows. + + +Language specific channels +-------------------------- + +- ``#ansible-es`` - Channel for Spanish speaking Ansible community. +- ``#ansible-fr`` - Channel for French speaking Ansible community. + + +IRC Meetings +------------ + +The Ansible community holds regular IRC meetings on various topics, and anyone who is interested is invited to +participate. For more information about Ansible meetings, consult the `meeting schedule and agenda page `_. + +Tower Support Questions +======================== + +Ansible `Tower `_ is a UI, Server, and REST endpoint for Ansible. + +If you have a question about Ansible Tower, visit `Red Hat support `_ rather than using the IRC channel or the general project mailing list. diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/development_process.rst ansible-2.4.0.0/docs/docsite/rst/community/development_process.rst --- ansible-2.3.2.0/docs/docsite/rst/community/development_process.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/development_process.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,113 @@ +The Ansible Development Process +=============================== + +.. contents:: Topics + +This section discusses how the Ansible development and triage process works. + +Road Maps +========= + +The Ansible Core team provides a road map for each upcoming release. These road maps can be found `here `_. + +Pull Requests +============= + +Ansible accepts code via **pull requests** ("PRs" for short). GitHub provides a great overview of `how the pull request process works `_ in general. + +Because Ansible receives many pull requests, we use an automated process to help us through the process of reviewing and merging pull requests. That process is managed by **Ansibullbot**. + +Ansibullbot +=========== + +Overview +-------- + +`Ansibullbot`_ serves many functions: + +- Responds quickly to PR submitters to thank them for submitting their PR +- Identifies the community maintainer responsible for reviewing PRs for any files affected +- Tracks the current status of PRs +- Pings responsible parties to remind them of any PR actions for which they may be responsible +- Provides maintainers with the ability to move PRs through the workflow +- Identifies PRs abandoned by their submitters so that we can close them +- Identifies modules abandoned by their maintainers so that we can find new maintainers + +Community Maintainers +--------------------- + +Each module has at least one assigned maintainer, listed in a `maintainer's file`_: + +.. _Ansibullbot: https://github.com/ansible/ansibullbot/blob/master/ISSUE_HELP.md +.. _maintainer's file: https://github.com/ansible/ansible/blob/devel/.github/BOTMETA.yml + +Some modules have no community maintainers assigned. In this case, the maintainer is listed as ``$team_ansible``. Ultimately, it’s our goal to have at least one community maintainer for every module. + +The maintainer’s job is to review PRs and decide whether that PR should be merged (``shipit``) or revised (``needs_revision``). + +The ultimate goal of any pull request is to reach **shipit** status, where the Core team then decides whether the PR is ready to be merged. Not every PR that reaches the **shipit** label is actually ready to be merged, but the better our reviewers are, and the better our guidelines are, the more likely it will be that a PR that reaches **shipit** will be mergeable. + + + +Workflow +-------- + +Ansibullbot runs continuously. You can generally expect to see changes to your issue or pull request within thirty minutes. Ansibullbot examines every open pull request in the repositories, and enforces state roughly according to the following workflow: + +- If a pull request has no workflow labels, it’s considered **new**. Files in the pull request are identified, and the maintainers of those files are pinged by the bot, along with instructions on how to review the pull request. (Note: sometimes we strip labels from a pull request to “reboot†this process.) +- If the module maintainer is not ``$team_ansible``, the pull request then goes into the **community_review** state. +- If the module maintainer is ``$team_ansible``, the pull request then goes into the **core_review** state (and probably sits for a while). +- If the pull request is in **community_review** and has received comments from the maintainer: + + - If the maintainer says ``shipit``, the pull request is labeled **shipit**, whereupon the Core team assesses it for final merge. + - If the maintainer says ``needs_info``, the pull request is labeled **needs_info** and the submitter is asked for more info. + - If the maintainer says **needs_revision**, the pull request is labeled **needs_revision** and the submitter is asked to fix some things. + +- If the submitter says ``ready_for_review``, the pull request is put back into **community_review** or **core_review** and the maintainer is notified that the pull request is ready to be reviewed again. +- If the pull request is labeled **needs_revision** or **needs_info** and the submitter has not responded lately: + + - The submitter is first politely pinged after two weeks, pinged again after two more weeks and labeled **pending action**, and the issue or pull request will be closed two weeks after that. + - If the submitter responds at all, the clock is reset. +- If the pull request is labeled **community_review** and the reviewer has not responded lately: + + - The reviewer is first politely pinged after two weeks, pinged again after two more weeks and labeled **pending_action**, and then may be reassigned to ``$team_ansible`` or labeled **core_review**, or often the submitter of the pull request is asked to step up as a maintainer. +- If Shippable tests fail, or if the code is not able to be merged, the pull request is automatically put into **needs_revision** along with a message to the submitter explaining why. + + +There are corner cases and frequent refinements, but this is the workflow in general. + +PR Labels +--------- + +There are two types of PR Labels generally: *workflow labels* and *information labels*. + +Workflow Labels +~~~~~~~~~~~~~~~ + +- **community_review**: Pull requests for modules that are currently awaiting review by their maintainers in the Ansible community. +- **core_review**: Pull requests for modules that are currently awaiting review by their maintainers on the Ansible Core team. +- **needs_info**: Waiting on info from the submitter. +- **needs_rebase**: Waiting on the submitter to rebase. (Note: no longer used by the bot.) +- **needs_revision**: Waiting on the submitter to make changes. +- **shipit**: Waiting for final review by the core team for potential merge. + +Informational Labels +~~~~~~~~~~~~~~~~~~~~ + +- **backport**: this is applied automatically if the PR is requested against any branch that is not devel. The bot immediately assigns the labels backport and ``core_review``. +- **bugfix_pull_request**: applied by the bot based on the templatized description of the PR. +- **cloud**: applied by the bot based on the paths of the modified files. +- **docs_pull_request**: applied by the bot based on the templatized description of the PR. +- **easyfix**: applied manually, inconsistently used but sometimes useful. +- **feature_pull_request**: applied by the bot based on the templatized description of the PR. +- **networking**: applied by the bot based on the paths of the modified files. +- **owner_pr**: largely deprecated. Formerly workflow, now informational. Originally, PRs submitted by the maintainer would automatically go to **shipit** based on this label. If the submitter is also a maintainer, we notify the other maintainers and still require one of the maintainers (including the submitter) to give a **shipit**. +- **pending_action**: applied by the bot to PRs that are not moving. Reviewed every couple of weeks by the community team, who tries to figure out the appropriate action (closure, asking for new maintainers, etc). + + +Special Labels +~~~~~~~~~~~~~~ + +- **new_plugin**: this is for new modules or plugins that are not yet in Ansible. + + **Note:** `new_plugin` kicks off a completely separate process, and frankly it doesn’t work very well at present. We’re working our best to improve this process. diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/how_can_I_help.rst ansible-2.4.0.0/docs/docsite/rst/community/how_can_I_help.rst --- ansible-2.3.2.0/docs/docsite/rst/community/how_can_I_help.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/how_can_I_help.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,71 @@ +How To Help +=========== + +.. contents:: Topics + +There are many ways to help the Ansible project. + +Become a power user +------------------- + +A great way to help the Ansible project is to become a power user: + +* Use Ansible everywhere you can +* Take tutorials and classes +* Read the `official documentation `_ +* Study some of the `many excellent books `_ about Ansible +* `Get certified `_. + +When you become a power user, your ability and opportunities to help the Ansible project in other ways will multiply quickly. + +Ask and answer questions online +------------------------------- + +There are many forums online where Ansible users ask and answer questions. Reach out and communicate with your fellow Ansible users. + +You can find the official :ref:`Ansible communication channels `. + +Participate in your local meetup +-------------------------------- + +There are Ansible meetups `all over the world `_. Join your local meetup. Attend regularly. Ask good questions. Volunteer to give a presentation about how you use Ansible. + +If there isn't a meetup near you, we'll be happy to help you `start one `_. + +File and verify issues +---------------------- + +All software has bugs, and Ansible is no exception. When you find a bug, you can help tremendously by :ref:`telling us about it `. + + +If you should discover that the bug you're trying to file already exists in an issue, you can help by verifying the behavior of the reported bug with a comment in that issue, or by reporting any additional information. + +Review and submit pull requests +------------------------------- + +As you become more familiar with how Ansible works, you may be able to fix issues or develop new features yourself. If you think you've got a solution to a bug you've found in Ansible, or if you've got a new feature that you've written and would like to share with millions of Ansible users, read all about the `Ansible development process ` to learn how to get your code accepted into Ansible. + +Another good way to help is to review pull requests that other Ansible users have submitted. The Ansible community keeps a full list of `open pull requests by file `_, so if there's a particular module or plug-in that particularly interests you, you can easily keep track of all the relevant new pull requests and provide testing or feedback. + +Become a module maintainer +-------------------------- + +Once you've learned about the development process and have contributed code to a particular module, we encourage you to become a maintainer of that module. There are hundreds of different modules in Ansible, and the vast majority of them are written and maintained entirely by members of the Ansible community. + +To learn more about the responsibilities of being an Ansible module maintainer, please read our :ref:`module maintainer guidelines `. + +Join a working group +-------------------- + +Working groups are a way for Ansible community members to self-organize around particular topics of interest. We have working groups around various topics. To join or create a working group, please read the `Ansible working group guidelines `_. + + +Teach Ansible to others +----------------------- + +We're working on a standardized Ansible workshop called `Lightbulb `_ that can provide a good hands-on introduction to Ansible usage and concepts. + +Social media +------------ + +If you like Ansible and just want to spread the good word, feel free to share on your social media platform of choice, and let us know by using ``@ansible`` or ``#ansible``. We'll be looking for you. diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/index.rst ansible-2.4.0.0/docs/docsite/rst/community/index.rst --- ansible-2.3.2.0/docs/docsite/rst/community/index.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/index.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,26 @@ +********************* +Community Information +********************* + +Ansible Community Guide +======================= + +Welcome to the Ansible Community Guide! + +The purpose of this guide is to teach you everything you need to know about being a contributing member of the Ansible community. + +To get started, select one of the following topics. + + +.. toctree:: + :maxdepth: 1 + + development_process + reporting_bugs_and_features + how_can_I_help + maintainers + communication + other_tools_and_programs + + + diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/maintainers.rst ansible-2.4.0.0/docs/docsite/rst/community/maintainers.rst --- ansible-2.3.2.0/docs/docsite/rst/community/maintainers.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/maintainers.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,71 @@ +**************************** +Module Maintainer Guidelines +**************************** + +.. contents:: Topics + +Thank you for being a maintainer of one Ansible's community modules! This guide provides module maintainers an overview of their responsibilities, resources for additional information, and links to helpful tools. + +In addition to the information below, module maintainers should be familiar with: + +* :ref:`General Ansible community development practices <../community>` +* Documentation on :ref:`module development ` + + +Maintainer Responsibilities +=========================== + +When you contribute a new module to the [ansible/ansible](https://github.com/ansible/ansible) repository, you become the maintainer for that module once it has been merged. Maintainership empowers you with the authority to accept, reject, or request revisions to pull requests on your module -- but as they say, "with great power comes great responsibility." + +Maintainers of Ansible modules are expected to provide feedback, responses, or actions on pull requests or issues to the module(s) they maintain in a reasonably timely manner. + +It is also recommended that you occasionally revisit the [contribution guidelines](https://github.com/ansible/ansible/blob/devel/CONTRIBUTING.md), as they are continually refined. Occasionally, you may be requested to update your module to move it closer to the general accepted standard requirements. We hope for this to be infrequent, and will always be a request with a fair amount of lead time (ie: not by tomorrow!). + +Finally, following the [ansible-devel](https://groups.google.com/forum/#!forum/ansible-devel) mailing list can be a great way to participate in the broader Ansible community, and a place where you can influence the overall direction, quality, and goals of Ansible and its modules. If you're not on this relatively low-volume list, please join us here: https://groups.google.com/forum/#!forum/ansible-devel + +The Ansible community hopes that you will find that maintaining your module is as rewarding for you as having the module is for the wider community. + +Pull Requests, Issues, and Workflow +=================================== + +Pull Requests +------------- + +Module pull requests are located in the [main Ansible repository](https://github.com/ansible/ansible/pulls). + +Because of the high volume of pull requests, notification of PRs to specific modules are routed by an automated bot to the appropriate maintainer for handling. It is recommended that you set an appropriate notification process to receive notifications which mention your GitHub ID. + +Issues +------ + +Issues for modules, including bug reports, documentation bug reports, and feature requests, are tracked in the [ansible repository](https://github.com/ansible/ansible/issues). + +Issues for modules are routed to their maintainers via an automated process. This process is still being refined, and currently depends upon the issue creator to provide adequate details (specifically, providing the proper module name) in order to route it correctly. If you are a maintainer of a specific module, it is recommended that you periodically search module issues for issues which mention your module's name (or some variation on that name), as well as setting an appropriate notification process for receiving notification of mentions of your GitHub ID. + +PR Workflow +----------- + +Automated routing of pull requests is handled by a tool called [Ansibot](https://github.com/ansible/ansibullbot). + +Being moderately familiar with how the workflow behind the bot operates can be helpful to you, and -- should things go awry -- your feedback can be helpful to the folks that continually help Ansibullbot to evolve. + +A detailed explanation of the PR workflow can be seen here: https://github.com/ansible/community/blob/master/PR-FLOW.md + +Extras maintainers list +----------------------- + +The full list of maintainers for modules is located here: https://github.com/ansible/ansibullbot/blob/master/MAINTAINERS.txt + +Changing Maintainership +----------------------- + +Communities change over time, and no one maintains a module forever. If you'd like to propose an additional maintainer for your module, please submit a PR to the maintainers file with the Github username of the new maintainer. + +If you'd like to step down as a maintainer, please submit a PR to the maintainers file removing your Github ID from the module in question. If that would leave the module with no maintainers, put "ansible" as the maintainer. This will indicate that the module is temporarily without a maintainer, and the Ansible community team will search for a new maintainer. + +Tools and other Resources +------------------------- + +* https://ansible.sivel.net/pr/byfile.html -- a full list of all open Pull Requests, organized by file. +* Ansibullbot: https://github.com/ansible/ansibullbot +* Triage / pull request workflow and information, including definitions for Labels in GitHub: https://github.com/ansible/community/blob/master/PR-FLOW.md diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/other_tools_and_programs.rst ansible-2.4.0.0/docs/docsite/rst/community/other_tools_and_programs.rst --- ansible-2.3.2.0/docs/docsite/rst/community/other_tools_and_programs.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/other_tools_and_programs.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,18 @@ +************************ +Other Tools And Programs +************************ + +The Ansible community provides several useful tools for working with the Ansible project. This is a list +of some of the most popular of these tools. + +- `PR by File `_ shows a current list of all open pull requests by individual file. An essential tool for Ansible module maintainers. + +- `Ansible Lint `_ is a widely used, highly configurable best-practices linter for Ansible playbooks. + +- `Ansible Review `_ is an extension of Ansible Lint designed for code review. + +- `jctanner's Ansible Tools `_ is a miscellaneous collection of useful helper scripts for Ansible development. + +- `Ansigenome `_ is a command line tool designed to help you manage your Ansible roles. + +- `Awesome Ansible `_ is a collaboratively curated list of awesome Ansible resources. diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/reporting_bugs_and_features.rst ansible-2.4.0.0/docs/docsite/rst/community/reporting_bugs_and_features.rst --- ansible-2.3.2.0/docs/docsite/rst/community/reporting_bugs_and_features.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/reporting_bugs_and_features.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,35 @@ +************************************** +Reporting Bugs And Requesting Features +************************************** + +.. contents:: Topics + +Reporting A Bug +=============== + +Ansible practices responsible disclosure - if this is a security related bug, email `security@ansible.com `_ instead of filing a ticket or posting to the Google Group and you will receive a prompt response. + +Ansible bugs should be reported to `github.com/ansible/ansible/issues `_ after +signing up for a free GitHub account. Before reporting a bug, please use the bug/issue search +to see if the issue has already been reported. This is listed on the bottom of the docs page for any module. + +Knowing your Ansible version and the exact commands you are running, and what you expect, saves time and helps us help everyone with their issues more quickly. For that reason, we provide an issue template; please fill it out as completely and as accurately as possible. + +Do not use the issue tracker for "how do I do this" type questions. These are great candidates for IRC or the mailing list instead where things are likely to be more of a discussion. + +To be respectful of reviewers' time and allow us to help everyone efficiently, please provide minimal well-reduced and well-commented examples versus sharing your entire production playbook. Include playbook snippets and output where possible. + +When sharing YAML in playbooks, formatting can be preserved by using `code blocks `_. + +For multiple-file content, we encourage use of gist.github.com. Online pastebin content can expire, so it's nice to have things around for a longer term if they are referenced in a ticket. + +If you are not sure if something is a bug yet, you are welcome to ask about something on the mailing list or IRC first. + +As we are a very high volume project, if you determine that you do have a bug, please be sure to open the issue yourself to ensure we have a record of it. Don’t rely on someone else in the community to file the bug report for you. + +Requesting a feature +==================== + +The best way to get a feature into Ansible is to submit a pull request. + +The next best way of getting a feature into Ansible is to submit a proposal through the `Ansible proposal process ` . diff -Nru ansible-2.3.2.0/docs/docsite/rst/community/triage_process.rst ansible-2.4.0.0/docs/docsite/rst/community/triage_process.rst --- ansible-2.3.2.0/docs/docsite/rst/community/triage_process.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/community/triage_process.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,8 @@ +************** +Triage Process +************** + +The issue and PR triage processes are driven by the `Ansibot `. Whenever an issue or PR is filed, the Ansibot examines the issue to ensure that all relevant data is present, and handles the routing of the issue as it works its way to eventual completion. + +For details on how Ansibot manages the triage process, please consult the `Ansibot +Issue Guide `. diff -Nru ansible-2.3.2.0/docs/docsite/rst/conf.py ansible-2.4.0.0/docs/docsite/rst/conf.py --- ansible-2.3.2.0/docs/docsite/rst/conf.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/conf.py 2017-09-19 17:10:47.000000000 +0000 @@ -17,19 +17,19 @@ import os # pip install sphinx_rtd_theme -#import sphinx_rtd_theme -#html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] +# import sphinx_rtd_theme +# html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] # If your extensions are in another directory, add it here. If the directory # is relative to the documentation root, use os.path.abspath to make it # absolute, like shown here. -#sys.path.append(os.path.abspath('some/directory')) +# sys.path.append(os.path.abspath('some/directory')) # sys.path.insert(0, os.path.join('ansible', 'lib')) sys.path.append(os.path.abspath('_themes')) -VERSION='2.2' -AUTHOR='Ansible, Inc' +VERSION = '2.4' +AUTHOR = 'Ansible, Inc' # General configuration @@ -38,7 +38,7 @@ # Add any Sphinx extension module names here, as strings. # They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc'] +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx'] # Later on, add 'sphinx.ext.viewcode' to the list if you want to have # colorized code generated too for references. @@ -55,7 +55,7 @@ # General substitutions. project = 'Ansible Documentation' -copyright = "2013-2016 Ansible, Inc" +copyright = "2013-2017 Ansible, Inc" # The default replacements for |version| and |release|, also used in various # other places throughout the built documents. @@ -67,16 +67,16 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. -#unused_docs = [] +# unused_docs = [] # List of directories, relative to source directories, that shouldn't be # searched for source files. -#exclude_dirs = [] +# exclude_dirs = [] # A list of glob-style patterns that should be excluded when looking # for source files. @@ -84,26 +84,26 @@ # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' highlight_language = 'YAML+Jinja' -#Substitutions, variables, entities, & shortcuts for text which do not need to link to anything. -#For titles which should be a link, use the intersphinx anchors set at the index, chapter, and section levels, such as qi_start_: +# Substitutions, variables, entities, & shortcuts for text which do not need to link to anything. +# For titles which should be a link, use the intersphinx anchors set at the index, chapter, and section levels, such as qi_start_: rst_epilog = """ .. |acapi| replace:: *Ansible Core API Guide* .. |acrn| replace:: *Ansible Core Release Notes* @@ -128,28 +128,28 @@ # The style sheet to use for HTML and HTML Help pages. A file of that name # must exist either in Sphinx' static/ path, or in one of the custom paths # given in html_static_path. -#html_style = 'solar.css' +# html_style = 'solar.css' # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". html_title = 'Ansible Documentation' # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (within the static path) to place at the top of # the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = 'favicon.ico' +# html_favicon = 'favicon.ico' # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['.static'] +# html_static_path = ['.static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. @@ -157,23 +157,23 @@ # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_use_modindex = True +# html_use_modindex = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, the reST sources are included in the HTML build as _sources/. html_copy_source = False @@ -181,10 +181,10 @@ # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' +# html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Poseidodoc' @@ -194,34 +194,37 @@ # ------------------------ # The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' +# latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' +# latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, document class # [howto/manual]). latex_documents = [ - ('index', 'ansible.tex', 'Ansible 1.2 Documentation', - AUTHOR, 'manual'), + ('index', 'ansible.tex', 'Ansible 2.2 Documentation', AUTHOR, 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # Additional stuff for the LaTeX preamble. -#latex_preamble = '' +# latex_preamble = '' # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_use_modindex = True +# latex_use_modindex = True autoclass_content = 'both' + +intersphinx_mapping = {'python': ('https://docs.python.org/2/', (None, '../python2-2.7.13.inv')), + 'python3': ('https://docs.python.org/3/', (None, '../python3-3.6.2.inv')), + 'jinja2': ('http://jinja.pocoo.org/docs/', (None, '../jinja2-2.9.7.inv'))} diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/conf.py ansible-2.4.0.0/docs/docsite/rst/dev_guide/conf.py --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/conf.py 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/conf.py 1970-01-01 00:00:00.000000000 +0000 @@ -1,417 +0,0 @@ -# -*- coding: utf-8 -*- -# -# dev_guide documentation build configuration file, created by -# sphinx-quickstart on Mon Aug 15 13:24:02 2016. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -# -# import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = '.rst' - -# The encoding of source files. -# -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'dev_guide' -copyright = u'2016, Ansible' -author = u'Ansible' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = u'1.0' -# The full version, including alpha/beta/rc tags. -release = u'1.0' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# -# today = '' -# -# Else, today_fmt is used as the format for a strftime call. -# -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = False - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'alabaster' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. -# " v documentation" by default. -# -# html_title = u'dev_guide v1.0' - -# A shorter title for the navigation bar. Default is the same as html_title. -# -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# -# html_logo = None - -# The name of an image file (relative to this directory) to use as a favicon of -# the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# -# html_extra_path = [] - -# If not None, a 'Last updated on:' timestamp is inserted at every page -# bottom, using the given strftime format. -# The empty string is equivalent to '%b %d, %Y'. -# -# html_last_updated_fmt = None - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# -# html_additional_pages = {} - -# If false, no module index is generated. -# -# html_domain_indices = True - -# If false, no index is generated. -# -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr', 'zh' -# -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# 'ja' uses this config value. -# 'zh' user can custom change `jieba` dictionary path. -# -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'dev_guidedoc' - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, 'dev_guide.tex', u'dev\\_guide Documentation', - u'Ansible', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# -# latex_use_parts = False - -# If true, show page references after internal links. -# -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# -# latex_appendices = [] - -# If false, no module index is generated. -# -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - (master_doc, 'dev_guide', u'dev_guide Documentation', - [author], 1) -] - -# If true, show URL addresses after external links. -# -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - (master_doc, 'dev_guide', u'dev_guide Documentation', - author, 'dev_guide', 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -# -# texinfo_appendices = [] - -# If false, no module index is generated. -# -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# -# texinfo_no_detailmenu = False - - -# -- Options for Epub output ---------------------------------------------- - -# Bibliographic Dublin Core info. -epub_title = project -epub_author = author -epub_publisher = author -epub_copyright = copyright - -# The basename for the epub file. It defaults to the project name. -# epub_basename = project - -# The HTML theme for the epub output. Since the default themes are not -# optimized for small screen space, using the same theme for HTML and epub -# output is usually not wise. This defaults to 'epub', a theme designed to save -# visual space. -# -# epub_theme = 'epub' - -# The language of the text. It defaults to the language option -# or 'en' if the language is not set. -# -# epub_language = '' - -# The scheme of the identifier. Typical schemes are ISBN or URL. -# epub_scheme = '' - -# The unique identifier of the text. This can be a ISBN number -# or the project homepage. -# -# epub_identifier = '' - -# A unique identification for the text. -# -# epub_uid = '' - -# A tuple containing the cover image and cover page html template filenames. -# -# epub_cover = () - -# A sequence of (type, uri, title) tuples for the guide element of content.opf. -# -# epub_guide = () - -# HTML files that should be inserted before the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# -# epub_pre_files = [] - -# HTML files that should be inserted after the pages created by sphinx. -# The format is a list of tuples containing the path and title. -# -# epub_post_files = [] - -# A list of files that should not be packed into the epub file. -epub_exclude_files = ['search.html'] - -# The depth of the table of contents in toc.ncx. -# -# epub_tocdepth = 3 - -# Allow duplicate toc entries. -# -# epub_tocdup = True - -# Choose between 'default' and 'includehidden'. -# -# epub_tocscope = 'default' - -# Fix unsupported image types using the Pillow. -# -# epub_fix_images = False - -# Scale large images. -# -# epub_max_image_width = 0 - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# -# epub_show_urls = 'inline' - -# If false, no index is generated. -# -# epub_use_index = True diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_api.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_api.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_api.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_api.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,6 +1,8 @@ Python API ========== +.. note:: This document is out of date: 'ansible.parsing.dataloader' and 'ansible.runner' are not available in the current version of Ansible. + .. contents:: Topics Please note that while we make this API available it is not intended for direct consumption, it is here diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_checklist.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_checklist.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_checklist.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_checklist.rst 2017-09-19 17:10:47.000000000 +0000 @@ -31,7 +31,7 @@ ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', - 'metadata_version': '1.0'} + 'metadata_version': '1.1'} The complete module metadata specification is here: `Ansible metadata block `_ @@ -136,79 +136,10 @@ Windows modules checklist ========================= -* Favour native powershell and .net ways of doing things over calls to COM libraries or calls to native executables which may or may not be present in all versions of Windows -* modules are in powershell (.ps1 files) but the docs reside in same name python file (.py) -* look at ansible/lib/ansible/module_utils/powershell.ps1 for common code, avoid duplication -* Ansible uses strictmode version 2.0 so be sure to test with that enabled -All powershell modules must start: +For a checklist and details on how to write Windows modules please see :doc:`developing_modules_general_windows` -.. code-block:: powershell - - #!powershell - - - - # WANT_JSON - # POWERSHELL_COMMON - -To parse all arguments into a variable modules generally use: - -.. code-block:: powershell - - $params = Parse-Args $args - -Arguments ---------- - -* Try and use state present and state absent like other modules -* You need to check that all your mandatory args are present. You can do this using the builtin Get-AnsibleParam function. -* Required arguments: - -.. code-block:: powershell - - $package = Get-AnsibleParam -obj $params -name name -failifempty $true - -Required arguments with name validation: - -.. code-block:: powershell - - $state = Get-AnsibleParam -obj $params -name "State" -ValidateSet "Present","Absent" -resultobj $resultobj -failifempty $true - -Optional arguments with name validation ---------------------------------------- - -.. code-block:: powershell - - $state = Get-AnsibleParam -obj $params -name "State" -default "Present" -ValidateSet "Present","Absent" - -* If the "FailIfEmpty" is true, the resultobj parameter is used to specify the object returned to fail-json. You can also override the default message - using $emptyattributefailmessage (for missing required attributes) and $ValidateSetErrorMessage (for attribute validation errors) -* Look at existing modules for more examples of argument checking. - -Results -------- -* The result object should always contain an attribute called changed set to either $true or $false -* Create your result object like this - -.. code-block:: powershell - - $result = New-Object psobject @{ - changed = $false - other_result_attribute = $some_value - }; - - If all is well, exit with a - Exit-Json $result - -* Ensure anything you return, including errors can be converted to json. -* Be aware that because exception messages could contain almost anything. -* ConvertTo-Json will fail if it encounters a trailing \ in a string. -* If all is not well use Fail-Json to exit. - -* Have you tested for powershell 3.0 and 4.0 compliance? - Deprecating and making module aliases ====================================== diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_documenting.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_documenting.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_documenting.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_documenting.rst 2017-09-19 17:10:47.000000000 +0000 @@ -18,12 +18,12 @@ 2. ANSIBLE_METADATA 3. DOCUMENTATION 4. EXAMPLES -5. RETURNS +5. RETURN 6. Python imports .. note:: Why don't the imports go first? - Keen Python programmers may notice that contrary to PEP 8's advice we don't put ``imports`` at the top of the file. This is because the ``ANSIBLE_METADATA`` through ``RETURNS`` sections are not used by the module code itself; they are essentially extra docstrings for the file. The imports are placed after these special variables for the same reason as PEP 8 puts the imports after the introductory comments and docstrings. This keeps the active parts of the code together and the pieces which are purely informational apart. The decision to exclude E402 is based on readability (which is what PEP 8 is about). Documentation strings in a module are much more similar to module level docstrings, than code, and are never utilized by the module itself. Placing the imports below this documentation and closer to the code, consolidates and groups all related code in a congruent manner to improve readability, debugging and understanding. + Keen Python programmers may notice that contrary to PEP 8's advice we don't put ``imports`` at the top of the file. This is because the ``ANSIBLE_METADATA`` through ``RETURN`` sections are not used by the module code itself; they are essentially extra docstrings for the file. The imports are placed after these special variables for the same reason as PEP 8 puts the imports after the introductory comments and docstrings. This keeps the active parts of the code together and the pieces which are purely informational apart. The decision to exclude E402 is based on readability (which is what PEP 8 is about). Documentation strings in a module are much more similar to module level docstrings, than code, and are never utilized by the module itself. Placing the imports below this documentation and closer to the code, consolidates and groups all related code in a congruent manner to improve readability, debugging and understanding. .. warning:: Why do some modules have imports at the bottom of the file? @@ -75,7 +75,7 @@ .. code-block:: python - ANSIBLE_METADATA = {'metadata_version': '1.0', + ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} @@ -92,22 +92,22 @@ ANSIBLE_METADATA doesn't look quite right because of this. Module metadata should be fixed before checking it into the repository. -Version 1.0 of the metadata +Version 1.1 of the metadata +++++++++++++++++++++++++++ Structure -````````` +^^^^^^^^^ .. code-block:: python ANSIBLE_METADATA = { - 'metadata_version': '1.0', + 'metadata_version': '1.1', 'supported_by': 'community', 'status': ['preview', 'deprecated'] } Fields -`````` +^^^^^^ :metadata_version: An “X.Y†formatted string. X and Y are integers which define the metadata format version. Modules shipped with Ansible are @@ -115,13 +115,17 @@ of the metadata. We’ll increment Y if we add fields or legal values to an existing field. We’ll increment X if we remove fields or values or change the type or meaning of a field. + Current metadata_version is "1.1" :supported_by: This field records who supports the module. Default value is ``community``. Values are: - :core: - :curated: - :community: - + * core + * network + * certified + * community + * curated (Deprecated. Modules in this category should probably be core or + certified instead) + For information on what the support level values entail, please see `Modules Support `_. @@ -142,6 +146,18 @@ kept so that documentation can be built. The documentation helps users port from the removed module to new modules. +Changes from Version 1.0 +++++++++++++++++++++++++ + +:metadata_version: Version updated from 1.0 to 1.1 +:supported_by: All substantive changes were to potential values of the supported_by field + + * Added the certified value + * Deprecated the curated value, modules shipped with Ansible will use + certified instead. Third party modules are encouraged not to use this as + it is meaningless within Ansible proper. + * Added the network value + DOCUMENTATION Block ------------------- @@ -163,6 +179,8 @@ ''' + + The following fields can be used and are all required unless specified otherwise: :module: @@ -205,6 +223,8 @@ * If `required` is false/missing, `default` may be specified (assumed 'null' if missing). * Ensure that the default parameter in the docs matches the default parameter in the code. * The default option must not be listed as part of the description. + * If the option is a boolean value, you can use any of the boolean values recognized by Ansible: + (such as true/false or yes/no). Choose the one that reads better in the context of the option. :choices: List of option values. Should be absent if empty. :type: @@ -272,6 +292,9 @@ Data type :sample: One or more examples. + :version_added: + Only needed if this return was extended after initial Ansible release, i.e. this is greater than the top level `version_added` field. + This is a string, and not a float, i.e. ``version_added: "2.3"``. :contains: Optional, if you set `type: complex` you can detail the dictionary here by repeating the above elements. @@ -286,6 +309,9 @@ Data type :sample: One or more examples. + :version_added: + Only needed if this return was extended after initial Ansible release, i.e. this is greater than the top level `version_added` field. + This is a string, and not a float, i.e. ``version_added: "2.3"``. For complex nested returns type can be specified as ``type: complex``. @@ -375,6 +401,15 @@ run the command: ``make webdocs``. The new 'modules.html' file will be built in the ``docs/docsite/_build/html/$MODULENAME_module.html`` directory. +In order to speed up the build process, you can limit the documentation build to +only include modules you specify, or no modules at all. To do this, run the command: +``MODULES=$MODULENAME make webdocs``. The ``MODULES`` environment variable +accepts a comma-separated list of module names. To skip building +documentation for all modules, specify a non-existent module name, for example: +``MODULES=none make webdocs``. + +You may also build a single page of the entire docsite. From ``ansible/docs/docsite`` run ``make htmlsingle rst=[relative path to the .rst file]``, for example: ``make htmlsingle rst=dev_guide/developing_modules_documenting.rst`` + To test your documentation against your ``argument_spec`` you can use ``validate-modules``. Note that this option isn't currently enabled in Shippable due to the time it takes to run. .. code-block:: bash @@ -387,3 +422,5 @@ If you're having a problem with the syntax of your YAML you can validate it on the `YAML Lint `_ website. + +For more information in testing, including how to add unit and integration tests, see :doc:`testing`. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_general_OLD.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_general_OLD.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_general_OLD.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_general_OLD.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,1572 @@ +.. _module_dev_tutorial_sample: + +Building A Simple Module +```````````````````````` + +Let's build a very basic module to get and set the system time. For starters, let's build +a module that just outputs the current time. +Ansible Development by Example +============================== + +- `Why? <#why>`__ +- `What is this? <#what-is-this>`__ +- `Environment setup <#environment-setup>`__ +- `New module development <#new-module-development>`__ +- `Local/direct module testing <#localdirect-module-testing>`__ +- `Playbook module testing <#playbook-module-testing>`__ +- `Debugging (local) <#debugging-local>`__ +- `Debugging (remote) <#debugging-remote>`__ +- `Unit testing <#unit-testing>`__ +- Integration testing (coming soon) +- `Communication and development + support <#communication-and-development-support>`__ +- `Credit <#credit>`__ + +Why? +~~~~ + +Ansible is an awesome tool for configuration management. It is also a +highly utilized one, and there are so many ways to contribute as a +community. + +What is this? +~~~~~~~~~~~~~ + +There is no doubt that Ansible is a complex tool, with lots of +inner-workings, yet it is easy to work with as an end user. But on the +other end of that, contributing to Ansible with code can sometimes be a +daunting task. + +This documentation is a way to show step-by-step how to develop Ansible +modules, both new module development as well as bug fixes and debugging. + +Environment setup +================= + +1. Clone the Ansible repository: + ``$ git clone https://github.com/ansible/ansible.git`` +2. Change directory into the repository root dir: ``$ cd ansible`` +3. Create a virtual environment: ``$ python3 -m venv venv`` (or for + Python 2 ``$ virtualenv venv``. Note, this requires you to install + the virtualenv package: ``$ pip install virtualenv``) +4. Activate the virtual environment: ``$ . venv/bin/activate`` +5. Install development requirements: + ``$ pip install -r requirements.txt`` +6. Run the environment setup script for each new dev shell process: + ``$ . hacking/env-setup`` + +After the initial setup above, every time you are ready to start +developing Ansible you should be able to just run the following from the +root of the Ansible repo: +``$ . venv/bin/activate && . hacking/env-setup`` + +Starting new development now? Fixing a bug? Create a new branch: +``$ git checkout -b my-new-branch``. If you are planning on contributing +back to the main Ansible repostiry, fork the Ansible repository into +your own GitHub account and developing against your new non-devel branch +in your fork. When you believe you have a good working code change, +submit a pull request to the Ansible repository. + +Submitting a new module to the upstream Ansible repo? Run +through sanity checks first: +``$ ansible-test sanity -v --docker --python 2.7 MODULE_NAME`` (this +requires docker to be installed and running. If you'd rather not use a +container for this you can choose to use ``--tox`` instead of +``--docker``) + +New module development +====================== + +If you are creating a new module that doesn't exist, you would start +working on a whole new file. Here is an example: + +- Navigate to the directory that you want to develop your new module + in. E.g. ``$ cd lib/ansible/modules/cloud/azure/`` +- Create your new module file: ``$ touch my_new_test_module.py`` +- Paste this simple into the new module file: (explanation in comments):: + + #!/usr/bin/python + + ANSIBLE_METADATA = { + 'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community' + } + + DOCUMENTATION = ''' + --- + module: my_sample_module + + short_description: This is my sample module + + version_added: "2.4" + + description: + - "This is my longer description explaining my sample module" + + options: + name: + description: + - This is the message to send to the sample module + required: true + new: + description: + - Control to demo if the result of this module is changed or not + required: false + + extends_documentation_fragment + - azure + + author: + - Your Name (@yourhandle) + ''' + + EXAMPLES = ''' + # Pass in a message + - name: Test with a message + my_new_test_module: + name: hello world + + # pass in a message and have changed true + - name: Test with a message and changed output + my_new_test_module: + name: hello world + new: true + + # fail the module + - name: Test failure of the module + my_new_test_module: + name: fail me + ''' + + RETURN = ''' + original_message: + description: The original name param that was passed in + type: str + message: + description: The output message that the sample module generates + ''' + + from ansible.module_utils.basic import AnsibleModule + + def run_module(): + # define the available arguments/parameters that a user can pass to + # the module + module_args = dict( + name=dict(type='str', required=True), + new=dict(type='bool', required=False, default=False) + ) + + # seed the result dict in the object + # we primarily care about changed and state + # change is if this module effectively modified the target + # state will include any data that you want your module to pass back + # for consumption, for example, in a subsequent task + result = dict( + changed=False, + original_message='', + message='' + ) + + # the AnsibleModule object will be our abstraction working with Ansible + # this includes instantiation, a couple of common attr would be the + # args/params passed to the execution, as well as if the module + # supports check mode + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=True + ) + + # if the user is working with this module in only check mode we do not + # want to make any changes to the environment, just return the current + # state with no modifications + if module.check_mode: + return result + + # manipulate or modify the state as needed (this is going to be the + # part where your module will do what it needs to do) + result['original_message'] = module.params['name'] + result['message'] = 'goodbye' + + # use whatever logic you need to determine whether or not this module + # made any modifications to your target + if module.params['new']: + result['changed'] = True + + # during the execution of the module, if there is an exception or a + # conditional state that effectively causes a failure, run + # AnsibleModule.fail_json() to pass in the message and the result + if module.params['name'] == 'fail me': + module.fail_json(msg='You requested this to fail', **result) + + # in the event of a successful module execution, you will want to + # simple AnsibleModule.exit_json(), passing the key/value results + module.exit_json(**result) + + def main(): + run_module() + + if __name__ == '__main__': + main() + +Local/direct module testing +=========================== + +You may want to test the module on the local machine without targeting a +remote host. This is a great way to quickly and easily debug a module +that can run locally. + +- Create an arguments file in ``/tmp/args.json`` with the following + content: (explanation below) + + .. code:: json + + { + "ANSIBLE_MODULE_ARGS": { + "name": "hello", + "new": true + } + } + +- If you are using a virtual environment (highly recommended for + development) activate it: ``$ . venv/bin/activate`` +- Setup the environment for development: ``$ . hacking/env-setup`` +- Run your test module locally and directly: + ``$ python ./my_new_test_module.py /tmp/args.json`` + +This should be working output that resembles something like the +following: + +:: + + {"changed": true, "state": {"original_message": "hello", "new_message": "goodbye"}, "invocation": {"module_args": {"name": "hello", "new": true}}} + +The arguments file is just a basic json config file that you can +use to pass the module your parameters to run the module it + +Playbook module testing +======================= + +If you want to test your new module, you can now consume it with an +Ansible playbook. + +- Create a playbook in any directory: ``$ touch testmod.yml`` +- Add the following to the new playbook file \`\`\`yaml --- +- name: test my new module connection: local hosts: localhost + +tasks: - name: run the new module my\_new\_test\_module: name: 'hello' +new: true register: testout + +:: + + - name: dump test output + debug: + msg: '{{ testout }}' + +:: + + - Run the playbook and analyze the output: `$ ansible-playbook ./testmod.yml` + + # Debugging (local) + + If you want to break into a module and step through with the debugger, locally running the module you can do: + + 1. Set a breakpoint in the module: `import pdb; pdb.set_trace()` + 1. Run the module on the local machine: `$ python -m pdb ./my_new_test_module.py ./args.json` + + # Debugging (remote) + + In the event you want to debug a module that is running on a remote target (i.e. not localhost), one way to do this is the following: + + 1. On your controller machine (running Ansible) set `ANSIBLE_KEEP_REMOTE_FILES=1` (this tells Ansible to retain the modules it sends to the remote machine instead of removing them) + 1. Run your playbook targetting the remote machine and specify `-vvvv` (the verbose output will show you many things, including the remote location that Ansible uses for the modules) + 1. Take note of the remote path Ansible used on the remote host + 1. SSH into the remote target after the completion of the playbook + 1. Navigate to the directory (most likely it is going to be your ansible remote user defined or implied from the playbook: `~/.ansible/tmp/ansible-tmp-...`) + 1. Here you should see the module that you executed from your Ansible controller, but this is the zipped file that Ansible sent to the remote host. You can run this by specifying `python my_test_module.py` (not necessary) + 1. To debug, though, we will want to extra this zip out to the original module format: `python my_test_module.py explode` (Ansible will expand the module into `./debug-dir`) + 1. Navigate to `./debug-dir` (notice that unzipping has caused the generation of `ansible_module_my_test_module.py`) + 1. Modify or set a breakpoint in the unzipped module + 1. Ensure that the unzipped module is executable: `$ chmod 755 ansible_module_my_test_module.py` + 1. Run the unzipped module directly passing the args file: `$ ./ansible_module_my_test_module.py args` (args is the file that contains the params that were originally passed. Good for repro and debugging) + + # Unit testing + + Unit tests for modules will be appropriately located in `./test/units/modules`. You must first setup your testing environment. In my case, I'm using Python 3.5. + + - Install the requirements (outside of your virtual environment): `$ pip3 install -r ./test/runner/requirements/units.txt` + - To run all tests do the following: `$ ansible-test units --python 3.5` (you must run `. hacking/env-setup` prior to this) + + :bulb: Ansible uses pytest for unit testing + + To run pytest against a single test module, you can do the following (provide the path to the test module appropriately): + +$ pytest -r a --cov=. --cov-report=html --fulltrace --color yes +test/units/modules/.../test\_my\_new\_test\_module.py \`\`\` + +Communication and development support +===================================== + +Join the IRC channel ``#ansible-devel`` on freenode for discussions +surrounding Ansible development. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. + +Credit +====== + +A *huge* thank you to the Ansible team at Red Hat for providing not only +a great product but also the willingness to help out contributors! +We Ansible Development by Example +============================== + +- `Why? <#why>`__ +- `What is this? <#what-is-this>`__ +- `Environment setup <#environment-setup>`__ +- `New module development <#new-module-development>`__ +- `Local/direct module testing <#localdirect-module-testing>`__ +- `Playbook module testing <#playbook-module-testing>`__ +- `Debugging (local) <#debugging-local>`__ +- `Debugging (remote) <#debugging-remote>`__ +- `Unit testing <#unit-testing>`__ +- Integration testing (coming soon) +- `Communication and development + support <#communication-and-development-support>`__ +- `Credit <#credit>`__ + +Why? +~~~~ + +Ansible is an awesome tool for configuration management. It is also a +highly utilized one, and there are so many ways to contribute as a +community. + +What is this? +~~~~~~~~~~~~~ + +There is no doubt that Ansible is a complex tool, with lots of +inner-workings, yet it is easy to work with as an end user. But on the +other end of that, contributing to Ansible with code can sometimes be a +daunting task. + +This documentation is a way to show step-by-step how to develop Ansible +modules, both new module development as well as bug fixes and debugging. + +Environment setup +================= + +1. Clone the Ansible repository: + ``$ git clone https://github.com/ansible/ansible.git`` +2. Change directory into the repository root dir: ``$ cd ansible`` +3. Create a virtual environment: ``$ python3 -m venv venv`` (or for + Python 2 ``$ virtualenv venv``. Note, this requires you to install + the virtualenv package: ``$ pip install virtualenv``) +4. Activate the virtual environment: ``$ . venv/bin/activate`` +5. Install development requirements: + ``$ pip install -r requirements.txt`` +6. Run the environment setup script for each new dev shell process: + ``$ . hacking/env-setup`` + +After the initial setup above, every time you are ready to start +developing Ansible you should be able to just run the following from the +root of the Ansible repo: +``$ . venv/bin/activate && . hacking/env-setup`` + +Starting new development now? Fixing a bug? Create a new branch: +``$ git checkout -b my-new-branch``. If you are planning on contributing +back to the main Ansible repostiry, fork the Ansible repository into +your own GitHub account and developing against your new non-devel branch +in your fork. When you believe you have a good working code change, +submit a pull request to the Ansible repository. + +Submitting a new module to the upstream Ansible repo? Run +through sanity checks first: +``$ ansible-test sanity -v --docker --python 2.7 MODULE_NAME`` (this +requires docker to be installed and running. If you'd rather not use a +container for this you can choose to use ``--tox`` instead of +``--docker``) + +New module development +====================== + +If you are creating a new module that doesn't exist, you would start +working on a whole new file. Here is an example: + +- Navigate to the directory that you want to develop your new module + in. E.g. ``$ cd lib/ansible/modules/cloud/azure/`` +- Create your new module file: ``$ touch my_new_test_module.py`` +- Paste this simple into the new module file: (explanation in comments):: + + #!/usr/bin/python + + ANSIBLE_METADATA = { + 'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community' + } + + DOCUMENTATION = ''' + --- + module: my_sample_module + + short_description: This is my sample module + + version_added: "2.4" + + description: + - "This is my longer description explaining my sample module" + + options: + name: + description: + - This is the message to send to the sample module + required: true + new: + description: + - Control to demo if the result of this module is changed or not + required: false + + extends_documentation_fragment + - azure + + author: + - Your Name (@yourhandle) + ''' + + EXAMPLES = ''' + # Pass in a message + - name: Test with a message + my_new_test_module: + name: hello world + + # pass in a message and have changed true + - name: Test with a message and changed output + my_new_test_module: + name: hello world + new: true + + # fail the module + - name: Test failure of the module + my_new_test_module: + name: fail me + ''' + + RETURN = ''' + original_message: + description: The original name param that was passed in + type: str + message: + description: The output message that the sample module generates + ''' + + from ansible.module_utils.basic import AnsibleModule + + def run_module(): + # define the available arguments/parameters that a user can pass to + # the module + module_args = dict( + name=dict(type='str', required=True), + new=dict(type='bool', required=False, default=False) + ) + + # seed the result dict in the object + # we primarily care about changed and state + # change is if this module effectively modified the target + # state will include any data that you want your module to pass back + # for consumption, for example, in a subsequent task + result = dict( + changed=False, + original_message='', + message='' + ) + + # the AnsibleModule object will be our abstraction working with Ansible + # this includes instantiation, a couple of common attr would be the + # args/params passed to the execution, as well as if the module + # supports check mode + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=True + ) + + # if the user is working with this module in only check mode we do not + # want to make any changes to the environment, just return the current + # state with no modifications + if module.check_mode: + return result + + # manipulate or modify the state as needed (this is going to be the + # part where your module will do what it needs to do) + result['original_message'] = module.params['name'] + result['message'] = 'goodbye' + + # use whatever logic you need to determine whether or not this module + # made any modifications to your target + if module.params['new']: + result['changed'] = True + + # during the execution of the module, if there is an exception or a + # conditional state that effectively causes a failure, run + # AnsibleModule.fail_json() to pass in the message and the result + if module.params['name'] == 'fail me': + module.fail_json(msg='You requested this to fail', **result) + + # in the event of a successful module execution, you will want to + # simple AnsibleModule.exit_json(), passing the key/value results + module.exit_json(**result) + + def main(): + run_module() + + if __name__ == '__main__': + main() + +Local/direct module testing +=========================== + +You may want to test the module on the local machine without targeting a +remote host. This is a great way to quickly and easily debug a module +that can run locally. + +- Create an arguments file in ``/tmp/args.json`` with the following + content: (explanation below) + + .. code:: json + + { + "ANSIBLE_MODULE_ARGS": { + "name": "hello", + "new": true + } + } + +- If you are using a virtual environment (highly recommended for + development) activate it: ``$ . venv/bin/activate`` +- Setup the environment for development: ``$ . hacking/env-setup`` +- Run your test module locally and directly: + ``$ python ./my_new_test_module.py /tmp/args.json`` + +This should be working output that resembles something like the +following: + +:: + + {"changed": true, "state": {"original_message": "hello", "new_message": "goodbye"}, "invocation": {"module_args": {"name": "hello", "new": true}}} + +The arguments file is just a basic json config file that you can +use to pass the module your parameters to run the module it + +Playbook module testing +======================= + +If you want to test your new module, you can now consume it with an +Ansible playbook. + +- Create a playbook in any directory: ``$ touch testmod.yml`` +- Add the following to the new playbook file \`\`\`yaml --- +- name: test my new module connection: local hosts: localhost + +tasks: - name: run the new module my\_new\_test\_module: name: 'hello' +new: true register: testout + +:: + + - name: dump test output + debug: + msg: '{{ testout }}' + +:: + + - Run the playbook and analyze the output: `$ ansible-playbook ./testmod.yml` + + # Debugging (local) + + If you want to break into a module and step through with the debugger, locally running the module you can do: + + 1. Set a breakpoint in the module: `import pdb; pdb.set_trace()` + 1. Run the module on the local machine: `$ python -m pdb ./my_new_test_module.py ./args.json` + + # Debugging (remote) + + In the event you want to debug a module that is running on a remote target (i.e. not localhost), one way to do this is the following: + + 1. On your controller machine (running Ansible) set `ANSIBLE_KEEP_REMOTE_FILES=1` (this tells Ansible to retain the modules it sends to the remote machine instead of removing them) + 1. Run your playbook targetting the remote machine and specify `-vvvv` (the verbose output will show you many things, including the remote location that Ansible uses for the modules) + 1. Take note of the remote path Ansible used on the remote host + 1. SSH into the remote target after the completion of the playbook + 1. Navigate to the directory (most likely it is going to be your ansible remote user defined or implied from the playbook: `~/.ansible/tmp/ansible-tmp-...`) + 1. Here you should see the module that you executed from your Ansible controller, but this is the zipped file that Ansible sent to the remote host. You can run this by specifying `python my_test_module.py` (not necessary) + 1. To debug, though, we will want to extra this zip out to the original module format: `python my_test_module.py explode` (Ansible will expand the module into `./debug-dir`) + 1. Navigate to `./debug-dir` (notice that unzipping has caused the generation of `ansible_module_my_test_module.py`) + 1. Modify or set a breakpoint in the unzipped module + 1. Ensure that the unzipped module is executable: `$ chmod 755 ansible_module_my_test_module.py` + 1. Run the unzipped module directly passing the args file: `$ ./ansible_module_my_test_module.py args` (args is the file that contains the params that were originally passed. Good for repro and debugging) + + # Unit testing + + Unit tests for modules will be appropriately located in `./test/units/modules`. You must first setup your testing environment. In my case, I'm using Python 3.5. + + - Install the requirements (outside of your virtual environment): `$ pip3 install -r ./test/runner/requirements/units.txt` + - To run all tests do the following: `$ ansible-test units --python 3.5` (you must run `. hacking/env-setup` prior to this) + + :bulb: Ansible uses pytest for unit testing + + To run pytest against a single test module, you can do the following (provide the path to the test module appropriately): + +$ pytest -r a --cov=. --cov-report=html --fulltrace --color yes +test/units/modules/.../test\_my\_new\_test\_module.py \`\`\` + +Communication and development support +===================================== + +Join the IRC channel ``#ansible-devel`` on freenode for discussions +surrounding Ansible development. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. + +Credit +====== + +A *huge* thank you to the Ansible team at Red Hat for providing not only +a great product but also the willingness to help out contributors!are going to use Python here but any language is possible. Only File I/O and outputting to standard +outAnsible Development by Example +============================== + +- `Why? <#why>`__ +- `What is this? <#what-is-this>`__ +- `Environment setup <#environment-setup>`__ +- `New module development <#new-module-development>`__ +- `Local/direct module testing <#localdirect-module-testing>`__ +- `Playbook module testing <#playbook-module-testing>`__ +- `Debugging (local) <#debugging-local>`__ +- `Debugging (remote) <#debugging-remote>`__ +- `Unit testing <#unit-testing>`__ +- Integration testing (coming soon) +- `Communication and development + support <#communication-and-development-support>`__ +- `Credit <#credit>`__ + +Why? +~~~~ + +Ansible is an awesome tool for configuration management. It is also a +highly utilized one, and there are so many ways to contribute as a +community. + +What is this? +~~~~~~~~~~~~~ + +There is no doubt that Ansible is a complex tool, with lots of +inner-workings, yet it is easy to work with as an end user. But on the +other end of that, contributing to Ansible with code can sometimes be a +daunting task. + +This documentation is a way to show step-by-step how to develop Ansible +modules, both new module development as well as bug fixes and debugging. + +Environment setup +================= + +1. Clone the Ansible repository: + ``$ git clone https://github.com/ansible/ansible.git`` +2. Change directory into the repository root dir: ``$ cd ansible`` +3. Create a virtual environment: ``$ python3 -m venv venv`` (or for + Python 2 ``$ virtualenv venv``. Note, this requires you to install + the virtualenv package: ``$ pip install virtualenv``) +4. Activate the virtual environment: ``$ . venv/bin/activate`` +5. Install development requirements: + ``$ pip install -r requirements.txt`` +6. Run the environment setup script for each new dev shell process: + ``$ . hacking/env-setup`` + +After the initial setup above, every time you are ready to start +developing Ansible you should be able to just run the following from the +root of the Ansible repo: +``$ . venv/bin/activate && . hacking/env-setup`` + +Starting new development now? Fixing a bug? Create a new branch: +``$ git checkout -b my-new-branch``. If you are planning on contributing +back to the main Ansible repostiry, fork the Ansible repository into +your own GitHub account and developing against your new non-devel branch +in your fork. When you believe you have a good working code change, +submit a pull request to the Ansible repository. + +Submitting a new module to the upstream Ansible repo? Run +through sanity checks first: +``$ ansible-test sanity -v --docker --python 2.7 MODULE_NAME`` (this +requires docker to be installed and running. If you'd rather not use a +container for this you can choose to use ``--tox`` instead of +``--docker``) + +New module development +====================== + +If you are creating a new module that doesn't exist, you would start +working on a whole new file. Here is an example: + +- Navigate to the directory that you want to develop your new module + in. E.g. ``$ cd lib/ansible/modules/cloud/azure/`` +- Create your new module file: ``$ touch my_new_test_module.py`` +- Paste this simple into the new module file: (explanation in comments):: + + #!/usr/bin/python + + ANSIBLE_METADATA = { + 'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community' + } + + DOCUMENTATION = ''' + --- + module: my_sample_module + + short_description: This is my sample module + + version_added: "2.4" + + description: + - "This is my longer description explaining my sample module" + + options: + name: + description: + - This is the message to send to the sample module + required: true + new: + description: + - Control to demo if the result of this module is changed or not + required: false + + extends_documentation_fragment + - azure + + author: + - Your Name (@yourhandle) + ''' + + EXAMPLES = ''' + # Pass in a message + - name: Test with a message + my_new_test_module: + name: hello world + + # pass in a message and have changed true + - name: Test with a message and changed output + my_new_test_module: + name: hello world + new: true + + # fail the module + - name: Test failure of the module + my_new_test_module: + name: fail me + ''' + + RETURN = ''' + original_message: + description: The original name param that was passed in + type: str + message: + description: The output message that the sample module generates + ''' + + from ansible.module_utils.basic import AnsibleModule + + def run_module(): + # define the available arguments/parameters that a user can pass to + # the module + module_args = dict( + name=dict(type='str', required=True), + new=dict(type='bool', required=False, default=False) + ) + + # seed the result dict in the object + # we primarily care about changed and state + # change is if this module effectively modified the target + # state will include any data that you want your module to pass back + # for consumption, for example, in a subsequent task + result = dict( + changed=False, + original_message='', + message='' + ) + + # the AnsibleModule object will be our abstraction working with Ansible + # this includes instantiation, a couple of common attr would be the + # args/params passed to the execution, as well as if the module + # supports check mode + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=True + ) + + # if the user is working with this module in only check mode we do not + # want to make any changes to the environment, just return the current + # state with no modifications + if module.check_mode: + return result + + # manipulate or modify the state as needed (this is going to be the + # part where your module will do what it needs to do) + result['original_message'] = module.params['name'] + result['message'] = 'goodbye' + + # use whatever logic you need to determine whether or not this module + # made any modifications to your target + if module.params['new']: + result['changed'] = True + + # during the execution of the module, if there is an exception or a + # conditional state that effectively causes a failure, run + # AnsibleModule.fail_json() to pass in the message and the result + if module.params['name'] == 'fail me': + module.fail_json(msg='You requested this to fail', **result) + + # in the event of a successful module execution, you will want to + # simple AnsibleModule.exit_json(), passing the key/value results + module.exit_json(**result) + + def main(): + run_module() + + if __name__ == '__main__': + main() + +Local/direct module testing +=========================== + +You may want to test the module on the local machine without targeting a +remote host. This is a great way to quickly and easily debug a module +that can run locally. + +- Create an arguments file in ``/tmp/args.json`` with the following + content: (explanation below) + + .. code:: json + + { + "ANSIBLE_MODULE_ARGS": { + "name": "hello", + "new": true + } + } + +- If you are using a virtual environment (highly recommended for + development) activate it: ``$ . venv/bin/activate`` +- Setup the environment for development: ``$ . hacking/env-setup`` +- Run your test module locally and directly: + ``$ python ./my_new_test_module.py /tmp/args.json`` + +This should be working output that resembles something like the +following: + +:: + + {"changed": true, "state": {"original_message": "hello", "new_message": "goodbye"}, "invocation": {"module_args": {"name": "hello", "new": true}}} + +The arguments file is just a basic json config file that you can +use to pass the module your parameters to run the module it + +Playbook module testing +======================= + +If you want to test your new module, you can now consume it with an +Ansible playbook. + +- Create a playbook in any directory: ``$ touch testmod.yml`` +- Add the following to the new playbook file \`\`\`yaml --- +- name: test my new module connection: local hosts: localhost + +tasks: - name: run the new module my\_new\_test\_module: name: 'hello' +new: true register: testout + +:: + + - name: dump test output + debug: + msg: '{{ testout }}' + +:: + + - Run the playbook and analyze the output: `$ ansible-playbook ./testmod.yml` + + # Debugging (local) + + If you want to break into a module and step through with the debugger, locally running the module you can do: + + 1. Set a breakpoint in the module: `import pdb; pdb.set_trace()` + 1. Run the module on the local machine: `$ python -m pdb ./my_new_test_module.py ./args.json` + + # Debugging (remote) + + In the event you want to debug a module that is running on a remote target (i.e. not localhost), one way to do this is the following: + + 1. On your controller machine (running Ansible) set `ANSIBLE_KEEP_REMOTE_FILES=1` (this tells Ansible to retain the modules it sends to the remote machine instead of removing them) + 1. Run your playbook targetting the remote machine and specify `-vvvv` (the verbose output will show you many things, including the remote location that Ansible uses for the modules) + 1. Take note of the remote path Ansible used on the remote host + 1. SSH into the remote target after the completion of the playbook + 1. Navigate to the directory (most likely it is going to be your ansible remote user defined or implied from the playbook: `~/.ansible/tmp/ansible-tmp-...`) + 1. Here you should see the module that you executed from your Ansible controller, but this is the zipped file that Ansible sent to the remote host. You can run this by specifying `python my_test_module.py` (not necessary) + 1. To debug, though, we will want to extra this zip out to the original module format: `python my_test_module.py explode` (Ansible will expand the module into `./debug-dir`) + 1. Navigate to `./debug-dir` (notice that unzipping has caused the generation of `ansible_module_my_test_module.py`) + 1. Modify or set a breakpoint in the unzipped module + 1. Ensure that the unzipped module is executable: `$ chmod 755 ansible_module_my_test_module.py` + 1. Run the unzipped module directly passing the args file: `$ ./ansible_module_my_test_module.py args` (args is the file that contains the params that were originally passed. Good for repro and debugging) + + # Unit testing + + Unit tests for modules will be appropriately located in `./test/units/modules`. You must first setup your testing environment. In my case, I'm using Python 3.5. + + - Install the requirements (outside of your virtual environment): `$ pip3 install -r ./test/runner/requirements/units.txt` + - To run all tests do the following: `$ ansible-test units --python 3.5` (you must run `. hacking/env-setup` prior to this) + + :bulb: Ansible uses pytest for unit testing + + To run pytest against a single test module, you can do the following (provide the path to the test module appropriately): + +$ pytest -r a --cov=. --cov-report=html --fulltrace --color yes +test/units/modules/.../test\_my\_new\_test\_module.py \`\`\` + +Communication and development support +===================================== + +Join the IRC channel ``#ansible-devel`` on freenode for discussions +surrounding Ansible development. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. + +Credit +====== + +A *huge* thank you to the Ansible team at Red Hat for providing not only +a great product but also the willingness to help out contributors! are required. So, bash, C++, clojure, Python, Ruby, whatever you want +is Ansible Development by Example +============================== + +- `Why? <#why>`__ +- `What is this? <#what-is-this>`__ +- `Environment setup <#environment-setup>`__ +- `New module development <#new-module-development>`__ +- `Local/direct module testing <#localdirect-module-testing>`__ +- `Playbook module testing <#playbook-module-testing>`__ +- `Debugging (local) <#debugging-local>`__ +- `Debugging (remote) <#debugging-remote>`__ +- `Unit testing <#unit-testing>`__ +- Integration testing (coming soon) +- `Communication and development + support <#communication-and-development-support>`__ +- `Credit <#credit>`__ + +Why? +~~~~ + +Ansible is an awesome tool for configuration management. It is also a +highly utilized one, and there are so many ways to contribute as a +community. + +What is this? +~~~~~~~~~~~~~ + +There is no doubt that Ansible is a complex tool, with lots of +inner-workings, yet it is easy to work with as an end user. But on the +other end of that, contributing to Ansible with code can sometimes be a +daunting task. + +This documentation is a way to show step-by-step how to develop Ansible +modules, both new module development as well as bug fixes and debugging. + +Environment setup +================= + +1. Clone the Ansible repository: + ``$ git clone https://github.com/ansible/ansible.git`` +2. Change directory into the repository root dir: ``$ cd ansible`` +3. Create a virtual environment: ``$ python3 -m venv venv`` (or for + Python 2 ``$ virtualenv venv``. Note, this requires you to install + the virtualenv package: ``$ pip install virtualenv``) +4. Activate the virtual environment: ``$ . venv/bin/activate`` +5. Install development requirements: + ``$ pip install -r requirements.txt`` +6. Run the environment setup script for each new dev shell process: + ``$ . hacking/env-setup`` + +After the initial setup above, every time you are ready to start +developing Ansible you should be able to just run the following from the +root of the Ansible repo: +``$ . venv/bin/activate && . hacking/env-setup`` + +Starting new development now? Fixing a bug? Create a new branch: +``$ git checkout -b my-new-branch``. If you are planning on contributing +back to the main Ansible repostiry, fork the Ansible repository into +your own GitHub account and developing against your new non-devel branch +in your fork. When you believe you have a good working code change, +submit a pull request to the Ansible repository. + +Submitting a new module to the upstream Ansible repo? Run +through sanity checks first: +``$ ansible-test sanity -v --docker --python 2.7 MODULE_NAME`` (this +requires docker to be installed and running. If you'd rather not use a +container for this you can choose to use ``--tox`` instead of +``--docker``) + +New module development +====================== + +If you are creating a new module that doesn't exist, you would start +working on a whole new file. Here is an example: + +- Navigate to the directory that you want to develop your new module + in. E.g. ``$ cd lib/ansible/modules/cloud/azure/`` +- Create your new module file: ``$ touch my_new_test_module.py`` +- Paste this simple into the new module file: (explanation in comments):: + + #!/usr/bin/python + + ANSIBLE_METADATA = { + 'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community' + } + + DOCUMENTATION = ''' + --- + module: my_sample_module + + short_description: This is my sample module + + version_added: "2.4" + + description: + - "This is my longer description explaining my sample module" + + options: + name: + description: + - This is the message to send to the sample module + required: true + new: + description: + - Control to demo if the result of this module is changed or not + required: false + + extends_documentation_fragment + - azure + + author: + - Your Name (@yourhandle) + ''' + + EXAMPLES = ''' + # Pass in a message + - name: Test with a message + my_new_test_module: + name: hello world + + # pass in a message and have changed true + - name: Test with a message and changed output + my_new_test_module: + name: hello world + new: true + + # fail the module + - name: Test failure of the module + my_new_test_module: + name: fail me + ''' + + RETURN = ''' + original_message: + description: The original name param that was passed in + type: str + message: + description: The output message that the sample module generates + ''' + + from ansible.module_utils.basic import AnsibleModule + + def run_module(): + # define the available arguments/parameters that a user can pass to + # the module + module_args = dict( + name=dict(type='str', required=True), + new=dict(type='bool', required=False, default=False) + ) + + # seed the result dict in the object + # we primarily care about changed and state + # change is if this module effectively modified the target + # state will include any data that you want your module to pass back + # for consumption, for example, in a subsequent task + result = dict( + changed=False, + original_message='', + message='' + ) + + # the AnsibleModule object will be our abstraction working with Ansible + # this includes instantiation, a couple of common attr would be the + # args/params passed to the execution, as well as if the module + # supports check mode + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=True + ) + + # if the user is working with this module in only check mode we do not + # want to make any changes to the environment, just return the current + # state with no modifications + if module.check_mode: + return result + + # manipulate or modify the state as needed (this is going to be the + # part where your module will do what it needs to do) + result['original_message'] = module.params['name'] + result['message'] = 'goodbye' + + # use whatever logic you need to determine whether or not this module + # made any modifications to your target + if module.params['new']: + result['changed'] = True + + # during the execution of the module, if there is an exception or a + # conditional state that effectively causes a failure, run + # AnsibleModule.fail_json() to pass in the message and the result + if module.params['name'] == 'fail me': + module.fail_json(msg='You requested this to fail', **result) + + # in the event of a successful module execution, you will want to + # simple AnsibleModule.exit_json(), passing the key/value results + module.exit_json(**result) + + def main(): + run_module() + + if __name__ == '__main__': + main() + +Local/direct module testing +=========================== + +You may want to test the module on the local machine without targeting a +remote host. This is a great way to quickly and easily debug a module +that can run locally. + +- Create an arguments file in ``/tmp/args.json`` with the following + content: (explanation below) + + .. code:: json + + { + "ANSIBLE_MODULE_ARGS": { + "name": "hello", + "new": true + } + } + +- If you are using a virtual environment (highly recommended for + development) activate it: ``$ . venv/bin/activate`` +- Setup the environment for development: ``$ . hacking/env-setup`` +- Run your test module locally and directly: + ``$ python ./my_new_test_module.py /tmp/args.json`` + +This should be working output that resembles something like the +following: + +:: + + {"changed": true, "state": {"original_message": "hello", "new_message": "goodbye"}, "invocation": {"module_args": {"name": "hello", "new": true}}} + +The arguments file is just a basic json config file that you can +use to pass the module your parameters to run the module it + +Playbook module testing +======================= + +If you want to test your new module, you can now consume it with an +Ansible playbook. + +- Create a playbook in any directory: ``$ touch testmod.yml`` +- Add the following to the new playbook file \`\`\`yaml --- +- name: test my new module connection: local hosts: localhost + +tasks: - name: run the new module my\_new\_test\_module: name: 'hello' +new: true register: testout + +:: + + - name: dump test output + debug: + msg: '{{ testout }}' + +:: + + - Run the playbook and analyze the output: `$ ansible-playbook ./testmod.yml` + + # Debugging (local) + + If you want to break into a module and step through with the debugger, locally running the module you can do: + + 1. Set a breakpoint in the module: `import pdb; pdb.set_trace()` + 1. Run the module on the local machine: `$ python -m pdb ./my_new_test_module.py ./args.json` + + # Debugging (remote) + + In the event you want to debug a module that is running on a remote target (i.e. not localhost), one way to do this is the following: + + 1. On your controller machine (running Ansible) set `ANSIBLE_KEEP_REMOTE_FILES=1` (this tells Ansible to retain the modules it sends to the remote machine instead of removing them) + 1. Run your playbook targetting the remote machine and specify `-vvvv` (the verbose output will show you many things, including the remote location that Ansible uses for the modules) + 1. Take note of the remote path Ansible used on the remote host + 1. SSH into the remote target after the completion of the playbook + 1. Navigate to the directory (most likely it is going to be your ansible remote user defined or implied from the playbook: `~/.ansible/tmp/ansible-tmp-...`) + 1. Here you should see the module that you executed from your Ansible controller, but this is the zipped file that Ansible sent to the remote host. You can run this by specifying `python my_test_module.py` (not necessary) + 1. To debug, though, we will want to extra this zip out to the original module format: `python my_test_module.py explode` (Ansible will expand the module into `./debug-dir`) + 1. Navigate to `./debug-dir` (notice that unzipping has caused the generation of `ansible_module_my_test_module.py`) + 1. Modify or set a breakpoint in the unzipped module + 1. Ensure that the unzipped module is executable: `$ chmod 755 ansible_module_my_test_module.py` + 1. Run the unzipped module directly passing the args file: `$ ./ansible_module_my_test_module.py args` (args is the file that contains the params that were originally passed. Good for repro and debugging) + + # Unit testing + + Unit tests for modules will be appropriately located in `./test/units/modules`. You must first setup your testing environment. In my case, I'm using Python 3.5. + + - Install the requirements (outside of your virtual environment): `$ pip3 install -r ./test/runner/requirements/units.txt` + - To run all tests do the following: `$ ansible-test units --python 3.5` (you must run `. hacking/env-setup` prior to this) + + :bulb: Ansible uses pytest for unit testing + + To run pytest against a single test module, you can do the following (provide the path to the test module appropriately): + +$ pytest -r a --cov=. --cov-report=html --fulltrace --color yes +test/units/modules/.../test\_my\_new\_test\_module.py \`\`\` + +Communication and development support +===================================== + +Join the IRC channel ``#ansible-devel`` on freenode for discussions +surrounding Ansible development. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. + +Credit +====== + +A *huge* thank you to the Ansible team at Red Hat for providing not only +a great product but also the willingness to help out contributors!fine. +Ansible Development by Example +============================== + +- `Why? <#why>`__ +- `What is this? <#what-is-this>`__ +- `Environment setup <#environment-setup>`__ +- `New module development <#new-module-development>`__ +- `Local/direct module testing <#localdirect-module-testing>`__ +- `Playbook module testing <#playbook-module-testing>`__ +- `Debugging (local) <#debugging-local>`__ +- `Debugging (remote) <#debugging-remote>`__ +- `Unit testing <#unit-testing>`__ +- Integration testing (coming soon) +- `Communication and development + support <#communication-and-development-support>`__ +- `Credit <#credit>`__ + +Why? +~~~~ + +Ansible is an awesome tool for configuration management. It is also a +highly utilized one, and there are so many ways to contribute as a +community. + +What is this? +~~~~~~~~~~~~~ + +There is no doubt that Ansible is a complex tool, with lots of +inner-workings, yet it is easy to work with as an end user. But on the +other end of that, contributing to Ansible with code can sometimes be a +daunting task. + +This documentation is a way to show step-by-step how to develop Ansible +modules, both new module development as well as bug fixes and debugging. + +Environment setup +================= + +1. Clone the Ansible repository: + ``$ git clone https://github.com/ansible/ansible.git`` +2. Change directory into the repository root dir: ``$ cd ansible`` +3. Create a virtual environment: ``$ python3 -m venv venv`` (or for + Python 2 ``$ virtualenv venv``. Note, this requires you to install + the virtualenv package: ``$ pip install virtualenv``) +4. Activate the virtual environment: ``$ . venv/bin/activate`` +5. Install development requirements: + ``$ pip install -r requirements.txt`` +6. Run the environment setup script for each new dev shell process: + ``$ . hacking/env-setup`` + +After the initial setup above, every time you are ready to start +developing Ansible you should be able to just run the following from the +root of the Ansible repo: +``$ . venv/bin/activate && . hacking/env-setup`` + +Starting new development now? Fixing a bug? Create a new branch: +``$ git checkout -b my-new-branch``. If you are planning on contributing +back to the main Ansible repostiry, fork the Ansible repository into +your own GitHub account and developing against your new non-devel branch +in your fork. When you believe you have a good working code change, +submit a pull request to the Ansible repository. + +Submitting a new module to the upstream Ansible repo? Run +through sanity checks first: +``$ ansible-test sanity -v --docker --python 2.7 MODULE_NAME`` (this +requires docker to be installed and running. If you'd rather not use a +container for this you can choose to use ``--tox`` instead of +``--docker``) + +New module development +====================== + +If you are creating a new module that doesn't exist, you would start +working on a whole new file. Here is an example: + +- Navigate to the directory that you want to develop your new module + in. E.g. ``$ cd lib/ansible/modules/cloud/azure/`` +- Create your new module file: ``$ touch my_new_test_module.py`` +- Paste this simple into the new module file: (explanation in comments):: + + #!/usr/bin/python + + ANSIBLE_METADATA = { + 'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community' + } + + DOCUMENTATION = ''' + --- + module: my_sample_module + + short_description: This is my sample module + + version_added: "2.4" + + description: + - "This is my longer description explaining my sample module" + + options: + name: + description: + - This is the message to send to the sample module + required: true + new: + description: + - Control to demo if the result of this module is changed or not + required: false + + extends_documentation_fragment + - azure + + author: + - Your Name (@yourhandle) + ''' + + EXAMPLES = ''' + # Pass in a message + - name: Test with a message + my_new_test_module: + name: hello world + + # pass in a message and have changed true + - name: Test with a message and changed output + my_new_test_module: + name: hello world + new: true + + # fail the module + - name: Test failure of the module + my_new_test_module: + name: fail me + ''' + + RETURN = ''' + original_message: + description: The original name param that was passed in + type: str + message: + description: The output message that the sample module generates + ''' + + from ansible.module_utils.basic import AnsibleModule + + def run_module(): + # define the available arguments/parameters that a user can pass to + # the module + module_args = dict( + name=dict(type='str', required=True), + new=dict(type='bool', required=False, default=False) + ) + + # seed the result dict in the object + # we primarily care about changed and state + # change is if this module effectively modified the target + # state will include any data that you want your module to pass back + # for consumption, for example, in a subsequent task + result = dict( + changed=False, + original_message='', + message='' + ) + + # the AnsibleModule object will be our abstraction working with Ansible + # this includes instantiation, a couple of common attr would be the + # args/params passed to the execution, as well as if the module + # supports check mode + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=True + ) + + # if the user is working with this module in only check mode we do not + # want to make any changes to the environment, just return the current + # state with no modifications + if module.check_mode: + return result + + # manipulate or modify the state as needed (this is going to be the + # part where your module will do what it needs to do) + result['original_message'] = module.params['name'] + result['message'] = 'goodbye' + + # use whatever logic you need to determine whether or not this module + # made any modifications to your target + if module.params['new']: + result['changed'] = True + + # during the execution of the module, if there is an exception or a + # conditional state that effectively causes a failure, run + # AnsibleModule.fail_json() to pass in the message and the result + if module.params['name'] == 'fail me': + module.fail_json(msg='You requested this to fail', **result) + + # in the event of a successful module execution, you will want to + # simple AnsibleModule.exit_json(), passing the key/value results + module.exit_json(**result) + + def main(): + run_module() + + if __name__ == '__main__': + main() + +Local/direct module testing +=========================== + +You may want to test the module on the local machine without targeting a +remote host. This is a great way to quickly and easily debug a module +that can run locally. + +- Create an arguments file in ``/tmp/args.json`` with the following + content: (explanation below) + + .. code:: json + + { + "ANSIBLE_MODULE_ARGS": { + "name": "hello", + "new": true + } + } + +- If you are using a virtual environment (highly recommended for + development) activate it: ``$ . venv/bin/activate`` +- Setup the environment for development: ``$ . hacking/env-setup`` +- Run your test module locally and directly: + ``$ python ./my_new_test_module.py /tmp/args.json`` + +This should be working output that resembles something like the +following: + +:: + + {"changed": true, "state": {"original_message": "hello", "new_message": "goodbye"}, "invocation": {"module_args": {"name": "hello", "new": true}}} + +The arguments file is just a basic json config file that you can +use to pass the module your parameters to run the module it + +Playbook module testing +======================= + +If you want to test your new module, you can now consume it with an +Ansible playbook. + +- Create a playbook in any directory: ``$ touch testmod.yml`` +- Add the following to the new playbook file \`\`\`yaml --- +- name: test my new module connection: local hosts: localhost + +tasks: - name: run the new module my\_new\_test\_module: name: 'hello' +new: true register: testout + +:: + + - name: dump test output + debug: + msg: '{{ testout }}' + +:: + + - Run the playbook and analyze the output: `$ ansible-playbook ./testmod.yml` + + # Debugging (local) + + If you want to break into a module and step through with the debugger, locally running the module you can do: + + 1. Set a breakpoint in the module: `import pdb; pdb.set_trace()` + 1. Run the module on the local machine: `$ python -m pdb ./my_new_test_module.py ./args.json` + + # Debugging (remote) + + In the event you want to debug a module that is running on a remote target (i.e. not localhost), one way to do this is the following: + + 1. On your controller machine (running Ansible) set `ANSIBLE_KEEP_REMOTE_FILES=1` (this tells Ansible to retain the modules it sends to the remote machine instead of removing them) + 1. Run your playbook targetting the remote machine and specify `-vvvv` (the verbose output will show you many things, including the remote location that Ansible uses for the modules) + 1. Take note of the remote path Ansible used on the remote host + 1. SSH into the remote target after the completion of the playbook + 1. Navigate to the directory (most likely it is going to be your ansible remote user defined or implied from the playbook: `~/.ansible/tmp/ansible-tmp-...`) + 1. Here you should see the module that you executed from your Ansible controller, but this is the zipped file that Ansible sent to the remote host. You can run this by specifying `python my_test_module.py` (not necessary) + 1. To debug, though, we will want to extra this zip out to the original module format: `python my_test_module.py explode` (Ansible will expand the module into `./debug-dir`) + 1. Navigate to `./debug-dir` (notice that unzipping has caused the generation of `ansible_module_my_test_module.py`) + 1. Modify or set a breakpoint in the unzipped module + 1. Ensure that the unzipped module is executable: `$ chmod 755 ansible_module_my_test_module.py` + 1. Run the unzipped module directly passing the args file: `$ ./ansible_module_my_test_module.py args` (args is the file that contains the params that were originally passed. Good for repro and debugging) + + # Unit testing + + Unit tests for modules will be appropriately located in `./test/units/modules`. You must first setup your testing environment. In my case, I'm using Python 3.5. + + - Install the requirements (outside of your virtual environment): `$ pip3 install -r ./test/runner/requirements/units.txt` + - To run all tests do the following: `$ ansible-test units --python 3.5` (you must run `. hacking/env-setup` prior to this) + + :bulb: Ansible uses pytest for unit testing + + To run pytest against a single test module, you can do the following (provide the path to the test module appropriately): + +$ pytest -r a --cov=. --cov-report=html --fulltrace --color yes +test/units/modules/.../test\_my\_new\_test\_module.py \`\`\` + +Communication and development support +===================================== + +Join the IRC channel ``#ansible-devel`` on freenode for discussions +surrounding Ansible development. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. + +Credit +====== + +A *huge* thank you to the Ansible team at Red Hat for providing not only +a great product but also the willingness to help out contributors! diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_general.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_general.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_general.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_general.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,336 +1,316 @@ .. _module_dev_tutorial_sample: -Building A Simple Module -```````````````````````` +Ansible Module Development Walkthrough +====================================== -Let's build a very-basic module to get and set the system time. For starters, let's build -a module that just outputs the current time. -We are going to use Python here but any language is possible. Only File I/O and outputting to standard -out are required. So, bash, C++, clojure, Python, Ruby, whatever you want -is fine. +In this section, we will walk through developing, testing, and debugging an Ansible module. -Now Python Ansible modules contain some extremely powerful shortcuts (that all the core modules use) -but first we are going to build a module the very hard way. The reason we do this is because modules -written in any language OTHER than Python are going to have to do exactly this. We'll show the easy -way later. +What's covered in this section: -So, here's an example. You would never really need to build a module to set the system time, -the 'command' module could already be used to do this. +- `Environment setup <#environment-setup>`__ +- `New module development <#new-module-development>`__ +- `Local/direct module testing <#localdirect-module-testing>`__ +- `Playbook module testing <#playbook-module-testing>`__ +- `Debugging (local) <#debugging-local>`__ +- `Debugging (remote) <#debugging-remote>`__ +- `Unit testing <#unit-testing>`__ +- Integration testing (coming soon) +- `Communication and development + support <#communication-and-development-support>`__ +- `Credit <#credit>`__ + + +Environment setup +================= + +1. Clone the Ansible repository: + ``$ git clone https://github.com/ansible/ansible.git`` +2. Change directory into the repository root dir: ``$ cd ansible`` +3. Create a virtual environment: ``$ python3 -m venv venv`` (or for + Python 2 ``$ virtualenv venv``. Note, this requires you to install + the virtualenv package: ``$ pip install virtualenv``) +4. Activate the virtual environment: ``$ . venv/bin/activate`` +5. Install development requirements: + ``$ pip install -r requirements.txt`` +6. Run the environment setup script for each new dev shell process: + ``$ . hacking/env-setup`` + +.. note:: After the initial setup above, every time you are ready to start + developing Ansible you should be able to just run the following from the + root of the Ansible repo: + ``$ . venv/bin/activate && . hacking/env-setup`` + + +New module development +====================== + +If you are creating a new module that doesn't exist, you would start +working on a whole new file. Here is an example: + +- Navigate to the directory that you want to develop your new module + in. E.g. ``$ cd lib/ansible/modules/cloud/azure/`` +- Create your new module file: ``$ touch my_new_test_module.py`` +- Paste this example code into the new module file: (explanation in comments) -Reading the modules that come with Ansible (linked above) is a great way to learn how to write -modules. Keep in mind, though, that some modules in Ansible's source tree are internalisms, -so look at :ref:`service` or :ref:`yum`, and don't stare too close into things like ``async_wrapper`` or -you'll turn to stone. Nobody ever executes ``async_wrapper`` directly. - -Ok, let's get going with an example. We'll use Python. For starters, save this as a file named :file:`timetest.py` - -.. code-block:: python +.. code:: python #!/usr/bin/python - import datetime - import json - - date = str(datetime.datetime.now()) - print(json.dumps({ - "time" : date - })) - -.. _module_testing: - -Testing Your Module -``````````````````` - -There's a useful test script in the source checkout for Ansible: - -.. code-block:: shell-session - - git clone git://github.com/ansible/ansible.git --recursive - source ansible/hacking/env-setup - -For instructions on setting up Ansible from source, please see -:doc:`../intro_installation`. - -Let's run the script you just wrote with that: - -.. code-block:: shell-session - - ansible/hacking/test-module -m ./timetest.py - -You should see output that looks something like this: - -.. code-block:: json - - {"time": "2012-03-14 22:13:48.539183"} - -If you did not, you might have a typo in your module, so recheck it and try again. - -.. _reading_input: - -Reading Input -````````````` -Let's modify the module to allow setting the current time. We'll do this by seeing -if a key value pair in the form `time=` is passed in to the module. - -Ansible internally saves arguments to an arguments file. So we must read the file -and parse it. The arguments file is just a string, so any form of arguments are legal. -Here we'll do some basic parsing to treat the input as key=value. - -The example usage we are trying to achieve to set the time is:: - - time time="March 14 22:10" - -If no time parameter is set, we'll just leave the time as is and return the current time. + ANSIBLE_METADATA = { + 'metadata_version': '1.1', + 'status': ['preview'], + 'supported_by': 'community' + } -.. note:: - This is obviously an unrealistic idea for a module. You'd most likely just - use the command module. However, it makes for a decent tutorial. + DOCUMENTATION = ''' + --- + module: my_sample_module + + short_description: This is my sample module + + version_added: "2.4" + + description: + - "This is my longer description explaining my sample module" + + options: + name: + description: + - This is the message to send to the sample module + required: true + new: + description: + - Control to demo if the result of this module is changed or not + required: false + + extends_documentation_fragment: + - azure + + author: + - Your Name (@yourhandle) + ''' + + EXAMPLES = ''' + # Pass in a message + - name: Test with a message + my_new_test_module: + name: hello world + + # pass in a message and have changed true + - name: Test with a message and changed output + my_new_test_module: + name: hello world + new: true + + # fail the module + - name: Test failure of the module + my_new_test_module: + name: fail me + ''' + + RETURN = ''' + original_message: + description: The original name param that was passed in + type: str + message: + description: The output message that the sample module generates + ''' -Let's look at the code. Read the comments as we'll explain as we go. Note that this -is highly verbose because it's intended as an educational example. You can write modules -a lot shorter than this: + from ansible.module_utils.basic import AnsibleModule -.. code-block:: python + def run_module(): + # define the available arguments/parameters that a user can pass to + # the module + module_args = dict( + name=dict(type='str', required=True), + new=dict(type='bool', required=False, default=False) + ) - #!/usr/bin/python + # seed the result dict in the object + # we primarily care about changed and state + # change is if this module effectively modified the target + # state will include any data that you want your module to pass back + # for consumption, for example, in a subsequent task + result = dict( + changed=False, + original_message='', + message='' + ) - # import some python modules that we'll use. These are all - # available in Python's core + # the AnsibleModule object will be our abstraction working with Ansible + # this includes instantiation, a couple of common attr would be the + # args/params passed to the execution, as well as if the module + # supports check mode + module = AnsibleModule( + argument_spec=module_args, + supports_check_mode=True + ) - import datetime - import sys - import json - import os - import shlex - - # read the argument string from the arguments file - args_file = sys.argv[1] - args_data = file(args_file).read() - - # For this module, we're going to do key=value style arguments. - # Modules can choose to receive json instead by adding the string: - # WANT_JSON - # Somewhere in the file. - # Modules can also take free-form arguments instead of key-value or json - # but this is not recommended. - - arguments = shlex.split(args_data) - for arg in arguments: - - # ignore any arguments without an equals in it - if "=" in arg: - - (key, value) = arg.split("=") - - # if setting the time, the key 'time' - # will contain the value we want to set the time to - - if key == "time": - - # now we'll affect the change. Many modules - # will strive to be idempotent, generally - # by not performing any actions if the current - # state is the same as the desired state. - # See 'service' or 'yum' in the main git tree - # for an illustrative example. - - rc = os.system("date -s \"%s\"" % value) - - # always handle all possible errors - # - # when returning a failure, include 'failed' - # in the return data, and explain the failure - # in 'msg'. Both of these conventions are - # required however additional keys and values - # can be added. - - if rc != 0: - print(json.dumps({ - "failed" : True, - "msg" : "failed setting the time" - })) - sys.exit(1) - - # when things do not fail, we do not - # have any restrictions on what kinds of - # data are returned, but it's always a - # good idea to include whether or not - # a change was made, as that will allow - # notifiers to be used in playbooks. - - date = str(datetime.datetime.now()) - print(json.dumps({ - "time" : date, - "changed" : True - })) - sys.exit(0) - - # if no parameters are sent, the module may or - # may not error out, this one will just - # return the time - - date = str(datetime.datetime.now()) - print(json.dumps({ - "time" : date - })) - -Let's test that module:: - - ansible/hacking/test-module -m ./timetest.py -a "time=\"March 14 12:23\"" - -This should return something like:: - - {"changed": true, "time": "2012-03-14 12:23:00.000307"} - -.. _binary_module_reading_input: - -Binary Modules Input -++++++++++++++++++++ - -Support for binary modules was added in Ansible 2.2. When Ansible detects a binary module, it will proceed to -supply the argument input as a file on ``argv[1]`` that is formatted as JSON. The JSON contents of that file -would resemble something similar to the following payload for a module accepting the same arguments as the -``ping`` module: + # if the user is working with this module in only check mode we do not + # want to make any changes to the environment, just return the current + # state with no modifications + if module.check_mode: + return result + + # manipulate or modify the state as needed (this is going to be the + # part where your module will do what it needs to do) + result['original_message'] = module.params['name'] + result['message'] = 'goodbye' + + # use whatever logic you need to determine whether or not this module + # made any modifications to your target + if module.params['new']: + result['changed'] = True + + # during the execution of the module, if there is an exception or a + # conditional state that effectively causes a failure, run + # AnsibleModule.fail_json() to pass in the message and the result + if module.params['name'] == 'fail me': + module.fail_json(msg='You requested this to fail', **result) + + # in the event of a successful module execution, you will want to + # simple AnsibleModule.exit_json(), passing the key/value results + module.exit_json(**result) -.. code-block:: json + def main(): + run_module() - { - "data": "pong", - "_ansible_verbosity": 4, - "_ansible_diff": false, - "_ansible_debug": false, - "_ansible_check_mode": false, - "_ansible_no_log": false - } + if __name__ == '__main__': + main() -.. _module_provided_facts: +Local/direct module testing +=========================== -Module Provided 'Facts' -```````````````````````` +You may want to test the module on the local machine without targeting a +remote host. This is a great way to quickly and easily debug a module +that can run locally. -The :ref:`setup` module that ships with Ansible provides many variables about a system that can be used in playbooks -and templates. However, it's possible to also add your own facts without modifying the system module. To do -this, just have the module return a `ansible_facts` key, like so, along with other return data: +- Create an arguments file in ``/tmp/args.json`` with the following + content: (explanation below) -.. code-block:: json +.. code:: json { - "changed" : true, - "rc" : 5, - "ansible_facts" : { - "leptons" : 5000, - "colors" : { - "red" : "FF0000", - "white" : "FFFFFF" - } + "ANSIBLE_MODULE_ARGS": { + "name": "hello", + "new": true } } -These 'facts' will be available to all statements called after that module (but not before) in the playbook. -A good idea might be to make a module called 'site_facts' and always call it at the top of each playbook, though -we're always open to improving the selection of core facts in Ansible as well. - -Returning a new fact from a python module could be done like:: - - module.exit_json(msg=message, ansible_facts=dict(leptons=5000, colors=my_colors)) - -.. _common_module_boilerplate: +- If you are using a virtual environment (highly recommended for + development) activate it: ``$ . venv/bin/activate`` +- Setup the environment for development: ``$ . hacking/env-setup`` +- Run your test module locally and directly: + ``$ python ./my_new_test_module.py /tmp/args.json`` -Common Module Boilerplate -````````````````````````` +This should be working output that resembles something like the +following: -As mentioned, if you are writing a module in Python, there are some very powerful shortcuts you can use. -Modules are still transferred as one file, but an arguments file is no longer needed, so these are not -only shorter in terms of code, they are actually FASTER in terms of execution time. +.. code:: json -Rather than mention these here, the best way to learn is to read some of the `source of the modules `_ that come with Ansible. + {"changed": true, "state": {"original_message": "hello", "new_message": "goodbye"}, "invocation": {"module_args": {"name": "hello", "new": true}}} -The 'group' and 'user' modules are reasonably non-trivial and showcase what this looks like. +The arguments file is just a basic json config file that you can +use to pass the module your parameters to run the module it -Key parts include always importing the boilerplate code from -:mod:`ansible.module_utils.basic` like this: +Playbook module testing +======================= -.. code-block:: python - - from ansible.module_utils.basic import AnsibleModule - if __name__ == '__main__': - main() +If you want to test your new module, you can now consume it with an +Ansible playbook. -.. note:: - Prior to Ansible-2.1.0, importing only what you used from - :mod:`ansible.module_utils.basic` did not work. You needed to use - a wildcard import like this: +- Create a playbook in any directory: ``$ touch testmod.yml`` +- Add the following to the new playbook file:: -.. code-block:: python + - name: test my new module + connection: local + hosts: localhost + tasks: + - name: run the new module + my_new_test_module: + name: 'hello' + new: true + register: testout + - name: dump test output + debug: + msg: '{{ testout }}' - from ansible.module_utils.basic import * +- Run the playbook and analyze the output: ``$ ansible-playbook ./testmod.yml`` -And instantiating the module class like: +Debugging (local) +================= -.. code-block:: python +If you want to break into a module and step through with the debugger, locally running the module you can do: - def main(): - module = AnsibleModule( - argument_spec = dict( - state = dict(default='present', choices=['present', 'absent']), - name = dict(required=True), - enabled = dict(required=True, type='bool'), - something = dict(aliases=['whatever']) - ) - ) +- Set a breakpoint in the module: ``import pdb; pdb.set_trace()`` +- Run the module on the local machine: ``$ python -m pdb ./my_new_test_module.py ./args.json`` -The :class:`AnsibleModule` provides lots of common code for handling returns, parses your arguments -for you, and allows you to check inputs. +Debugging (remote) +================== -Successful returns are made like this: +In the event you want to debug a module that is running on a remote target (i.e. not localhost), one way to do this is the following: -.. code-block:: python +- On your controller machine (running Ansible) set `ANSIBLE_KEEP_REMOTE_FILES=1` (this tells Ansible to retain the modules it sends to the remote machine instead of removing them) +- Run your playbook targetting the remote machine and specify ``-vvvv`` (the verbose output will show you many things, including the remote location that Ansible uses for the modules) +- Take note of the remote path Ansible used on the remote host +- SSH into the remote target after the completion of the playbook +- Navigate to the directory (most likely it is going to be your ansible remote user defined or implied from the playbook: ``~/.ansible/tmp/ansible-tmp-...``) +- Here you should see the module that you executed from your Ansible controller, but this is the zipped file that Ansible sent to the remote host. You can run this by specifying ``python my_test_module.py`` (not necessary) +- To debug, though, we will want to extract this zip out to the original module format: ``python my_test_module.py explode`` (Ansible will expand the module into ``./debug-dir``) +- Navigate to ``./debug-dir`` (notice that unzipping has caused the generation of ``ansible_module_my_test_module.py``) +- Modify or set a breakpoint in the unzipped module +- Ensure that the unzipped module is executable: ``$ chmod 755 ansible_module_my_test_module.py`` +- Run the unzipped module directly passing the args file: ``$ ./ansible_module_my_test_module.py args`` (args is the file that contains the params that were originally passed. Good for repro and debugging) - module.exit_json(changed=True, something_else=12345) +Unit testing +============ -And failures are just as simple (where `msg` is a required parameter to explain the error): +Unit tests for modules will be appropriately located in ``./test/units/modules``. You must first setup your testing environment. In this example, we're using Python 3.5. -.. code-block:: python +- Install the requirements (outside of your virtual environment): ``$ pip3 install -r ./test/runner/requirements/units.txt`` +- To run all tests do the following: ``$ ansible-test units --python 3.5`` (you must run ``. hacking/env-setup`` prior to this) - module.fail_json(msg="Something fatal happened") +.. note:: Ansible uses pytest for unit testing. -There are also other useful functions in the module class, such as :func:`module.sha1(path)`. See -:file:`lib/ansible/module_utils/basic.py` in the source checkout for implementation details. +To run pytest against a single test module, you can do the following (provide the path to the test module appropriately): -Again, modules developed this way are best tested with the :file:`hacking/test-module` script in the git -source checkout. Because of the magic involved, this is really the only way the scripts -can function outside of Ansible. +``$ pytest -r a --cov=. --cov-report=html --fulltrace --color yes +test/units/modules/.../test/my_new_test_module.py`` -If submitting a module to Ansible's core code, which we encourage, use of -:class:`AnsibleModule` is required. +Going Further +============= -.. _developing_for_check_mode: +If you are starting new development or fixing a bug, create a new branch: -Supporting Check Mode -````````````````````` -.. versionadded:: 1.1 +``$ git checkout -b my-new-branch``. -Modules may optionally support `check mode `_. If the user runs Ansible in check mode, a module should try to predict and report whether changes will occur but not actually make any changes (modules that do not support check mode will also take no action, but just will not report what changes they might have made). +If you are planning on contributing +back to the main Ansible repository, fork the Ansible repository into +your own GitHub account and develop against the new non-devel branch +in your fork. When you believe you have a good working code change, +submit a pull request to the Ansible repository. -For your module to support check mode, you must pass ``supports_check_mode=True`` when instantiating the AnsibleModule object. The AnsibleModule.check_mode attribute will evaluate to True when check mode is enabled. For example: +If you want to submit a new module to the upstream Ansible repo, be sure +to run through sanity checks first. For example: -.. code-block:: python +``$ ansible-test sanity -v --docker --python 2.7 MODULE_NAME`` - module = AnsibleModule( - argument_spec = dict(...), - supports_check_mode=True - ) +Note that this example requires docker to be installed and running. If you'd rather not use a +container for this, you can choose to use ``--tox`` instead of ``--docker``. - if module.check_mode: - # Check if any changes would be made but don't actually make those changes - module.exit_json(changed=check_if_system_state_would_be_changed()) -Remember that, as module developer, you are responsible for ensuring that no -system state is altered when the user enables check mode. +Communication and development support +===================================== -If your module does not support check mode, when the user runs Ansible in check -mode, your module will simply be skipped. +Join the IRC channel ``#ansible-devel`` on freenode for discussions +surrounding Ansible development. +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. +Credit +====== +Thank you to Thomas Stringer (`@tstring `_) for contributing source +material for this topic. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_general_windows.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_general_windows.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_general_windows.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_general_windows.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,261 @@ +Windows Ansible Module Development Walkthrough +============================================== + +In this section, we will walk through developing, testing, and debugging an +Ansible Windows module. + +Because Windows modules are written in Powershell and need to be run on a +Windows host, this guide differs from the usual development walkthrough guide. + +What's covered in this section: + +.. contents:: Topics + + +Windows environment setup +========================= + +TODO: Add in more information on how to use Vagrant to setup a Windows host. + + +Windows new module development +============================== + +When creating a new module there are a few things to keep in mind: + +- Module code is in Powershell (.ps1) files while the documentation is contained in Python (.py) files of the same name +- Avoid using ``Write-Host/Debug/Verbose/Error`` in the module and add what needs to be returned to the ``$result`` variable +- When trying an exception use ``Fail-Json -obj $result -message "exception message here"`` instead +- Most new modules require check mode and integration tests before they are merged into the main Ansible codebase +- Avoid using try/catch statements over a large code block, rather use them for individual calls so the error message can be more descriptive +- Try and catch specific exceptions when using try/catch statements +- Avoid using PSCustomObjects unless necessary +- Look for common functions in ``./lib/ansible/module_utils/powershell/`` and use the code there instead of duplicating work. These can be imported by adding the line ``#Requires -Module *`` where * is the filename to import, and will be automatically included with the module code sent to the Windows target when run via Ansible +- Ensure the code runs under Powershell v3 and higher on Windows Server 2008 and higher; if higher minimum Powershell or OS versions are required, ensure the documentation reflects this clearly +- Ansible runs modules under strictmode version 2.0. Be sure to test with that enabled by putting ``Set-StrictMode -Version 2.0`` at the top of your dev script +- Favour native Powershell cmdlets over executable calls if possible +- If adding an object to ``$result``, ensure any trailing slashes are removed or escaped, as ``ConvertTo-Json`` will fail to convert it +- Use the full cmdlet name instead of aliases, e.g. ``Remove-Item`` over ``rm`` +- Use named parameters with cmdlets, e.g. ``Remove-Item -Path C:\temp`` over ``Remove-Item C:\temp`` + +A very basic powershell module template can be found found below: + +.. code-block:: powershell + + #!powershell + # This file is part of Ansible + + # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) + + #Requires -Module Ansible.ModuleUtils.Legacy.psm1 + + $ErrorActionPreference = 'Stop' + + $params = Parse-Args -arguments $args -supports_check_mode $true + $check_mode = Get-AnsibleParam -obj $params -name "_ansible_check_mode" -type "bool" -default $false + $diff_mode = Get-AnsibleParam -obj $params -name "_ansible_diff" -type "bool" -default $false + + # these are your module parameters, there are various types which can be + # used to format your parameters. You can also set mandatory parameters + # with -failifempty, set defaults with -default and set choices with + # -validateset. + $string = Get-AnsibleParam -obj $params -name "string" -type "str" -failifempty $true + $bool = Get-AnsibleParam -obj $params -name "bool" -type "bool" -default $false + $int = Get-AnsibleParam -obj $params -name "int" -type "int" + $path = Get-AnsibleParam -obj $params -name "path" -type "path" + $list = Get-AnsibleParam -obj $params -name "list" -type "list" + $choices = Get-AnsibleParam -obj $params -name "choices" -type "str" -default "present" -validateset "absent","present" + + $result = @{ + changed = $false + } + + if ($diff_mode) { + $result.diff = @{} + } + + # code goes here + + # you can add/set new result objects with + $result.changed = $true + $result.new_result = "Hi" + + Exit-Json -obj $result + + +When in doubt, look at some of the core modules and see how things have been +implemented there. + +Sometimes there are multiple ways that Windows offers to complete a task; this +is the order to favour when writing modules: + +- Native Powershell cmdlets like ``Remove-Item -Path C:\temp -Recurse`` +- .NET classes like ``[System.IO.Path]::GetRandomFileName()`` +- WMI objects through the ``New-CimInstance`` cmdlet +- COM objects through ``New-Object -ComObject`` cmdlet +- Calls to native executables like ``Secedit.exe`` + + +Windows playbook module testing +=============================== + +To test a module you can do so with an Ansible playbook. + +- Create a playbook in any directory ``touch testmodule.yml`` +- Create an inventory file in the same directory ``touch hosts`` +- Populate the inventory file with the variables required to connect to a Windows host(s). +- Add the following to the new playbook file:: + + --- + - name: test out windows module + hosts: windows + tasks: + - name: test out module + win_module: + name: test name + +- Run the playbook ``ansible-playbook -i hosts testmodule.yml`` + +This can be pretty high level and is useful for seeing how Ansible runs with +the new module end to end: but there are better ways to test out the module as +shown below. + + +Windows debugging +================= + +Debugging a module currently can only be done on a Windows host. This is +extremely useful when developing a new module or looking at bug fixes. These +are some steps that need to be followed to set this up. + +- Copy the module script to the Windows server +- Copy ``./lib/ansible/module_utils/powershell/Ansible.ModuleUtils.Legacy.psm1`` to the same directory as the script above +- To stop the script from exiting the editor on a successful run, in ``Ansible.ModuleUtils.Legacy.psm1`` under the function ``Exit-Json``, replace the last two lines of the function with:: + + ConvertTo-Json -InputObject $obj -Depth 99 + +- To stop the script from exiting the editor on a failed run, in ``Ansible.ModuleUtils.Legacy.psm1`` under the function ``Fail-Json``, replace the last two lines of the function with:: + + Write-Error -Message (ConvertTo-Json -InputObject $obj -Depth 99) + +- Add the following to the start of the module script that was copied to the server:: + + ### start setup code + $complex_args = @{ + "_ansible_check_mode" = $false + "_ansible_diff" = $false + "path" = "C:\temp" + "state" = "present" + } + + Import-Module -Name .\Ansible.ModuleUtils.Legacy.psm1 + ### end setup code + +You can add more args to ``$complex_args`` as required by the module. The +module can now be run on the Windows host either directly through Powershell +or through an IDE. + +There are multiple IDEs that can be used to debug a Powershell script, two of +the most popular are + +- `Powershell ISE`_ +- `Visual Studio Code`_ + +.. _Powershell ISE: https://msdn.microsoft.com/en-us/powershell/scripting/core-powershell/ise/how-to-debug-scripts-in-windows-powershell-ise +.. _Visual Studio Code: https://blogs.technet.microsoft.com/heyscriptingguy/2017/02/06/debugging-powershell-script-in-visual-studio-code-part-1/ + +To be able to view the arguments as passed by Ansible to the module follow +these steps. + +- Prefix the Ansible command with :envvar:`ANSIBLE_KEEP_REMOTE_FILES=1` to get Ansible to keep the exec files on the server +- Log onto the Windows server using the same user Ansible executed the module as +- Navigate to ``%TEMP%\..``, there should be a folder starting with ``ansible-tmp-`` +- Inside this folder open up the powershell script for the module +- In this script there is a raw JSON script under ``$json_raw`` which contains the module arguments under ``module_args`` +- These args can be assigned manually to the ``$complex_args`` variable that is defined on your debug script + + +Windows unit testing +==================== + +Currently there is no mechanism to run unit tests for Powershell modules under Ansible CI. +There is work in the pipeline to introduce this in the future, stay tuned. + + +Windows integration testing +=========================== + +Integration tests for Ansible modules are typically written as Ansible roles. The test +roles are located in ``./test/integration/targets``. You must first set up your testing +environment, and configure a test inventory for Ansible to connect to. In this example we +will set up a test inventory to connect to two hosts and run the integration +tests for win_stat. + +- Create a copy of ``./test/integration/inventory.winrm.template`` and just call it ``inventory.winrm`` +- Fill in entries under ``[windows]`` and set the required vars that are needed to connect to the host +- To execute the integration tests, run ``ansible-test windows-integration win_stat``- you can replace ``win_stat`` with the role you wish to test + +This will execute all the tests currently defined for that role. You can set +the verbosity level using the ``-v`` argument just as you would with +ansible-playbook. + +When developing tests for a new module, it is recommended to test a scenario in +check mode and 2 times not in check mode. This ensures that check mode +does not make any changes but reports a change, as well as that the 2nd run is +idempotent and does not report changes. Following is an example of one way that this can be done: + +.. code-block:: yaml + + - name: remove a file (check mode) + win_file: + path: C:\temp + state: absent + register: remove_file_check + check_mode: yes + + - name: get result of remove a file (check mode) + win_command: powershell.exe "if (Test-Path -Path 'C:\temp') { 'true' } else { 'false' }" + register: remove_file_actual_check + + - name: assert remove a file (check mode) + assert: + that: + - remove_file_check|changed + - remove_file_actual_check.stdout == 'true\r\n' + + - name: remove a file + win_file: + path: C:\temp + state: absent + register: remove_file + + - name: get result of remove a file + win_command: powershell.exe "if (Test-Path -Path 'C:\temp') { 'true' } else { 'false' }" + register: remove_file_actual + + - name: assert remove a file + assert: + that: + - remove_file|changed + - remove_file_actual.stdout == 'false\r\n' + + - name: remove a file (idempotent) + win_file: + path: C:\temp + state: absent + register: remove_file_again + + - name: assert remove a file (idempotent) + assert: + that: + - not remove_file_again|changed + + +Windows communication and development support +============================================= + +Join the IRC channel ``#ansible-devel`` or ``#ansible-windows`` on freenode for +discussions surrounding Ansible development for Windows. + +For questions and discussions pertaining to using the Ansible product, +use the ``#ansible`` channel. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_in_groups.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_in_groups.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_in_groups.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_in_groups.rst 2017-09-19 17:10:47.000000000 +0000 @@ -121,11 +121,11 @@ Now that your modules are integrated there are a few bits of housekeeping to be done -**Maintainers** +**Bot Meta** Update `Ansibullbot` so it knows who to notify if/when bugs or PRs are raised against your modules -`MAINTAINERS.txt `_. +`BOTMETA.yml `_. -If there are multiple people that can be notified, please list them. That avoids waiting on a single person who may be unavailable for any reason. Note that in `MAINTAINERS.txt` you can take ownership of an entire directory. +If there are multiple people that can be notified, please list them. That avoids waiting on a single person who may be unavailable for any reason. Note that in `BOTMETA.yml` you can take ownership of an entire directory. **Review Module web docs** diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_python3.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_python3.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules_python3.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules_python3.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,363 +0,0 @@ -=============================== -Ansible and Porting to Python 3 -=============================== - -Ansible can be divided into three overlapping pieces for the purposes of -porting: - -1. Controller-side code. This is the code which runs on the machine where you - invoke /usr/bin/ansible -2. Modules. This is the code which Ansible transmits over the wire and - invokes on the managed machine. -3. module_utils code. This is code whose primary purpose is to be used by the - modules to perform tasks. However, some controller-side code might use - generic functions from here. - -Much of the knowledge of porting code will be usable on all three of these -pieces but there are some special considerations for some of it as well. - --------------------------------------------- -Minimum Version of Python-3.x and Python-2.x --------------------------------------------- - -In controller side code, we support Python-3.5 or greater and Python-2.6 or -greater. - -For modules (and by extension, module_utils) we support -Python-3.5 and Python-2.4. Python-3.5 was chosen as a minimum because it is the earliest Python-3 version -adopted as the default Python by a Long Term Support (LTS) Linux distribution (in this case, Ubuntu-16.04). -Previous LTS Linux distributions shipped with a Python-2 version which users can rely upon instead of the -Python-3 version. - -For Python-2, the default is for modules to run on Python-2.4. This allows -users with older distributions that are stuck on Python-2.4 to manage their -machines. Modules are allowed to drop support for Python-2.4 when one of -their dependent libraries requires a higher version of Python. This is not an -invitation to add unnecessary dependent libraries in order to force your -module to be usable only with a newer version of Python.; instead it is an -acknowledgment that some libraries (for instance, boto3 and docker-py) will -only function with a newer version of Python. - -.. note:: Python-2.4 Support: - - The only long term supported distro that we know of with Python-2.4 support is - RHEL5 (and its rebuilds like CentOS5), which is supported until April of - 2017. For Ansible, that means Ansible-2.3 will be the last major release - that supports Python-2.4 for modules. Ansible-2.4 will require - Python-2.6 or greater for modules. - ------------------------------------ -Porting Controller Code to Python 3 ------------------------------------ - -Most of the general tips for porting code to be used on both Python-2 and -Python-3 applies to porting controller code. The best place to start learning -to port code is `Lennart Regebro's book: Porting to Python 3 `_. - -The book describes several strategies for porting to Python 3. The one we're -using is `to support Python-2 and Python-3 from a single code base -`_ - -Controller String Strategy -========================== - -Background ----------- - -One of the most essential things to decide upon for porting code to Python-3 -is what string model to use. Strings can be an array of bytes (like in C) or -they can be an array of text. Text is what we think of as letters, digits, -numbers, other printable symbols, and a small number of unprintable "symbols" -(control codes). - -In Python-2, the two types for these (:class:`str` for bytes and -:class:`unicode` for text) are often used interchangeably. When dealing only -with ASCII characters, the strings can be combined, compared, and converted -from one type to another automatically. When non-ASCII characters are -introduced, Python starts throwing exceptions due to not knowing what encoding -the non-ASCII characters should be in. - -Python-3 changes this behavior by making the separation between bytes (:class:`bytes`) -and text (:class:`str`) more strict. Python will throw an exception when -trying to combine and compare the two types. The programmer has to explicitly -convert from one type to the other to mix values from each. - -This change makes it immediately apparent to the programmer when code is -mixing the types inappropriately, rather than working until one of their users -causes an exception by entering non-ASCII input. However, it forces the -programmer to proactively define a strategy for working with strings in their -program so that they don't mix text and byte strings unintentionally. - -Unicode Sandwich ----------------- - -In controller-side code we use a strategy known as the Unicode Sandwich (named -after Python-2's :class:`unicode` text type). For Unicode Sandwich we know that -at the border of our code and the outside world (for example, file and network IO, -environment variables, and some library calls) we are going to receive bytes. -We need to transform these bytes into text and use that throughout the -internal portions of our code. When we have to send those strings back out to -the outside world we first convert the text back into bytes. -To visualize this, imagine a 'sandwich' consisting of a top and bottom layer -of bytes, a layer of conversion between, and all text type in the center. - -Common Borders --------------- - -This is a partial list of places where we have to convert to and from bytes. -It's not exhaustive but gives you an idea of where to watch for problems. - -Reading and writing to files -~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In Python-2, reading from files yields bytes. In Python-3, it can yield text. -To make code that's portable to both we don't make use of Python-3's ability -to yield text but instead do the conversion explicitly ourselves. For example:: - - from ansible.module_utils._text import to_text - - with open('filename-with-utf8-data.txt', 'rb') as my_file: - b_data = my_file.read() - try: - data = to_text(b_data, errors='surrogate_or_strict') - except UnicodeError: - # Handle the exception gracefully -- usually by displaying a good - # user-centric error message that can be traced back to this piece - # of code. - pass - -.. note:: Much of Ansible assumes that all encoded text is UTF-8. At some - point, if there is demand for other encodings we may change that, but for - now it is safe to assume that bytes are UTF-8. - -Writing to files is the opposite process:: - - from ansible.module_utils._text import to_bytes - - with open('filename.txt', 'wb') as my_file: - my_file.write(to_bytes(some_text_string)) - -Note that we don't have to catch :exc:`UnicodeError` here because we're -transforming to UTF-8 and all text strings in Python can be transformed back -to UTF-8. - -Filesystem Interaction -~~~~~~~~~~~~~~~~~~~~~~ - -Dealing with filenames often involves dropping back to bytes because on UNIX-like -systems filenames are bytes. On Python-2, if we pass a text string to these -functions, the text string will be converted to a byte string inside of the -function and a traceback will occur if non-ASCII characters are present. In -Python-3, a traceback will only occur if the text string can't be decoded in -the current locale, but it's still good to be explicit and have code which -works on both versions:: - - import os.path - - from ansible.module_utils._text import to_bytes - - filename = u'/var/tmp/ãらã¨ã¿.txt' - f = open(to_bytes(filename), 'wb') - mtime = os.path.getmtime(to_bytes(filename)) - b_filename = os.path.expandvars(to_bytes(filename)) - if os.path.exists(to_bytes(filename)): - pass - -When you are only manipulating a filename as a string without talking to the -filesystem (or a C library which talks to the filesystem) you can often get -away without converting to bytes:: - - import os.path - - os.path.join(u'/var/tmp/café', u'ãらã¨ã¿') - os.path.split(u'/var/tmp/café/ãらã¨ã¿') - -On the other hand, if the code needs to manipulate the filename and also talk -to the filesystem, it can be more convenient to transform to bytes right away -and manipulate in bytes. - -.. warning:: Make sure all variables passed to a function are the same type. - If you're working with something like :func:`os.path.join` which takes - multiple strings and uses them in combination, you need to make sure that - all the types are the same (either all bytes or all text). Mixing - bytes and text will cause tracebacks. - -Interacting with Other Programs -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Interacting with other programs goes through the operating system and -C libraries and operates on things that the UNIX kernel defines. These -interfaces are all byte-oriented so the Python interface is byte oriented as -well. On both Python-2 and Python-3, byte strings should be given to Python's -subprocess library and byte strings should be expected back from it. - -One of the main places in Ansible's controller code that we interact with -other programs is the connection plugins' ``exec_command`` methods. These -methods transform any text strings they receive in the command (and arugments -to the command) to execute into bytes and return stdout and stderr as byte strings -Higher level functions (like action plugins' ``_low_level_execute_command``) -transform the output into text strings. - -Tips, tricks, and idioms to adopt -================================= - -Forwards Compatibility Boilerplate ----------------------------------- - -Use the following boilerplate code at the top of all controller-side modules -to make certain constructs act the same way on Python-2 and Python-3:: - - # Make coding more python3-ish - from __future__ import (absolute_import, division, print_function) - __metaclass__ = type - -``__metaclass__ = type`` makes all classes defined in the file into new-style -classes without explicitly inheriting from :class:`object`. - -The ``__future__`` imports do the following: - -:absolute_import: Makes imports look in :attr:`sys.path` for the modules being - imported, skipping the directory in which the module doing the importing - lives. If the code wants to use the directory in which the module doing - the importing, there's a new dot notation to do so. -:division: Makes division of integers always return a float. If you need to - find the quotient use ``x // y`` instead of ``x / y``. -:print_function: Changes :func:`print` from a keyword into a function. - -.. seealso:: - * `PEP 0328: Absolute Imports `_ - * `PEP 0238: Division `_ - * `PEP 3105: Print function `_ - -Prefix byte strings with "b\_" ------------------------------- - -Since mixing text and bytes types leads to tracebacks we want to be clear -about what variables hold text and what variables hold bytes. We do this by -prefixing any variable holding bytes with ``b_``. For instance:: - - filename = u'/var/tmp/café.txt' - b_filename = to_bytes(filename) - with open(b_filename) as f: - data = f.read() - -We do not prefix the text strings instead because we only operate -on byte strings at the borders, so there are fewer variables that need bytes -than text. - ---------------------------- -Porting Modules to Python 3 ---------------------------- - -Ansible modules are not the usual Python-3 porting exercise. There are two -factors that make it harder to port them than most code: - -1. Many modules need to run on Python-2.4 in addition to Python-3. -2. A lot of mocking has to go into unit testing a Python-3 module, so it's - harder to test that your porting has fixed everything or to to make sure that - later commits haven't regressed. - -Module String Strategy -====================== - -There are a large number of modules in Ansible. Most of those are maintained -by the Ansible community at large, not by a centralized team. To make life -easier on them, it was decided not to break backwards compatibility by -mandating that all strings inside of modules are text and converting between -text and bytes at the borders; instead, we're using a native string strategy -for now. - -Tips, tricks, and idioms to adopt -================================= - -Exceptions ----------- - -In code which already needs Python-2.6+ (for instance, because a library it -depends on only runs on Python >= 2.6) it is okay to port directly to the new -exception-catching syntax:: - - try: - a = 2/0 - except ValueError as e: - module.fail_json(msg="Tried to divide by zero!") - -For modules which also run on Python-2.4, we have to use an uglier -construction to make this work under both Python-2.4 and Python-3:: - - from ansible.module_utils.pycompat24 import get_exception - - try: - a = 2/0 - except ValueError: - e = get_exception() - module.fail_json(msg="Tried to divide by zero!") - -Octal numbers -------------- - -In Python-2.4, octal literals are specified as ``0755``. In Python-3, that is -invalid and octals must be specified as ``0o755``. To bridge this gap, -modules should create their octals like this:: - - # Can't use 0755 on Python-3 and can't use 0o755 on Python-2.4 - EXECUTABLE_PERMS = int('0755', 8) - -Bundled six ------------ - -The third-party python-six library exists to help projects create code that -runs on both Python-2 and Python-3. Ansible includes version 1.4.1 in -module_utils so that other modules can use it without requiring that it is -installed on the remote system. To make use of it, import it like this:: - - from ansible.module_utils import six - -.. note:: Why version 1.4.1? - - six-1.4.1 is the last version of python-six to support Python-2.4. As - long as Ansible modules need to run on Python-2.4 we won't be able to - update the bundled copy of six. - -Compile Test ------------- - -We have travis compiling all modules with various versions of Python to check -that the modules conform to the syntax at those versions. When you've -ported a module so that its syntax works with Python-3, we need to modify -.travis.yml so that the module is included in the syntax check. Here's the -relevant section of .travis.yml:: - - env: - global: - - PY3_EXCLUDE_LIST="cloud/amazon/cloudformation.py - cloud/amazon/ec2_ami.py - [...] - utilities/logic/wait_for.py" - -The :envvar:`PY3_EXCLUDE_LIST` environment variable is a blacklist of modules -which should not be tested (because we know that they are older modules which -have not yet been ported to pass the Python-3 syntax checks. To get another -old module to compile with Python-3, remove the entry for it from the list. -The goal is to have the LIST be empty. - -------------------------------------- -Porting module_utils code to Python 3 -------------------------------------- - -module_utils code is largely like module code. However, some pieces of it are -used by the controller as well. Because of this, it needs to be usable with -the controller's assumptions. This is most notable in the string strategy. - -Module_utils String Strategy -============================ - -Module_utils **must** use the Native String Strategy. Functions in -module_utils receive either text strings or byte strings and may emit either -the same type as they were given or the native string for the Python version -they are run on depending on which makes the most sense for that function. -Functions which return strings **must** document whether they return text, -byte, or native strings. Module-utils functions are therefore often very -defensive in nature, converting from potential text or bytes at the -beginning of a function and converting to the native string type at the end. - diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_modules.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_modules.rst 2017-09-19 17:10:47.000000000 +0000 @@ -31,11 +31,11 @@ 1. Does a similar module already exist? -There are a lot of existing modules available. You should check out the list of existing modules at :doc:`../modules` +There are a lot of existing modules available, you should check out the list of existing modules at :doc:`../modules` -2. Has someone already worked on a similar pull request? +2. Has someone already worked on a similar Pull Request? -It's possible that someone has already started developing a similar PR. There are a few ways to find open module pull requests: +It's possible that someone has already started developing a similar PR. There are a few ways to find open module Pull Requests: * `GitHub new module PRs `_ * `All updates to modules `_ @@ -63,6 +63,8 @@ :doc:`developing_modules_general` A general overview of how to develop, debug, and test modules. +:doc:`developing_modules_general_windows` + A general overview of how to develop, debug and test Windows modules. :doc:`developing_modules_documenting` How to include in-line documentation in your module. :doc:`developing_modules_best_practices` diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_module_utilities.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_module_utilities.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_module_utilities.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_module_utilities.rst 2017-09-19 17:10:47.000000000 +0000 @@ -9,8 +9,10 @@ The following is a list of module_utils files and a general description. The module utility source code lives in the `./lib/module_utils` directory under your main Ansible path - for more details on any specific module utility, please see the source code. - a10.py - Utilities used by the a10_server module to manage A10 Networks devices. +- aireos.py - Definitions and helper functions for modules that manage Cisco WLC devices. - api.py - Adds shared support for generic API modules. - aos.py - Module support utilities for managing Apstra AOS Server. +- aruba.py - Helper functions for modules working with Aruba networking devices. - asa.py - Module support utilities for managing Cisco ASA network devices. - azure_rm_common.py - Definitions and utilities for Microsoft Azure Resource Manager template deployments. - basic.py - General definitions and helper utilities for Ansible modules. @@ -27,21 +29,24 @@ - ismount.py - Contains single helper function that fixes os.path.ismount - junos.py - Definitions and helper functions for modules that manage Junos networking devices - known_hosts.py - utilities for working with known_hosts file +- manageiq.py - Functions and utilities for modules that work with ManageIQ platform and its resources. - mysql.py - Allows modules to connect to a MySQL instance - netapp.py - Functions and utilities for modules that work with the NetApp storage platforms. - netcfg.py - Configuration utility functions for use by networking modules - netcmd.py - Defines commands and comparison operators for use in networking modules +- netscaler.py - Utilities specifically for the netscaler network modules. - network.py - Functions for running commands on networking devices - nxos.py - Contains definitions and helper functions specific to Cisco NXOS networking devices - openstack.py - Utilities for modules that work with Openstack instances. - openswitch.py - Definitions and helper functions for modules that manage OpenSwitch devices - powershell.ps1 - Utilities for working with Microsoft Windows clients -- pycompat24.py - Exception workaround for Python 2.4 +- pure.py - Functions and utilities for modules that work with the Pure Storage storage platforms. +- pycompat24.py - Exception workaround for Python 2.4. - rax.py - Definitions and helper functions for modules that work with Rackspace resources. - redhat.py - Functions for modules that manage Red Hat Network registration and subscriptions - service.py - Contains utilities to enable modules to work with Linux services (placeholder, not in use). - shell.py - Functions to allow modules to create shells and work with shell commands -- six.py - Module utils for working with the Six Python 2 and 3 compatibility library +- six/__init__.py - Bundled copy of the `Six Python library `_ to aid in writing code compatible with both Python 2 and Python 3. - splitter.py - String splitting and manipulation utilities for working with Jinja2 templates - urls.py - Utilities for working with http and https requests - vca.py - Contains utilities for modules that work with VMware vCloud Air diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_plugins.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_plugins.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_plugins.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_plugins.rst 2017-09-19 17:10:47.000000000 +0000 @@ -219,7 +219,7 @@ data into ansible runs that did not come from an inventory, playbook, or command line. Note that variables can also be returned from inventory, so in most cases, you won't need to write or understand vars_plugins. -More documentation on writing vars plugins is pending, though you can jump into `lib/ansible/inventory/vars_plugins `_ and figure +More documentation on writing vars plugins is pending, though you can jump into `lib/ansible/plugins `_ and figure things out pretty easily. If you find yourself wanting to write a vars_plugin, it's more likely you should write an inventory script instead. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_program_flow_modules.rst 2017-09-19 17:10:47.000000000 +0000 @@ -330,8 +330,8 @@ .. note:: Ansible wraps the zipfile in the Python script for two reasons: - * for compatibility with Python-2.4 and Python-2.6 which have less - featureful versions of Python's ``-m`` command line switch. + * for compatibility with Python-2.6 which has a less + functional version of Python's ``-m`` command line switch. * so that pipelining will function properly. Pipelining needs to pipe the Python module into the Python interpreter on the remote node. Python understands scripts on stdin but does not understand zip files. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_releases.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_releases.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_releases.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_releases.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,57 +0,0 @@ -Releases -======== - -.. contents:: Topics - :local: - -.. _support_life: - -Support for older releases -`````````````````````````` - -Ansible supports the two most recent major, stable releases. Security- and bug-related fixes may be implemented in older versions, but this -support is not guaranteed. - -If you are on a release older than the last two major, stable releases, please see our `Porting Guide `_. - -.. _schedule: - -Release schedule -```````````````` -Ansible is on a 'flexible' 4 month release schedule. Sometimes the release cycle can be extended if there is a major change that requires more time (for example, a core rewrite). -Recently the main Ansible repo `merged `_ the separated ansible-modules-core and ansible-modules-extras repos, as such modules get released at the same time as the main Ansible repo. - -The major features and bugs fixed in a release should be reflected in the `CHANGELOG.md `_. Minor features and bug fixes will be shown in the commit history. For example, `issue #19057 `_ is reflected only in the commit history. -When a fix orfeature gets added to the `devel` branch it will be part of the next release. Some bugfixes can be backported to previous releases and will be part of a minor point release if such a release is deemed necessary. - -Sometimes a release candidate can be extended by a few days if a bug fix makes a change that can have far-reaching consequences, so users have enough time to find any new issues that may stem from this. - -.. _methods: - -Release methods -```````````````` - -Ansible normally goes through a 'release candidate', issuing an RC1 for a release. If no major bugs are discovered in the release candidate after 5 business days, we'll get a final release. Otherwise, fixes will be applied and an RC2 will be provided for testing. If no bugs are discovered in RC2 after 2 days, the final release will be made, iterating this last step and incrementing the candidate number as we find major bugs. - - -.. _freezing: - -Release feature freeze -`````````````````````` - -During the release candidate process, the focus will be on bugfixes that affect the RC, new features will be delayed while we try to produce a final version. Some bugfixes that are minor or don't affect the RC will also be postponed until after the release is finalized. - -.. seealso:: - - :doc:`developing_api` - Python API to Playbooks and Ad Hoc Task Execution - :doc:`developing_modules` - How to develop modules - :doc:`developing_plugins` - How to develop plugins - `Ansible Tower `_ - REST API endpoint and GUI for Ansible, syncs with dynamic inventory - `Development Mailing List `_ - Mailing list for development topics - `irc.freenode.net `_ - #ansible IRC chat channel diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_test_pr.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_test_pr.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/developing_test_pr.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/developing_test_pr.rst 1970-01-01 00:00:00.000000000 +0000 @@ -1,200 +0,0 @@ -Helping Testing PRs -``````````````````` - -If you're a developer, one of the most valuable things you can do is look at the github issues list and help fix bugs. We almost always prioritize bug fixing over -feature development, so clearing bugs out of the way is one of the best things you can do. - -Even if you're not a developer, helping test pull requests for bug fixes and features is still immensely valuable. - -This goes for testing new features as well as testing bugfixes. - -In many cases, code should add tests that prove it works but that's not ALWAYS possible and tests are not always comprehensive, especially when a user doesn't have access -to a wide variety of platforms, or that is using an API or web service. - -In these cases, live testing against real equipment can be more valuable than automation that runs against simulated interfaces. -In any case, things should always be tested manually the first time too. - -Thankfully helping test ansible is pretty straightforward, assuming you are already used to how ansible works. - -Get Started with A Source Checkout -++++++++++++++++++++++++++++++++++ - -You can do this by checking out ansible, making a test branch off the main one, merging a GitHub issue, testing, -and then commenting on that particular issue on GitHub. Here's how: - -.. note:: - Testing source code from GitHub pull requests sent to us does have some inherent risk, as the source code - sent may have mistakes or malicious code that could have a negative impact on your system. We recommend - doing all testing on a virtual machine, whether a cloud instance, or locally. Some users like Vagrant - or Docker for this, but they are optional. It is also useful to have virtual machines of different Linux or - other flavors, since some features (apt vs. yum, for example) are specific to those OS versions. - -First, you will need to configure your testing environment with the necessary tools required to run our test -suites. You will need at least:: - - git - python-nosetests (sometimes named python-nose) - python-passlib - python-mock - -If you want to run the full integration test suite you'll also need the following packages installed:: - - svn - hg - python-pip - gem - -Second, if you haven't already, clone the Ansible source code from GitHub:: - - git clone https://github.com/ansible/ansible.git --recursive - cd ansible/ - -.. note:: - If you have previously forked the repository on GitHub, you could also clone it from there. - -.. note:: - If updating your repo for testing something module related, use "git rebase origin/devel" and then "git submodule update" to fetch - the latest development versions of modules. Skipping the "git submodule update" step will result in versions that will be stale. - -Activating The Source Checkout -++++++++++++++++++++++++++++++ - -The Ansible source includes a script that allows you to use Ansible directly from source without requiring a -full installation, that is frequently used by developers on Ansible. - -Simply source it (to use the Linux/Unix terminology) to begin using it immediately:: - - source ./hacking/env-setup - -This script modifies the PYTHONPATH enviromnent variables (along with a few other things), which will be temporarily -set as long as your shell session is open. - -If you'd like your testing environment to always use the latest source, you could call the command from startup scripts (for example, -`.bash_profile`). - -Finding A Pull Request and Checking It Out On A Branch -++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -Next, find the pull request you'd like to test and make note of the line at the top which describes the source -and destination repositories. It will look something like this:: - - Someuser wants to merge 1 commit into ansible:devel from someuser:feature_branch_name - -.. note:: - It is important that the PR request target be ansible:devel, as we do not accept pull requests into any other branch. Dot releases are cherry-picked manually by ansible staff. - -The username and branch at the end are the important parts, which will be turned into git commands as follows:: - - git checkout -b testing_PRXXXX devel - git pull https://github.com/someuser/ansible.git feature_branch_name - -The first command creates and switches to a new branch named testing_PRXXXX, where the XXXX is the actual issue number associated with the pull request (for example, 1234). This branch is based on the devel branch. The second command pulls the new code from the users feature branch into the newly created branch. - -.. note:: - If the GitHub user interface shows that the pull request will not merge cleanly, we do not recommend proceeding if you are not somewhat familiar with git and coding, as you will have to resolve a merge conflict. This is the responsibility of the original pull request contributor. - -.. note:: - Some users do not create feature branches, which can cause problems when they have multiple, un-related commits in their version of `devel`. If the source looks like `someuser:devel`, make sure there is only one commit listed on the pull request. - -Finding a Pull Request for Ansible Modules -++++++++++++++++++++++++++++++++++++++++++ -Ansible modules are in separate repositories, which are managed as Git submodules. Here's a step by step process for checking out a PR for an Ansible extras module, for instance: - -1. git clone https://github.com/ansible/ansible.git -2. cd ansible -3. git submodule init -4. git submodule update --recursive [ fetches the submodules ] -5. cd lib/ansible/modules/extras -6. git fetch origin pull/1234/head:pr/1234 [ fetches the specific PR ] -7. git checkout pr/1234 [ do your testing here ] -8. cd /path/to/ansible/clone -9. git submodule update --recursive - -For Those About To Test, We Salute You -++++++++++++++++++++++++++++++++++++++ - -At this point, you should be ready to begin testing! - -If the PR is a bug-fix pull request, the first things to do are to run the suite of unit and integration tests, to ensure -the pull request does not break current functionality: - -.. code-block:: shell-session - - # Unit Tests - make tests - - # Integration Tests - cd test/integration - make - -.. note:: - Ansible does provide integration tests for cloud-based modules as well, however we do not recommend using them for some users - due to the associated costs from the cloud providers. As such, typically it's better to run specific parts of the integration battery - and skip these tests. - -Integration tests aren't the end all beat all - in many cases what is fixed might not *HAVE* a test, so determining if it works means -checking the functionality of the system and making sure it does what it said it would do. - -Pull requests for bug-fixes should reference the bug issue number they are fixing. - -We encourage users to provide playbook examples for bugs that show how to reproduce the error, and these playbooks should be used to verify the bugfix does resolve -the issue if available. You may wish to also do your own review to poke the corners of the change. - -Since some reproducers can be quite involved, you might wish to create a testing directory with the issue # as a sub- -directory to keep things organized: - -.. code-block:: shell-session - - mkdir -p testing/XXXX # where XXXX is again the issue # for the original issue or PR - cd testing/XXXX - # create files or git clone example playbook repo - -While it should go without saying, be sure to read any playbooks before you run them. VMs help with running untrusted content greatly, -though a playbook could still do something to your computing resources that you'd rather not like. - -Once the files are in place, you can run the provided playbook (if there is one) to test the functionality: - -.. code-block:: shell-session - - ansible-playbook -vvv playbook_name.yml - -If there's no playbook, you may have to copy and paste playbook snippets or run an ad-hoc command that was pasted in. - -Our issue template also included sections for "Expected Output" and "Actual Output", which should be used to gauge the output -from the provided examples. - -If the pull request resolves the issue, please leave a comment on the pull request, showing the following information: - - * "Works for me!" - * The output from `ansible --version`. - -In some cases, you may wish to share playbook output from the test run as well. - -Example! - - | Works for me! Tested on `Ansible 1.7.1`. I verified this on CentOS 6.5 and also Ubuntu 14.04. - -If the PR does not resolve the issue, or if you see any failures from the unit/integration tests, just include that output instead: - - | This doesn't work for me. - | - | When I ran this Ubuntu 16.04 it failed with the following: - | - | \``` - | BLARG - | StrackTrace - | RRRARRGGG - | \``` - -When you are done testing a feature branch, you can remove it with the following command: - -.. code-block:: shell-session - - $ git branch -D someuser-feature_branch_name - -We understand some users may be inexperienced with git, or other aspects of -the above procedure, so feel free to stop by ansible-devel list for questions -and we'd be happy to help answer them. - - - diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/index.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/index.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/index.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/index.rst 2017-09-19 17:10:47.000000000 +0000 @@ -18,6 +18,7 @@ overview_architecture developing_modules developing_modules_general + developing_modules_general_windows developing_modules_documenting developing_modules_best_practices developing_modules_checklist @@ -33,6 +34,6 @@ developing_rebasing testing repomerge - developing_releases + ../release_and_maintenance ../committer_guidelines ./style_guide/index diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/basic_rules.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/basic_rules.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/basic_rules.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/basic_rules.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,83 @@ +Basic Rules +`````````````````` +Use Standard American English +--------------------------------------- +Ansible has customers/users all around the globe, but the headquarters is in Durham, NC, in the US. Use Standard American English rather than other variations of the English language. + +Write for a Global Audience +------------------------------------ +The idea behind global writing is that everything you say should be understandable by those of many different backgrounds and cultures. References, therefore, should be as universal as possible. Avoid idioms and regionalism and maintain a neutral tone that cannot be misinterpreted. Avoid attempts at humor. + +Follow Naming Conventions +--------------------------------------- +Always follow naming conventions and trademarks. If you aren't sure how a product should be properly referred to, ask the Engineering Product Manager of that product line (ansible-core or Tower) for information. + +Important Information First +---------------------------------------------- +Important information stated at the beginning of a sentence makes it easier to understand. + + Unclear: + The unwise walking about upon the area near the cliff edge may result in a dangerous fall and therefore it is recommended that one remains a safe distance to maintain personal safety. + + Clearer: + Danger! Stay away from cliff. + +Sentence Structure +----------------------------------------------- +Good sentence structure helps convey information. Try to keep the most important information towards the beginning of the sentence. + + Bad: + Furthermore, large volumes of water are also required for the process of extraction. + + Better: + Extraction also requires large volumes of water. + +Avoid padding +------------------------------- + +When reading a piece of technical writing, the audience does not benefit from elaborate prose. They just need information on how to perform a task. Avoid using padding, or filler. Don't use phrases such as, kind of, sort of, and essentially. + +Avoid redundant prepositional phrases +-------------------------------------------------------------------- +Prepositional phrases, the combination of a preposition with a noun phrase, are among the worst offenders in making text long and tiresome to read. Often, it is possible to replace an entire phrase with a single word. + + Use now instead of at this point in time. + Use suddenly instead of all of the sudden. + +Avoid verbosity +------------------------------------ +Write short, succinct sentences. Never say, "...as has been said before," "..each and every," "...point in time," etc. Avoid "...in order to," especially at the beginning of sentences. Every word must contribute meaning to the sentence. Technical writing is information delivery. + +Avoid pomposity +--------------------------------------- +While it is good to have a wide vocabulary, technical writing is not the place for showing off linguistic abilities. Technical writing is about producing clear, plain instructions for a specific audience. + +Action verbs, menus, and commands +---------------------------------------------------------------- +We interact with computers in a variety of ways. You can select anything on an application user interface by selecting it using a keyboard or mouse. It is important to use action verbs and software terminology correctly. + +The most frequent verbs used in software are: + +- Click +- Double-click +- Select +- Type +- Press + +Use of an action verb in a sentence (**bolded** words): + +1. In the dialog box, click **Open**. +2. **Type** a name in the text box. +3. On the keyboard press **Enter**. + +Use of menu actions and commands in a sentence: + +1. On the **File** menu, click **Open**. +2. **Type** a name in the **User Name** field. +3. In the **Open** dialog box, click **Save**. +4. On the computer keyboard, press **Enter**. +5. On the toolbar, click the **Open File** icon. + +Make users aware of where they are in the application. If there is more than one method to perform an action, use the most common method. Define "what, where, and how" in each step of the task or procedure. Describe menu items for the current task left to right, top-down. + + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/conf.py ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/conf.py --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/conf.py 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/conf.py 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,369 @@ +# -*- coding: utf-8 -*- +# +# Ansible Style Guide documentation build configuration file, created by +# sphinx-quickstart on Mon May 11 12:41:35 2015. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# sys.path.insert(0, os.path.abspath('.')) + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.ifconfig', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = u'Ansible Style Guide' +copyright = u'2015, Sandra A Wills' +author = u'Sandra A Wills' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '1' +# The full version, including alpha/beta/rc tags. +release = '1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = [] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# html_theme = 'alabaster' +html_theme = 'srtd' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] +html_theme_path = ["_themes"] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None +html_title = 'Ansible Style Guide' + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None +html_short_title = 'Style Guide' + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'AnsibleStyleGuidedoc' + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'AnsibleStyleGuide.tex', u'Ansible Style Guide Documentation', + u'Sandra A Wills', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'ansiblestyleguide', u'Ansible Style Guide Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'AnsibleStyleGuide', u'Ansible Style Guide Documentation', + author, 'AnsibleStyleGuide', 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# -- Options for Epub output ---------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project +epub_author = author +epub_publisher = author +epub_copyright = copyright + +# The basename for the epub file. It defaults to the project name. +# epub_basename = project + +# The HTML theme for the epub output. Since the default themes are not optimized +# for small screen space, using the same theme for HTML and epub output is +# usually not wise. This defaults to 'epub', a theme designed to save visual +# space. +# epub_theme = 'epub' + +# The language of the text. It defaults to the language option +# or 'en' if the language is not set. +# epub_language = '' + +# The scheme of the identifier. Typical schemes are ISBN or URL. +# epub_scheme = '' + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# epub_identifier = '' + +# A unique identification for the text. +# epub_uid = '' + +# A tuple containing the cover image and cover page html template filenames. +# epub_cover = () + +# A sequence of (type, uri, title) tuples for the guide element of content.opf. +# epub_guide = () + +# HTML files that should be inserted before the pages created by sphinx. +# The format is a list of tuples containing the path and title. +# epub_pre_files = [] + +# HTML files shat should be inserted after the pages created by sphinx. +# The format is a list of tuples containing the path and title. +# epub_post_files = [] + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + +# The depth of the table of contents in toc.ncx. +# epub_tocdepth = 3 + +# Allow duplicate toc entries. +# epub_tocdup = True + +# Choose between 'default' and 'includehidden'. +# epub_tocscope = 'default' + +# Fix unsupported image types using the Pillow. +# epub_fix_images = False + +# Scale large images. +# epub_max_image_width = 0 + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# epub_show_urls = 'inline' + +# If false, no index is generated. +# epub_use_index = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'https://docs.python.org/': None} diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/grammar_punctuation.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,201 @@ + +Grammar and Punctuation +`````````````````````````````````````` + +Common Styles and Usage, and Common Mistakes +---------------------------------------------------- + +Ansible +~~~~~~~~~ +* Write "Ansible." Not "Ansible, Inc." or "AnsibleWorks The only exceptions to this rule are when we're writing legal or financial statements. + +* Never use the logotype by itself in body text. Always keep the same font you are using the rest of the sentence. + +* A company is singular in the US. In other words, Ansible is an "it," not a "they." + + +Capitalization +~~~~~~~~~~~~~~ +If it's not a real product, service, or department at Ansible, don't capitalize it. Not even if it seems important. Capitalize only the first letter of the first word in headlines. + +Colon +~~~~~~~~~~~~~~~~~ +A colon is generally used before a list or series: +- The Triangle Area consists of three cities: Raleigh, Durham, and Chapel Hill. + +But not if the list is a complement or object of an element in the sentence: +- Before going on vacation, be sure to (1) set the alarm, (2) cancel the newspaper, and (3) ask a neighbor to collect your mail. + +Use a colon after "as follows" and "the following" if the related list comes immediately after: +wedge The steps for changing directories are as follows: + + 1. Open a terminal. + 2. Type cd... + +Use a colon to introduce a bullet list (or dash, or icon/symbol of your choice): + + In the Properties dialog box, you'll find the following entries: + + - Connection name + - Count + - Cost per item + + +Commas +~~~~~~~~~~~ +Use serial commas, the comma before the "and" in a series of three or more items: + +- "Item 1, item 2, and item 3." + + +It's easier to read that way and helps avoid confusion. The primary exception to this you will see is in PR, where it is traditional not to use serial commas because it is often the style of journalists. + +Commas are always important, considering the vast difference in meanings of the following two statements. + +- Let's eat, Grandma +- Let's eat Grandma. + +Correct punctation could save Grandma's life. + +If that does not convince you, maybe this will: + +.. image:: images/commas-matter.jpg + + +Contractions +~~~~~~~~~~~~~ +Do not use contractions in Ansible documents. + +Em dashes +~~~~~~~~~~ +When possible, use em-dashes with no space on either side. When full em-dashes aren't available, use double-dashes with no spaces on either side--like this. + +A pair of em dashes can be used in place of commas to enhance readability. Note, however, that dashes are always more emphatic than commas. + +A pair of em dashes can replace a pair of parentheses. Dashes are considered less formal than parentheses; they are also more intrusive. If you want to draw attention to the parenthetical content, use dashes. If you want to include the parenthetical content more subtly, use parentheses. + +.. note:: + When dashes are used in place of parentheses, surrounding punctuation should be omitted. Compare the following examples. + +:: + + Upon discovering the errors (all 124 of them), the publisher immediately recalled the books. + + Upon discovering the errors—all 124 of them—the publisher immediately recalled the books. + + +When used in place of parentheses at the end of a sentence, only a single dash is used. + +:: + + After three weeks on set, the cast was fed up with his direction (or, rather, lack of direction). + + After three weeks on set, the cast was fed up with his direction—or, rather, lack of direction. + + +Exclamation points (!) +~~~~~~~~~~~~~~~~~~~~~~~ +Do not use them at the end of sentences. An exclamation point can be used when referring to a command, such as the bang (!) command. + +Gender References +~~~~~~~~~~~~~~~~~~ +Do not use gender-specific pronouns in documentation. It is far less awkward to read a sentence that uses "they" and "their" rather than "he/she" and "his/hers." + +It is fine to use "you" when giving instructions and "the user," "new users," etc. in more general explanations. + +Never use "one" in place of "you" when writing technical documentation. Using "one" is far too formal. + +Never use "we" when writing. "We" aren't doing anything on the user side. Ansible's products are doing the work as requested by the user. + + +Hyphen +~~~~~~~~~~~~~~ +The hyphen’s primary function is the formation of certain compound terms. Do not use a hyphen unless it serves a purpose. If a compound adjective cannot be misread or, as with many psychological terms, its meaning is established, a hyphen is not necessary. + +Use hyphens to avoid ambiguity or confusion: + +:: + + a little-used car + a little used-car + + cross complaint + cross-complaint + + high-school girl + high schoolgirl + + fine-tooth comb (most people do not comb their teeth) + + third-world war + third world war + +.. image:: images/hyphen-funny.jpg + +In professionally printed material (particularly books, magazines, and newspapers), the hyphen is used to divide words between the end of one line and the beginning of the next. This allows for an evenly aligned right margin without highly variable (and distracting) word spacing. + + +Lists +~~~~~~~ +Keep the structure of bulleted lists equivalent and consistent. If one bullet is a verb phrase, they should all be verb phrases. If one is a complete sentence, they should all be complete sentences, etc. + +Capitalize the first word of each bullet. Unless it is obvious that it is just a list of items, such as a list of items like: +* computer +* monitor +* keyboard +* mouse + +When the bulleted list appears within the context of other copy, (unless it's a straight list like the previous example) add periods, even if the bullets are sentence fragments. Part of the reason behind this is that each bullet is said to complete the original sentence. + +In some cases where the bullets are appearing independently, such as in a poster or a homepage promotion, they do not need periods. + +When giving instructional steps, use numbered lists instead of bulleted lists. + + +Months and States +~~~~~~~~~~~~~~~~~~~~ +Abbreviate months and states according to AP. Months are only abbreviated if they are used in conjunction with a day. Example: "The President visited in January 1999." or "The President visited Jan. 12." + +Months: Jan., Feb., March, April, May, June, July, Aug., Sept., Nov., Dec. + +States: Ala., Ariz., Ark., Calif., Colo., Conn., Del., Fla., Ga., Ill., Ind., Kan., Ky., La., Md., Mass., Mich., Minn., Miss., Mo., Mont., Neb., Nev., NH, NJ, NM, NY, NC, ND, Okla., Ore., Pa., RI, SC, SD, Tenn., Vt., Va., Wash., W.Va., Wis., Wyo. + +Numbers +~~~~~~~~~ +Numbers between one and nine are written out. 10 and above are numerals. The exception to this is writing "4 million" or "4 GB." It's also acceptable to use numerals in tables and charts. + +Phone Numbers ++++++++++++++++ + +Phone number style: 1 (919) 555-0123 x002 and 1 888-GOTTEXT + + +Quotations (Using Quotation Marks and Writing Quotes) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + "Place the punctuation inside the quotes," the editor said. + +Except in rare instances, use only "said" or "says" because anything else just gets in the way of the quote itself, and also tends to editorialize. + +Place the name first right after the quote: + "I like to write first-person because I like to become the character I'm writing," Wally Lamb said. + +Not: + "I like to write first-person because I like to become the character I'm writing," said Wally Lamb. + + +Semicolon +~~~~~~~~~~~~~~~ +Use a semicolon to separate items in a series if the items contain commas: + +- Everyday I have coffee, toast, and fruit for breakfast; a salad for lunch; and a peanut butter sandwich, cookies, ice cream, and chocolate cake for dinner. + +Use a semicolon before a conjunctive adverb (however, therefore, otherwise, namely, for example, etc.): +- I think; therefore, I am. + +Spacing after sentences +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Use only a single space after a sentence. + +Time +~~~~~~~~ +* Time of day is written as "4 p.m." Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/images/commas-matter-2.jpg and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/images/commas-matter-2.jpg differ Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/images/commas-matter.jpg and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/images/commas-matter.jpg differ Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/images/hyphen-funny.jpg and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/images/hyphen-funny.jpg differ Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/images/thenvsthan.jpg and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/images/thenvsthan.jpg differ diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/index.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/index.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/index.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/index.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,31 @@ +.. Ansible Style Guide documentation master file, created by + sphinx-quickstart on Mon May 11 12:41:35 2015. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +==================================== +Ansible Style Guide +==================================== + +.. Welcome to Ansible Style Guide's documentation! =============================================== + +.. toctree:: + :maxdepth: 1 + :numbered: + + self + why_use + resources + basic_rules + voice_style + trademarks + grammar_punctuation + spelling_word_choice + + +.. Indices and tables +.. ================== + +.. * :ref:`genindex` +.. * :ref:`modindex` +.. * :ref:`search` diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/Makefile ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/Makefile --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/Makefile 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/Makefile 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,192 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest coverage gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " applehelp to make an Apple Help Book" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + @echo " coverage to run coverage check of the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/AnsibleStyleGuide.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/AnsibleStyleGuide.qhc" + +applehelp: + $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp + @echo + @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." + @echo "N.B. You won't be able to view it unless you put it in" \ + "~/Library/Documentation/Help or install it in your application" \ + "bundle." + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/AnsibleStyleGuide" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/AnsibleStyleGuide" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +coverage: + $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage + @echo "Testing of coverage in the sources finished, look at the " \ + "results in $(BUILDDIR)/coverage/python.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/resources.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/resources.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/resources.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/resources.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,18 @@ +Resources +```````````````` +Internal resources +------------------------- +- http://docs.ansible.com/ +- https://sites.google.com/a/ansibleworks.com/ansible-intranet/ +- ??? intranet Engineering doc??? + + +External Resources +------------------------- +- www.apstylebook.com +- www.chicagomanualofstyle.org—home.html +- www.crockford.com—style.html +- orwell.ru—index.htm +- www.sun.com—sun_tech_pub.xml +- webopedia.internet.com +- www.computeruser.com—index.html diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/spelling_word_choice.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,326 @@ +Spelling - Word Usage - Common Words and Phrases to Use and Avoid +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Acronyms +++++++++++++++++ + +Always uppercase. An acronym is a word formed from the initial letters of a name, such as ROM for Read-only memory, +SaaS for Software as a Service, or by combining initial letters or part of a series of words, such as LILO for LInux +LOader. + +Spell out the acronym before using it in alone text, such as "The Embedded DevKit (EDK)..." + +Applications ++++++++++++++++++++ +When used as a proper name, use the capitalization of the product, such as GNUPro, Source-Navigator, and Ansible Tower. When used as a command, use lowercase as appropriate, such as "To start GCC, type ``gcc``." + +.. note:: + + "vi" is always lowercase. + +As +++++++++ +This is often used to mean "because", but has other connotations, for example, parallel or simultaneous actions. If you mean "because", say "because". + +Asks for +++++++++++++++++ +Use "requests" instead. + +Assure/Ensure/Insure +++++++++++++++++++++++++++++ +Assure implies a sort of mental comfort. As in "I assured my husband that I would eventually bring home beer." + +Ensure means "to make sure." + +Insure relates to monetary insurance. + + +Back up +++++++++++++++ +This is a verb. You "back up" files; you do not "backup" files. + +Backup +++++++++++ +This is a noun. You create "backup" files; you do not create "back up" files. + +Backward +++++++++++++++ +Correct. Avoid using backwards unless you are stating that something has "backwards compatibility." + +Backwards compatibility +++++++++++++++++++++++++ +Correct as is. + +By way of +++++++++++++++++++ +Use "using" instead. + +Can/May +++++++++++++++ +Use "can" to describe actions or conditions that are possible. Use "may" only to describe situations where permission is being given. If either "can," "could," or "may" apply, use "can" because it's less tentative. + +CD or cd ++++++++++++++++ +When referring to a compact disk, use CD, such as "Insert the CD into the CD-ROM drive." When referring to the change directory command, use cd. + +CD-ROM ++++++++++++++ +Correct. Do not use "cdrom," "CD-Rom," "CDROM," "cd-rom" or any other variation. When referring to the drive, use CD-ROM drive, such as "Insert the CD into the CD-ROM drive." The plural is "CD-ROMs." + + +Command line ++++++++++++++++++++ +Correct. Do not use "command-line" or "commandline." + +Use to describes where to place options for a command, but not where to type the command. Use "shell prompt" instead to describe where to type commands. The line on the display screen where a command is expected. Generally, the command line is the line that contains the most recently displayed command prompt. + + +Daylight saving time (DST) ++++++++++++++++++++++++++++++++ + +Correct. Do not use daylight savings time. Daylight Saving Time (DST) is often misspelled “Daylight Savingsâ€, with an “s†at the end. Other common variations are “Summer Timeâ€and “Daylight-Saving Timeâ€. (http://www.timeanddate.com/time/dst/daylight-savings-time.html) + + +Download +++++++++++++++++ +Correct. Do not use "down load" or "down-load." + +e.g. +++++++++++ +Spell it out: "For example." + +Failover ++++++++++++++++ +When used as a noun, a failover is a backup operation that automatically switches to a standby database, server or network if the primary system fails or is temporarily shut down for servicing. Failover is an important fault tolerance function of mission-critical systems that rely on constant accessibility. Failover automatically and transparently to the user redirects requests from the failed or down system to the backup system that mimics the operations of the primary system. + +Fail over +++++++++++++ +When used as a verb, fail over is two words since there can be different tenses such as failed over. + +Fewer ++++++++++++++++++++ +Fewer is used with plural nouns. Think things you could count. Time, money, distance, and weight are often listed as exceptions to the traditional “can you count it†rule, often thought of a singular amounts (the work will take less than 5 hours, for example). + +File name ++++++++++++++ +Correct. Do not use "filename." + +File system ++++++++++++++++++++ +Correct. Do not use "filesystem." The system that an operating system or program uses to organize and keep track of files. For example, a hierarchical file system is one that uses directories to organize files into a tree structure. Although the operating system provides its own file management system, you can buy separate file management systems. These systems interact smoothly with the operating system but provide more features, such as improved backup procedures and stricter file protection. + +For instance +++++++++++++++ +For example," instead. + +For further/additional/whatever information +++++++++++++++++++++++++++++++++++++++++++++++ +Use "For more information" + +For this reason +++++++++++++++++++ +Use "therefore". + +Forward +++++++++++++++ +Correct. Avoid using "forwards." + +Gigabyte (GB) +++++++++++++++ +2 to the 30th power (1,073,741,824) bytes. One gigabyte is equal to 1,024 megabytes. Gigabyte is often abbreviated as G or GB. + +Got +++++++++++++++ +Avoid. Use "must" instead. + +High-availability +++++++++++++++++++ +Correct. Do not use "high availability." + +Highly available +++++++++++++++++++ +Correct. Do not use highly-available." + +Hostname ++++++++++++++++++ +Correct. Do not use host name. + +i.e. +++++++++++++++ +Spell it out: "That is." + +Installer +Avoid. Use "installation program" instead. + +It's and its +++++++++++++++ +"It's" is a contraction for "it is;" use "it is" instead of "it's." Use "its" as a possessive pronoun (for example, "the store is known for its low prices"). + +Less +++++++++++++ +Less is used with singular nouns. For example "View less details" wouldn't be correct but "View less detail" works. Use fewer when you have plural nouns (things you can count). + +Linux +++++++++++++++ +Correct. Do not use "LINUX" or "linux" unless referring to a command, such as "To start Linux, type linux." Linux is a registered trademark of Linus Torvalds. + +Login +++++++++++++++ +A noun used to refer to the login prompt, such as "At the login prompt, enter your username." + +Log in +++++++++++++++ +A verb used to refer to the act of logging in. Do not use "login," "loggin," "logon," and other variants. For example, "When starting your computer, you are requested to log in..." + +Log on +++++++++++++++ +To make a computer system or network recognize you so that you can begin a computer session. Most personal computers have no log-on procedure -- you just turn the machine on and begin working. For larger systems and networks, however, you usually need to enter a username and password before the computer system will allow you to execute programs. + +Lots of +++++++++++++++ +Use "Several" or something equivalent instead. + +Make sure +++++++++++++++ +This means "be careful to remember, attend to, or find out something." For example, "...make sure that the rhedk group is listed in the output." +Try to use verify or ensure instead. + +Manual/man page +++++++++++++++++++ +Correct. Two words. Do not use "manpage" + +MB +++++++++ +(1) When spelled MB, short for megabyte (1,000,000 or 1,048,576 bytes, depending on the context). +(2) When spelled Mb, short for megabit. + +MBps +++++++++++++++ +Short for megabytes per second, a measure of data transfer speed. Mass storage devices are generally measured in MBps. + +MySQL +++++++++++++++ +Common open source database server and client package. Do not use "MYSQL" or "mySQL." + +Need to +++++++++++++++ +Avoid. Use "must" instead. + +Read-only +++++++++++++ +Correct. Use when referring to the access permissions of files or directories. + +Real time/real-time +++++++++++++++++++++++ +Depends. If used as a noun, it is the actual time during which something takes place. For example, "The computer may partly analyze the data in real time (as it comes in) -- R. H. March." If used as an adjective, "real-time" is appropriate. For example, "XEmacs is a self-documenting, customizable, extensible, real-time display editor." + +Refer to +++++++++++++++ +Use to indicate a reference (within a manual or website) or a cross-reference (to another manual or documentation source). + +See +++++++++++++++ +Don't use. Use "Refer to" instead. + +Since +++++++++ +This is often used to mean "because", but "since" has connotations of time, so be careful. If you mean "because", say "because". + +Tells +++++++++++++++ +Use "Instructs" instead. + +That/which +++++++++++++++ +"That" introduces a restrictive clause-a clause that must be there for the sentence to make sense. A restrictive clause often defines the noun or phrase preceding it. "Which" introduces a non-restrictive, parenthetical clause-a clause that could be omitted without affecting the meaning of the sentence. For example: The car was travelling at a speed that would endanger lives. The car, which was traveling at a speed that would endanger lives, swerved onto the sidewalk. Use "who" or "whom," rather than "that" or "which," when referring to a person. + +Then/than +++++++++++++++ + "Then" refers to a time in the past or the next step in a sequence. "Than" is used for comparisons. + +.. image:: images/thenvsthan.jpg + +Third-party +++++++++++++++ +Correct. Do not use "third party". + +Troubleshoot +++++++++++++++ +Correct. Do not use "trouble shoot" or "trouble-shoot." To isolate the source of a problem and fix it. In the case of computer systems, the term troubleshoot is usually used when the problem is suspected to be hardware -related. If the problem is known to be in software, the term debug is more commonly used. + +UK +++++++++++++++ +Correst as is, no periods. + +UNIX® +++++++++++++++ +Correct. Do not use "Unix" or "unix." UNIX® is a registered trademark of The Open Group. + +Unset +++++++++++++++ +Don't use. Use Clear. + +US +++++++++++++++ +Correst as is, no periods. + +User +++++++++++++++ +When referring to the reader, use "you" instead of "user." For example, "The user must..." is incorrect. Use "You must..." instead. If referring to more than one user, calling the collection "users" is acceptable, such as "Other users may wish to access your database." + +Username +++++++++++++++ +Correct. Do not use "user name." + +View +++++++++++++++ +When using as a reference ("View the documentation available online."), do not use View. Use "Refer to" instead. + +Within +++++++++++++++ +Don't use to refer to a file that exists in a directory. Use "In". + +World Wide Web +++++++++++++++ +Correct. Capitalize each word. Abbreviate as "WWW" or "Web." + +Webpage +++++++++++++++ +Correct. Do not use "web page" or "Web page." + +Web server +++++++++++++++ +Correct. Do not use "webserver". For example, "The Apache HTTP Server is the default Web server..." + +Website +++++++++++++++ +Correct. Do not use "web site" or "Web site." For example, "The Ansible website contains ..." + +Who/whom +++++++++++++++ +Use the pronoun "who" as a subject. Use the pronoun "whom" as a direct object, an indirect object, or the object of a preposition. For example: Who owns this? To whom does this belong? + +Will +++++++++++++++ +Do not use future tense unless it is absolutely necessary. For instance, do not use the sentence, "The next section will describe the process in more detail." Instead, use the sentence, "The next section describes the process in more detail." + +Wish +++++++++++++++ +Use "need" instead of "desire" and "wish." Use "want" when the reader's actions are optional (that is, they may not "need" something but may still "want" something). + +x86 +++++++++++++++ +Correct. Do not capitalize the "x." + +x86_64 +++++++++++++++ +Do not use. Do not use "Hammer". Always use "AMD64 and Intel® EM64T" when referring to this architecture. + +You +++++++++++++++ +Correct. Do not use "I," "he," or "she." + +You may +++++++++++++++ +Try to avoid using this. For example, "you may" can be eliminated from this sentence "You may double-click on the desktop..." + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/breadcrumbs.html ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/breadcrumbs.html --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/breadcrumbs.html 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/breadcrumbs.html 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,6 @@ + +
+ diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/footer.html ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/footer.html --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/footer.html 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/footer.html 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,25 @@ +
+ {% if next or prev %} + + {% endif %} + +
+ +

+ © Copyright 2015 Ansible, Inc.. + + {%- if last_updated %} + {% trans last_updated=last_updated|e %}Last updated on {{ last_updated }}.{% endtrans %} + {%- endif %} +

+ +Ansible docs are generated from GitHub sources using Sphinx using a theme provided by Read the Docs. {% if pagename.endswith("_module") %}. Module documentation is not edited directly, but is generated from the source code for the modules. To submit an update to module docs, edit the 'DOCUMENTATION' metadata in the core and extras modules source repositories. {% endif %} + +
diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/__init__.py ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/__init__.py --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/__init__.py 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/__init__.py 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,17 @@ +"""Sphinx ReadTheDocs theme. + +From https://github.com/ryan-roemer/sphinx-bootstrap-theme. + +""" +import os + +VERSION = (0, 1, 5) + +__version__ = ".".join(str(v) for v in VERSION) +__version_full__ = __version__ + + +def get_html_theme_path(): + """Return list of HTML theme paths.""" + cur_dir = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + return cur_dir diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout.html ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout.html --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout.html 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout.html 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,238 @@ +{# TEMPLATE VAR SETTINGS #} +{%- set url_root = pathto('', 1) %} +{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} +{%- if not embedded and docstitle %} + {%- set titlesuffix = " — "|safe + docstitle|e %} +{%- else %} + {%- set titlesuffix = "" %} +{%- endif %} + + + + + + + + + + + + + {% block htmltitle %} + {{ title|striptags|e }}{{ titlesuffix }} + {% endblock %} + + {# FAVICON #} + {% if favicon %} + + {% endif %} + + {# CSS #} + + + + {# JS #} + {% if not embedded %} + + + {%- for scriptfile in script_files %} + + {%- endfor %} + + {% if use_opensearch %} + + {% endif %} + + {% endif %} + + {# RTD hosts these file themselves, so just load on non RTD builds #} + {% if not READTHEDOCS %} + + + {% endif %} + + {% for cssfile in css_files %} + + {% endfor %} + + {%- block linktags %} + {%- if hasdoc('about') %} + + {%- endif %} + {%- if hasdoc('genindex') %} + + {%- endif %} + {%- if hasdoc('search') %} + + {%- endif %} + {%- if hasdoc('copyright') %} + + {%- endif %} + + {%- if parents %} + + {%- endif %} + {%- if next %} + + {%- endif %} + {%- if prev %} + + {%- endif %} + {%- endblock %} + {%- block extrahead %} {% endblock %} + + + + + + + + + + + + + + + + + + + + +
+ + {# SIDE NAV, TOGGLES ON MOBILE #} + + +
+ + {# MOBILE NAV, TRIGGLES SIDE NAV ON TOGGLE #} + + + + {# PAGE CONTENT #} +
+
+ + +
+ + + + + + +
 
+
 
+
+ + {% include "breadcrumbs.html" %} +
+ {% block body %}{% endblock %} +
+
+ {% include "footer.html" %} +
+
+ +
+ +
+ {% include "versions.html" %} + + + + + + + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout_old.html ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout_old.html --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout_old.html 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/layout_old.html 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,205 @@ +{# + basic/layout.html + ~~~~~~~~~~~~~~~~~ + + Master layout template for Sphinx themes. + + :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +#} +{%- block doctype -%} + +{%- endblock %} +{%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} +{%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} +{%- set render_sidebar = (not embedded) and (not theme_nosidebar|tobool) and + (sidebars != []) %} +{%- set url_root = pathto('', 1) %} +{# XXX necessary? #} +{%- if url_root == '#' %}{% set url_root = '' %}{% endif %} +{%- if not embedded and docstitle %} + {%- set titlesuffix = " — "|safe + docstitle|e %} +{%- else %} + {%- set titlesuffix = "" %} +{%- endif %} + +{%- macro relbar() %} + +{%- endmacro %} + +{%- macro sidebar() %} + {%- if render_sidebar %} +
+
+ {%- block sidebarlogo %} + {%- if logo %} + + {%- endif %} + {%- endblock %} + {%- if sidebars != None %} + {#- new style sidebar: explicitly include/exclude templates #} + {%- for sidebartemplate in sidebars %} + {%- include sidebartemplate %} + {%- endfor %} + {%- else %} + {#- old style sidebars: using blocks -- should be deprecated #} + {%- block sidebartoc %} + {%- include "localtoc.html" %} + {%- endblock %} + {%- block sidebarrel %} + {%- include "relations.html" %} + {%- endblock %} + {%- block sidebarsourcelink %} + {%- include "sourcelink.html" %} + {%- endblock %} + {%- if customsidebar %} + {%- include customsidebar %} + {%- endif %} + {%- block sidebarsearch %} + {%- include "searchbox.html" %} + {%- endblock %} + {%- endif %} +
+
+ {%- endif %} +{%- endmacro %} + +{%- macro script() %} + + {%- for scriptfile in script_files %} + + {%- endfor %} +{%- endmacro %} + +{%- macro css() %} + + + {%- for cssfile in css_files %} + + {%- endfor %} +{%- endmacro %} + + + + + {{ metatags }} + {%- block htmltitle %} + {{ title|striptags|e }}{{ titlesuffix }} + {%- endblock %} + {{ css() }} + {%- if not embedded %} + {{ script() }} + {%- if use_opensearch %} + + {%- endif %} + {%- if favicon %} + + {%- endif %} + {%- endif %} +{%- block linktags %} + {%- if hasdoc('about') %} + + {%- endif %} + {%- if hasdoc('genindex') %} + + {%- endif %} + {%- if hasdoc('search') %} + + {%- endif %} + {%- if hasdoc('copyright') %} + + {%- endif %} + + {%- if parents %} + + {%- endif %} + {%- if next %} + + {%- endif %} + {%- if prev %} + + {%- endif %} +{%- endblock %} +{%- block extrahead %} {% endblock %} + + +{%- block header %}{% endblock %} + +{%- block relbar1 %}{{ relbar() }}{% endblock %} + +{%- block content %} + {%- block sidebar1 %} {# possible location for sidebar #} {% endblock %} + +
+ {%- block document %} +
+ {%- if render_sidebar %} +
+ {%- endif %} +
+ {% block body %} {% endblock %} +
+ {%- if render_sidebar %} +
+ {%- endif %} +
+ {%- endblock %} + + {%- block sidebar2 %}{{ sidebar() }}{% endblock %} +
+
+{%- endblock %} + +{%- block relbar2 %}{{ relbar() }}{% endblock %} + +{%- block footer %} + +

asdf asdf asdf asdf 22

+{%- endblock %} + + + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/searchbox.html ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/searchbox.html --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/searchbox.html 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/searchbox.html 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,61 @@ + + + + +
+ + + +
+ + + + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/search.html ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/search.html --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/search.html 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/search.html 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,50 @@ +{# + basic/search.html + ~~~~~~~~~~~~~~~~~ + + Template for the search page. + + :copyright: Copyright 2007-2013 by the Sphinx team, see AUTHORS. + :license: BSD, see LICENSE for details. +#} +{%- extends "layout.html" %} +{% set title = _('Search') %} +{% set script_files = script_files + ['_static/searchtools.js'] %} +{% block extrahead %} + + {# this is used when loading the search index using $.ajax fails, + such as on Chrome for documents on localhost #} + + {{ super() }} +{% endblock %} +{% block body %} + + + {% if search_performed %} +

{{ _('Search Results') }}

+ {% if not search_results %} +

{{ _('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.') }}

+ {% endif %} + {% endif %} +
+ {% if search_results %} +
    + {% for href, caption, context in search_results %} +
  • + {{ caption }} +

    {{ context|e }}

    +
  • + {% endfor %} +
+ {% endif %} +
+{% endblock %} diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/badge_only.css ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/badge_only.css --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/badge_only.css 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/badge_only.css 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1 @@ +.font-smooth,.icon:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:fontawesome-webfont;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg")}.icon:before{display:inline-block;font-family:fontawesome-webfont;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .icon{display:inline-block;text-decoration:inherit}li .icon{display:inline-block}li .icon-large:before,li .icon-large:before{width:1.875em}ul.icons{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.icons li .icon{width:0.8em}ul.icons li .icon-large:before,ul.icons li .icon-large:before{vertical-align:baseline}.icon-book:before{content:"\f02d"}.icon-caret-down:before{content:"\f0d7"}.icon-caret-up:before{content:"\f0d8"}.icon-caret-left:before{content:"\f0d9"}.icon-caret-right:before{content:"\f0da"}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980b9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27ae60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .icon{color:#fcfcfc}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#e74c3c;color:#fff}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}img{width:100%;height:auto}} diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/old-theme.css ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/old-theme.css --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/old-theme.css 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/old-theme.css 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4636 @@ +* { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} + +article, aside, details, figcaption, figure, footer, header, hgroup, nav, section { + display: block; +} + +audio, canvas, video { + display: inline-block; + *display: inline; + *zoom: 1; +} + +audio:not([controls]) { + display: none; +} + +[hidden] { + display: none; +} + +* { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} + +html { + font-size: 100%; + -webkit-text-size-adjust: 100%; + -ms-text-size-adjust: 100%; +} + +body { + margin: 0; +} + +a:hover, a:active { + outline: 0; +} + +abbr[title] { + border-bottom: 1px dotted; +} + +b, strong { + font-weight: bold; +} + +blockquote { + margin: 0; +} + +dfn { + font-style: italic; +} + +hr { + display: block; + height: 1px; + border: 0; + border-top: 1px solid #ccc; + margin: 20px 0; + padding: 0; +} + +ins { + background: #ff9; + color: #000; + text-decoration: none; +} + +mark { + background: #ff0; + color: #000; + font-style: italic; + font-weight: bold; +} + +pre, code, .rst-content tt, kbd, samp { + font-family: monospace, serif; + _font-family: "courier new", monospace; + font-size: 1em; +} + +pre { + white-space: pre; +} + +q { + quotes: none; +} + +q:before, q:after { + content: ""; + content: none; +} + +small { + font-size: 85%; +} + +sub, sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} + +sup { + top: -0.5em; +} + +sub { + bottom: -0.25em; +} + +ul, ol, dl { + margin: 0; + padding: 0; + list-style: none; + list-style-image: none; +} + +li { + list-style: none; +} + +dd { + margin: 0; +} + +img { + border: 0; + -ms-interpolation-mode: bicubic; + vertical-align: middle; + max-width: 100%; +} + +svg:not(:root) { + overflow: hidden; +} + +figure { + margin: 0; +} + +form { + margin: 0; +} + +fieldset { + border: 0; + margin: 0; + padding: 0; +} + +label { + cursor: pointer; +} + +legend { + border: 0; + *margin-left: -7px; + padding: 0; + white-space: normal; +} + +button, input, select, textarea { + font-size: 100%; + margin: 0; + vertical-align: baseline; + *vertical-align: middle; +} + +button, input { + line-height: normal; +} + +button, input[type="button"], input[type="reset"], input[type="submit"] { + cursor: pointer; + -webkit-appearance: button; + *overflow: visible; +} + +button[disabled], input[disabled] { + cursor: default; +} + +input[type="checkbox"], input[type="radio"] { + box-sizing: border-box; + padding: 0; + *width: 13px; + *height: 13px; +} + +input[type="search"] { + -webkit-appearance: textfield; + -moz-box-sizing: content-box; + -webkit-box-sizing: content-box; + box-sizing: content-box; +} + +input[type="search"]::-webkit-search-decoration, input[type="search"]::-webkit-search-cancel-button { + -webkit-appearance: none; +} + +button::-moz-focus-inner, input::-moz-focus-inner { + border: 0; + padding: 0; +} + +textarea { + overflow: auto; + vertical-align: top; + resize: vertical; +} + +table { + border-collapse: collapse; + border-spacing: 0; +} + +td { + vertical-align: top; +} + +.chromeframe { + margin: 0.2em 0; + background: #ccc; + color: #000; + padding: 0.2em 0; +} + +.ir { + display: block; + border: 0; + text-indent: -999em; + overflow: hidden; + background-color: transparent; + background-repeat: no-repeat; + text-align: left; + direction: ltr; + *line-height: 0; +} + +.ir br { + display: none; +} + +.hidden { + display: none !important; + visibility: hidden; +} + +.visuallyhidden { + border: 0; + clip: rect(0 0 0 0); + height: 1px; + margin: -1px; + overflow: hidden; + padding: 0; + position: absolute; + width: 1px; +} + +.visuallyhidden.focusable:active, .visuallyhidden.focusable:focus { + clip: auto; + height: auto; + margin: 0; + overflow: visible; + position: static; + width: auto; +} + +.invisible { + visibility: hidden; +} + +.relative { + position: relative; +} + +big, small { + font-size: 100%; +} + +@media print { + html, body, section { + background: none !important; + } + + * { + box-shadow: none !important; + text-shadow: none !important; + filter: none !important; + -ms-filter: none !important; + } + + a, a:visited { + text-decoration: underline; + } + + .ir a:after, a[href^="javascript:"]:after, a[href^="#"]:after { + content: ""; + } + + pre, blockquote { + page-break-inside: avoid; + } + + thead { + display: table-header-group; + } + + tr, img { + page-break-inside: avoid; + } + + img { + max-width: 100% !important; + } + + @page { + margin: 0.5cm; + } + + p, h2, h3 { + orphans: 3; + widows: 3; + } + + h2, h3 { + page-break-after: avoid; + } +} + +.font-smooth, .icon:before, .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .wy-tag-input-group .wy-tag .wy-tag-remove:before, .rst-content .admonition-title:before, .rst-content h1 .headerlink:before, .rst-content h2 .headerlink:before, .rst-content h3 .headerlink:before, .rst-content h4 .headerlink:before, .rst-content h5 .headerlink:before, .rst-content h6 .headerlink:before, .rst-content dl dt .headerlink:before, .wy-alert, .rst-content .note, .rst-content .attention, .rst-content .caution, .rst-content .danger, .rst-content .error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .warning, .btn, input[type="text"], input[type="password"], input[type="email"], input[type="url"], input[type="date"], input[type="month"], input[type="time"], input[type="datetime"], input[type="datetime-local"], input[type="week"], input[type="number"], input[type="search"], input[type="tel"], input[type="color"], select, textarea, .wy-tag-input-group, .wy-menu-vertical li.on a, .wy-menu-vertical li.current>a, .wy-side-nav-search>a, .wy-side-nav-search .wy-dropdown>a, .wy-nav-top a { + -webkit-font-smoothing: antialiased; +} + +.clearfix { + *zoom: 1; +} + +.clearfix:before, .clearfix:after { + display: table; + content: ""; +} + +.clearfix:after { + clear: both; +} + +@font-face { + font-family: fontawesome-webfont; + font-weight: normal; + font-style: normal; + src: url("../font/fontawesome_webfont.eot"); + src: url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"), url("../font/fontawesome_webfont.woff") format("woff"), url("../font/fontawesome_webfont.ttf") format("truetype"), url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg"); +} + +.icon:before, .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .wy-tag-input-group .wy-tag .wy-tag-remove:before, .rst-content .admonition-title:before, .rst-content h1 .headerlink:before, .rst-content h2 .headerlink:before, .rst-content h3 .headerlink:before, .rst-content h4 .headerlink:before, .rst-content h5 .headerlink:before, .rst-content h6 .headerlink:before, .rst-content dl dt .headerlink:before { + display: inline-block; + font-family: fontawesome-webfont; + font-style: normal; + font-weight: normal; + line-height: 1; + text-decoration: inherit; +} + +a .icon, a .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success a .wy-input-context, a .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger a .wy-input-context, a .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning a .wy-input-context, a .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info a .wy-input-context, a .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag a .wy-tag-remove, a .rst-content .admonition-title, .rst-content a .admonition-title, a .rst-content h1 .headerlink, .rst-content h1 a .headerlink, a .rst-content h2 .headerlink, .rst-content h2 a .headerlink, a .rst-content h3 .headerlink, .rst-content h3 a .headerlink, a .rst-content h4 .headerlink, .rst-content h4 a .headerlink, a .rst-content h5 .headerlink, .rst-content h5 a .headerlink, a .rst-content h6 .headerlink, .rst-content h6 a .headerlink, a .rst-content dl dt .headerlink, .rst-content dl dt a .headerlink { + display: inline-block; + text-decoration: inherit; +} + +.icon-large:before { + vertical-align: -10%; + font-size: 1.33333em; +} + +.btn .icon, .btn .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success .btn .wy-input-context, .btn .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger .btn .wy-input-context, .btn .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning .btn .wy-input-context, .btn .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info .btn .wy-input-context, .btn .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag .btn .wy-tag-remove, .btn .rst-content .admonition-title, .rst-content .btn .admonition-title, .btn .rst-content h1 .headerlink, .rst-content h1 .btn .headerlink, .btn .rst-content h2 .headerlink, .rst-content h2 .btn .headerlink, .btn .rst-content h3 .headerlink, .rst-content h3 .btn .headerlink, .btn .rst-content h4 .headerlink, .rst-content h4 .btn .headerlink, .btn .rst-content h5 .headerlink, .rst-content h5 .btn .headerlink, .btn .rst-content h6 .headerlink, .rst-content h6 .btn .headerlink, .btn .rst-content dl dt .headerlink, .rst-content dl dt .btn .headerlink, .nav .icon, .nav .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success .nav .wy-input-context, .nav .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger .nav .wy-input-context, .nav .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning .nav .wy-input-context, .nav .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info .nav .wy-input-context, .nav .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag .nav .wy-tag-remove, .nav .rst-content .admonition-title, .rst-content .nav .admonition-title, .nav .rst-content h1 .headerlink, .rst-content h1 .nav .headerlink, .nav .rst-content h2 .headerlink, .rst-content h2 .nav .headerlink, .nav .rst-content h3 .headerlink, .rst-content h3 .nav .headerlink, .nav .rst-content h4 .headerlink, .rst-content h4 .nav .headerlink, .nav .rst-content h5 .headerlink, .rst-content h5 .nav .headerlink, .nav .rst-content h6 .headerlink, .rst-content h6 .nav .headerlink, .nav .rst-content dl dt .headerlink, .rst-content dl dt .nav .headerlink { + display: inline; +} + +.btn .icon.icon-large, .btn .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-success .btn .icon-large.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .btn .icon-large.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .btn .icon-large.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-info .btn .icon-large.wy-input-context, .btn .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove, .wy-tag-input-group .wy-tag .btn .icon-large.wy-tag-remove, .btn .rst-content .icon-large.admonition-title, .rst-content .btn .icon-large.admonition-title, .btn .rst-content h1 .icon-large.headerlink, .rst-content h1 .btn .icon-large.headerlink, .btn .rst-content h2 .icon-large.headerlink, .rst-content h2 .btn .icon-large.headerlink, .btn .rst-content h3 .icon-large.headerlink, .rst-content h3 .btn .icon-large.headerlink, .btn .rst-content h4 .icon-large.headerlink, .rst-content h4 .btn .icon-large.headerlink, .btn .rst-content h5 .icon-large.headerlink, .rst-content h5 .btn .icon-large.headerlink, .btn .rst-content h6 .icon-large.headerlink, .rst-content h6 .btn .icon-large.headerlink, .btn .rst-content dl dt .icon-large.headerlink, .rst-content dl dt .btn .icon-large.headerlink, .nav .icon.icon-large, .nav .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-success .nav .icon-large.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .nav .icon-large.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .nav .icon-large.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-info .nav .icon-large.wy-input-context, .nav .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove, .wy-tag-input-group .wy-tag .nav .icon-large.wy-tag-remove, .nav .rst-content .icon-large.admonition-title, .rst-content .nav .icon-large.admonition-title, .nav .rst-content h1 .icon-large.headerlink, .rst-content h1 .nav .icon-large.headerlink, .nav .rst-content h2 .icon-large.headerlink, .rst-content h2 .nav .icon-large.headerlink, .nav .rst-content h3 .icon-large.headerlink, .rst-content h3 .nav .icon-large.headerlink, .nav .rst-content h4 .icon-large.headerlink, .rst-content h4 .nav .icon-large.headerlink, .nav .rst-content h5 .icon-large.headerlink, .rst-content h5 .nav .icon-large.headerlink, .nav .rst-content h6 .icon-large.headerlink, .rst-content h6 .nav .icon-large.headerlink, .nav .rst-content dl dt .icon-large.headerlink, .rst-content dl dt .nav .icon-large.headerlink { + line-height: 0.9em; +} + +.btn .icon.icon-spin, .btn .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-success .btn .icon-spin.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .btn .icon-spin.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .btn .icon-spin.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-info .btn .icon-spin.wy-input-context, .btn .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove, .wy-tag-input-group .wy-tag .btn .icon-spin.wy-tag-remove, .btn .rst-content .icon-spin.admonition-title, .rst-content .btn .icon-spin.admonition-title, .btn .rst-content h1 .icon-spin.headerlink, .rst-content h1 .btn .icon-spin.headerlink, .btn .rst-content h2 .icon-spin.headerlink, .rst-content h2 .btn .icon-spin.headerlink, .btn .rst-content h3 .icon-spin.headerlink, .rst-content h3 .btn .icon-spin.headerlink, .btn .rst-content h4 .icon-spin.headerlink, .rst-content h4 .btn .icon-spin.headerlink, .btn .rst-content h5 .icon-spin.headerlink, .rst-content h5 .btn .icon-spin.headerlink, .btn .rst-content h6 .icon-spin.headerlink, .rst-content h6 .btn .icon-spin.headerlink, .btn .rst-content dl dt .icon-spin.headerlink, .rst-content dl dt .btn .icon-spin.headerlink, .nav .icon.icon-spin, .nav .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-success .nav .icon-spin.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .nav .icon-spin.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .nav .icon-spin.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-info .nav .icon-spin.wy-input-context, .nav .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove, .wy-tag-input-group .wy-tag .nav .icon-spin.wy-tag-remove, .nav .rst-content .icon-spin.admonition-title, .rst-content .nav .icon-spin.admonition-title, .nav .rst-content h1 .icon-spin.headerlink, .rst-content h1 .nav .icon-spin.headerlink, .nav .rst-content h2 .icon-spin.headerlink, .rst-content h2 .nav .icon-spin.headerlink, .nav .rst-content h3 .icon-spin.headerlink, .rst-content h3 .nav .icon-spin.headerlink, .nav .rst-content h4 .icon-spin.headerlink, .rst-content h4 .nav .icon-spin.headerlink, .nav .rst-content h5 .icon-spin.headerlink, .rst-content h5 .nav .icon-spin.headerlink, .nav .rst-content h6 .icon-spin.headerlink, .rst-content h6 .nav .icon-spin.headerlink, .nav .rst-content dl dt .icon-spin.headerlink, .rst-content dl dt .nav .icon-spin.headerlink { + display: inline-block; +} + +.btn.icon:before, .wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:before, .wy-tag-input-group .wy-tag .btn.wy-tag-remove:before, .rst-content .btn.admonition-title:before, .rst-content h1 .btn.headerlink:before, .rst-content h2 .btn.headerlink:before, .rst-content h3 .btn.headerlink:before, .rst-content h4 .btn.headerlink:before, .rst-content h5 .btn.headerlink:before, .rst-content h6 .btn.headerlink:before, .rst-content dl dt .btn.headerlink:before { + opacity: 0.5; + -webkit-transition: opacity 0.05s ease-in; + -moz-transition: opacity 0.05s ease-in; + transition: opacity 0.05s ease-in; +} + +.btn.icon:hover:before, .wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:hover:before, .wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:hover:before, .wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:hover:before, .wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:hover:before, .wy-tag-input-group .wy-tag .btn.wy-tag-remove:hover:before, .rst-content .btn.admonition-title:hover:before, .rst-content h1 .btn.headerlink:hover:before, .rst-content h2 .btn.headerlink:hover:before, .rst-content h3 .btn.headerlink:hover:before, .rst-content h4 .btn.headerlink:hover:before, .rst-content h5 .btn.headerlink:hover:before, .rst-content h6 .btn.headerlink:hover:before, .rst-content dl dt .btn.headerlink:hover:before { + opacity: 1; +} + +.btn-mini .icon:before, .btn-mini .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, .wy-inline-validate.wy-inline-validate-success .btn-mini .wy-input-context:before, .btn-mini .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .btn-mini .wy-input-context:before, .btn-mini .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .btn-mini .wy-input-context:before, .btn-mini .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .btn-mini .wy-input-context:before, .btn-mini .wy-tag-input-group .wy-tag .wy-tag-remove:before, .wy-tag-input-group .wy-tag .btn-mini .wy-tag-remove:before, .btn-mini .rst-content .admonition-title:before, .rst-content .btn-mini .admonition-title:before, .btn-mini .rst-content h1 .headerlink:before, .rst-content h1 .btn-mini .headerlink:before, .btn-mini .rst-content h2 .headerlink:before, .rst-content h2 .btn-mini .headerlink:before, .btn-mini .rst-content h3 .headerlink:before, .rst-content h3 .btn-mini .headerlink:before, .btn-mini .rst-content h4 .headerlink:before, .rst-content h4 .btn-mini .headerlink:before, .btn-mini .rst-content h5 .headerlink:before, .rst-content h5 .btn-mini .headerlink:before, .btn-mini .rst-content h6 .headerlink:before, .rst-content h6 .btn-mini .headerlink:before, .btn-mini .rst-content dl dt .headerlink:before, .rst-content dl dt .btn-mini .headerlink:before { + font-size: 14px; + vertical-align: -15%; +} + +li .icon, li .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success li .wy-input-context, li .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger li .wy-input-context, li .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning li .wy-input-context, li .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info li .wy-input-context, li .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag li .wy-tag-remove, li .rst-content .admonition-title, .rst-content li .admonition-title, li .rst-content h1 .headerlink, .rst-content h1 li .headerlink, li .rst-content h2 .headerlink, .rst-content h2 li .headerlink, li .rst-content h3 .headerlink, .rst-content h3 li .headerlink, li .rst-content h4 .headerlink, .rst-content h4 li .headerlink, li .rst-content h5 .headerlink, .rst-content h5 li .headerlink, li .rst-content h6 .headerlink, .rst-content h6 li .headerlink, li .rst-content dl dt .headerlink, .rst-content dl dt li .headerlink { + display: inline-block; +} + +li .icon-large:before, li .icon-large:before { + width: 1.875em; +} + +ul.icons { + list-style-type: none; + margin-left: 2em; + text-indent: -0.8em; +} + +ul.icons li .icon, ul.icons li .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success ul.icons li .wy-input-context, ul.icons li .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger ul.icons li .wy-input-context, ul.icons li .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning ul.icons li .wy-input-context, ul.icons li .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info ul.icons li .wy-input-context, ul.icons li .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag ul.icons li .wy-tag-remove, ul.icons li .rst-content .admonition-title, .rst-content ul.icons li .admonition-title, ul.icons li .rst-content h1 .headerlink, .rst-content h1 ul.icons li .headerlink, ul.icons li .rst-content h2 .headerlink, .rst-content h2 ul.icons li .headerlink, ul.icons li .rst-content h3 .headerlink, .rst-content h3 ul.icons li .headerlink, ul.icons li .rst-content h4 .headerlink, .rst-content h4 ul.icons li .headerlink, ul.icons li .rst-content h5 .headerlink, .rst-content h5 ul.icons li .headerlink, ul.icons li .rst-content h6 .headerlink, .rst-content h6 ul.icons li .headerlink, ul.icons li .rst-content dl dt .headerlink, .rst-content dl dt ul.icons li .headerlink { + width: 0.8em; +} + +ul.icons li .icon-large:before, ul.icons li .icon-large:before { + vertical-align: baseline; +} + +.icon-glass:before { + content: "\f000"; +} + +.icon-music:before { + content: "\f001"; +} + +.icon-search:before { + content: "\f002"; +} + +.icon-envelope-alt:before { + content: "\f003"; +} + +.icon-heart:before { + content: "\f004"; +} + +.icon-star:before { + content: "\f005"; +} + +.icon-star-empty:before { + content: "\f006"; +} + +.icon-user:before { + content: "\f007"; +} + +.icon-film:before { + content: "\f008"; +} + +.icon-th-large:before { + content: "\f009"; +} + +.icon-th:before { + content: "\f00a"; +} + +.icon-th-list:before { + content: "\f00b"; +} + +.icon-ok:before { + content: "\f00c"; +} + +.icon-remove:before, .wy-tag-input-group .wy-tag .wy-tag-remove:before { + content: "\f00d"; +} + +.icon-zoom-in:before { + content: "\f00e"; +} + +.icon-zoom-out:before { + content: "\f010"; +} + +.icon-power-off:before, .icon-off:before { + content: "\f011"; +} + +.icon-signal:before { + content: "\f012"; +} + +.icon-gear:before, .icon-cog:before { + content: "\f013"; +} + +.icon-trash:before { + content: "\f014"; +} + +.icon-home:before { + content: "\f015"; +} + +.icon-file-alt:before { + content: "\f016"; +} + +.icon-time:before { + content: "\f017"; +} + +.icon-road:before { + content: "\f018"; +} + +.icon-download-alt:before { + content: "\f019"; +} + +.icon-download:before { + content: "\f01a"; +} + +.icon-upload:before { + content: "\f01b"; +} + +.icon-inbox:before { + content: "\f01c"; +} + +.icon-play-circle:before { + content: "\f01d"; +} + +.icon-rotate-right:before, .icon-repeat:before { + content: "\f01e"; +} + +.icon-refresh:before { + content: "\f021"; +} + +.icon-list-alt:before { + content: "\f022"; +} + +.icon-lock:before { + content: "\f023"; +} + +.icon-flag:before { + content: "\f024"; +} + +.icon-headphones:before { + content: "\f025"; +} + +.icon-volume-off:before { + content: "\f026"; +} + +.icon-volume-down:before { + content: "\f027"; +} + +.icon-volume-up:before { + content: "\f028"; +} + +.icon-qrcode:before { + content: "\f029"; +} + +.icon-barcode:before { + content: "\f02a"; +} + +.icon-tag:before { + content: "\f02b"; +} + +.icon-tags:before { + content: "\f02c"; +} + +.icon-book:before { + content: "\f02d"; +} + +.icon-bookmark:before { + content: "\f02e"; +} + +.icon-print:before { + content: "\f02f"; +} + +.icon-camera:before { + content: "\f030"; +} + +.icon-font:before { + content: "\f031"; +} + +.icon-bold:before { + content: "\f032"; +} + +.icon-italic:before { + content: "\f033"; +} + +.icon-text-height:before { + content: "\f034"; +} + +.icon-text-width:before { + content: "\f035"; +} + +.icon-align-left:before { + content: "\f036"; +} + +.icon-align-center:before { + content: "\f037"; +} + +.icon-align-right:before { + content: "\f038"; +} + +.icon-align-justify:before { + content: "\f039"; +} + +.icon-list:before { + content: "\f03a"; +} + +.icon-indent-left:before { + content: "\f03b"; +} + +.icon-indent-right:before { + content: "\f03c"; +} + +.icon-facetime-video:before { + content: "\f03d"; +} + +.icon-picture:before { + content: "\f03e"; +} + +.icon-pencil:before { + content: "\f040"; +} + +.icon-map-marker:before { + content: "\f041"; +} + +.icon-adjust:before { + content: "\f042"; +} + +.icon-tint:before { + content: "\f043"; +} + +.icon-edit:before { + content: "\f044"; +} + +.icon-share:before { + content: "\f045"; +} + +.icon-check:before { + content: "\f046"; +} + +.icon-move:before { + content: "\f047"; +} + +.icon-step-backward:before { + content: "\f048"; +} + +.icon-fast-backward:before { + content: "\f049"; +} + +.icon-backward:before { + content: "\f04a"; +} + +.icon-play:before { + content: "\f04b"; +} + +.icon-pause:before { + content: "\f04c"; +} + +.icon-stop:before { + content: "\f04d"; +} + +.icon-forward:before { + content: "\f04e"; +} + +.icon-fast-forward:before { + content: "\f050"; +} + +.icon-step-forward:before { + content: "\f051"; +} + +.icon-eject:before { + content: "\f052"; +} + +.icon-chevron-left:before { + content: "\f053"; +} + +.icon-chevron-right:before { + content: "\f054"; +} + +.icon-plus-sign:before { + content: "\f055"; +} + +.icon-minus-sign:before { + content: "\f056"; +} + +.icon-remove-sign:before, .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before { + content: "\f057"; +} + +.icon-ok-sign:before { + content: "\f058"; +} + +.icon-question-sign:before { + content: "\f059"; +} + +.icon-info-sign:before { + content: "\f05a"; +} + +.icon-screenshot:before { + content: "\f05b"; +} + +.icon-remove-circle:before { + content: "\f05c"; +} + +.icon-ok-circle:before { + content: "\f05d"; +} + +.icon-ban-circle:before { + content: "\f05e"; +} + +.icon-arrow-left:before { + content: "\f060"; +} + +.icon-arrow-right:before { + content: "\f061"; +} + +.icon-arrow-up:before { + content: "\f062"; +} + +.icon-arrow-down:before { + content: "\f063"; +} + +.icon-mail-forward:before, .icon-share-alt:before { + content: "\f064"; +} + +.icon-resize-full:before { + content: "\f065"; +} + +.icon-resize-small:before { + content: "\f066"; +} + +.icon-plus:before { + content: "\f067"; +} + +.icon-minus:before { + content: "\f068"; +} + +.icon-asterisk:before { + content: "\f069"; +} + +.icon-exclamation-sign:before, .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .rst-content .admonition-title:before { + content: "\f06a"; +} + +.icon-gift:before { + content: "\f06b"; +} + +.icon-leaf:before { + content: "\f06c"; +} + +.icon-fire:before { + content: "\f06d"; +} + +.icon-eye-open:before { + content: "\f06e"; +} + +.icon-eye-close:before { + content: "\f070"; +} + +.icon-warning-sign:before { + content: "\f071"; +} + +.icon-plane:before { + content: "\f072"; +} + +.icon-calendar:before { + content: "\f073"; +} + +.icon-random:before { + content: "\f074"; +} + +.icon-comment:before { + content: "\f075"; +} + +.icon-magnet:before { + content: "\f076"; +} + +.icon-chevron-up:before { + content: "\f077"; +} + +.icon-chevron-down:before { + content: "\f078"; +} + +.icon-retweet:before { + content: "\f079"; +} + +.icon-shopping-cart:before { + content: "\f07a"; +} + +.icon-folder-close:before { + content: "\f07b"; +} + +.icon-folder-open:before { + content: "\f07c"; +} + +.icon-resize-vertical:before { + content: "\f07d"; +} + +.icon-resize-horizontal:before { + content: "\f07e"; +} + +.icon-bar-chart:before { + content: "\f080"; +} + +.icon-twitter-sign:before { + content: "\f081"; +} + +.icon-facebook-sign:before { + content: "\f082"; +} + +.icon-camera-retro:before { + content: "\f083"; +} + +.icon-key:before { + content: "\f084"; +} + +.icon-gears:before, .icon-cogs:before { + content: "\f085"; +} + +.icon-comments:before { + content: "\f086"; +} + +.icon-thumbs-up-alt:before { + content: "\f087"; +} + +.icon-thumbs-down-alt:before { + content: "\f088"; +} + +.icon-star-half:before { + content: "\f089"; +} + +.icon-heart-empty:before { + content: "\f08a"; +} + +.icon-signout:before { + content: "\f08b"; +} + +.icon-linkedin-sign:before { + content: "\f08c"; +} + +.icon-pushpin:before { + content: "\f08d"; +} + +.icon-external-link:before { + content: "\f08e"; +} + +.icon-signin:before { + content: "\f090"; +} + +.icon-trophy:before { + content: "\f091"; +} + +.icon-github-sign:before { + content: "\f092"; +} + +.icon-upload-alt:before { + content: "\f093"; +} + +.icon-lemon:before { + content: "\f094"; +} + +.icon-phone:before { + content: "\f095"; +} + +.icon-unchecked:before, .icon-check-empty:before { + content: "\f096"; +} + +.icon-bookmark-empty:before { + content: "\f097"; +} + +.icon-phone-sign:before { + content: "\f098"; +} + +.icon-twitter:before { + content: "\f099"; +} + +.icon-facebook:before { + content: "\f09a"; +} + +.icon-github:before { + content: "\f09b"; +} + +.icon-unlock:before { + content: "\f09c"; +} + +.icon-credit-card:before { + content: "\f09d"; +} + +.icon-rss:before { + content: "\f09e"; +} + +.icon-hdd:before { + content: "\f0a0"; +} + +.icon-bullhorn:before { + content: "\f0a1"; +} + +.icon-bell:before { + content: "\f0a2"; +} + +.icon-certificate:before { + content: "\f0a3"; +} + +.icon-hand-right:before { + content: "\f0a4"; +} + +.icon-hand-left:before { + content: "\f0a5"; +} + +.icon-hand-up:before { + content: "\f0a6"; +} + +.icon-hand-down:before { + content: "\f0a7"; +} + +.icon-circle-arrow-left:before { + content: "\f0a8"; +} + +.icon-circle-arrow-right:before { + content: "\f0a9"; +} + +.icon-circle-arrow-up:before { + content: "\f0aa"; +} + +.icon-circle-arrow-down:before { + content: "\f0ab"; +} + +.icon-globe:before { + content: "\f0ac"; +} + +.icon-wrench:before { + content: "\f0ad"; +} + +.icon-tasks:before { + content: "\f0ae"; +} + +.icon-filter:before { + content: "\f0b0"; +} + +.icon-briefcase:before { + content: "\f0b1"; +} + +.icon-fullscreen:before { + content: "\f0b2"; +} + +.icon-group:before { + content: "\f0c0"; +} + +.icon-link:before { + content: "\f0c1"; +} + +.icon-cloud:before { + content: "\f0c2"; +} + +.icon-beaker:before { + content: "\f0c3"; +} + +.icon-cut:before { + content: "\f0c4"; +} + +.icon-copy:before { + content: "\f0c5"; +} + +.icon-paperclip:before, .icon-paper-clip:before { + content: "\f0c6"; +} + +.icon-save:before { + content: "\f0c7"; +} + +.icon-sign-blank:before { + content: "\f0c8"; +} + +.icon-reorder:before { + content: "\f0c9"; +} + +.icon-list-ul:before { + content: "\f0ca"; +} + +.icon-list-ol:before { + content: "\f0cb"; +} + +.icon-strikethrough:before { + content: "\f0cc"; +} + +.icon-underline:before { + content: "\f0cd"; +} + +.icon-table:before { + content: "\f0ce"; +} + +.icon-magic:before { + content: "\f0d0"; +} + +.icon-truck:before { + content: "\f0d1"; +} + +.icon-pinterest:before { + content: "\f0d2"; +} + +.icon-pinterest-sign:before { + content: "\f0d3"; +} + +.icon-google-plus-sign:before { + content: "\f0d4"; +} + +.icon-google-plus:before { + content: "\f0d5"; +} + +.icon-money:before { + content: "\f0d6"; +} + +.icon-caret-down:before { + content: "\f0d7"; +} + +.icon-caret-up:before { + content: "\f0d8"; +} + +.icon-caret-left:before { + content: "\f0d9"; +} + +.icon-caret-right:before { + content: "\f0da"; +} + +.icon-columns:before { + content: "\f0db"; +} + +.icon-sort:before { + content: "\f0dc"; +} + +.icon-sort-down:before { + content: "\f0dd"; +} + +.icon-sort-up:before { + content: "\f0de"; +} + +.icon-envelope:before { + content: "\f0e0"; +} + +.icon-linkedin:before { + content: "\f0e1"; +} + +.icon-rotate-left:before, .icon-undo:before { + content: "\f0e2"; +} + +.icon-legal:before { + content: "\f0e3"; +} + +.icon-dashboard:before { + content: "\f0e4"; +} + +.icon-comment-alt:before { + content: "\f0e5"; +} + +.icon-comments-alt:before { + content: "\f0e6"; +} + +.icon-bolt:before { + content: "\f0e7"; +} + +.icon-sitemap:before { + content: "\f0e8"; +} + +.icon-umbrella:before { + content: "\f0e9"; +} + +.icon-paste:before { + content: "\f0ea"; +} + +.icon-lightbulb:before { + content: "\f0eb"; +} + +.icon-exchange:before { + content: "\f0ec"; +} + +.icon-cloud-download:before { + content: "\f0ed"; +} + +.icon-cloud-upload:before { + content: "\f0ee"; +} + +.icon-user-md:before { + content: "\f0f0"; +} + +.icon-stethoscope:before { + content: "\f0f1"; +} + +.icon-suitcase:before { + content: "\f0f2"; +} + +.icon-bell-alt:before { + content: "\f0f3"; +} + +.icon-coffee:before { + content: "\f0f4"; +} + +.icon-food:before { + content: "\f0f5"; +} + +.icon-file-text-alt:before { + content: "\f0f6"; +} + +.icon-building:before { + content: "\f0f7"; +} + +.icon-hospital:before { + content: "\f0f8"; +} + +.icon-ambulance:before { + content: "\f0f9"; +} + +.icon-medkit:before { + content: "\f0fa"; +} + +.icon-fighter-jet:before { + content: "\f0fb"; +} + +.icon-beer:before { + content: "\f0fc"; +} + +.icon-h-sign:before { + content: "\f0fd"; +} + +.icon-plus-sign-alt:before { + content: "\f0fe"; +} + +.icon-double-angle-left:before { + content: "\f100"; +} + +.icon-double-angle-right:before { + content: "\f101"; +} + +.icon-double-angle-up:before { + content: "\f102"; +} + +.icon-double-angle-down:before { + content: "\f103"; +} + +.icon-angle-left:before { + content: "\f104"; +} + +.icon-angle-right:before { + content: "\f105"; +} + +.icon-angle-up:before { + content: "\f106"; +} + +.icon-angle-down:before { + content: "\f107"; +} + +.icon-desktop:before { + content: "\f108"; +} + +.icon-laptop:before { + content: "\f109"; +} + +.icon-tablet:before { + content: "\f10a"; +} + +.icon-mobile-phone:before { + content: "\f10b"; +} + +.icon-circle-blank:before { + content: "\f10c"; +} + +.icon-quote-left:before { + content: "\f10d"; +} + +.icon-quote-right:before { + content: "\f10e"; +} + +.icon-spinner:before { + content: "\f110"; +} + +.icon-circle:before { + content: "\f111"; +} + +.icon-mail-reply:before, .icon-reply:before { + content: "\f112"; +} + +.icon-github-alt:before { + content: "\f113"; +} + +.icon-folder-close-alt:before { + content: "\f114"; +} + +.icon-folder-open-alt:before { + content: "\f115"; +} + +.icon-expand-alt:before { + content: "\f116"; +} + +.icon-collapse-alt:before { + content: "\f117"; +} + +.icon-smile:before { + content: "\f118"; +} + +.icon-frown:before { + content: "\f119"; +} + +.icon-meh:before { + content: "\f11a"; +} + +.icon-gamepad:before { + content: "\f11b"; +} + +.icon-keyboard:before { + content: "\f11c"; +} + +.icon-flag-alt:before { + content: "\f11d"; +} + +.icon-flag-checkered:before { + content: "\f11e"; +} + +.icon-terminal:before { + content: "\f120"; +} + +.icon-code:before { + content: "\f121"; +} + +.icon-reply-all:before { + content: "\f122"; +} + +.icon-mail-reply-all:before { + content: "\f122"; +} + +.icon-star-half-full:before, .icon-star-half-empty:before { + content: "\f123"; +} + +.icon-location-arrow:before { + content: "\f124"; +} + +.icon-crop:before { + content: "\f125"; +} + +.icon-code-fork:before { + content: "\f126"; +} + +.icon-unlink:before { + content: "\f127"; +} + +.icon-question:before { + content: "\f128"; +} + +.icon-info:before { + content: "\f129"; +} + +.icon-exclamation:before { + content: "\f12a"; +} + +.icon-superscript:before { + content: "\f12b"; +} + +.icon-subscript:before { + content: "\f12c"; +} + +.icon-eraser:before { + content: "\f12d"; +} + +.icon-puzzle-piece:before { + content: "\f12e"; +} + +.icon-microphone:before { + content: "\f130"; +} + +.icon-microphone-off:before { + content: "\f131"; +} + +.icon-shield:before { + content: "\f132"; +} + +.icon-calendar-empty:before { + content: "\f133"; +} + +.icon-fire-extinguisher:before { + content: "\f134"; +} + +.icon-rocket:before { + content: "\f135"; +} + +.icon-maxcdn:before { + content: "\f136"; +} + +.icon-chevron-sign-left:before { + content: "\f137"; +} + +.icon-chevron-sign-right:before { + content: "\f138"; +} + +.icon-chevron-sign-up:before { + content: "\f139"; +} + +.icon-chevron-sign-down:before { + content: "\f13a"; +} + +.icon-html5:before { + content: "\f13b"; +} + +.icon-css3:before { + content: "\f13c"; +} + +.icon-anchor:before { + content: "\f13d"; +} + +.icon-unlock-alt:before { + content: "\f13e"; +} + +.icon-bullseye:before { + content: "\f140"; +} + +.icon-ellipsis-horizontal:before { + content: "\f141"; +} + +.icon-ellipsis-vertical:before { + content: "\f142"; +} + +.icon-rss-sign:before { + content: "\f143"; +} + +.icon-play-sign:before { + content: "\f144"; +} + +.icon-ticket:before { + content: "\f145"; +} + +.icon-minus-sign-alt:before { + content: "\f146"; +} + +.icon-check-minus:before { + content: "\f147"; +} + +.icon-level-up:before { + content: "\f148"; +} + +.icon-level-down:before { + content: "\f149"; +} + +.icon-check-sign:before, .wy-inline-validate.wy-inline-validate-success .wy-input-context:before { + content: "\f14a"; +} + +.icon-edit-sign:before { + content: "\f14b"; +} + +.icon-external-link-sign:before { + content: "\f14c"; +} + +.icon-share-sign:before { + content: "\f14d"; +} + +.icon-compass:before { + content: "\f14e"; +} + +.icon-collapse:before { + content: "\f150"; +} + +.icon-collapse-top:before { + content: "\f151"; +} + +.icon-expand:before { + content: "\f152"; +} + +.icon-euro:before, .icon-eur:before { + content: "\f153"; +} + +.icon-gbp:before { + content: "\f154"; +} + +.icon-dollar:before, .icon-usd:before { + content: "\f155"; +} + +.icon-rupee:before, .icon-inr:before { + content: "\f156"; +} + +.icon-yen:before, .icon-jpy:before { + content: "\f157"; +} + +.icon-renminbi:before, .icon-cny:before { + content: "\f158"; +} + +.icon-won:before, .icon-krw:before { + content: "\f159"; +} + +.icon-bitcoin:before, .icon-btc:before { + content: "\f15a"; +} + +.icon-file:before { + content: "\f15b"; +} + +.icon-file-text:before { + content: "\f15c"; +} + +.icon-sort-by-alphabet:before { + content: "\f15d"; +} + +.icon-sort-by-alphabet-alt:before { + content: "\f15e"; +} + +.icon-sort-by-attributes:before { + content: "\f160"; +} + +.icon-sort-by-attributes-alt:before { + content: "\f161"; +} + +.icon-sort-by-order:before { + content: "\f162"; +} + +.icon-sort-by-order-alt:before { + content: "\f163"; +} + +.icon-thumbs-up:before { + content: "\f164"; +} + +.icon-thumbs-down:before { + content: "\f165"; +} + +.icon-youtube-sign:before { + content: "\f166"; +} + +.icon-youtube:before { + content: "\f167"; +} + +.icon-xing:before { + content: "\f168"; +} + +.icon-xing-sign:before { + content: "\f169"; +} + +.icon-youtube-play:before { + content: "\f16a"; +} + +.icon-dropbox:before { + content: "\f16b"; +} + +.icon-stackexchange:before { + content: "\f16c"; +} + +.icon-instagram:before { + content: "\f16d"; +} + +.icon-flickr:before { + content: "\f16e"; +} + +.icon-adn:before { + content: "\f170"; +} + +.icon-bitbucket:before { + content: "\f171"; +} + +.icon-bitbucket-sign:before { + content: "\f172"; +} + +.icon-tumblr:before { + content: "\f173"; +} + +.icon-tumblr-sign:before { + content: "\f174"; +} + +.icon-long-arrow-down:before { + content: "\f175"; +} + +.icon-long-arrow-up:before { + content: "\f176"; +} + +.icon-long-arrow-left:before { + content: "\f177"; +} + +.icon-long-arrow-right:before { + content: "\f178"; +} + +.icon-apple:before { + content: "\f179"; +} + +.icon-windows:before { + content: "\f17a"; +} + +.icon-android:before { + content: "\f17b"; +} + +.icon-linux:before { + content: "\f17c"; +} + +.icon-dribbble:before { + content: "\f17d"; +} + +.icon-skype:before { + content: "\f17e"; +} + +.icon-foursquare:before { + content: "\f180"; +} + +.icon-trello:before { + content: "\f181"; +} + +.icon-female:before { + content: "\f182"; +} + +.icon-male:before { + content: "\f183"; +} + +.icon-gittip:before { + content: "\f184"; +} + +.icon-sun:before { + content: "\f185"; +} + +.icon-moon:before { + content: "\f186"; +} + +.icon-archive:before { + content: "\f187"; +} + +.icon-bug:before { + content: "\f188"; +} + +.icon-vk:before { + content: "\f189"; +} + +.icon-weibo:before { + content: "\f18a"; +} + +.icon-renren:before { + content: "\f18b"; +} + +.wy-alert, .rst-content .note, .rst-content .attention, .rst-content .caution, .rst-content .danger, .rst-content .error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .warning { + padding: 24px; + line-height: 24px; + margin-bottom: 24px; + border-left: solid 3px transparent; +} + +.wy-alert strong, .rst-content .note strong, .rst-content .attention strong, .rst-content .caution strong, .rst-content .danger strong, .rst-content .error strong, .rst-content .hint strong, .rst-content .important strong, .rst-content .tip strong, .rst-content .warning strong, .wy-alert a, .rst-content .note a, .rst-content .attention a, .rst-content .caution a, .rst-content .danger a, .rst-content .error a, .rst-content .hint a, .rst-content .important a, .rst-content .tip a, .rst-content .warning a { + color: #fff; +} + +.wy-alert.wy-alert-danger, .rst-content .wy-alert-danger.note, .rst-content .wy-alert-danger.attention, .rst-content .wy-alert-danger.caution, .rst-content .danger, .rst-content .error, .rst-content .wy-alert-danger.hint, .rst-content .wy-alert-danger.important, .rst-content .wy-alert-danger.tip, .rst-content .wy-alert-danger.warning { + background: #e74c3c; + color: #fff; + border-color: #d62c1a; +} + +.wy-alert.wy-alert-warning, .rst-content .wy-alert-warning.note, .rst-content .attention, .rst-content .caution, .rst-content .wy-alert-warning.danger, .rst-content .wy-alert-warning.error, .rst-content .wy-alert-warning.hint, .rst-content .wy-alert-warning.important, .rst-content .wy-alert-warning.tip, .rst-content .warning { + background: #e67e22; + color: #fff; + border-color: #bf6516; +} + +.wy-alert.wy-alert-info, .rst-content .note, .rst-content .wy-alert-info.attention, .rst-content .wy-alert-info.caution, .rst-content .wy-alert-info.danger, .rst-content .wy-alert-info.error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .wy-alert-info.warning { + background: #2980b9; + color: #fff; + border-color: #20638f; +} + +.wy-alert.wy-alert-success, .rst-content .wy-alert-success.note, .rst-content .wy-alert-success.attention, .rst-content .wy-alert-success.caution, .rst-content .wy-alert-success.danger, .rst-content .wy-alert-success.error, .rst-content .wy-alert-success.hint, .rst-content .wy-alert-success.important, .rst-content .wy-alert-success.tip, .rst-content .wy-alert-success.warning { + background: #27ae60; + color: #fff; + border-color: #1e8449; +} + +.wy-alert.wy-alert-neutral, .rst-content .wy-alert-neutral.note, .rst-content .wy-alert-neutral.attention, .rst-content .wy-alert-neutral.caution, .rst-content .wy-alert-neutral.danger, .rst-content .wy-alert-neutral.error, .rst-content .wy-alert-neutral.hint, .rst-content .wy-alert-neutral.important, .rst-content .wy-alert-neutral.tip, .rst-content .wy-alert-neutral.warning { + background: #f3f6f6; + border-color: #e1e4e5; +} + +.wy-alert.wy-alert-neutral strong, .rst-content .wy-alert-neutral.note strong, .rst-content .wy-alert-neutral.attention strong, .rst-content .wy-alert-neutral.caution strong, .rst-content .wy-alert-neutral.danger strong, .rst-content .wy-alert-neutral.error strong, .rst-content .wy-alert-neutral.hint strong, .rst-content .wy-alert-neutral.important strong, .rst-content .wy-alert-neutral.tip strong, .rst-content .wy-alert-neutral.warning strong { + color: #404040; +} + +.wy-alert.wy-alert-neutral a, .rst-content .wy-alert-neutral.note a, .rst-content .wy-alert-neutral.attention a, .rst-content .wy-alert-neutral.caution a, .rst-content .wy-alert-neutral.danger a, .rst-content .wy-alert-neutral.error a, .rst-content .wy-alert-neutral.hint a, .rst-content .wy-alert-neutral.important a, .rst-content .wy-alert-neutral.tip a, .rst-content .wy-alert-neutral.warning a { + color: #2980b9; +} + +.wy-tray-container { + position: fixed; + top: -50px; + left: 0; + width: 100%; + -webkit-transition: top 0.2s ease-in; + -moz-transition: top 0.2s ease-in; + transition: top 0.2s ease-in; +} + +.wy-tray-container.on { + top: 0; +} + +.wy-tray-container li { + display: none; + width: 100%; + background: #343131; + padding: 12px 24px; + color: #fff; + margin-bottom: 6px; + text-align: center; + box-shadow: 0 5px 5px 0 rgba(0, 0, 0, 0.1), 0px -1px 2px -1px rgba(255, 255, 255, 0.5) inset; +} + +.wy-tray-container li.wy-tray-item-success { + background: #27ae60; +} + +.wy-tray-container li.wy-tray-item-info { + background: #2980b9; +} + +.wy-tray-container li.wy-tray-item-warning { + background: #e67e22; +} + +.wy-tray-container li.wy-tray-item-danger { + background: #e74c3c; +} + +.btn { + display: inline-block; + *display: inline; + zoom: 1; + line-height: normal; + white-space: nowrap; + vertical-align: baseline; + text-align: center; + cursor: pointer; + -webkit-user-drag: none; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + font-size: 100%; + padding: 6px 12px; + color: #fff; + border: 1px solid rgba(0, 0, 0, 0.1); + border-bottom: solid 3px rgba(0, 0, 0, 0.1); + background-color: #27ae60; + text-decoration: none; + font-weight: 500; + box-shadow: 0px 1px 2px -1px rgba(255, 255, 255, 0.5) inset; + -webkit-transition: all 0.1s linear; + -moz-transition: all 0.1s linear; + transition: all 0.1s linear; + outline-none: false; +} + +.btn-hover { + background: #2e8ece; + color: #fff; +} + +.btn:hover { + background: #2cc36b; + color: #fff; +} + +.btn:focus { + background: #2cc36b; + color: #fff; + outline: 0; +} + +.btn:active { + border-top: solid 3px rgba(0, 0, 0, 0.1); + border-bottom: solid 1px rgba(0, 0, 0, 0.1); + box-shadow: 0px 1px 2px -1px rgba(0, 0, 0, 0.5) inset; +} + +.btn[disabled] { + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=40); + opacity: 0.4; + cursor: not-allowed; + box-shadow: none; +} + +.btn-disabled { + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=40); + opacity: 0.4; + cursor: not-allowed; + box-shadow: none; +} + +.btn-disabled:hover, .btn-disabled:focus, .btn-disabled:active { + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=40); + opacity: 0.4; + cursor: not-allowed; + box-shadow: none; +} + +.btn::-moz-focus-inner { + padding: 0; + border: 0; +} + +.btn-small { + font-size: 80%; +} + +.btn-info { + background-color: #2980b9 !important; +} + +.btn-info:hover { + background-color: #2e8ece !important; +} + +.btn-neutral { + background-color: #f3f6f6 !important; + color: #404040 !important; +} + +.btn-neutral:hover { + background-color: #e5ebeb !important; + color: #404040; +} + +.btn-danger { + background-color: #e74c3c !important; +} + +.btn-danger:hover { + background-color: #ea6153 !important; +} + +.btn-warning { + background-color: #e67e22 !important; +} + +.btn-warning:hover { + background-color: #e98b39 !important; +} + +.btn-invert { + background-color: #343131; +} + +.btn-invert:hover { + background-color: #413d3d !important; +} + +.btn-link { + background-color: transparent !important; + color: #2980b9; + border-color: transparent; +} + +.btn-link:hover { + background-color: transparent !important; + color: #409ad5; + border-color: transparent; +} + +.btn-link:active { + background-color: transparent !important; + border-color: transparent; + border-top: solid 1px transparent; + border-bottom: solid 3px transparent; +} + +.wy-btn-group .btn, .wy-control .btn { + vertical-align: middle; +} + +.wy-btn-group { + margin-bottom: 24px; + *zoom: 1; +} + +.wy-btn-group:before, .wy-btn-group:after { + display: table; + content: ""; +} + +.wy-btn-group:after { + clear: both; +} + +.wy-dropdown { + position: relative; + display: inline-block; +} + +.wy-dropdown:hover .wy-dropdown-menu { + display: block; +} + +.wy-dropdown .caret:after { + font-family: fontawesome-webfont; + content: "\f0d7"; + font-size: 70%; +} + +.wy-dropdown-menu { + position: absolute; + top: 100%; + left: 0; + display: none; + float: left; + min-width: 100%; + background: #fcfcfc; + z-index: 100; + border: solid 1px #cfd7dd; + box-shadow: 0 5px 5px 0 rgba(0, 0, 0, 0.1); + padding: 12px; +} + +.wy-dropdown-menu>dd>a { + display: block; + clear: both; + color: #404040; + white-space: nowrap; + font-size: 90%; + padding: 0 12px; +} + +.wy-dropdown-menu>dd>a:hover { + background: #2980b9; + color: #fff; +} + +.wy-dropdown-menu>dd.divider { + border-top: solid 1px #cfd7dd; + margin: 6px 0; +} + +.wy-dropdown-menu>dd.search { + padding-bottom: 12px; +} + +.wy-dropdown-menu>dd.search input[type="search"] { + width: 100%; +} + +.wy-dropdown-menu>dd.call-to-action { + background: #e3e3e3; + text-transform: uppercase; + font-weight: 500; + font-size: 80%; +} + +.wy-dropdown-menu>dd.call-to-action:hover { + background: #e3e3e3; +} + +.wy-dropdown-menu>dd.call-to-action .btn { + color: #fff; +} + +.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu { + background: #fcfcfc; + margin-top: 2px; +} + +.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a { + padding: 6px 12px; +} + +.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover { + background: #2980b9; + color: #fff; +} + +.wy-dropdown.wy-dropdown-left .wy-dropdown-menu { + right: 0; + text-align: right; +} + +.wy-dropdown-arrow:before { + content: " "; + border-bottom: 5px solid #f5f5f5; + border-left: 5px solid transparent; + border-right: 5px solid transparent; + position: absolute; + display: block; + top: -4px; + left: 50%; + margin-left: -3px; +} + +.wy-dropdown-arrow.wy-dropdown-arrow-left:before { + left: 11px; +} + +.wy-form-stacked select { + display: block; +} + +.wy-form-aligned input, .wy-form-aligned textarea, .wy-form-aligned select, .wy-form-aligned .wy-help-inline, .wy-form-aligned label { + display: inline-block; + *display: inline; + *zoom: 1; + vertical-align: middle; +} + +.wy-form-aligned .wy-control-group>label { + display: inline-block; + vertical-align: middle; + width: 10em; + margin: 0.5em 1em 0 0; + float: left; +} + +.wy-form-aligned .wy-control { + float: left; +} + +.wy-form-aligned .wy-control label { + display: block; +} + +.wy-form-aligned .wy-control select { + margin-top: 0.5em; +} + +fieldset { + border: 0; + margin: 0; + padding: 0; +} + +legend { + display: block; + width: 100%; + border: 0; + padding: 0; + white-space: normal; + margin-bottom: 24px; + font-size: 150%; + *margin-left: -7px; +} + +label { + display: block; + margin: 0 0 0.3125em 0; + color: #999; + font-size: 90%; +} + +button, input, select, textarea { + font-size: 100%; + margin: 0; + vertical-align: baseline; + *vertical-align: middle; +} + +button, input { + line-height: normal; +} + +button { + -webkit-appearance: button; + cursor: pointer; + *overflow: visible; +} + +button::-moz-focus-inner, input::-moz-focus-inner { + border: 0; + padding: 0; +} + +button[disabled] { + cursor: default; +} + +input[type="button"], input[type="reset"], input[type="submit"] { + -webkit-appearance: button; + cursor: pointer; + *overflow: visible; +} + +input[type="text"], input[type="password"], input[type="email"], input[type="url"], input[type="date"], input[type="month"], input[type="time"], input[type="datetime"], input[type="datetime-local"], input[type="week"], input[type="number"], input[type="search"], input[type="tel"], input[type="color"] { + -webkit-appearance: none; + padding: 6px; + display: inline-block; + border: 1px solid #ccc; + font-size: 80%; + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + box-shadow: inset 0 1px 3px #ddd; + border-radius: 0; + -webkit-transition: border 0.3s linear; + -moz-transition: border 0.3s linear; + transition: border 0.3s linear; +} + +input[type="datetime-local"] { + padding: 0.34375em 0.625em; +} + +input[disabled] { + cursor: default; +} + +input[type="checkbox"], input[type="radio"] { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; + padding: 0; + margin-right: 0.3125em; + *height: 13px; + *width: 13px; +} + +input[type="search"] { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} + +input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { + -webkit-appearance: none; +} + +input[type="text"]:focus, input[type="password"]:focus, input[type="email"]:focus, input[type="url"]:focus, input[type="date"]:focus, input[type="month"]:focus, input[type="time"]:focus, input[type="datetime"]:focus, input[type="datetime-local"]:focus, input[type="week"]:focus, input[type="number"]:focus, input[type="search"]:focus, input[type="tel"]:focus, input[type="color"]:focus { + outline: 0; + outline: thin dotted \9; + border-color: #2980b9; +} + +input.no-focus:focus { + border-color: #ccc !important; +} + +input[type="file"]:focus, input[type="radio"]:focus, input[type="checkbox"]:focus { + outline: thin dotted #333; + outline: 1px auto #129fea; +} + +input[type="text"][disabled], input[type="password"][disabled], input[type="email"][disabled], input[type="url"][disabled], input[type="date"][disabled], input[type="month"][disabled], input[type="time"][disabled], input[type="datetime"][disabled], input[type="datetime-local"][disabled], input[type="week"][disabled], input[type="number"][disabled], input[type="search"][disabled], input[type="tel"][disabled], input[type="color"][disabled] { + cursor: not-allowed; + background-color: #f3f6f6; + color: #cad2d3; +} + +input:focus:invalid, textarea:focus:invalid, select:focus:invalid { + color: #e74c3c; + border: 1px solid #e74c3c; +} + +input:focus:invalid:focus, textarea:focus:invalid:focus, select:focus:invalid:focus { + border-color: #e9322d; +} + +input[type="file"]:focus:invalid:focus, input[type="radio"]:focus:invalid:focus, input[type="checkbox"]:focus:invalid:focus { + outline-color: #e9322d; +} + +input.wy-input-large { + padding: 12px; + font-size: 100%; +} + +textarea { + overflow: auto; + vertical-align: top; + width: 100%; +} + +select, textarea { + padding: 0.5em 0.625em; + display: inline-block; + border: 1px solid #ccc; + font-size: 0.8em; + box-shadow: inset 0 1px 3px #ddd; + -webkit-transition: border 0.3s linear; + -moz-transition: border 0.3s linear; + transition: border 0.3s linear; +} + +select { + border: 1px solid #ccc; + background-color: #fff; +} + +select[multiple] { + height: auto; +} + +select:focus, textarea:focus { + outline: 0; +} + +select[disabled], textarea[disabled], input[readonly], select[readonly], textarea[readonly] { + cursor: not-allowed; + background-color: #fff; + color: #cad2d3; + border-color: transparent; +} + +.wy-checkbox, .wy-radio { + margin: 0.5em 0; + color: #404040 !important; + display: block; +} + +.wy-form-message-inline { + display: inline-block; + *display: inline; + *zoom: 1; + vertical-align: middle; +} + +.wy-input-prefix, .wy-input-suffix { + white-space: nowrap; +} + +.wy-input-prefix .wy-input-context, .wy-input-suffix .wy-input-context { + padding: 6px; + display: inline-block; + font-size: 80%; + background-color: #f3f6f6; + border: solid 1px #ccc; + color: #999; +} + +.wy-input-suffix .wy-input-context { + border-left: 0; +} + +.wy-input-prefix .wy-input-context { + border-right: 0; +} + +.wy-inline-validate { + white-space: nowrap; +} + +.wy-inline-validate .wy-input-context { + padding: 0.5em 0.625em; + display: inline-block; + font-size: 80%; +} + +.wy-inline-validate.wy-inline-validate-success .wy-input-context { + color: #27ae60; +} + +.wy-inline-validate.wy-inline-validate-danger .wy-input-context { + color: #e74c3c; +} + +.wy-inline-validate.wy-inline-validate-warning .wy-input-context { + color: #e67e22; +} + +.wy-inline-validate.wy-inline-validate-info .wy-input-context { + color: #2980b9; +} + +.wy-control-group { + margin-bottom: 24px; + *zoom: 1; +} + +.wy-control-group:before, .wy-control-group:after { + display: table; + content: ""; +} + +.wy-control-group:after { + clear: both; +} + +.wy-control-group.wy-control-group-error .wy-form-message, .wy-control-group.wy-control-group-error label { + color: #e74c3c; +} + +.wy-control-group.wy-control-group-error input[type="text"], .wy-control-group.wy-control-group-error input[type="password"], .wy-control-group.wy-control-group-error input[type="email"], .wy-control-group.wy-control-group-error input[type="url"], .wy-control-group.wy-control-group-error input[type="date"], .wy-control-group.wy-control-group-error input[type="month"], .wy-control-group.wy-control-group-error input[type="time"], .wy-control-group.wy-control-group-error input[type="datetime"], .wy-control-group.wy-control-group-error input[type="datetime-local"], .wy-control-group.wy-control-group-error input[type="week"], .wy-control-group.wy-control-group-error input[type="number"], .wy-control-group.wy-control-group-error input[type="search"], .wy-control-group.wy-control-group-error input[type="tel"], .wy-control-group.wy-control-group-error input[type="color"] { + border: solid 2px #e74c3c; +} + +.wy-control-group.wy-control-group-error textarea { + border: solid 2px #e74c3c; +} + +.wy-control-group.fluid-input input[type="text"], .wy-control-group.fluid-input input[type="password"], .wy-control-group.fluid-input input[type="email"], .wy-control-group.fluid-input input[type="url"], .wy-control-group.fluid-input input[type="date"], .wy-control-group.fluid-input input[type="month"], .wy-control-group.fluid-input input[type="time"], .wy-control-group.fluid-input input[type="datetime"], .wy-control-group.fluid-input input[type="datetime-local"], .wy-control-group.fluid-input input[type="week"], .wy-control-group.fluid-input input[type="number"], .wy-control-group.fluid-input input[type="search"], .wy-control-group.fluid-input input[type="tel"], .wy-control-group.fluid-input input[type="color"] { + width: 100%; +} + +.wy-form-message-inline { + display: inline-block; + padding-left: 0.3em; + color: #666; + vertical-align: middle; + font-size: 90%; +} + +.wy-form-message { + display: block; + color: #ccc; + font-size: 70%; + margin-top: 0.3125em; + font-style: italic; +} + +.wy-tag-input-group { + padding: 4px 4px 0px 4px; + display: inline-block; + border: 1px solid #ccc; + font-size: 80%; + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + box-shadow: inset 0 1px 3px #ddd; + -webkit-transition: border 0.3s linear; + -moz-transition: border 0.3s linear; + transition: border 0.3s linear; +} + +.wy-tag-input-group .wy-tag { + display: inline-block; + background-color: rgba(0, 0, 0, 0.1); + padding: 0.5em 0.625em; + border-radius: 2px; + position: relative; + margin-bottom: 4px; +} + +.wy-tag-input-group .wy-tag .wy-tag-remove { + color: #ccc; + margin-left: 5px; +} + +.wy-tag-input-group .wy-tag .wy-tag-remove:hover { + color: #e74c3c; +} + +.wy-tag-input-group label { + margin-left: 5px; + display: inline-block; + margin-bottom: 0; +} + +.wy-tag-input-group input { + border: none; + font-size: 100%; + margin-bottom: 4px; + box-shadow: none; +} + +.wy-form-upload { + border: solid 1px #ccc; + border-bottom: solid 3px #ccc; + background-color: #fff; + padding: 24px; + display: inline-block; + text-align: center; + cursor: pointer; + color: #404040; + -webkit-transition: border-color 0.1s ease-in; + -moz-transition: border-color 0.1s ease-in; + transition: border-color 0.1s ease-in; + *zoom: 1; +} + +.wy-form-upload:before, .wy-form-upload:after { + display: table; + content: ""; +} + +.wy-form-upload:after { + clear: both; +} + +@media screen and (max-width: 480px) { + .wy-form-upload { + width: 100%; + } +} + +.wy-form-upload .image-drop { + display: none; +} + +.wy-form-upload .image-desktop { + display: none; +} + +.wy-form-upload .image-loading { + display: none; +} + +.wy-form-upload .wy-form-upload-icon { + display: block; + font-size: 32px; + color: #b3b3b3; +} + +.wy-form-upload .image-drop .wy-form-upload-icon { + color: #27ae60; +} + +.wy-form-upload p { + font-size: 90%; +} + +.wy-form-upload .wy-form-upload-image { + float: left; + margin-right: 24px; +} + +@media screen and (max-width: 480px) { + .wy-form-upload .wy-form-upload-image { + width: 100%; + margin-bottom: 24px; + } +} + +.wy-form-upload img { + max-width: 125px; + max-height: 125px; + opacity: 0.9; + -webkit-transition: opacity 0.1s ease-in; + -moz-transition: opacity 0.1s ease-in; + transition: opacity 0.1s ease-in; +} + +.wy-form-upload .wy-form-upload-content { + float: left; +} + +@media screen and (max-width: 480px) { + .wy-form-upload .wy-form-upload-content { + width: 100%; + } +} + +.wy-form-upload:hover { + border-color: #b3b3b3; + color: #404040; +} + +.wy-form-upload:hover .image-desktop { + display: block; +} + +.wy-form-upload:hover .image-drag { + display: none; +} + +.wy-form-upload:hover img { + opacity: 1; +} + +.wy-form-upload:active { + border-top: solid 3px #ccc; + border-bottom: solid 1px #ccc; +} + +.wy-form-upload.wy-form-upload-big { + width: 100%; + text-align: center; + padding: 72px; +} + +.wy-form-upload.wy-form-upload-big .wy-form-upload-content { + float: none; +} + +.wy-form-upload.wy-form-upload-file p { + margin-bottom: 0; +} + +.wy-form-upload.wy-form-upload-file .wy-form-upload-icon { + display: inline-block; + font-size: inherit; +} + +.wy-form-upload.wy-form-upload-drop { + background-color: #ddf7e8; +} + +.wy-form-upload.wy-form-upload-drop .image-drop { + display: block; +} + +.wy-form-upload.wy-form-upload-drop .image-desktop { + display: none; +} + +.wy-form-upload.wy-form-upload-drop .image-drag { + display: none; +} + +.wy-form-upload.wy-form-upload-loading .image-drag { + display: none; +} + +.wy-form-upload.wy-form-upload-loading .image-desktop { + display: none; +} + +.wy-form-upload.wy-form-upload-loading .image-loading { + display: block; +} + +.wy-form-upload.wy-form-upload-loading .wy-input-prefix { + display: none; +} + +.wy-form-upload.wy-form-upload-loading p { + margin-bottom: 0; +} + +.rotate-90 { + -webkit-transform: rotate(90deg); + -moz-transform: rotate(90deg); + -ms-transform: rotate(90deg); + -o-transform: rotate(90deg); + transform: rotate(90deg); +} + +.rotate-180 { + -webkit-transform: rotate(180deg); + -moz-transform: rotate(180deg); + -ms-transform: rotate(180deg); + -o-transform: rotate(180deg); + transform: rotate(180deg); +} + +.rotate-270 { + -webkit-transform: rotate(270deg); + -moz-transform: rotate(270deg); + -ms-transform: rotate(270deg); + -o-transform: rotate(270deg); + transform: rotate(270deg); +} + +.mirror { + -webkit-transform: scaleX(-1); + -moz-transform: scaleX(-1); + -ms-transform: scaleX(-1); + -o-transform: scaleX(-1); + transform: scaleX(-1); +} + +.mirror.rotate-90 { + -webkit-transform: scaleX(-1) rotate(90deg); + -moz-transform: scaleX(-1) rotate(90deg); + -ms-transform: scaleX(-1) rotate(90deg); + -o-transform: scaleX(-1) rotate(90deg); + transform: scaleX(-1) rotate(90deg); +} + +.mirror.rotate-180 { + -webkit-transform: scaleX(-1) rotate(180deg); + -moz-transform: scaleX(-1) rotate(180deg); + -ms-transform: scaleX(-1) rotate(180deg); + -o-transform: scaleX(-1) rotate(180deg); + transform: scaleX(-1) rotate(180deg); +} + +.mirror.rotate-270 { + -webkit-transform: scaleX(-1) rotate(270deg); + -moz-transform: scaleX(-1) rotate(270deg); + -ms-transform: scaleX(-1) rotate(270deg); + -o-transform: scaleX(-1) rotate(270deg); + transform: scaleX(-1) rotate(270deg); +} + +.wy-form-gallery-manage { + margin-left: -12px; + margin-right: -12px; +} + +.wy-form-gallery-manage li { + float: left; + padding: 12px; + width: 20%; + cursor: pointer; +} + +@media screen and (max-width: 768px) { + .wy-form-gallery-manage li { + width: 25%; + } +} + +@media screen and (max-width: 480px) { + .wy-form-gallery-manage li { + width: 50%; + } +} + +.wy-form-gallery-manage li:active { + cursor: move; +} + +.wy-form-gallery-manage li>a { + padding: 12px; + background-color: #fff; + border: solid 1px #e1e4e5; + border-bottom: solid 3px #e1e4e5; + display: inline-block; + -webkit-transition: all 0.1s ease-in; + -moz-transition: all 0.1s ease-in; + transition: all 0.1s ease-in; +} + +.wy-form-gallery-manage li>a:active { + border: solid 1px #ccc; + border-top: solid 3px #ccc; +} + +.wy-form-gallery-manage img { + width: 100%; + -webkit-transition: all 0.05s ease-in; + -moz-transition: all 0.05s ease-in; + transition: all 0.05s ease-in; +} + +li.wy-form-gallery-edit { + position: relative; + color: #fff; + padding: 24px; + width: 100%; + display: block; + background-color: #343131; + border-radius: 4px; +} + +li.wy-form-gallery-edit .arrow { + position: absolute; + display: block; + top: -50px; + left: 50%; + margin-left: -25px; + z-index: 500; + height: 0; + width: 0; + border-color: transparent; + border-style: solid; + border-width: 25px; + border-bottom-color: #343131; +} + +@media only screen and (max-width: 480px) { + .wy-form button[type="submit"] { + margin: 0.7em 0 0; + } + + .wy-form input[type="text"], .wy-form input[type="password"], .wy-form input[type="email"], .wy-form input[type="url"], .wy-form input[type="date"], .wy-form input[type="month"], .wy-form input[type="time"], .wy-form input[type="datetime"], .wy-form input[type="datetime-local"], .wy-form input[type="week"], .wy-form input[type="number"], .wy-form input[type="search"], .wy-form input[type="tel"], .wy-form input[type="color"] { + margin-bottom: 0.3em; + display: block; + } + + .wy-form label { + margin-bottom: 0.3em; + display: block; + } + + .wy-form input[type="password"], .wy-form input[type="email"], .wy-form input[type="url"], .wy-form input[type="date"], .wy-form input[type="month"], .wy-form input[type="time"], .wy-form input[type="datetime"], .wy-form input[type="datetime-local"], .wy-form input[type="week"], .wy-form input[type="number"], .wy-form input[type="search"], .wy-form input[type="tel"], .wy-form input[type="color"] { + margin-bottom: 0; + } + + .wy-form-aligned .wy-control-group label { + margin-bottom: 0.3em; + text-align: left; + display: block; + width: 100%; + } + + .wy-form-aligned .wy-controls { + margin: 1.5em 0 0 0; + } + + .wy-form .wy-help-inline, .wy-form-message-inline, .wy-form-message { + display: block; + font-size: 80%; + padding: 0.2em 0 0.8em; + } +} + +@media screen and (max-width: 768px) { + .tablet-hide { + display: none; + } +} + +@media screen and (max-width: 480px) { + .mobile-hide { + display: none; + } +} + +.float-left { + float: left; +} + +.float-right { + float: right; +} + +.full-width { + width: 100%; +} + +.wy-grid-one-col { + *zoom: 1; + max-width: 68em; + margin-left: auto; + margin-right: auto; + max-width: 1066px; + margin-top: 1.618em; +} + +.wy-grid-one-col:before, .wy-grid-one-col:after { + display: table; + content: ""; +} + +.wy-grid-one-col:after { + clear: both; +} + +.wy-grid-one-col section { + display: block; + float: left; + margin-right: 2.35765%; + width: 100%; + background: #fff; + padding: 1.618em; + margin-right: 0; +} + +.wy-grid-one-col section:last-child { + margin-right: 0; +} + +.wy-grid-index-card { + *zoom: 1; + max-width: 68em; + margin-left: auto; + margin-right: auto; + max-width: 460px; + margin-top: 1.618em; + background: #fff; + padding: 1.618em; +} + +.wy-grid-index-card:before, .wy-grid-index-card:after { + display: table; + content: ""; +} + +.wy-grid-index-card:after { + clear: both; +} + +.wy-grid-index-card header, .wy-grid-index-card section, .wy-grid-index-card aside { + display: block; + float: left; + margin-right: 2.35765%; + width: 100%; +} + +.wy-grid-index-card header:last-child, .wy-grid-index-card section:last-child, .wy-grid-index-card aside:last-child { + margin-right: 0; +} + +.wy-grid-index-card.twocol { + max-width: 768px; +} + +.wy-grid-index-card.twocol section { + display: block; + float: left; + margin-right: 2.35765%; + width: 48.82117%; +} + +.wy-grid-index-card.twocol section:last-child { + margin-right: 0; +} + +.wy-grid-index-card.twocol aside { + display: block; + float: left; + margin-right: 2.35765%; + width: 48.82117%; +} + +.wy-grid-index-card.twocol aside:last-child { + margin-right: 0; +} + +.wy-grid-search-filter { + *zoom: 1; + max-width: 68em; + margin-left: auto; + margin-right: auto; + margin-bottom: 24px; +} + +.wy-grid-search-filter:before, .wy-grid-search-filter:after { + display: table; + content: ""; +} + +.wy-grid-search-filter:after { + clear: both; +} + +.wy-grid-search-filter .wy-grid-search-filter-input { + display: block; + float: left; + margin-right: 2.35765%; + width: 74.41059%; +} + +.wy-grid-search-filter .wy-grid-search-filter-input:last-child { + margin-right: 0; +} + +.wy-grid-search-filter .wy-grid-search-filter-btn { + display: block; + float: left; + margin-right: 2.35765%; + width: 23.23176%; +} + +.wy-grid-search-filter .wy-grid-search-filter-btn:last-child { + margin-right: 0; +} + +.wy-table, .rst-content table.docutils, .rst-content table.field-list { + border-collapse: collapse; + border-spacing: 0; + empty-cells: show; + margin-bottom: 24px; +} + +.wy-table caption, .rst-content table.docutils caption, .rst-content table.field-list caption { + color: #000; + font: italic 85%/1 arial, sans-serif; + padding: 1em 0; + text-align: center; +} + +.wy-table td, .rst-content table.docutils td, .rst-content table.field-list td, .wy-table th, .rst-content table.docutils th, .rst-content table.field-list th { + font-size: 90%; + margin: 0; + overflow: visible; + padding: 8px 16px; +} + +.wy-table td:first-child, .rst-content table.docutils td:first-child, .rst-content table.field-list td:first-child, .wy-table th:first-child, .rst-content table.docutils th:first-child, .rst-content table.field-list th:first-child { + border-left-width: 0; +} + +.wy-table thead, .rst-content table.docutils thead, .rst-content table.field-list thead { + color: #000; + text-align: left; + vertical-align: bottom; + white-space: nowrap; +} + +.wy-table thead th, .rst-content table.docutils thead th, .rst-content table.field-list thead th { + font-weight: bold; + border-bottom: solid 2px #e1e4e5; +} + +.wy-table td, .rst-content table.docutils td, .rst-content table.field-list td { + background-color: transparent; + vertical-align: middle; +} + +.wy-table td p, .rst-content table.docutils td p, .rst-content table.field-list td p { + line-height: 18px; + margin-bottom: 0; +} + +.wy-table .wy-table-cell-min, .rst-content table.docutils .wy-table-cell-min, .rst-content table.field-list .wy-table-cell-min { + width: 1%; + padding-right: 0; +} + +.wy-table .wy-table-cell-min input[type=checkbox], .rst-content table.docutils .wy-table-cell-min input[type=checkbox], .rst-content table.field-list .wy-table-cell-min input[type=checkbox], .wy-table .wy-table-cell-min input[type=checkbox], .rst-content table.docutils .wy-table-cell-min input[type=checkbox], .rst-content table.field-list .wy-table-cell-min input[type=checkbox] { + margin: 0; +} + +.wy-table-secondary { + color: gray; + font-size: 90%; +} + +.wy-table-tertiary { + color: gray; + font-size: 80%; +} + +.wy-table-odd td, .wy-table-striped tr:nth-child(2n-1) td, .rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td { + background-color: #f3f6f6; +} + +.wy-table-backed { + background-color: #f3f6f6; +} + +.wy-table-bordered-all, .rst-content table.docutils { + border: 1px solid #e1e4e5; +} + +.wy-table-bordered-all td, .rst-content table.docutils td { + border-bottom: 1px solid #e1e4e5; + border-left: 1px solid #e1e4e5; +} + +.wy-table-bordered-all tbody>tr:last-child td, .rst-content table.docutils tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-bordered { + border: 1px solid #e1e4e5; +} + +.wy-table-bordered-rows td { + border-bottom: 1px solid #e1e4e5; +} + +.wy-table-bordered-rows tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-horizontal tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-horizontal td, .wy-table-horizontal th { + border-width: 0 0 1px 0; + border-bottom: 1px solid #e1e4e5; +} + +.wy-table-horizontal tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-responsive { + margin-bottom: 24px; + max-width: 100%; + overflow: auto; +} + +.wy-table-responsive table { + margin-bottom: 0 !important; +} + +.wy-table-responsive table td, .wy-table-responsive table th { + white-space: nowrap; +} + +html { + height: 100%; + overflow-x: hidden; +} + +body { + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + font-weight: normal; + color: #404040; + min-height: 100%; + overflow-x: hidden; + background: #edf0f2; +} + +a { + color: #2980b9; + text-decoration: none; +} + +a:hover { + color: #3091d1; +} + +.link-danger { + color: #e74c3c; +} + +.link-danger:hover { + color: #d62c1a; +} + +.text-left { + text-align: left; +} + +.text-center { + text-align: center; +} + +.text-right { + text-align: right; +} + +h1, h2, h3, h4, h5, h6, legend { + margin-top: 0; + font-weight: 700; + font-family: "Roboto Slab", "ff-tisa-web-pro", "Georgia", Arial, sans-serif; +} + +p { + line-height: 24px; + margin: 0; + font-size: 16px; + margin-bottom: 24px; +} + +h1 { + font-size: 175%; +} + +h2 { + font-size: 150%; +} + +h3 { + font-size: 125%; +} + +h4 { + font-size: 115%; +} + +h5 { + font-size: 110%; +} + +h6 { + font-size: 100%; +} + +small { + font-size: 80%; +} + +code, .rst-content tt { + white-space: nowrap; + max-width: 100%; + background: #fff; + border: solid 1px #e1e4e5; + font-size: 75%; + padding: 0 5px; + font-family: "Inconsolata", "Consolata", "Monaco", monospace; + color: #e74c3c; + overflow-x: auto; +} + +code.code-large, .rst-content tt.code-large { + font-size: 90%; +} + +.full-width { + width: 100%; +} + +.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul { + list-style: disc; + line-height: 24px; + margin-bottom: 24px; +} + +.wy-plain-list-disc li, .rst-content .section ul li, .rst-content .toctree-wrapper ul li { + list-style: disc; + margin-left: 24px; +} + +.wy-plain-list-disc li ul, .rst-content .section ul li ul, .rst-content .toctree-wrapper ul li ul { + margin-bottom: 0; +} + +.wy-plain-list-disc li li, .rst-content .section ul li li, .rst-content .toctree-wrapper ul li li { + list-style: circle; +} + +.wy-plain-list-disc li li li, .rst-content .section ul li li li, .rst-content .toctree-wrapper ul li li li { + list-style: square; +} + +.wy-plain-list-decimal, .rst-content .section ol, .rst-content ol.arabic { + list-style: decimal; + line-height: 24px; + margin-bottom: 24px; +} + +.wy-plain-list-decimal li, .rst-content .section ol li, .rst-content ol.arabic li { + list-style: decimal; + margin-left: 24px; +} + +.wy-type-large { + font-size: 120%; +} + +.wy-type-normal { + font-size: 100%; +} + +.wy-type-small { + font-size: 100%; +} + +.wy-type-strike { + text-decoration: line-through; +} + +.wy-text-warning { + color: #e67e22 !important; +} + +a.wy-text-warning:hover { + color: #eb9950 !important; +} + +.wy-text-info { + color: #2980b9 !important; +} + +a.wy-text-info:hover { + color: #409ad5 !important; +} + +.wy-text-success { + color: #27ae60 !important; +} + +a.wy-text-success:hover { + color: #36d278 !important; +} + +.wy-text-danger { + color: #e74c3c !important; +} + +a.wy-text-danger:hover { + color: #ed7669 !important; +} + +.wy-text-neutral { + color: #404040 !important; +} + +a.wy-text-neutral:hover { + color: #595959 !important; +} + +.codeblock-example { + border: 1px solid #e1e4e5; + border-bottom: none; + padding: 24px; + padding-top: 48px; + font-weight: 500; + background: #fff; + position: relative; +} + +.codeblock-example:after { + content: "Example"; + position: absolute; + top: 0px; + left: 0px; + background: #9b59b6; + color: #fff; + padding: 6px 12px; +} + +.codeblock-example.prettyprint-example-only { + border: 1px solid #e1e4e5; + margin-bottom: 24px; +} + +.codeblock, div[class^='highlight'] { + border: 1px solid #e1e4e5; + padding: 0px; + overflow-x: auto; + background: #fff; + margin: 1px 0 24px 0; +} + +.codeblock div[class^='highlight'], div[class^='highlight'] div[class^='highlight'] { + border: none; + background: none; + margin: 0; +} + +.linenodiv pre { + border-right: solid 1px #e6e9ea; + margin: 0; + padding: 12px 12px; + font-family: "Inconsolata", "Consolata", "Monaco", monospace; + font-size: 12px; + line-height: 1.5; + color: #d9d9d9; +} + +div[class^='highlight'] pre { + white-space: pre; + margin: 0; + padding: 12px 12px; + font-family: "Inconsolata", "Consolata", "Monaco", monospace; + font-size: 12px; + line-height: 1.5; + display: block; + overflow: auto; + color: #404040; +} + +pre.literal-block { + @extends .codeblock; +} + +@media print { + .codeblock, div[class^='highlight'], div[class^='highlight'] pre { + white-space: pre-wrap; + } +} + +.hll { + background-color: #f8f8f8; + border: 1px solid #ccc; + padding: 1.5px 5px; +} + +.c { + color: #998; + font-style: italic; +} + +.err { + color: #a61717; + background-color: #e3d2d2; +} + +.k { + font-weight: bold; +} + +.o { + font-weight: bold; +} + +.cm { + color: #998; + font-style: italic; +} + +.cp { + color: #999; + font-weight: bold; +} + +.c1 { + color: #998; + font-style: italic; +} + +.cs { + color: #999; + font-weight: bold; + font-style: italic; +} + +.gd { + color: #000; + background-color: #fdd; +} + +.gd .x { + color: #000; + background-color: #faa; +} + +.ge { + font-style: italic; +} + +.gr { + color: #a00; +} + +.gh { + color: #999; +} + +.gi { + color: #000; + background-color: #dfd; +} + +.gi .x { + color: #000; + background-color: #afa; +} + +.go { + color: #888; +} + +.gp { + color: #555; +} + +.gs { + font-weight: bold; +} + +.gu { + color: purple; + font-weight: bold; +} + +.gt { + color: #a00; +} + +.kc { + font-weight: bold; +} + +.kd { + font-weight: bold; +} + +.kn { + font-weight: bold; +} + +.kp { + font-weight: bold; +} + +.kr { + font-weight: bold; +} + +.kt { + color: #458; + font-weight: bold; +} + +.m { + color: #099; +} + +.s { + color: #d14; +} + +.n { + color: #333; +} + +.na { + color: teal; +} + +.nb { + color: #0086b3; +} + +.nc { + color: #458; + font-weight: bold; +} + +.no { + color: teal; +} + +.ni { + color: purple; +} + +.ne { + color: #900; + font-weight: bold; +} + +.nf { + color: #900; + font-weight: bold; +} + +.nn { + color: #555; +} + +.nt { + color: navy; +} + +.nv { + color: teal; +} + +.ow { + font-weight: bold; +} + +.w { + color: #bbb; +} + +.mf { + color: #099; +} + +.mh { + color: #099; +} + +.mi { + color: #099; +} + +.mo { + color: #099; +} + +.sb { + color: #d14; +} + +.sc { + color: #d14; +} + +.sd { + color: #d14; +} + +.s2 { + color: #d14; +} + +.se { + color: #d14; +} + +.sh { + color: #d14; +} + +.si { + color: #d14; +} + +.sx { + color: #d14; +} + +.sr { + color: #009926; +} + +.s1 { + color: #d14; +} + +.ss { + color: #990073; +} + +.bp { + color: #999; +} + +.vc { + color: teal; +} + +.vg { + color: teal; +} + +.vi { + color: teal; +} + +.il { + color: #099; +} + +.gc { + color: #999; + background-color: #eaf2f5; +} + +.wy-breadcrumbs li { + display: inline-block; +} + +.wy-breadcrumbs li.wy-breadcrumbs-aside { + float: right; +} + +.wy-breadcrumbs li a { + display: inline-block; + padding: 5px; +} + +.wy-breadcrumbs li a:first-child { + padding-left: 0; +} + +.wy-breadcrumbs-extra { + margin-bottom: 0; + color: #b3b3b3; + font-size: 80%; + display: inline-block; +} + +@media screen and (max-width: 480px) { + .wy-breadcrumbs-extra { + display: none; + } + + .wy-breadcrumbs li.wy-breadcrumbs-aside { + display: none; + } +} + +@media print { + .wy-breadcrumbs li.wy-breadcrumbs-aside { + display: none; + } +} + +.wy-affix { + position: fixed; + top: 1.618em; +} + +.wy-menu a:hover { + text-decoration: none; +} + +.wy-menu-horiz { + *zoom: 1; +} + +.wy-menu-horiz:before, .wy-menu-horiz:after { + display: table; + content: ""; +} + +.wy-menu-horiz:after { + clear: both; +} + +.wy-menu-horiz ul, .wy-menu-horiz li { + display: inline-block; +} + +.wy-menu-horiz li:hover { + background: rgba(255, 255, 255, 0.1); +} + +.wy-menu-horiz li.divide-left { + border-left: solid 1px #404040; +} + +.wy-menu-horiz li.divide-right { + border-right: solid 1px #404040; +} + +.wy-menu-horiz a { + height: 32px; + display: inline-block; + line-height: 32px; + padding: 0 16px; +} + +.wy-menu-vertical header { + height: 32px; + display: inline-block; + line-height: 32px; + padding: 0 1.618em; + display: block; + font-weight: bold; + text-transform: uppercase; + font-size: 80%; + color: #2980b9; + white-space: nowrap; +} + +.wy-menu-vertical ul { + margin-bottom: 0; +} + +.wy-menu-vertical li.divide-top { + border-top: solid 1px #404040; +} + +.wy-menu-vertical li.divide-bottom { + border-bottom: solid 1px #404040; +} + +.wy-menu-vertical li.current { + background: #e3e3e3; +} + +.wy-menu-vertical li.current a { + color: gray; + border-right: solid 1px #c9c9c9; + padding: 0.4045em 2.427em; +} + +.wy-menu-vertical li.current a:hover { + background: #d6d6d6; +} + +.wy-menu-vertical li.on a, .wy-menu-vertical li.current>a { + color: #404040; + padding: 0.4045em 1.618em; + font-weight: bold; + position: relative; + background: #fcfcfc; + border: none; + border-bottom: solid 1px #c9c9c9; + border-top: solid 1px #c9c9c9; + padding-left: 1.618em -4px; +} + +.wy-menu-vertical li.on a:hover, .wy-menu-vertical li.current>a:hover { + background: #fcfcfc; +} + +.wy-menu-vertical li.tocktree-l2.current>a { + background: #c9c9c9; +} + +.wy-menu-vertical li.current ul { + display: block; +} + +.wy-menu-vertical li ul { + margin-bottom: 0; + display: none; +} + +.wy-menu-vertical li ul li a { + margin-bottom: 0; + color: #b3b3b3; + font-weight: normal; +} + +.wy-menu-vertical a { + display: inline-block; + line-height: 18px; + padding: 0.4045em 1.618em; + display: block; + position: relative; + font-size: 90%; + color: #b3b3b3; +} + +.wy-menu-vertical a:hover { + background-color: #4e4a4a; + cursor: pointer; +} + +.wy-menu-vertical a:active { + background-color: #2980b9; + cursor: pointer; + color: #fff; +} + +.wy-side-nav-search { + z-index: 200; + background-color: #2980b9; + text-align: center; + padding: 0.809em; + display: block; + color: #fcfcfc; + margin-bottom: 0.809em; +} + +.wy-side-nav-search input[type=text] { + width: 100%; + border-radius: 50px; + padding: 6px 12px; + border-color: #2472a4; +} + +.wy-side-nav-search img { + display: block; + margin: auto auto 0.809em auto; + height: 214px; + width: 26px; + background-color: #2980b9; + padding: 5px; +/* border-radius: 100%; */ +} + +.wy-side-nav-search>a, .wy-side-nav-search .wy-dropdown>a { + color: #fcfcfc; + font-size: 100%; + font-weight: bold; + display: inline-block; + padding: 4px 6px; + margin-bottom: 0.809em; +} + +.wy-side-nav-search>a:hover, .wy-side-nav-search .wy-dropdown>a:hover { + background: rgba(255, 255, 255, 0.1); +} + +.wy-nav .wy-menu-vertical header { + color: #2980b9; +} + +.wy-nav .wy-menu-vertical a { + color: #b3b3b3; +} + +.wy-nav .wy-menu-vertical a:hover { + background-color: #2980b9; + color: #fff; +} + +[data-menu-wrap] { + -webkit-transition: all 0.2s ease-in; + -moz-transition: all 0.2s ease-in; + transition: all 0.2s ease-in; + position: absolute; + opacity: 1; + width: 100%; + opacity: 0; +} + +[data-menu-wrap].move-center { + left: 0; + right: auto; + opacity: 1; +} + +[data-menu-wrap].move-left { + right: auto; + left: -100%; + opacity: 0; +} + +[data-menu-wrap].move-right { + right: -100%; + left: auto; + opacity: 0; +} + +.wy-body-for-nav { + background: left repeat-y #fff; + background-image: url(data:image/png; + base64, iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoxOERBMTRGRDBFMUUxMUUzODUwMkJCOThDMEVFNURFMCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoxOERBMTRGRTBFMUUxMUUzODUwMkJCOThDMEVFNURFMCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjE4REExNEZCMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjE4REExNEZDMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+EwrlwAAAAA5JREFUeNpiMDU0BAgwAAE2AJgB9BnaAAAAAElFTkSuQmCC); + background-size: 300px 1px; +} + +.wy-grid-for-nav { + position: absolute; + width: 100%; + height: 100%; +} + +.wy-nav-side { + position: absolute; + top: 0; + left: 0; + width: 300px; + overflow: hidden; + min-height: 100%; + background: #343131; + z-index: 200; +} + +.wy-nav-top { + display: none; + background: #2980b9; + color: #fff; + padding: 0.4045em 0.809em; + position: relative; + line-height: 50px; + text-align: center; + font-size: 100%; + *zoom: 1; +} + +.wy-nav-top:before, .wy-nav-top:after { + display: table; + content: ""; +} + +.wy-nav-top:after { + clear: both; +} + +.wy-nav-top a { + color: #fff; + font-weight: bold; +} + +.wy-nav-top img { + margin-right: 12px; + height: 45px; + width: 45px; + background-color: #2980b9; + padding: 5px; + border-radius: 100%; +} + +.wy-nav-top i { + font-size: 30px; + float: left; + cursor: pointer; +} + +.wy-nav-content-wrap { + margin-left: 300px; + background: #fff; + min-height: 100%; +} + +.wy-nav-content { + padding: 1.618em 3.236em; + height: 100%; + max-width: 1140px; + margin: auto; +} + +.wy-body-mask { + position: fixed; + width: 100%; + height: 100%; + background: rgba(0, 0, 0, 0.2); + display: none; + z-index: 499; +} + +.wy-body-mask.on { + display: block; +} + +footer { + color: #999; +} + +footer p { + margin-bottom: 12px; +} + +.rst-footer-buttons { + *zoom: 1; +} + +.rst-footer-buttons:before, .rst-footer-buttons:after { + display: table; + content: ""; +} + +.rst-footer-buttons:after { + clear: both; +} + +#search-results .search li { + margin-bottom: 24px; + border-bottom: solid 1px #e1e4e5; + padding-bottom: 24px; +} + +#search-results .search li:first-child { + border-top: solid 1px #e1e4e5; + padding-top: 24px; +} + +#search-results .search li a { + font-size: 120%; + margin-bottom: 12px; + display: inline-block; +} + +#search-results .context { + color: gray; + font-size: 90%; +} + +@media screen and (max-width: 768px) { + .wy-body-for-nav { + background: #fff; + } + + .wy-nav-top { + display: block; + } + + .wy-nav-side { + left: -300px; + } + + .wy-nav-side.shift { + width: 85%; + left: 0; + } + + .wy-nav-content-wrap { + margin-left: 0; + } + + .wy-nav-content-wrap .wy-nav-content { + padding: 1.618em; + } + + .wy-nav-content-wrap.shift { + position: fixed; + min-width: 100%; + left: 85%; + top: 0; + height: 100%; + overflow: hidden; + } +} + +@media screen and (min-width: 1400px) { + .wy-nav-content-wrap { + background: #fff; + } + + .wy-nav-content { + margin: 0; + background: #fff; + } +} + +@media print { + .wy-nav-side { + display: none; + } + + .wy-nav-content-wrap { + margin-left: 0; + } +} + +.rst-versions { + position: fixed; + bottom: 0; + left: 0; + width: 300px; + color: #fcfcfc; + background: #1f1d1d; + border-top: solid 10px #343131; + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + z-index: 400; +} + +.rst-versions a { + color: #2980b9; + text-decoration: none; +} + +.rst-versions .rst-badge-small { + display: none; +} + +.rst-versions .rst-current-version { + padding: 12px; + background-color: #272525; + display: block; + text-align: right; + font-size: 90%; + cursor: pointer; + color: #27ae60; + *zoom: 1; +} + +.rst-versions .rst-current-version:before, .rst-versions .rst-current-version:after { + display: table; + content: ""; +} + +.rst-versions .rst-current-version:after { + clear: both; +} + +.rst-versions .rst-current-version .icon, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag .rst-versions .rst-current-version .wy-tag-remove, .rst-versions .rst-current-version .rst-content .admonition-title, .rst-content .rst-versions .rst-current-version .admonition-title, .rst-versions .rst-current-version .rst-content h1 .headerlink, .rst-content h1 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h2 .headerlink, .rst-content h2 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h3 .headerlink, .rst-content h3 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h4 .headerlink, .rst-content h4 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h5 .headerlink, .rst-content h5 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h6 .headerlink, .rst-content h6 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content dl dt .headerlink, .rst-content dl dt .rst-versions .rst-current-version .headerlink { + color: #fcfcfc; +} + +.rst-versions .rst-current-version .icon-book { + float: left; +} + +.rst-versions .rst-current-version.rst-out-of-date { + background-color: #e74c3c; + color: #fff; +} + +.rst-versions.shift-up .rst-other-versions { + display: block; +} + +.rst-versions .rst-other-versions { + font-size: 90%; + padding: 12px; + color: gray; + display: none; +} + +.rst-versions .rst-other-versions hr { + display: block; + height: 1px; + border: 0; + margin: 20px 0; + padding: 0; + border-top: solid 1px #413d3d; +} + +.rst-versions .rst-other-versions dd { + display: inline-block; + margin: 0; +} + +.rst-versions .rst-other-versions dd a { + display: inline-block; + padding: 6px; + color: #fcfcfc; +} + +.rst-versions.rst-badge { + width: auto; + bottom: 20px; + right: 20px; + left: auto; + border: none; + max-width: 300px; +} + +.rst-versions.rst-badge .icon-book { + float: none; +} + +.rst-versions.rst-badge.shift-up .rst-current-version { + text-align: right; +} + +.rst-versions.rst-badge.shift-up .rst-current-version .icon-book { + float: left; +} + +.rst-versions.rst-badge .rst-current-version { + width: auto; + height: 30px; + line-height: 30px; + padding: 0 6px; + display: block; + text-align: center; +} + +@media screen and (max-width: 768px) { + .rst-versions { + width: 85%; + display: none; + } + + .rst-versions.shift { + display: block; + } + + img { + width: 100%; + height: auto; + } +} + +.rst-content img { + max-width: 100%; + height: auto !important; +} + +.rst-content .section>img { + margin-bottom: 24px; +} + +.rst-content a.reference.external:after { + font-family: fontawesome-webfont; + content: " \f08e "; + color: #b3b3b3; + vertical-align: super; + font-size: 60%; +} + +.rst-content blockquote { + margin-left: 24px; + line-height: 24px; + margin-bottom: 24px; +} + +.rst-content .note .last, .rst-content .note p.first, .rst-content .attention .last, .rst-content .attention p.first, .rst-content .caution .last, .rst-content .caution p.first, .rst-content .danger .last, .rst-content .danger p.first, .rst-content .error .last, .rst-content .error p.first, .rst-content .hint .last, .rst-content .hint p.first, .rst-content .important .last, .rst-content .important p.first, .rst-content .tip .last, .rst-content .tip p.first, .rst-content .warning .last, .rst-content .warning p.first { + margin-bottom: 0; +} + +.rst-content .admonition-title { + font-weight: bold; +} + +.rst-content .admonition-title:before { + margin-right: 4px; +} + +.rst-content .admonition table { + border-color: rgba(0, 0, 0, 0.1); +} + +.rst-content .admonition table td, .rst-content .admonition table th { + background: transparent !important; + border-color: rgba(0, 0, 0, 0.1) !important; +} + +.rst-content .section ol.loweralpha, .rst-content .section ol.loweralpha li { + list-style: lower-alpha; +} + +.rst-content .section ol.upperalpha, .rst-content .section ol.upperalpha li { + list-style: upper-alpha; +} + +.rst-content .section ol p, .rst-content .section ul p { + margin-bottom: 12px; +} + +.rst-content .line-block { + margin-left: 24px; +} + +.rst-content .topic-title { + font-weight: bold; + margin-bottom: 12px; +} + +.rst-content .toc-backref { + color: #404040; +} + +.rst-content .align-right { + float: right; + margin: 0px 0px 24px 24px; +} + +.rst-content .align-left { + float: left; + margin: 0px 24px 24px 0px; +} + +.rst-content h1 .headerlink, .rst-content h2 .headerlink, .rst-content h3 .headerlink, .rst-content h4 .headerlink, .rst-content h5 .headerlink, .rst-content h6 .headerlink, .rst-content dl dt .headerlink { + display: none; + visibility: hidden; + font-size: 14px; +} + +.rst-content h1 .headerlink:after, .rst-content h2 .headerlink:after, .rst-content h3 .headerlink:after, .rst-content h4 .headerlink:after, .rst-content h5 .headerlink:after, .rst-content h6 .headerlink:after, .rst-content dl dt .headerlink:after { + visibility: visible; + content: "\f0c1"; + font-family: fontawesome-webfont; + display: inline-block; +} + +.rst-content h1:hover .headerlink, .rst-content h2:hover .headerlink, .rst-content h3:hover .headerlink, .rst-content h4:hover .headerlink, .rst-content h5:hover .headerlink, .rst-content h6:hover .headerlink, .rst-content dl dt:hover .headerlink { + display: inline-block; +} + +.rst-content .sidebar { + float: right; + width: 40%; + display: block; + margin: 0 0 24px 24px; + padding: 24px; + background: #f3f6f6; + border: solid 1px #e1e4e5; +} + +.rst-content .sidebar p, .rst-content .sidebar ul, .rst-content .sidebar dl { + font-size: 90%; +} + +.rst-content .sidebar .last { + margin-bottom: 0; +} + +.rst-content .sidebar .sidebar-title { + display: block; + font-family: "Roboto Slab", "ff-tisa-web-pro", "Georgia", Arial, sans-serif; + font-weight: bold; + background: #e1e4e5; + padding: 6px 12px; + margin: -24px; + margin-bottom: 24px; + font-size: 100%; +} + +.rst-content .highlighted { + background: #f1c40f; + display: inline-block; + font-weight: bold; + padding: 0 6px; +} + +.rst-content .footnote-reference, .rst-content .citation-reference { + vertical-align: super; + font-size: 90%; +} + +.rst-content table.docutils.citation, .rst-content table.docutils.footnote { + background: none; + border: none; + color: #999; +} + +.rst-content table.docutils.citation td, .rst-content table.docutils.citation tr, .rst-content table.docutils.footnote td, .rst-content table.docutils.footnote tr { + border: none; + background-color: transparent !important; + white-space: normal; +} + +.rst-content table.docutils.citation td.label, .rst-content table.docutils.footnote td.label { + padding-left: 0; + padding-right: 0; + vertical-align: top; +} + +.rst-content table.field-list { + border: none; +} + +.rst-content table.field-list td { + border: none; +} + +.rst-content table.field-list .field-name { + padding-right: 10px; + text-align: left; +} + +.rst-content table.field-list .field-body { + text-align: left; + padding-left: 0; +} + +.rst-content tt { + color: #000; +} + +.rst-content tt big, .rst-content tt em { + font-size: 100% !important; + line-height: normal; +} + +.rst-content tt .xref, a .rst-content tt { + font-weight: bold; +} + +.rst-content dl { + margin-bottom: 24px; +} + +.rst-content dl dt { + font-weight: bold; +} + +.rst-content dl p, .rst-content dl table, .rst-content dl ul, .rst-content dl ol { + margin-bottom: 12px !important; +} + +.rst-content dl dd { + margin: 0 0 12px 24px; +} + +.rst-content dl:not(.docutils) { + margin-bottom: 24px; +} + +.rst-content dl:not(.docutils) dt { + display: inline-block; + margin: 6px 0; + font-size: 90%; + line-height: normal; + background: #e7f2fa; + color: #2980b9; + border-top: solid 3px #6ab0de; + padding: 6px; + position: relative; +} + +.rst-content dl:not(.docutils) dt:before { + color: #6ab0de; +} + +.rst-content dl:not(.docutils) dt .headerlink { + color: #404040; + font-size: 100% !important; +} + +.rst-content dl:not(.docutils) dl dt { + margin-bottom: 6px; + border: none; + border-left: solid 3px #ccc; + background: #f0f0f0; + color: gray; +} + +.rst-content dl:not(.docutils) dl dt .headerlink { + color: #404040; + font-size: 100% !important; +} + +.rst-content dl:not(.docutils) dt:first-child { + margin-top: 0; +} + +.rst-content dl:not(.docutils) tt { + font-weight: bold; +} + +.rst-content dl:not(.docutils) tt.descname, .rst-content dl:not(.docutils) tt.descclassname { + background-color: transparent; + border: none; + padding: 0; + font-size: 100% !important; +} + +.rst-content dl:not(.docutils) tt.descname { + font-weight: bold; +} + +.rst-content dl:not(.docutils) .viewcode-link { + display: inline-block; + color: #27ae60; + font-size: 80%; + padding-left: 24px; +} + +.rst-content dl:not(.docutils) .optional { + display: inline-block; + padding: 0 4px; + color: #000; + font-weight: bold; +} + +.rst-content dl:not(.docutils) .property { + display: inline-block; + padding-right: 8px; +} + +@media screen and (max-width: 480px) { + .rst-content .sidebar { + width: 100%; + } +} + +span[id*='MathJax-Span'] { + color: #404040; +} + +.admonition.note span[id*='MathJax-Span'] { + color: #fff; +} + +.admonition.warning span[id*='MathJax-Span'] { + color: #fff; +} + +.search-reset-start { + color: #463E3F; + float: right; + position: relative; + top: -25px; + left: -10px; + z-index: 10; +} + +.search-reset-start:hover { + cursor: pointer; + color: #2980B9; +} + +#search-box-id { + padding-right: 25px; +} diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/theme.css ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/theme.css --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/theme.css 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/css/theme.css 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4636 @@ +* { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} + +article, aside, details, figcaption, figure, footer, header, hgroup, nav, section { + display: block; +} + +audio, canvas, video { + display: inline-block; + *display: inline; + *zoom: 1; +} + +audio:not([controls]) { + display: none; +} + +[hidden] { + display: none; +} + +* { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} + +html { + font-size: 100%; + -webkit-text-size-adjust: 100%; + -ms-text-size-adjust: 100%; +} + +body { + margin: 0; +} + +a:hover, a:active { + outline: 0; +} + +abbr[title] { + border-bottom: 1px dotted; +} + +b, strong { + font-weight: bold; +} + +blockquote { + margin: 0; +} + +dfn { + font-style: italic; +} + +hr { + display: block; + height: 1px; + border: 0; + border-top: 1px solid #ccc; + margin: 20px 0; + padding: 0; +} + +ins { + background: #ff9; + color: #000; + text-decoration: none; +} + +mark { + background: #ff0; + color: #000; + font-style: italic; + font-weight: bold; +} + +pre, code, .rst-content tt, kbd, samp { + font-family: monospace, serif; + _font-family: "courier new", monospace; + font-size: 1em; +} + +pre { + white-space: pre; +} + +q { + quotes: none; +} + +q:before, q:after { + content: ""; + content: none; +} + +small { + font-size: 85%; +} + +sub, sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} + +sup { + top: -0.5em; +} + +sub { + bottom: -0.25em; +} + +ul, ol, dl { + margin: 0; + padding: 0; + list-style: none; + list-style-image: none; +} + +li { + list-style: none; +} + +dd { + margin: 0; +} + +img { + border: 0; + -ms-interpolation-mode: bicubic; + vertical-align: middle; + max-width: 100%; +} + +svg:not(:root) { + overflow: hidden; +} + +figure { + margin: 0; +} + +form { + margin: 0; +} + +fieldset { + border: 0; + margin: 0; + padding: 0; +} + +label { + cursor: pointer; +} + +legend { + border: 0; + *margin-left: -7px; + padding: 0; + white-space: normal; +} + +button, input, select, textarea { + font-size: 100%; + margin: 0; + vertical-align: baseline; + *vertical-align: middle; +} + +button, input { + line-height: normal; +} + +button, input[type="button"], input[type="reset"], input[type="submit"] { + cursor: pointer; + -webkit-appearance: button; + *overflow: visible; +} + +button[disabled], input[disabled] { + cursor: default; +} + +input[type="checkbox"], input[type="radio"] { + box-sizing: border-box; + padding: 0; + *width: 13px; + *height: 13px; +} + +input[type="search"] { + -webkit-appearance: textfield; + -moz-box-sizing: content-box; + -webkit-box-sizing: content-box; + box-sizing: content-box; +} + +input[type="search"]::-webkit-search-decoration, input[type="search"]::-webkit-search-cancel-button { + -webkit-appearance: none; +} + +button::-moz-focus-inner, input::-moz-focus-inner { + border: 0; + padding: 0; +} + +textarea { + overflow: auto; + vertical-align: top; + resize: vertical; +} + +table { + border-collapse: collapse; + border-spacing: 0; +} + +td { + vertical-align: top; +} + +.chromeframe { + margin: 0.2em 0; + background: #ccc; + color: #000; + padding: 0.2em 0; +} + +.ir { + display: block; + border: 0; + text-indent: -999em; + overflow: hidden; + background-color: transparent; + background-repeat: no-repeat; + text-align: left; + direction: ltr; + *line-height: 0; +} + +.ir br { + display: none; +} + +.hidden { + display: none !important; + visibility: hidden; +} + +.visuallyhidden { + border: 0; + clip: rect(0 0 0 0); + height: 1px; + margin: -1px; + overflow: hidden; + padding: 0; + position: absolute; + width: 1px; +} + +.visuallyhidden.focusable:active, .visuallyhidden.focusable:focus { + clip: auto; + height: auto; + margin: 0; + overflow: visible; + position: static; + width: auto; +} + +.invisible { + visibility: hidden; +} + +.relative { + position: relative; +} + +big, small { + font-size: 100%; +} + +@media print { + html, body, section { + background: none !important; + } + + * { + box-shadow: none !important; + text-shadow: none !important; + filter: none !important; + -ms-filter: none !important; + } + + a, a:visited { + text-decoration: underline; + } + + .ir a:after, a[href^="javascript:"]:after, a[href^="#"]:after { + content: ""; + } + + pre, blockquote { + page-break-inside: avoid; + } + + thead { + display: table-header-group; + } + + tr, img { + page-break-inside: avoid; + } + + img { + max-width: 100% !important; + } + + @page { + margin: 0.5cm; + } + + p, h2, h3 { + orphans: 3; + widows: 3; + } + + h2, h3 { + page-break-after: avoid; + } +} + +.font-smooth, .icon:before, .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .wy-tag-input-group .wy-tag .wy-tag-remove:before, .rst-content .admonition-title:before, .rst-content h1 .headerlink:before, .rst-content h2 .headerlink:before, .rst-content h3 .headerlink:before, .rst-content h4 .headerlink:before, .rst-content h5 .headerlink:before, .rst-content h6 .headerlink:before, .rst-content dl dt .headerlink:before, .wy-alert, .rst-content .note, .rst-content .attention, .rst-content .caution, .rst-content .danger, .rst-content .error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .warning, .btn, input[type="text"], input[type="password"], input[type="email"], input[type="url"], input[type="date"], input[type="month"], input[type="time"], input[type="datetime"], input[type="datetime-local"], input[type="week"], input[type="number"], input[type="search"], input[type="tel"], input[type="color"], select, textarea, .wy-tag-input-group, .wy-menu-vertical li.on a, .wy-menu-vertical li.current>a, .wy-side-nav-search>a, .wy-side-nav-search .wy-dropdown>a, .wy-nav-top a { + -webkit-font-smoothing: antialiased; +} + +.clearfix { + *zoom: 1; +} + +.clearfix:before, .clearfix:after { + display: table; + content: ""; +} + +.clearfix:after { + clear: both; +} + +@font-face { + font-family: fontawesome-webfont; + font-weight: normal; + font-style: normal; + src: url("../font/fontawesome_webfont.eot"); + src: url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"), url("../font/fontawesome_webfont.woff") format("woff"), url("../font/fontawesome_webfont.ttf") format("truetype"), url("../font/fontawesome_webfont.svg#fontawesome-webfont") format("svg"); +} + +.icon:before, .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .wy-tag-input-group .wy-tag .wy-tag-remove:before, .rst-content .admonition-title:before, .rst-content h1 .headerlink:before, .rst-content h2 .headerlink:before, .rst-content h3 .headerlink:before, .rst-content h4 .headerlink:before, .rst-content h5 .headerlink:before, .rst-content h6 .headerlink:before, .rst-content dl dt .headerlink:before { + display: inline-block; + font-family: fontawesome-webfont; + font-style: normal; + font-weight: normal; + line-height: 1; + text-decoration: inherit; +} + +a .icon, a .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success a .wy-input-context, a .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger a .wy-input-context, a .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning a .wy-input-context, a .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info a .wy-input-context, a .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag a .wy-tag-remove, a .rst-content .admonition-title, .rst-content a .admonition-title, a .rst-content h1 .headerlink, .rst-content h1 a .headerlink, a .rst-content h2 .headerlink, .rst-content h2 a .headerlink, a .rst-content h3 .headerlink, .rst-content h3 a .headerlink, a .rst-content h4 .headerlink, .rst-content h4 a .headerlink, a .rst-content h5 .headerlink, .rst-content h5 a .headerlink, a .rst-content h6 .headerlink, .rst-content h6 a .headerlink, a .rst-content dl dt .headerlink, .rst-content dl dt a .headerlink { + display: inline-block; + text-decoration: inherit; +} + +.icon-large:before { + vertical-align: -10%; + font-size: 1.33333em; +} + +.btn .icon, .btn .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success .btn .wy-input-context, .btn .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger .btn .wy-input-context, .btn .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning .btn .wy-input-context, .btn .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info .btn .wy-input-context, .btn .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag .btn .wy-tag-remove, .btn .rst-content .admonition-title, .rst-content .btn .admonition-title, .btn .rst-content h1 .headerlink, .rst-content h1 .btn .headerlink, .btn .rst-content h2 .headerlink, .rst-content h2 .btn .headerlink, .btn .rst-content h3 .headerlink, .rst-content h3 .btn .headerlink, .btn .rst-content h4 .headerlink, .rst-content h4 .btn .headerlink, .btn .rst-content h5 .headerlink, .rst-content h5 .btn .headerlink, .btn .rst-content h6 .headerlink, .rst-content h6 .btn .headerlink, .btn .rst-content dl dt .headerlink, .rst-content dl dt .btn .headerlink, .nav .icon, .nav .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success .nav .wy-input-context, .nav .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger .nav .wy-input-context, .nav .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning .nav .wy-input-context, .nav .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info .nav .wy-input-context, .nav .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag .nav .wy-tag-remove, .nav .rst-content .admonition-title, .rst-content .nav .admonition-title, .nav .rst-content h1 .headerlink, .rst-content h1 .nav .headerlink, .nav .rst-content h2 .headerlink, .rst-content h2 .nav .headerlink, .nav .rst-content h3 .headerlink, .rst-content h3 .nav .headerlink, .nav .rst-content h4 .headerlink, .rst-content h4 .nav .headerlink, .nav .rst-content h5 .headerlink, .rst-content h5 .nav .headerlink, .nav .rst-content h6 .headerlink, .rst-content h6 .nav .headerlink, .nav .rst-content dl dt .headerlink, .rst-content dl dt .nav .headerlink { + display: inline; +} + +.btn .icon.icon-large, .btn .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-success .btn .icon-large.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .btn .icon-large.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .btn .icon-large.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-info .btn .icon-large.wy-input-context, .btn .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove, .wy-tag-input-group .wy-tag .btn .icon-large.wy-tag-remove, .btn .rst-content .icon-large.admonition-title, .rst-content .btn .icon-large.admonition-title, .btn .rst-content h1 .icon-large.headerlink, .rst-content h1 .btn .icon-large.headerlink, .btn .rst-content h2 .icon-large.headerlink, .rst-content h2 .btn .icon-large.headerlink, .btn .rst-content h3 .icon-large.headerlink, .rst-content h3 .btn .icon-large.headerlink, .btn .rst-content h4 .icon-large.headerlink, .rst-content h4 .btn .icon-large.headerlink, .btn .rst-content h5 .icon-large.headerlink, .rst-content h5 .btn .icon-large.headerlink, .btn .rst-content h6 .icon-large.headerlink, .rst-content h6 .btn .icon-large.headerlink, .btn .rst-content dl dt .icon-large.headerlink, .rst-content dl dt .btn .icon-large.headerlink, .nav .icon.icon-large, .nav .wy-inline-validate.wy-inline-validate-success .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-success .nav .icon-large.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-danger .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .nav .icon-large.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-warning .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .nav .icon-large.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-info .icon-large.wy-input-context, .wy-inline-validate.wy-inline-validate-info .nav .icon-large.wy-input-context, .nav .wy-tag-input-group .wy-tag .icon-large.wy-tag-remove, .wy-tag-input-group .wy-tag .nav .icon-large.wy-tag-remove, .nav .rst-content .icon-large.admonition-title, .rst-content .nav .icon-large.admonition-title, .nav .rst-content h1 .icon-large.headerlink, .rst-content h1 .nav .icon-large.headerlink, .nav .rst-content h2 .icon-large.headerlink, .rst-content h2 .nav .icon-large.headerlink, .nav .rst-content h3 .icon-large.headerlink, .rst-content h3 .nav .icon-large.headerlink, .nav .rst-content h4 .icon-large.headerlink, .rst-content h4 .nav .icon-large.headerlink, .nav .rst-content h5 .icon-large.headerlink, .rst-content h5 .nav .icon-large.headerlink, .nav .rst-content h6 .icon-large.headerlink, .rst-content h6 .nav .icon-large.headerlink, .nav .rst-content dl dt .icon-large.headerlink, .rst-content dl dt .nav .icon-large.headerlink { + line-height: 0.9em; +} + +.btn .icon.icon-spin, .btn .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-success .btn .icon-spin.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .btn .icon-spin.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .btn .icon-spin.wy-input-context, .btn .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-info .btn .icon-spin.wy-input-context, .btn .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove, .wy-tag-input-group .wy-tag .btn .icon-spin.wy-tag-remove, .btn .rst-content .icon-spin.admonition-title, .rst-content .btn .icon-spin.admonition-title, .btn .rst-content h1 .icon-spin.headerlink, .rst-content h1 .btn .icon-spin.headerlink, .btn .rst-content h2 .icon-spin.headerlink, .rst-content h2 .btn .icon-spin.headerlink, .btn .rst-content h3 .icon-spin.headerlink, .rst-content h3 .btn .icon-spin.headerlink, .btn .rst-content h4 .icon-spin.headerlink, .rst-content h4 .btn .icon-spin.headerlink, .btn .rst-content h5 .icon-spin.headerlink, .rst-content h5 .btn .icon-spin.headerlink, .btn .rst-content h6 .icon-spin.headerlink, .rst-content h6 .btn .icon-spin.headerlink, .btn .rst-content dl dt .icon-spin.headerlink, .rst-content dl dt .btn .icon-spin.headerlink, .nav .icon.icon-spin, .nav .wy-inline-validate.wy-inline-validate-success .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-success .nav .icon-spin.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-danger .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-danger .nav .icon-spin.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-warning .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-warning .nav .icon-spin.wy-input-context, .nav .wy-inline-validate.wy-inline-validate-info .icon-spin.wy-input-context, .wy-inline-validate.wy-inline-validate-info .nav .icon-spin.wy-input-context, .nav .wy-tag-input-group .wy-tag .icon-spin.wy-tag-remove, .wy-tag-input-group .wy-tag .nav .icon-spin.wy-tag-remove, .nav .rst-content .icon-spin.admonition-title, .rst-content .nav .icon-spin.admonition-title, .nav .rst-content h1 .icon-spin.headerlink, .rst-content h1 .nav .icon-spin.headerlink, .nav .rst-content h2 .icon-spin.headerlink, .rst-content h2 .nav .icon-spin.headerlink, .nav .rst-content h3 .icon-spin.headerlink, .rst-content h3 .nav .icon-spin.headerlink, .nav .rst-content h4 .icon-spin.headerlink, .rst-content h4 .nav .icon-spin.headerlink, .nav .rst-content h5 .icon-spin.headerlink, .rst-content h5 .nav .icon-spin.headerlink, .nav .rst-content h6 .icon-spin.headerlink, .rst-content h6 .nav .icon-spin.headerlink, .nav .rst-content dl dt .icon-spin.headerlink, .rst-content dl dt .nav .icon-spin.headerlink { + display: inline-block; +} + +.btn.icon:before, .wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:before, .wy-tag-input-group .wy-tag .btn.wy-tag-remove:before, .rst-content .btn.admonition-title:before, .rst-content h1 .btn.headerlink:before, .rst-content h2 .btn.headerlink:before, .rst-content h3 .btn.headerlink:before, .rst-content h4 .btn.headerlink:before, .rst-content h5 .btn.headerlink:before, .rst-content h6 .btn.headerlink:before, .rst-content dl dt .btn.headerlink:before { + opacity: 0.5; + -webkit-transition: opacity 0.05s ease-in; + -moz-transition: opacity 0.05s ease-in; + transition: opacity 0.05s ease-in; +} + +.btn.icon:hover:before, .wy-inline-validate.wy-inline-validate-success .btn.wy-input-context:hover:before, .wy-inline-validate.wy-inline-validate-danger .btn.wy-input-context:hover:before, .wy-inline-validate.wy-inline-validate-warning .btn.wy-input-context:hover:before, .wy-inline-validate.wy-inline-validate-info .btn.wy-input-context:hover:before, .wy-tag-input-group .wy-tag .btn.wy-tag-remove:hover:before, .rst-content .btn.admonition-title:hover:before, .rst-content h1 .btn.headerlink:hover:before, .rst-content h2 .btn.headerlink:hover:before, .rst-content h3 .btn.headerlink:hover:before, .rst-content h4 .btn.headerlink:hover:before, .rst-content h5 .btn.headerlink:hover:before, .rst-content h6 .btn.headerlink:hover:before, .rst-content dl dt .btn.headerlink:hover:before { + opacity: 1; +} + +.btn-mini .icon:before, .btn-mini .wy-inline-validate.wy-inline-validate-success .wy-input-context:before, .wy-inline-validate.wy-inline-validate-success .btn-mini .wy-input-context:before, .btn-mini .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before, .wy-inline-validate.wy-inline-validate-danger .btn-mini .wy-input-context:before, .btn-mini .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-warning .btn-mini .wy-input-context:before, .btn-mini .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .btn-mini .wy-input-context:before, .btn-mini .wy-tag-input-group .wy-tag .wy-tag-remove:before, .wy-tag-input-group .wy-tag .btn-mini .wy-tag-remove:before, .btn-mini .rst-content .admonition-title:before, .rst-content .btn-mini .admonition-title:before, .btn-mini .rst-content h1 .headerlink:before, .rst-content h1 .btn-mini .headerlink:before, .btn-mini .rst-content h2 .headerlink:before, .rst-content h2 .btn-mini .headerlink:before, .btn-mini .rst-content h3 .headerlink:before, .rst-content h3 .btn-mini .headerlink:before, .btn-mini .rst-content h4 .headerlink:before, .rst-content h4 .btn-mini .headerlink:before, .btn-mini .rst-content h5 .headerlink:before, .rst-content h5 .btn-mini .headerlink:before, .btn-mini .rst-content h6 .headerlink:before, .rst-content h6 .btn-mini .headerlink:before, .btn-mini .rst-content dl dt .headerlink:before, .rst-content dl dt .btn-mini .headerlink:before { + font-size: 14px; + vertical-align: -15%; +} + +li .icon, li .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success li .wy-input-context, li .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger li .wy-input-context, li .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning li .wy-input-context, li .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info li .wy-input-context, li .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag li .wy-tag-remove, li .rst-content .admonition-title, .rst-content li .admonition-title, li .rst-content h1 .headerlink, .rst-content h1 li .headerlink, li .rst-content h2 .headerlink, .rst-content h2 li .headerlink, li .rst-content h3 .headerlink, .rst-content h3 li .headerlink, li .rst-content h4 .headerlink, .rst-content h4 li .headerlink, li .rst-content h5 .headerlink, .rst-content h5 li .headerlink, li .rst-content h6 .headerlink, .rst-content h6 li .headerlink, li .rst-content dl dt .headerlink, .rst-content dl dt li .headerlink { + display: inline-block; +} + +li .icon-large:before, li .icon-large:before { + width: 1.875em; +} + +ul.icons { + list-style-type: none; + margin-left: 2em; + text-indent: -0.8em; +} + +ul.icons li .icon, ul.icons li .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success ul.icons li .wy-input-context, ul.icons li .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger ul.icons li .wy-input-context, ul.icons li .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning ul.icons li .wy-input-context, ul.icons li .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info ul.icons li .wy-input-context, ul.icons li .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag ul.icons li .wy-tag-remove, ul.icons li .rst-content .admonition-title, .rst-content ul.icons li .admonition-title, ul.icons li .rst-content h1 .headerlink, .rst-content h1 ul.icons li .headerlink, ul.icons li .rst-content h2 .headerlink, .rst-content h2 ul.icons li .headerlink, ul.icons li .rst-content h3 .headerlink, .rst-content h3 ul.icons li .headerlink, ul.icons li .rst-content h4 .headerlink, .rst-content h4 ul.icons li .headerlink, ul.icons li .rst-content h5 .headerlink, .rst-content h5 ul.icons li .headerlink, ul.icons li .rst-content h6 .headerlink, .rst-content h6 ul.icons li .headerlink, ul.icons li .rst-content dl dt .headerlink, .rst-content dl dt ul.icons li .headerlink { + width: 0.8em; +} + +ul.icons li .icon-large:before, ul.icons li .icon-large:before { + vertical-align: baseline; +} + +.icon-glass:before { + content: "\f000"; +} + +.icon-music:before { + content: "\f001"; +} + +.icon-search:before { + content: "\f002"; +} + +.icon-envelope-alt:before { + content: "\f003"; +} + +.icon-heart:before { + content: "\f004"; +} + +.icon-star:before { + content: "\f005"; +} + +.icon-star-empty:before { + content: "\f006"; +} + +.icon-user:before { + content: "\f007"; +} + +.icon-film:before { + content: "\f008"; +} + +.icon-th-large:before { + content: "\f009"; +} + +.icon-th:before { + content: "\f00a"; +} + +.icon-th-list:before { + content: "\f00b"; +} + +.icon-ok:before { + content: "\f00c"; +} + +.icon-remove:before, .wy-tag-input-group .wy-tag .wy-tag-remove:before { + content: "\f00d"; +} + +.icon-zoom-in:before { + content: "\f00e"; +} + +.icon-zoom-out:before { + content: "\f010"; +} + +.icon-power-off:before, .icon-off:before { + content: "\f011"; +} + +.icon-signal:before { + content: "\f012"; +} + +.icon-gear:before, .icon-cog:before { + content: "\f013"; +} + +.icon-trash:before { + content: "\f014"; +} + +.icon-home:before { + content: "\f015"; +} + +.icon-file-alt:before { + content: "\f016"; +} + +.icon-time:before { + content: "\f017"; +} + +.icon-road:before { + content: "\f018"; +} + +.icon-download-alt:before { + content: "\f019"; +} + +.icon-download:before { + content: "\f01a"; +} + +.icon-upload:before { + content: "\f01b"; +} + +.icon-inbox:before { + content: "\f01c"; +} + +.icon-play-circle:before { + content: "\f01d"; +} + +.icon-rotate-right:before, .icon-repeat:before { + content: "\f01e"; +} + +.icon-refresh:before { + content: "\f021"; +} + +.icon-list-alt:before { + content: "\f022"; +} + +.icon-lock:before { + content: "\f023"; +} + +.icon-flag:before { + content: "\f024"; +} + +.icon-headphones:before { + content: "\f025"; +} + +.icon-volume-off:before { + content: "\f026"; +} + +.icon-volume-down:before { + content: "\f027"; +} + +.icon-volume-up:before { + content: "\f028"; +} + +.icon-qrcode:before { + content: "\f029"; +} + +.icon-barcode:before { + content: "\f02a"; +} + +.icon-tag:before { + content: "\f02b"; +} + +.icon-tags:before { + content: "\f02c"; +} + +.icon-book:before { + content: "\f02d"; +} + +.icon-bookmark:before { + content: "\f02e"; +} + +.icon-print:before { + content: "\f02f"; +} + +.icon-camera:before { + content: "\f030"; +} + +.icon-font:before { + content: "\f031"; +} + +.icon-bold:before { + content: "\f032"; +} + +.icon-italic:before { + content: "\f033"; +} + +.icon-text-height:before { + content: "\f034"; +} + +.icon-text-width:before { + content: "\f035"; +} + +.icon-align-left:before { + content: "\f036"; +} + +.icon-align-center:before { + content: "\f037"; +} + +.icon-align-right:before { + content: "\f038"; +} + +.icon-align-justify:before { + content: "\f039"; +} + +.icon-list:before { + content: "\f03a"; +} + +.icon-indent-left:before { + content: "\f03b"; +} + +.icon-indent-right:before { + content: "\f03c"; +} + +.icon-facetime-video:before { + content: "\f03d"; +} + +.icon-picture:before { + content: "\f03e"; +} + +.icon-pencil:before { + content: "\f040"; +} + +.icon-map-marker:before { + content: "\f041"; +} + +.icon-adjust:before { + content: "\f042"; +} + +.icon-tint:before { + content: "\f043"; +} + +.icon-edit:before { + content: "\f044"; +} + +.icon-share:before { + content: "\f045"; +} + +.icon-check:before { + content: "\f046"; +} + +.icon-move:before { + content: "\f047"; +} + +.icon-step-backward:before { + content: "\f048"; +} + +.icon-fast-backward:before { + content: "\f049"; +} + +.icon-backward:before { + content: "\f04a"; +} + +.icon-play:before { + content: "\f04b"; +} + +.icon-pause:before { + content: "\f04c"; +} + +.icon-stop:before { + content: "\f04d"; +} + +.icon-forward:before { + content: "\f04e"; +} + +.icon-fast-forward:before { + content: "\f050"; +} + +.icon-step-forward:before { + content: "\f051"; +} + +.icon-eject:before { + content: "\f052"; +} + +.icon-chevron-left:before { + content: "\f053"; +} + +.icon-chevron-right:before { + content: "\f054"; +} + +.icon-plus-sign:before { + content: "\f055"; +} + +.icon-minus-sign:before { + content: "\f056"; +} + +.icon-remove-sign:before, .wy-inline-validate.wy-inline-validate-danger .wy-input-context:before { + content: "\f057"; +} + +.icon-ok-sign:before { + content: "\f058"; +} + +.icon-question-sign:before { + content: "\f059"; +} + +.icon-info-sign:before { + content: "\f05a"; +} + +.icon-screenshot:before { + content: "\f05b"; +} + +.icon-remove-circle:before { + content: "\f05c"; +} + +.icon-ok-circle:before { + content: "\f05d"; +} + +.icon-ban-circle:before { + content: "\f05e"; +} + +.icon-arrow-left:before { + content: "\f060"; +} + +.icon-arrow-right:before { + content: "\f061"; +} + +.icon-arrow-up:before { + content: "\f062"; +} + +.icon-arrow-down:before { + content: "\f063"; +} + +.icon-mail-forward:before, .icon-share-alt:before { + content: "\f064"; +} + +.icon-resize-full:before { + content: "\f065"; +} + +.icon-resize-small:before { + content: "\f066"; +} + +.icon-plus:before { + content: "\f067"; +} + +.icon-minus:before { + content: "\f068"; +} + +.icon-asterisk:before { + content: "\f069"; +} + +.icon-exclamation-sign:before, .wy-inline-validate.wy-inline-validate-warning .wy-input-context:before, .wy-inline-validate.wy-inline-validate-info .wy-input-context:before, .rst-content .admonition-title:before { + content: "\f06a"; +} + +.icon-gift:before { + content: "\f06b"; +} + +.icon-leaf:before { + content: "\f06c"; +} + +.icon-fire:before { + content: "\f06d"; +} + +.icon-eye-open:before { + content: "\f06e"; +} + +.icon-eye-close:before { + content: "\f070"; +} + +.icon-warning-sign:before { + content: "\f071"; +} + +.icon-plane:before { + content: "\f072"; +} + +.icon-calendar:before { + content: "\f073"; +} + +.icon-random:before { + content: "\f074"; +} + +.icon-comment:before { + content: "\f075"; +} + +.icon-magnet:before { + content: "\f076"; +} + +.icon-chevron-up:before { + content: "\f077"; +} + +.icon-chevron-down:before { + content: "\f078"; +} + +.icon-retweet:before { + content: "\f079"; +} + +.icon-shopping-cart:before { + content: "\f07a"; +} + +.icon-folder-close:before { + content: "\f07b"; +} + +.icon-folder-open:before { + content: "\f07c"; +} + +.icon-resize-vertical:before { + content: "\f07d"; +} + +.icon-resize-horizontal:before { + content: "\f07e"; +} + +.icon-bar-chart:before { + content: "\f080"; +} + +.icon-twitter-sign:before { + content: "\f081"; +} + +.icon-facebook-sign:before { + content: "\f082"; +} + +.icon-camera-retro:before { + content: "\f083"; +} + +.icon-key:before { + content: "\f084"; +} + +.icon-gears:before, .icon-cogs:before { + content: "\f085"; +} + +.icon-comments:before { + content: "\f086"; +} + +.icon-thumbs-up-alt:before { + content: "\f087"; +} + +.icon-thumbs-down-alt:before { + content: "\f088"; +} + +.icon-star-half:before { + content: "\f089"; +} + +.icon-heart-empty:before { + content: "\f08a"; +} + +.icon-signout:before { + content: "\f08b"; +} + +.icon-linkedin-sign:before { + content: "\f08c"; +} + +.icon-pushpin:before { + content: "\f08d"; +} + +.icon-external-link:before { + content: "\f08e"; +} + +.icon-signin:before { + content: "\f090"; +} + +.icon-trophy:before { + content: "\f091"; +} + +.icon-github-sign:before { + content: "\f092"; +} + +.icon-upload-alt:before { + content: "\f093"; +} + +.icon-lemon:before { + content: "\f094"; +} + +.icon-phone:before { + content: "\f095"; +} + +.icon-unchecked:before, .icon-check-empty:before { + content: "\f096"; +} + +.icon-bookmark-empty:before { + content: "\f097"; +} + +.icon-phone-sign:before { + content: "\f098"; +} + +.icon-twitter:before { + content: "\f099"; +} + +.icon-facebook:before { + content: "\f09a"; +} + +.icon-github:before { + content: "\f09b"; +} + +.icon-unlock:before { + content: "\f09c"; +} + +.icon-credit-card:before { + content: "\f09d"; +} + +.icon-rss:before { + content: "\f09e"; +} + +.icon-hdd:before { + content: "\f0a0"; +} + +.icon-bullhorn:before { + content: "\f0a1"; +} + +.icon-bell:before { + content: "\f0a2"; +} + +.icon-certificate:before { + content: "\f0a3"; +} + +.icon-hand-right:before { + content: "\f0a4"; +} + +.icon-hand-left:before { + content: "\f0a5"; +} + +.icon-hand-up:before { + content: "\f0a6"; +} + +.icon-hand-down:before { + content: "\f0a7"; +} + +.icon-circle-arrow-left:before { + content: "\f0a8"; +} + +.icon-circle-arrow-right:before { + content: "\f0a9"; +} + +.icon-circle-arrow-up:before { + content: "\f0aa"; +} + +.icon-circle-arrow-down:before { + content: "\f0ab"; +} + +.icon-globe:before { + content: "\f0ac"; +} + +.icon-wrench:before { + content: "\f0ad"; +} + +.icon-tasks:before { + content: "\f0ae"; +} + +.icon-filter:before { + content: "\f0b0"; +} + +.icon-briefcase:before { + content: "\f0b1"; +} + +.icon-fullscreen:before { + content: "\f0b2"; +} + +.icon-group:before { + content: "\f0c0"; +} + +.icon-link:before { + content: "\f0c1"; +} + +.icon-cloud:before { + content: "\f0c2"; +} + +.icon-beaker:before { + content: "\f0c3"; +} + +.icon-cut:before { + content: "\f0c4"; +} + +.icon-copy:before { + content: "\f0c5"; +} + +.icon-paperclip:before, .icon-paper-clip:before { + content: "\f0c6"; +} + +.icon-save:before { + content: "\f0c7"; +} + +.icon-sign-blank:before { + content: "\f0c8"; +} + +.icon-reorder:before { + content: "\f0c9"; +} + +.icon-list-ul:before { + content: "\f0ca"; +} + +.icon-list-ol:before { + content: "\f0cb"; +} + +.icon-strikethrough:before { + content: "\f0cc"; +} + +.icon-underline:before { + content: "\f0cd"; +} + +.icon-table:before { + content: "\f0ce"; +} + +.icon-magic:before { + content: "\f0d0"; +} + +.icon-truck:before { + content: "\f0d1"; +} + +.icon-pinterest:before { + content: "\f0d2"; +} + +.icon-pinterest-sign:before { + content: "\f0d3"; +} + +.icon-google-plus-sign:before { + content: "\f0d4"; +} + +.icon-google-plus:before { + content: "\f0d5"; +} + +.icon-money:before { + content: "\f0d6"; +} + +.icon-caret-down:before { + content: "\f0d7"; +} + +.icon-caret-up:before { + content: "\f0d8"; +} + +.icon-caret-left:before { + content: "\f0d9"; +} + +.icon-caret-right:before { + content: "\f0da"; +} + +.icon-columns:before { + content: "\f0db"; +} + +.icon-sort:before { + content: "\f0dc"; +} + +.icon-sort-down:before { + content: "\f0dd"; +} + +.icon-sort-up:before { + content: "\f0de"; +} + +.icon-envelope:before { + content: "\f0e0"; +} + +.icon-linkedin:before { + content: "\f0e1"; +} + +.icon-rotate-left:before, .icon-undo:before { + content: "\f0e2"; +} + +.icon-legal:before { + content: "\f0e3"; +} + +.icon-dashboard:before { + content: "\f0e4"; +} + +.icon-comment-alt:before { + content: "\f0e5"; +} + +.icon-comments-alt:before { + content: "\f0e6"; +} + +.icon-bolt:before { + content: "\f0e7"; +} + +.icon-sitemap:before { + content: "\f0e8"; +} + +.icon-umbrella:before { + content: "\f0e9"; +} + +.icon-paste:before { + content: "\f0ea"; +} + +.icon-lightbulb:before { + content: "\f0eb"; +} + +.icon-exchange:before { + content: "\f0ec"; +} + +.icon-cloud-download:before { + content: "\f0ed"; +} + +.icon-cloud-upload:before { + content: "\f0ee"; +} + +.icon-user-md:before { + content: "\f0f0"; +} + +.icon-stethoscope:before { + content: "\f0f1"; +} + +.icon-suitcase:before { + content: "\f0f2"; +} + +.icon-bell-alt:before { + content: "\f0f3"; +} + +.icon-coffee:before { + content: "\f0f4"; +} + +.icon-food:before { + content: "\f0f5"; +} + +.icon-file-text-alt:before { + content: "\f0f6"; +} + +.icon-building:before { + content: "\f0f7"; +} + +.icon-hospital:before { + content: "\f0f8"; +} + +.icon-ambulance:before { + content: "\f0f9"; +} + +.icon-medkit:before { + content: "\f0fa"; +} + +.icon-fighter-jet:before { + content: "\f0fb"; +} + +.icon-beer:before { + content: "\f0fc"; +} + +.icon-h-sign:before { + content: "\f0fd"; +} + +.icon-plus-sign-alt:before { + content: "\f0fe"; +} + +.icon-double-angle-left:before { + content: "\f100"; +} + +.icon-double-angle-right:before { + content: "\f101"; +} + +.icon-double-angle-up:before { + content: "\f102"; +} + +.icon-double-angle-down:before { + content: "\f103"; +} + +.icon-angle-left:before { + content: "\f104"; +} + +.icon-angle-right:before { + content: "\f105"; +} + +.icon-angle-up:before { + content: "\f106"; +} + +.icon-angle-down:before { + content: "\f107"; +} + +.icon-desktop:before { + content: "\f108"; +} + +.icon-laptop:before { + content: "\f109"; +} + +.icon-tablet:before { + content: "\f10a"; +} + +.icon-mobile-phone:before { + content: "\f10b"; +} + +.icon-circle-blank:before { + content: "\f10c"; +} + +.icon-quote-left:before { + content: "\f10d"; +} + +.icon-quote-right:before { + content: "\f10e"; +} + +.icon-spinner:before { + content: "\f110"; +} + +.icon-circle:before { + content: "\f111"; +} + +.icon-mail-reply:before, .icon-reply:before { + content: "\f112"; +} + +.icon-github-alt:before { + content: "\f113"; +} + +.icon-folder-close-alt:before { + content: "\f114"; +} + +.icon-folder-open-alt:before { + content: "\f115"; +} + +.icon-expand-alt:before { + content: "\f116"; +} + +.icon-collapse-alt:before { + content: "\f117"; +} + +.icon-smile:before { + content: "\f118"; +} + +.icon-frown:before { + content: "\f119"; +} + +.icon-meh:before { + content: "\f11a"; +} + +.icon-gamepad:before { + content: "\f11b"; +} + +.icon-keyboard:before { + content: "\f11c"; +} + +.icon-flag-alt:before { + content: "\f11d"; +} + +.icon-flag-checkered:before { + content: "\f11e"; +} + +.icon-terminal:before { + content: "\f120"; +} + +.icon-code:before { + content: "\f121"; +} + +.icon-reply-all:before { + content: "\f122"; +} + +.icon-mail-reply-all:before { + content: "\f122"; +} + +.icon-star-half-full:before, .icon-star-half-empty:before { + content: "\f123"; +} + +.icon-location-arrow:before { + content: "\f124"; +} + +.icon-crop:before { + content: "\f125"; +} + +.icon-code-fork:before { + content: "\f126"; +} + +.icon-unlink:before { + content: "\f127"; +} + +.icon-question:before { + content: "\f128"; +} + +.icon-info:before { + content: "\f129"; +} + +.icon-exclamation:before { + content: "\f12a"; +} + +.icon-superscript:before { + content: "\f12b"; +} + +.icon-subscript:before { + content: "\f12c"; +} + +.icon-eraser:before { + content: "\f12d"; +} + +.icon-puzzle-piece:before { + content: "\f12e"; +} + +.icon-microphone:before { + content: "\f130"; +} + +.icon-microphone-off:before { + content: "\f131"; +} + +.icon-shield:before { + content: "\f132"; +} + +.icon-calendar-empty:before { + content: "\f133"; +} + +.icon-fire-extinguisher:before { + content: "\f134"; +} + +.icon-rocket:before { + content: "\f135"; +} + +.icon-maxcdn:before { + content: "\f136"; +} + +.icon-chevron-sign-left:before { + content: "\f137"; +} + +.icon-chevron-sign-right:before { + content: "\f138"; +} + +.icon-chevron-sign-up:before { + content: "\f139"; +} + +.icon-chevron-sign-down:before { + content: "\f13a"; +} + +.icon-html5:before { + content: "\f13b"; +} + +.icon-css3:before { + content: "\f13c"; +} + +.icon-anchor:before { + content: "\f13d"; +} + +.icon-unlock-alt:before { + content: "\f13e"; +} + +.icon-bullseye:before { + content: "\f140"; +} + +.icon-ellipsis-horizontal:before { + content: "\f141"; +} + +.icon-ellipsis-vertical:before { + content: "\f142"; +} + +.icon-rss-sign:before { + content: "\f143"; +} + +.icon-play-sign:before { + content: "\f144"; +} + +.icon-ticket:before { + content: "\f145"; +} + +.icon-minus-sign-alt:before { + content: "\f146"; +} + +.icon-check-minus:before { + content: "\f147"; +} + +.icon-level-up:before { + content: "\f148"; +} + +.icon-level-down:before { + content: "\f149"; +} + +.icon-check-sign:before, .wy-inline-validate.wy-inline-validate-success .wy-input-context:before { + content: "\f14a"; +} + +.icon-edit-sign:before { + content: "\f14b"; +} + +.icon-external-link-sign:before { + content: "\f14c"; +} + +.icon-share-sign:before { + content: "\f14d"; +} + +.icon-compass:before { + content: "\f14e"; +} + +.icon-collapse:before { + content: "\f150"; +} + +.icon-collapse-top:before { + content: "\f151"; +} + +.icon-expand:before { + content: "\f152"; +} + +.icon-euro:before, .icon-eur:before { + content: "\f153"; +} + +.icon-gbp:before { + content: "\f154"; +} + +.icon-dollar:before, .icon-usd:before { + content: "\f155"; +} + +.icon-rupee:before, .icon-inr:before { + content: "\f156"; +} + +.icon-yen:before, .icon-jpy:before { + content: "\f157"; +} + +.icon-renminbi:before, .icon-cny:before { + content: "\f158"; +} + +.icon-won:before, .icon-krw:before { + content: "\f159"; +} + +.icon-bitcoin:before, .icon-btc:before { + content: "\f15a"; +} + +.icon-file:before { + content: "\f15b"; +} + +.icon-file-text:before { + content: "\f15c"; +} + +.icon-sort-by-alphabet:before { + content: "\f15d"; +} + +.icon-sort-by-alphabet-alt:before { + content: "\f15e"; +} + +.icon-sort-by-attributes:before { + content: "\f160"; +} + +.icon-sort-by-attributes-alt:before { + content: "\f161"; +} + +.icon-sort-by-order:before { + content: "\f162"; +} + +.icon-sort-by-order-alt:before { + content: "\f163"; +} + +.icon-thumbs-up:before { + content: "\f164"; +} + +.icon-thumbs-down:before { + content: "\f165"; +} + +.icon-youtube-sign:before { + content: "\f166"; +} + +.icon-youtube:before { + content: "\f167"; +} + +.icon-xing:before { + content: "\f168"; +} + +.icon-xing-sign:before { + content: "\f169"; +} + +.icon-youtube-play:before { + content: "\f16a"; +} + +.icon-dropbox:before { + content: "\f16b"; +} + +.icon-stackexchange:before { + content: "\f16c"; +} + +.icon-instagram:before { + content: "\f16d"; +} + +.icon-flickr:before { + content: "\f16e"; +} + +.icon-adn:before { + content: "\f170"; +} + +.icon-bitbucket:before { + content: "\f171"; +} + +.icon-bitbucket-sign:before { + content: "\f172"; +} + +.icon-tumblr:before { + content: "\f173"; +} + +.icon-tumblr-sign:before { + content: "\f174"; +} + +.icon-long-arrow-down:before { + content: "\f175"; +} + +.icon-long-arrow-up:before { + content: "\f176"; +} + +.icon-long-arrow-left:before { + content: "\f177"; +} + +.icon-long-arrow-right:before { + content: "\f178"; +} + +.icon-apple:before { + content: "\f179"; +} + +.icon-windows:before { + content: "\f17a"; +} + +.icon-android:before { + content: "\f17b"; +} + +.icon-linux:before { + content: "\f17c"; +} + +.icon-dribbble:before { + content: "\f17d"; +} + +.icon-skype:before { + content: "\f17e"; +} + +.icon-foursquare:before { + content: "\f180"; +} + +.icon-trello:before { + content: "\f181"; +} + +.icon-female:before { + content: "\f182"; +} + +.icon-male:before { + content: "\f183"; +} + +.icon-gittip:before { + content: "\f184"; +} + +.icon-sun:before { + content: "\f185"; +} + +.icon-moon:before { + content: "\f186"; +} + +.icon-archive:before { + content: "\f187"; +} + +.icon-bug:before { + content: "\f188"; +} + +.icon-vk:before { + content: "\f189"; +} + +.icon-weibo:before { + content: "\f18a"; +} + +.icon-renren:before { + content: "\f18b"; +} + +.wy-alert, .rst-content .note, .rst-content .attention, .rst-content .caution, .rst-content .danger, .rst-content .error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .warning { + padding: 24px; + line-height: 24px; + margin-bottom: 24px; + border-left: solid 3px transparent; +} + +.wy-alert strong, .rst-content .note strong, .rst-content .attention strong, .rst-content .caution strong, .rst-content .danger strong, .rst-content .error strong, .rst-content .hint strong, .rst-content .important strong, .rst-content .tip strong, .rst-content .warning strong, .wy-alert a, .rst-content .note a, .rst-content .attention a, .rst-content .caution a, .rst-content .danger a, .rst-content .error a, .rst-content .hint a, .rst-content .important a, .rst-content .tip a, .rst-content .warning a { + color: #fff; +} + +.wy-alert.wy-alert-danger, .rst-content .wy-alert-danger.note, .rst-content .wy-alert-danger.attention, .rst-content .wy-alert-danger.caution, .rst-content .danger, .rst-content .error, .rst-content .wy-alert-danger.hint, .rst-content .wy-alert-danger.important, .rst-content .wy-alert-danger.tip, .rst-content .wy-alert-danger.warning { + background: #e74c3c; + color: #fff; + border-color: #d62c1a; +} + +.wy-alert.wy-alert-warning, .rst-content .wy-alert-warning.note, .rst-content .attention, .rst-content .caution, .rst-content .wy-alert-warning.danger, .rst-content .wy-alert-warning.error, .rst-content .wy-alert-warning.hint, .rst-content .wy-alert-warning.important, .rst-content .wy-alert-warning.tip, .rst-content .warning { + background: #e67e22; + color: #fff; + border-color: #ff5850; +} + +.wy-alert.wy-alert-info, .rst-content .note, .rst-content .wy-alert-info.attention, .rst-content .wy-alert-info.caution, .rst-content .wy-alert-info.danger, .rst-content .wy-alert-info.error, .rst-content .hint, .rst-content .important, .rst-content .tip, .rst-content .wy-alert-info.warning { + background: #64c5c7; + color: #fff; + border-color: #64c5c7; +} + +.wy-alert.wy-alert-success, .rst-content .wy-alert-success.note, .rst-content .wy-alert-success.attention, .rst-content .wy-alert-success.caution, .rst-content .wy-alert-success.danger, .rst-content .wy-alert-success.error, .rst-content .wy-alert-success.hint, .rst-content .wy-alert-success.important, .rst-content .wy-alert-success.tip, .rst-content .wy-alert-success.warning { + background: #27ae60; + color: #fff; + border-color: #1e8449; +} + +.wy-alert.wy-alert-neutral, .rst-content .wy-alert-neutral.note, .rst-content .wy-alert-neutral.attention, .rst-content .wy-alert-neutral.caution, .rst-content .wy-alert-neutral.danger, .rst-content .wy-alert-neutral.error, .rst-content .wy-alert-neutral.hint, .rst-content .wy-alert-neutral.important, .rst-content .wy-alert-neutral.tip, .rst-content .wy-alert-neutral.warning { + background: #f3f6f6; + border-color: #e1e4e5; +} + +.wy-alert.wy-alert-neutral strong, .rst-content .wy-alert-neutral.note strong, .rst-content .wy-alert-neutral.attention strong, .rst-content .wy-alert-neutral.caution strong, .rst-content .wy-alert-neutral.danger strong, .rst-content .wy-alert-neutral.error strong, .rst-content .wy-alert-neutral.hint strong, .rst-content .wy-alert-neutral.important strong, .rst-content .wy-alert-neutral.tip strong, .rst-content .wy-alert-neutral.warning strong { + color: #404040; +} + +.wy-alert.wy-alert-neutral a, .rst-content .wy-alert-neutral.note a, .rst-content .wy-alert-neutral.attention a, .rst-content .wy-alert-neutral.caution a, .rst-content .wy-alert-neutral.danger a, .rst-content .wy-alert-neutral.error a, .rst-content .wy-alert-neutral.hint a, .rst-content .wy-alert-neutral.important a, .rst-content .wy-alert-neutral.tip a, .rst-content .wy-alert-neutral.warning a { + color: #64c5c7; +} + +.wy-tray-container { + position: fixed; + top: -50px; + left: 0; + width: 100%; + -webkit-transition: top 0.2s ease-in; + -moz-transition: top 0.2s ease-in; + transition: top 0.2s ease-in; +} + +.wy-tray-container.on { + top: 0; +} + +.wy-tray-container li { + display: none; + width: 100%; + background: #343131; + padding: 12px 24px; + color: #fff; + margin-bottom: 6px; + text-align: center; + box-shadow: 0 5px 5px 0 rgba(0, 0, 0, 0.1), 0px -1px 2px -1px rgba(255, 255, 255, 0.5) inset; +} + +.wy-tray-container li.wy-tray-item-success { + background: #27ae60; +} + +.wy-tray-container li.wy-tray-item-info { + background: #2980b9; +} + +.wy-tray-container li.wy-tray-item-warning { + background: #e67e22; +} + +.wy-tray-container li.wy-tray-item-danger { + background: #e74c3c; +} + +.btn { + display: inline-block; + *display: inline; + zoom: 1; + line-height: normal; + white-space: nowrap; + vertical-align: baseline; + text-align: center; + cursor: pointer; + -webkit-user-drag: none; + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; + font-size: 100%; + padding: 6px 12px; + color: #fff; + border: 1px solid rgba(0, 0, 0, 0.1); + border-bottom: solid 3px rgba(0, 0, 0, 0.1); + background-color: #27ae60; + text-decoration: none; + font-weight: 500; + box-shadow: 0px 1px 2px -1px rgba(255, 255, 255, 0.5) inset; + -webkit-transition: all 0.1s linear; + -moz-transition: all 0.1s linear; + transition: all 0.1s linear; + outline-none: false; +} + +.btn-hover { + background: #2e8ece; + color: #fff; +} + +.btn:hover { + background: #2cc36b; + color: #fff; +} + +.btn:focus { + background: #2cc36b; + color: #fff; + outline: 0; +} + +.btn:active { + border-top: solid 3px rgba(0, 0, 0, 0.1); + border-bottom: solid 1px rgba(0, 0, 0, 0.1); + box-shadow: 0px 1px 2px -1px rgba(0, 0, 0, 0.5) inset; +} + +.btn[disabled] { + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=40); + opacity: 0.4; + cursor: not-allowed; + box-shadow: none; +} + +.btn-disabled { + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=40); + opacity: 0.4; + cursor: not-allowed; + box-shadow: none; +} + +.btn-disabled:hover, .btn-disabled:focus, .btn-disabled:active { + background-image: none; + filter: progid:DXImageTransform.Microsoft.gradient(enabled = false); + filter: alpha(opacity=40); + opacity: 0.4; + cursor: not-allowed; + box-shadow: none; +} + +.btn::-moz-focus-inner { + padding: 0; + border: 0; +} + +.btn-small { + font-size: 80%; +} + +.btn-info { + background-color: #64c5c7 !important; +} + +.btn-info:hover { + background-color: #2e8ece !important; +} + +.btn-neutral { + background-color: #f3f6f6 !important; + color: #404040 !important; +} + +.btn-neutral:hover { + background-color: #e5ebeb !important; + color: #404040; +} + +.btn-danger { + background-color: #e74c3c !important; +} + +.btn-danger:hover { + background-color: #ea6153 !important; +} + +.btn-warning { + background-color: #e67e22 !important; +} + +.btn-warning:hover { + background-color: #e98b39 !important; +} + +.btn-invert { + background-color: #343131; +} + +.btn-invert:hover { + background-color: #413d3d !important; +} + +.btn-link { + background-color: transparent !important; + color: #2980b9; + border-color: transparent; +} + +.btn-link:hover { + background-color: transparent !important; + color: #409ad5; + border-color: transparent; +} + +.btn-link:active { + background-color: transparent !important; + border-color: transparent; + border-top: solid 1px transparent; + border-bottom: solid 3px transparent; +} + +.wy-btn-group .btn, .wy-control .btn { + vertical-align: middle; +} + +.wy-btn-group { + margin-bottom: 24px; + *zoom: 1; +} + +.wy-btn-group:before, .wy-btn-group:after { + display: table; + content: ""; +} + +.wy-btn-group:after { + clear: both; +} + +.wy-dropdown { + position: relative; + display: inline-block; +} + +.wy-dropdown:hover .wy-dropdown-menu { + display: block; +} + +.wy-dropdown .caret:after { + font-family: fontawesome-webfont; + content: "\f0d7"; + font-size: 70%; +} + +.wy-dropdown-menu { + position: absolute; + top: 100%; + left: 0; + display: none; + float: left; + min-width: 100%; + background: #fcfcfc; + z-index: 100; + border: solid 1px #cfd7dd; + box-shadow: 0 5px 5px 0 rgba(0, 0, 0, 0.1); + padding: 12px; +} + +.wy-dropdown-menu>dd>a { + display: block; + clear: both; + color: #404040; + white-space: nowrap; + font-size: 90%; + padding: 0 12px; +} + +.wy-dropdown-menu>dd>a:hover { + background: #2980b9; + color: #fff; +} + +.wy-dropdown-menu>dd.divider { + border-top: solid 1px #cfd7dd; + margin: 6px 0; +} + +.wy-dropdown-menu>dd.search { + padding-bottom: 12px; +} + +.wy-dropdown-menu>dd.search input[type="search"] { + width: 100%; +} + +.wy-dropdown-menu>dd.call-to-action { + background: #e3e3e3; + text-transform: uppercase; + font-weight: 500; + font-size: 80%; +} + +.wy-dropdown-menu>dd.call-to-action:hover { + background: #e3e3e3; +} + +.wy-dropdown-menu>dd.call-to-action .btn { + color: #fff; +} + +.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu { + background: #fcfcfc; + margin-top: 2px; +} + +.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a { + padding: 6px 12px; +} + +.wy-dropdown.wy-dropdown-bubble .wy-dropdown-menu a:hover { + background: #2980b9; + color: #fff; +} + +.wy-dropdown.wy-dropdown-left .wy-dropdown-menu { + right: 0; + text-align: right; +} + +.wy-dropdown-arrow:before { + content: " "; + border-bottom: 5px solid #f5f5f5; + border-left: 5px solid transparent; + border-right: 5px solid transparent; + position: absolute; + display: block; + top: -4px; + left: 50%; + margin-left: -3px; +} + +.wy-dropdown-arrow.wy-dropdown-arrow-left:before { + left: 11px; +} + +.wy-form-stacked select { + display: block; +} + +.wy-form-aligned input, .wy-form-aligned textarea, .wy-form-aligned select, .wy-form-aligned .wy-help-inline, .wy-form-aligned label { + display: inline-block; + *display: inline; + *zoom: 1; + vertical-align: middle; +} + +.wy-form-aligned .wy-control-group>label { + display: inline-block; + vertical-align: middle; + width: 10em; + margin: 0.5em 1em 0 0; + float: left; +} + +.wy-form-aligned .wy-control { + float: left; +} + +.wy-form-aligned .wy-control label { + display: block; +} + +.wy-form-aligned .wy-control select { + margin-top: 0.5em; +} + +fieldset { + border: 0; + margin: 0; + padding: 0; +} + +legend { + display: block; + width: 100%; + border: 0; + padding: 0; + white-space: normal; + margin-bottom: 24px; + font-size: 150%; + *margin-left: -7px; +} + +label { + display: block; + margin: 0 0 0.3125em 0; + color: #999; + font-size: 90%; +} + +button, input, select, textarea { + font-size: 100%; + margin: 0; + vertical-align: baseline; + *vertical-align: middle; +} + +button, input { + line-height: normal; +} + +button { + -webkit-appearance: button; + cursor: pointer; + *overflow: visible; +} + +button::-moz-focus-inner, input::-moz-focus-inner { + border: 0; + padding: 0; +} + +button[disabled] { + cursor: default; +} + +input[type="button"], input[type="reset"], input[type="submit"] { + -webkit-appearance: button; + cursor: pointer; + *overflow: visible; +} + +input[type="text"], input[type="password"], input[type="email"], input[type="url"], input[type="date"], input[type="month"], input[type="time"], input[type="datetime"], input[type="datetime-local"], input[type="week"], input[type="number"], input[type="search"], input[type="tel"], input[type="color"] { + -webkit-appearance: none; + padding: 6px; + display: inline-block; + border: 1px solid #ccc; + font-size: 80%; + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + box-shadow: inset 0 1px 3px #ddd; + border-radius: 0; + -webkit-transition: border 0.3s linear; + -moz-transition: border 0.3s linear; + transition: border 0.3s linear; +} + +input[type="datetime-local"] { + padding: 0.34375em 0.625em; +} + +input[disabled] { + cursor: default; +} + +input[type="checkbox"], input[type="radio"] { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; + padding: 0; + margin-right: 0.3125em; + *height: 13px; + *width: 13px; +} + +input[type="search"] { + -webkit-box-sizing: border-box; + -moz-box-sizing: border-box; + box-sizing: border-box; +} + +input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration { + -webkit-appearance: none; +} + +input[type="text"]:focus, input[type="password"]:focus, input[type="email"]:focus, input[type="url"]:focus, input[type="date"]:focus, input[type="month"]:focus, input[type="time"]:focus, input[type="datetime"]:focus, input[type="datetime-local"]:focus, input[type="week"]:focus, input[type="number"]:focus, input[type="search"]:focus, input[type="tel"]:focus, input[type="color"]:focus { + outline: 0; + outline: thin dotted \9; + border-color: #2980b9; +} + +input.no-focus:focus { + border-color: #ccc !important; +} + +input[type="file"]:focus, input[type="radio"]:focus, input[type="checkbox"]:focus { + outline: thin dotted #333; + outline: 1px auto #129fea; +} + +input[type="text"][disabled], input[type="password"][disabled], input[type="email"][disabled], input[type="url"][disabled], input[type="date"][disabled], input[type="month"][disabled], input[type="time"][disabled], input[type="datetime"][disabled], input[type="datetime-local"][disabled], input[type="week"][disabled], input[type="number"][disabled], input[type="search"][disabled], input[type="tel"][disabled], input[type="color"][disabled] { + cursor: not-allowed; + background-color: #f3f6f6; + color: #cad2d3; +} + +input:focus:invalid, textarea:focus:invalid, select:focus:invalid { + color: #e74c3c; + border: 1px solid #e74c3c; +} + +input:focus:invalid:focus, textarea:focus:invalid:focus, select:focus:invalid:focus { + border-color: #e9322d; +} + +input[type="file"]:focus:invalid:focus, input[type="radio"]:focus:invalid:focus, input[type="checkbox"]:focus:invalid:focus { + outline-color: #e9322d; +} + +input.wy-input-large { + padding: 12px; + font-size: 100%; +} + +textarea { + overflow: auto; + vertical-align: top; + width: 100%; +} + +select, textarea { + padding: 0.5em 0.625em; + display: inline-block; + border: 1px solid #ccc; + font-size: 0.8em; + box-shadow: inset 0 1px 3px #ddd; + -webkit-transition: border 0.3s linear; + -moz-transition: border 0.3s linear; + transition: border 0.3s linear; +} + +select { + border: 1px solid #ccc; + background-color: #fff; +} + +select[multiple] { + height: auto; +} + +select:focus, textarea:focus { + outline: 0; +} + +select[disabled], textarea[disabled], input[readonly], select[readonly], textarea[readonly] { + cursor: not-allowed; + background-color: #fff; + color: #cad2d3; + border-color: transparent; +} + +.wy-checkbox, .wy-radio { + margin: 0.5em 0; + color: #404040 !important; + display: block; +} + +.wy-form-message-inline { + display: inline-block; + *display: inline; + *zoom: 1; + vertical-align: middle; +} + +.wy-input-prefix, .wy-input-suffix { + white-space: nowrap; +} + +.wy-input-prefix .wy-input-context, .wy-input-suffix .wy-input-context { + padding: 6px; + display: inline-block; + font-size: 80%; + background-color: #f3f6f6; + border: solid 1px #ccc; + color: #999; +} + +.wy-input-suffix .wy-input-context { + border-left: 0; +} + +.wy-input-prefix .wy-input-context { + border-right: 0; +} + +.wy-inline-validate { + white-space: nowrap; +} + +.wy-inline-validate .wy-input-context { + padding: 0.5em 0.625em; + display: inline-block; + font-size: 80%; +} + +.wy-inline-validate.wy-inline-validate-success .wy-input-context { + color: #27ae60; +} + +.wy-inline-validate.wy-inline-validate-danger .wy-input-context { + color: #e74c3c; +} + +.wy-inline-validate.wy-inline-validate-warning .wy-input-context { + color: #e67e22; +} + +.wy-inline-validate.wy-inline-validate-info .wy-input-context { + color: #2980b9; +} + +.wy-control-group { + margin-bottom: 24px; + *zoom: 1; +} + +.wy-control-group:before, .wy-control-group:after { + display: table; + content: ""; +} + +.wy-control-group:after { + clear: both; +} + +.wy-control-group.wy-control-group-error .wy-form-message, .wy-control-group.wy-control-group-error label { + color: #e74c3c; +} + +.wy-control-group.wy-control-group-error input[type="text"], .wy-control-group.wy-control-group-error input[type="password"], .wy-control-group.wy-control-group-error input[type="email"], .wy-control-group.wy-control-group-error input[type="url"], .wy-control-group.wy-control-group-error input[type="date"], .wy-control-group.wy-control-group-error input[type="month"], .wy-control-group.wy-control-group-error input[type="time"], .wy-control-group.wy-control-group-error input[type="datetime"], .wy-control-group.wy-control-group-error input[type="datetime-local"], .wy-control-group.wy-control-group-error input[type="week"], .wy-control-group.wy-control-group-error input[type="number"], .wy-control-group.wy-control-group-error input[type="search"], .wy-control-group.wy-control-group-error input[type="tel"], .wy-control-group.wy-control-group-error input[type="color"] { + border: solid 2px #e74c3c; +} + +.wy-control-group.wy-control-group-error textarea { + border: solid 2px #e74c3c; +} + +.wy-control-group.fluid-input input[type="text"], .wy-control-group.fluid-input input[type="password"], .wy-control-group.fluid-input input[type="email"], .wy-control-group.fluid-input input[type="url"], .wy-control-group.fluid-input input[type="date"], .wy-control-group.fluid-input input[type="month"], .wy-control-group.fluid-input input[type="time"], .wy-control-group.fluid-input input[type="datetime"], .wy-control-group.fluid-input input[type="datetime-local"], .wy-control-group.fluid-input input[type="week"], .wy-control-group.fluid-input input[type="number"], .wy-control-group.fluid-input input[type="search"], .wy-control-group.fluid-input input[type="tel"], .wy-control-group.fluid-input input[type="color"] { + width: 100%; +} + +.wy-form-message-inline { + display: inline-block; + padding-left: 0.3em; + color: #666; + vertical-align: middle; + font-size: 90%; +} + +.wy-form-message { + display: block; + color: #ccc; + font-size: 70%; + margin-top: 0.3125em; + font-style: italic; +} + +.wy-tag-input-group { + padding: 4px 4px 0px 4px; + display: inline-block; + border: 1px solid #ccc; + font-size: 80%; + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + box-shadow: inset 0 1px 3px #ddd; + -webkit-transition: border 0.3s linear; + -moz-transition: border 0.3s linear; + transition: border 0.3s linear; +} + +.wy-tag-input-group .wy-tag { + display: inline-block; + background-color: rgba(0, 0, 0, 0.1); + padding: 0.5em 0.625em; + border-radius: 2px; + position: relative; + margin-bottom: 4px; +} + +.wy-tag-input-group .wy-tag .wy-tag-remove { + color: #ccc; + margin-left: 5px; +} + +.wy-tag-input-group .wy-tag .wy-tag-remove:hover { + color: #e74c3c; +} + +.wy-tag-input-group label { + margin-left: 5px; + display: inline-block; + margin-bottom: 0; +} + +.wy-tag-input-group input { + border: none; + font-size: 100%; + margin-bottom: 4px; + box-shadow: none; +} + +.wy-form-upload { + border: solid 1px #ccc; + border-bottom: solid 3px #ccc; + background-color: #fff; + padding: 24px; + display: inline-block; + text-align: center; + cursor: pointer; + color: #404040; + -webkit-transition: border-color 0.1s ease-in; + -moz-transition: border-color 0.1s ease-in; + transition: border-color 0.1s ease-in; + *zoom: 1; +} + +.wy-form-upload:before, .wy-form-upload:after { + display: table; + content: ""; +} + +.wy-form-upload:after { + clear: both; +} + +@media screen and (max-width: 480px) { + .wy-form-upload { + width: 100%; + } +} + +.wy-form-upload .image-drop { + display: none; +} + +.wy-form-upload .image-desktop { + display: none; +} + +.wy-form-upload .image-loading { + display: none; +} + +.wy-form-upload .wy-form-upload-icon { + display: block; + font-size: 32px; + color: #b3b3b3; +} + +.wy-form-upload .image-drop .wy-form-upload-icon { + color: #27ae60; +} + +.wy-form-upload p { + font-size: 90%; +} + +.wy-form-upload .wy-form-upload-image { + float: left; + margin-right: 24px; +} + +@media screen and (max-width: 480px) { + .wy-form-upload .wy-form-upload-image { + width: 100%; + margin-bottom: 24px; + } +} + +.wy-form-upload img { + max-width: 125px; + max-height: 125px; + opacity: 0.9; + -webkit-transition: opacity 0.1s ease-in; + -moz-transition: opacity 0.1s ease-in; + transition: opacity 0.1s ease-in; +} + +.wy-form-upload .wy-form-upload-content { + float: left; +} + +@media screen and (max-width: 480px) { + .wy-form-upload .wy-form-upload-content { + width: 100%; + } +} + +.wy-form-upload:hover { + border-color: #b3b3b3; + color: #404040; +} + +.wy-form-upload:hover .image-desktop { + display: block; +} + +.wy-form-upload:hover .image-drag { + display: none; +} + +.wy-form-upload:hover img { + opacity: 1; +} + +.wy-form-upload:active { + border-top: solid 3px #ccc; + border-bottom: solid 1px #ccc; +} + +.wy-form-upload.wy-form-upload-big { + width: 100%; + text-align: center; + padding: 72px; +} + +.wy-form-upload.wy-form-upload-big .wy-form-upload-content { + float: none; +} + +.wy-form-upload.wy-form-upload-file p { + margin-bottom: 0; +} + +.wy-form-upload.wy-form-upload-file .wy-form-upload-icon { + display: inline-block; + font-size: inherit; +} + +.wy-form-upload.wy-form-upload-drop { + background-color: #ddf7e8; +} + +.wy-form-upload.wy-form-upload-drop .image-drop { + display: block; +} + +.wy-form-upload.wy-form-upload-drop .image-desktop { + display: none; +} + +.wy-form-upload.wy-form-upload-drop .image-drag { + display: none; +} + +.wy-form-upload.wy-form-upload-loading .image-drag { + display: none; +} + +.wy-form-upload.wy-form-upload-loading .image-desktop { + display: none; +} + +.wy-form-upload.wy-form-upload-loading .image-loading { + display: block; +} + +.wy-form-upload.wy-form-upload-loading .wy-input-prefix { + display: none; +} + +.wy-form-upload.wy-form-upload-loading p { + margin-bottom: 0; +} + +.rotate-90 { + -webkit-transform: rotate(90deg); + -moz-transform: rotate(90deg); + -ms-transform: rotate(90deg); + -o-transform: rotate(90deg); + transform: rotate(90deg); +} + +.rotate-180 { + -webkit-transform: rotate(180deg); + -moz-transform: rotate(180deg); + -ms-transform: rotate(180deg); + -o-transform: rotate(180deg); + transform: rotate(180deg); +} + +.rotate-270 { + -webkit-transform: rotate(270deg); + -moz-transform: rotate(270deg); + -ms-transform: rotate(270deg); + -o-transform: rotate(270deg); + transform: rotate(270deg); +} + +.mirror { + -webkit-transform: scaleX(-1); + -moz-transform: scaleX(-1); + -ms-transform: scaleX(-1); + -o-transform: scaleX(-1); + transform: scaleX(-1); +} + +.mirror.rotate-90 { + -webkit-transform: scaleX(-1) rotate(90deg); + -moz-transform: scaleX(-1) rotate(90deg); + -ms-transform: scaleX(-1) rotate(90deg); + -o-transform: scaleX(-1) rotate(90deg); + transform: scaleX(-1) rotate(90deg); +} + +.mirror.rotate-180 { + -webkit-transform: scaleX(-1) rotate(180deg); + -moz-transform: scaleX(-1) rotate(180deg); + -ms-transform: scaleX(-1) rotate(180deg); + -o-transform: scaleX(-1) rotate(180deg); + transform: scaleX(-1) rotate(180deg); +} + +.mirror.rotate-270 { + -webkit-transform: scaleX(-1) rotate(270deg); + -moz-transform: scaleX(-1) rotate(270deg); + -ms-transform: scaleX(-1) rotate(270deg); + -o-transform: scaleX(-1) rotate(270deg); + transform: scaleX(-1) rotate(270deg); +} + +.wy-form-gallery-manage { + margin-left: -12px; + margin-right: -12px; +} + +.wy-form-gallery-manage li { + float: left; + padding: 12px; + width: 20%; + cursor: pointer; +} + +@media screen and (max-width: 768px) { + .wy-form-gallery-manage li { + width: 25%; + } +} + +@media screen and (max-width: 480px) { + .wy-form-gallery-manage li { + width: 50%; + } +} + +.wy-form-gallery-manage li:active { + cursor: move; +} + +.wy-form-gallery-manage li>a { + padding: 12px; + background-color: #fff; + border: solid 1px #e1e4e5; + border-bottom: solid 3px #e1e4e5; + display: inline-block; + -webkit-transition: all 0.1s ease-in; + -moz-transition: all 0.1s ease-in; + transition: all 0.1s ease-in; +} + +.wy-form-gallery-manage li>a:active { + border: solid 1px #ccc; + border-top: solid 3px #ccc; +} + +.wy-form-gallery-manage img { + width: 100%; + -webkit-transition: all 0.05s ease-in; + -moz-transition: all 0.05s ease-in; + transition: all 0.05s ease-in; +} + +li.wy-form-gallery-edit { + position: relative; + color: #fff; + padding: 24px; + width: 100%; + display: block; + background-color: #343131; + border-radius: 4px; +} + +li.wy-form-gallery-edit .arrow { + position: absolute; + display: block; + top: -50px; + left: 50%; + margin-left: -25px; + z-index: 500; + height: 0; + width: 0; + border-color: transparent; + border-style: solid; + border-width: 25px; + border-bottom-color: #343131; +} + +@media only screen and (max-width: 480px) { + .wy-form button[type="submit"] { + margin: 0.7em 0 0; + } + + .wy-form input[type="text"], .wy-form input[type="password"], .wy-form input[type="email"], .wy-form input[type="url"], .wy-form input[type="date"], .wy-form input[type="month"], .wy-form input[type="time"], .wy-form input[type="datetime"], .wy-form input[type="datetime-local"], .wy-form input[type="week"], .wy-form input[type="number"], .wy-form input[type="search"], .wy-form input[type="tel"], .wy-form input[type="color"] { + margin-bottom: 0.3em; + display: block; + } + + .wy-form label { + margin-bottom: 0.3em; + display: block; + } + + .wy-form input[type="password"], .wy-form input[type="email"], .wy-form input[type="url"], .wy-form input[type="date"], .wy-form input[type="month"], .wy-form input[type="time"], .wy-form input[type="datetime"], .wy-form input[type="datetime-local"], .wy-form input[type="week"], .wy-form input[type="number"], .wy-form input[type="search"], .wy-form input[type="tel"], .wy-form input[type="color"] { + margin-bottom: 0; + } + + .wy-form-aligned .wy-control-group label { + margin-bottom: 0.3em; + text-align: left; + display: block; + width: 100%; + } + + .wy-form-aligned .wy-controls { + margin: 1.5em 0 0 0; + } + + .wy-form .wy-help-inline, .wy-form-message-inline, .wy-form-message { + display: block; + font-size: 80%; + padding: 0.2em 0 0.8em; + } +} + +@media screen and (max-width: 768px) { + .tablet-hide { + display: none; + } +} + +@media screen and (max-width: 480px) { + .mobile-hide { + display: none; + } +} + +.float-left { + float: left; +} + +.float-right { + float: right; +} + +.full-width { + width: 100%; +} + +.wy-grid-one-col { + *zoom: 1; + max-width: 68em; + margin-left: auto; + margin-right: auto; + max-width: 1066px; + margin-top: 1.618em; +} + +.wy-grid-one-col:before, .wy-grid-one-col:after { + display: table; + content: ""; +} + +.wy-grid-one-col:after { + clear: both; +} + +.wy-grid-one-col section { + display: block; + float: left; + margin-right: 2.35765%; + width: 100%; + background: #fff; + padding: 1.618em; + margin-right: 0; +} + +.wy-grid-one-col section:last-child { + margin-right: 0; +} + +.wy-grid-index-card { + *zoom: 1; + max-width: 68em; + margin-left: auto; + margin-right: auto; + max-width: 460px; + margin-top: 1.618em; + background: #fff; + padding: 1.618em; +} + +.wy-grid-index-card:before, .wy-grid-index-card:after { + display: table; + content: ""; +} + +.wy-grid-index-card:after { + clear: both; +} + +.wy-grid-index-card header, .wy-grid-index-card section, .wy-grid-index-card aside { + display: block; + float: left; + margin-right: 2.35765%; + width: 100%; +} + +.wy-grid-index-card header:last-child, .wy-grid-index-card section:last-child, .wy-grid-index-card aside:last-child { + margin-right: 0; +} + +.wy-grid-index-card.twocol { + max-width: 768px; +} + +.wy-grid-index-card.twocol section { + display: block; + float: left; + margin-right: 2.35765%; + width: 48.82117%; +} + +.wy-grid-index-card.twocol section:last-child { + margin-right: 0; +} + +.wy-grid-index-card.twocol aside { + display: block; + float: left; + margin-right: 2.35765%; + width: 48.82117%; +} + +.wy-grid-index-card.twocol aside:last-child { + margin-right: 0; +} + +.wy-grid-search-filter { + *zoom: 1; + max-width: 68em; + margin-left: auto; + margin-right: auto; + margin-bottom: 24px; +} + +.wy-grid-search-filter:before, .wy-grid-search-filter:after { + display: table; + content: ""; +} + +.wy-grid-search-filter:after { + clear: both; +} + +.wy-grid-search-filter .wy-grid-search-filter-input { + display: block; + float: left; + margin-right: 2.35765%; + width: 74.41059%; +} + +.wy-grid-search-filter .wy-grid-search-filter-input:last-child { + margin-right: 0; +} + +.wy-grid-search-filter .wy-grid-search-filter-btn { + display: block; + float: left; + margin-right: 2.35765%; + width: 23.23176%; +} + +.wy-grid-search-filter .wy-grid-search-filter-btn:last-child { + margin-right: 0; +} + +.wy-table, .rst-content table.docutils, .rst-content table.field-list { + border-collapse: collapse; + border-spacing: 0; + empty-cells: show; + margin-bottom: 24px; +} + +.wy-table caption, .rst-content table.docutils caption, .rst-content table.field-list caption { + color: #000; + font: italic 85%/1 arial, sans-serif; + padding: 1em 0; + text-align: center; +} + +.wy-table td, .rst-content table.docutils td, .rst-content table.field-list td, .wy-table th, .rst-content table.docutils th, .rst-content table.field-list th { + font-size: 90%; + margin: 0; + overflow: visible; + padding: 8px 16px; +} + +.wy-table td:first-child, .rst-content table.docutils td:first-child, .rst-content table.field-list td:first-child, .wy-table th:first-child, .rst-content table.docutils th:first-child, .rst-content table.field-list th:first-child { + border-left-width: 0; +} + +.wy-table thead, .rst-content table.docutils thead, .rst-content table.field-list thead { + color: #000; + text-align: left; + vertical-align: bottom; + white-space: nowrap; +} + +.wy-table thead th, .rst-content table.docutils thead th, .rst-content table.field-list thead th { + font-weight: bold; + border-bottom: solid 2px #e1e4e5; +} + +.wy-table td, .rst-content table.docutils td, .rst-content table.field-list td { + background-color: transparent; + vertical-align: middle; +} + +.wy-table td p, .rst-content table.docutils td p, .rst-content table.field-list td p { + line-height: 18px; + margin-bottom: 0; +} + +.wy-table .wy-table-cell-min, .rst-content table.docutils .wy-table-cell-min, .rst-content table.field-list .wy-table-cell-min { + width: 1%; + padding-right: 0; +} + +.wy-table .wy-table-cell-min input[type=checkbox], .rst-content table.docutils .wy-table-cell-min input[type=checkbox], .rst-content table.field-list .wy-table-cell-min input[type=checkbox], .wy-table .wy-table-cell-min input[type=checkbox], .rst-content table.docutils .wy-table-cell-min input[type=checkbox], .rst-content table.field-list .wy-table-cell-min input[type=checkbox] { + margin: 0; +} + +.wy-table-secondary { + color: gray; + font-size: 90%; +} + +.wy-table-tertiary { + color: gray; + font-size: 80%; +} + +.wy-table-odd td, .wy-table-striped tr:nth-child(2n-1) td, .rst-content table.docutils:not(.field-list) tr:nth-child(2n-1) td { + background-color: #f3f6f6; +} + +.wy-table-backed { + background-color: #f3f6f6; +} + +.wy-table-bordered-all, .rst-content table.docutils { + border: 1px solid #e1e4e5; +} + +.wy-table-bordered-all td, .rst-content table.docutils td { + border-bottom: 1px solid #e1e4e5; + border-left: 1px solid #e1e4e5; +} + +.wy-table-bordered-all tbody>tr:last-child td, .rst-content table.docutils tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-bordered { + border: 1px solid #e1e4e5; +} + +.wy-table-bordered-rows td { + border-bottom: 1px solid #e1e4e5; +} + +.wy-table-bordered-rows tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-horizontal tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-horizontal td, .wy-table-horizontal th { + border-width: 0 0 1px 0; + border-bottom: 1px solid #e1e4e5; +} + +.wy-table-horizontal tbody>tr:last-child td { + border-bottom-width: 0; +} + +.wy-table-responsive { + margin-bottom: 24px; + max-width: 100%; + overflow: auto; +} + +.wy-table-responsive table { + margin-bottom: 0 !important; +} + +.wy-table-responsive table td, .wy-table-responsive table th { + white-space: nowrap; +} + +html { + height: 100%; + overflow-x: hidden; +} + +body { + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + font-weight: normal; + color: #404040; + min-height: 100%; + overflow-x: hidden; + background: #edf0f2; +} + +a { + color: #2980b9; + text-decoration: none; +} + +a:hover { + color: #3091d1; +} + +.link-danger { + color: #e74c3c; +} + +.link-danger:hover { + color: #d62c1a; +} + +.text-left { + text-align: left; +} + +.text-center { + text-align: center; +} + +.text-right { + text-align: right; +} + +h1, h2, h3, h4, h5, h6, legend { + margin-top: 0; + font-weight: 700; + font-family: "Roboto Slab", "ff-tisa-web-pro", "Georgia", Arial, sans-serif; +} + +p { + line-height: 24px; + margin: 0; + font-size: 16px; + margin-bottom: 24px; +} + +h1 { + font-size: 175%; +} + +h2 { + font-size: 150%; +} + +h3 { + font-size: 125%; +} + +h4 { + font-size: 115%; +} + +h5 { + font-size: 110%; +} + +h6 { + font-size: 100%; +} + +small { + font-size: 80%; +} + +code, .rst-content tt { + white-space: nowrap; + max-width: 100%; + background: #fff; + border: solid 1px #e1e4e5; + font-size: 75%; + padding: 0 5px; + font-family: "Inconsolata", "Consolata", "Monaco", monospace; + color: #e74c3c; + overflow-x: auto; +} + +code.code-large, .rst-content tt.code-large { + font-size: 90%; +} + +.full-width { + width: 100%; +} + +.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul { + list-style: disc; + line-height: 24px; + margin-bottom: 24px; +} + +.wy-plain-list-disc li, .rst-content .section ul li, .rst-content .toctree-wrapper ul li { + list-style: disc; + margin-left: 24px; +} + +.wy-plain-list-disc li ul, .rst-content .section ul li ul, .rst-content .toctree-wrapper ul li ul { + margin-bottom: 0; +} + +.wy-plain-list-disc li li, .rst-content .section ul li li, .rst-content .toctree-wrapper ul li li { + list-style: circle; +} + +.wy-plain-list-disc li li li, .rst-content .section ul li li li, .rst-content .toctree-wrapper ul li li li { + list-style: square; +} + +.wy-plain-list-decimal, .rst-content .section ol, .rst-content ol.arabic { + list-style: decimal; + line-height: 24px; + margin-bottom: 24px; +} + +.wy-plain-list-decimal li, .rst-content .section ol li, .rst-content ol.arabic li { + list-style: decimal; + margin-left: 24px; +} + +.wy-type-large { + font-size: 120%; +} + +.wy-type-normal { + font-size: 100%; +} + +.wy-type-small { + font-size: 100%; +} + +.wy-type-strike { + text-decoration: line-through; +} + +.wy-text-warning { + color: #e67e22 !important; +} + +a.wy-text-warning:hover { + color: #eb9950 !important; +} + +.wy-text-info { + color: #2980b9 !important; +} + +a.wy-text-info:hover { + color: #409ad5 !important; +} + +.wy-text-success { + color: #27ae60 !important; +} + +a.wy-text-success:hover { + color: #36d278 !important; +} + +.wy-text-danger { + color: #e74c3c !important; +} + +a.wy-text-danger:hover { + color: #ed7669 !important; +} + +.wy-text-neutral { + color: #404040 !important; +} + +a.wy-text-neutral:hover { + color: #595959 !important; +} + +.codeblock-example { + border: 1px solid #e1e4e5; + border-bottom: none; + padding: 24px; + padding-top: 48px; + font-weight: 500; + background: #fff; + position: relative; +} + +.codeblock-example:after { + content: "Example"; + position: absolute; + top: 0px; + left: 0px; + background: #9b59b6; + color: #fff; + padding: 6px 12px; +} + +.codeblock-example.prettyprint-example-only { + border: 1px solid #e1e4e5; + margin-bottom: 24px; +} + +.codeblock, div[class^='highlight'] { + border: 1px solid #e1e4e5; + padding: 0px; + overflow-x: auto; + background: #fff; + margin: 1px 0 24px 0; +} + +.codeblock div[class^='highlight'], div[class^='highlight'] div[class^='highlight'] { + border: none; + background: none; + margin: 0; +} + +.linenodiv pre { + border-right: solid 1px #e6e9ea; + margin: 0; + padding: 12px 12px; + font-family: "Inconsolata", "Consolata", "Monaco", monospace; + font-size: 12px; + line-height: 1.5; + color: #d9d9d9; +} + +div[class^='highlight'] pre { + white-space: pre; + margin: 0; + padding: 12px 12px; + font-family: "Inconsolata", "Consolata", "Monaco", monospace; + font-size: 12px; + line-height: 1.5; + display: block; + overflow: auto; + color: #404040; +} + +pre.literal-block { + @extends .codeblock; +} + +@media print { + .codeblock, div[class^='highlight'], div[class^='highlight'] pre { + white-space: pre-wrap; + } +} + +.hll { + background-color: #f8f8f8; + border: 1px solid #ccc; + padding: 1.5px 5px; +} + +.c { + color: #998; + font-style: italic; +} + +.err { + color: #a61717; + background-color: #e3d2d2; +} + +.k { + font-weight: bold; +} + +.o { + font-weight: bold; +} + +.cm { + color: #998; + font-style: italic; +} + +.cp { + color: #999; + font-weight: bold; +} + +.c1 { + color: #998; + font-style: italic; +} + +.cs { + color: #999; + font-weight: bold; + font-style: italic; +} + +.gd { + color: #000; + background-color: #fdd; +} + +.gd .x { + color: #000; + background-color: #faa; +} + +.ge { + font-style: italic; +} + +.gr { + color: #a00; +} + +.gh { + color: #999; +} + +.gi { + color: #000; + background-color: #dfd; +} + +.gi .x { + color: #000; + background-color: #afa; +} + +.go { + color: #888; +} + +.gp { + color: #555; +} + +.gs { + font-weight: bold; +} + +.gu { + color: purple; + font-weight: bold; +} + +.gt { + color: #a00; +} + +.kc { + font-weight: bold; +} + +.kd { + font-weight: bold; +} + +.kn { + font-weight: bold; +} + +.kp { + font-weight: bold; +} + +.kr { + font-weight: bold; +} + +.kt { + color: #458; + font-weight: bold; +} + +.m { + color: #099; +} + +.s { + color: #d14; +} + +.n { + color: #333; +} + +.na { + color: teal; +} + +.nb { + color: #0086b3; +} + +.nc { + color: #458; + font-weight: bold; +} + +.no { + color: teal; +} + +.ni { + color: purple; +} + +.ne { + color: #900; + font-weight: bold; +} + +.nf { + color: #900; + font-weight: bold; +} + +.nn { + color: #555; +} + +.nt { + color: navy; +} + +.nv { + color: teal; +} + +.ow { + font-weight: bold; +} + +.w { + color: #bbb; +} + +.mf { + color: #099; +} + +.mh { + color: #099; +} + +.mi { + color: #099; +} + +.mo { + color: #099; +} + +.sb { + color: #d14; +} + +.sc { + color: #d14; +} + +.sd { + color: #d14; +} + +.s2 { + color: #d14; +} + +.se { + color: #d14; +} + +.sh { + color: #d14; +} + +.si { + color: #d14; +} + +.sx { + color: #d14; +} + +.sr { + color: #009926; +} + +.s1 { + color: #d14; +} + +.ss { + color: #990073; +} + +.bp { + color: #999; +} + +.vc { + color: teal; +} + +.vg { + color: teal; +} + +.vi { + color: teal; +} + +.il { + color: #099; +} + +.gc { + color: #999; + background-color: #eaf2f5; +} + +.wy-breadcrumbs li { + display: inline-block; +} + +.wy-breadcrumbs li.wy-breadcrumbs-aside { + float: right; +} + +.wy-breadcrumbs li a { + display: inline-block; + padding: 5px; +} + +.wy-breadcrumbs li a:first-child { + padding-left: 0; +} + +.wy-breadcrumbs-extra { + margin-bottom: 0; + color: #b3b3b3; + font-size: 80%; + display: inline-block; +} + +@media screen and (max-width: 480px) { + .wy-breadcrumbs-extra { + display: none; + } + + .wy-breadcrumbs li.wy-breadcrumbs-aside { + display: none; + } +} + +@media print { + .wy-breadcrumbs li.wy-breadcrumbs-aside { + display: none; + } +} + +.wy-affix { + position: fixed; + top: 1.618em; +} + +.wy-menu a:hover { + text-decoration: none; +} + +.wy-menu-horiz { + *zoom: 1; +} + +.wy-menu-horiz:before, .wy-menu-horiz:after { + display: table; + content: ""; +} + +.wy-menu-horiz:after { + clear: both; +} + +.wy-menu-horiz ul, .wy-menu-horiz li { + display: inline-block; +} + +.wy-menu-horiz li:hover { + background: rgba(255, 255, 255, 0.1); +} + +.wy-menu-horiz li.divide-left { + border-left: solid 1px #404040; +} + +.wy-menu-horiz li.divide-right { + border-right: solid 1px #404040; +} + +.wy-menu-horiz a { + height: 32px; + display: inline-block; + line-height: 32px; + padding: 0 16px; +} + +.wy-menu-vertical header { + height: 32px; + display: inline-block; + line-height: 32px; + padding: 0 1.618em; + display: block; + font-weight: bold; + text-transform: uppercase; + font-size: 80%; + color: #2980b9; + white-space: nowrap; +} + +.wy-menu-vertical ul { + margin-bottom: 0; +} + +.wy-menu-vertical li.divide-top { + border-top: solid 1px #404040; +} + +.wy-menu-vertical li.divide-bottom { + border-bottom: solid 1px #404040; +} + +.wy-menu-vertical li.current { + background: #e3e3e3; +} + +.wy-menu-vertical li.current a { + color: gray; + border-right: solid 1px #c9c9c9; + padding: 0.4045em 2.427em; +} + +.wy-menu-vertical li.current a:hover { + background: #d6d6d6; +} + +.wy-menu-vertical li.on a, .wy-menu-vertical li.current>a { + color: #404040; + padding: 0.4045em 1.618em; + font-weight: bold; + position: relative; + background: #fcfcfc; + border: none; + border-bottom: solid 1px #c9c9c9; + border-top: solid 1px #c9c9c9; + padding-left: 1.618em -4px; +} + +.wy-menu-vertical li.on a:hover, .wy-menu-vertical li.current>a:hover { + background: #fcfcfc; +} + +.wy-menu-vertical li.tocktree-l2.current>a { + background: #c9c9c9; +} + +.wy-menu-vertical li.current ul { + display: block; +} + +.wy-menu-vertical li ul { + margin-bottom: 0; + display: none; +} + +.wy-menu-vertical li ul li a { + margin-bottom: 0; + color: #b3b3b3; + font-weight: normal; +} + +.wy-menu-vertical a { + display: inline-block; + line-height: 18px; + padding: 0.4045em 1.618em; + display: block; + position: relative; + font-size: 90%; + color: #b3b3b3; +} + +.wy-menu-vertical a:hover { + background-color: #4e4a4a; + cursor: pointer; +} + +.wy-menu-vertical a:active { + background-color: #2980b9; + cursor: pointer; + color: #fff; +} + +.wy-side-nav-search { + z-index: 200; + background-color: #2980b9; + text-align: center; + padding: 0.809em; + display: block; + color: #fcfcfc; + margin-bottom: 0.809em; +} + +.wy-side-nav-search input[type=text] { + width: 100%; + border-radius: 50px; + padding: 6px 12px; + border-color: #2472a4; +} + +.wy-side-nav-search img { + display: block; + margin: auto auto 0.809em auto; + height: 214px; + width: 26px; + background-color: #2980b9; + padding: 5px; +/* border-radius: 100%; */ +} + +.wy-side-nav-search>a, .wy-side-nav-search .wy-dropdown>a { + color: #fcfcfc; + font-size: 100%; + font-weight: bold; + display: inline-block; + padding: 4px 6px; + margin-bottom: 0.809em; +} + +.wy-side-nav-search>a:hover, .wy-side-nav-search .wy-dropdown>a:hover { + background: rgba(255, 255, 255, 0.1); +} + +.wy-nav .wy-menu-vertical header { + color: #2980b9; +} + +.wy-nav .wy-menu-vertical a { + color: #b3b3b3; +} + +.wy-nav .wy-menu-vertical a:hover { + background-color: #2980b9; + color: #fff; +} + +[data-menu-wrap] { + -webkit-transition: all 0.2s ease-in; + -moz-transition: all 0.2s ease-in; + transition: all 0.2s ease-in; + position: absolute; + opacity: 1; + width: 100%; + opacity: 0; +} + +[data-menu-wrap].move-center { + left: 0; + right: auto; + opacity: 1; +} + +[data-menu-wrap].move-left { + right: auto; + left: -100%; + opacity: 0; +} + +[data-menu-wrap].move-right { + right: -100%; + left: auto; + opacity: 0; +} + +.wy-body-for-nav { + background: left repeat-y #fff; + background-image: url(data:image/png; + base64, iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAAGXRFWHRTb2Z0d2FyZQBBZG9iZSBJbWFnZVJlYWR5ccllPAAAAyRpVFh0WE1MOmNvbS5hZG9iZS54bXAAAAAAADw/eHBhY2tldCBiZWdpbj0i77u/IiBpZD0iVzVNME1wQ2VoaUh6cmVTek5UY3prYzlkIj8+IDx4OnhtcG1ldGEgeG1sbnM6eD0iYWRvYmU6bnM6bWV0YS8iIHg6eG1wdGs9IkFkb2JlIFhNUCBDb3JlIDUuMy1jMDExIDY2LjE0NTY2MSwgMjAxMi8wMi8wNi0xNDo1NjoyNyAgICAgICAgIj4gPHJkZjpSREYgeG1sbnM6cmRmPSJodHRwOi8vd3d3LnczLm9yZy8xOTk5LzAyLzIyLXJkZi1zeW50YXgtbnMjIj4gPHJkZjpEZXNjcmlwdGlvbiByZGY6YWJvdXQ9IiIgeG1sbnM6eG1wPSJodHRwOi8vbnMuYWRvYmUuY29tL3hhcC8xLjAvIiB4bWxuczp4bXBNTT0iaHR0cDovL25zLmFkb2JlLmNvbS94YXAvMS4wL21tLyIgeG1sbnM6c3RSZWY9Imh0dHA6Ly9ucy5hZG9iZS5jb20veGFwLzEuMC9zVHlwZS9SZXNvdXJjZVJlZiMiIHhtcDpDcmVhdG9yVG9vbD0iQWRvYmUgUGhvdG9zaG9wIENTNiAoTWFjaW50b3NoKSIgeG1wTU06SW5zdGFuY2VJRD0ieG1wLmlpZDoxOERBMTRGRDBFMUUxMUUzODUwMkJCOThDMEVFNURFMCIgeG1wTU06RG9jdW1lbnRJRD0ieG1wLmRpZDoxOERBMTRGRTBFMUUxMUUzODUwMkJCOThDMEVFNURFMCI+IDx4bXBNTTpEZXJpdmVkRnJvbSBzdFJlZjppbnN0YW5jZUlEPSJ4bXAuaWlkOjE4REExNEZCMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIiBzdFJlZjpkb2N1bWVudElEPSJ4bXAuZGlkOjE4REExNEZDMEUxRTExRTM4NTAyQkI5OEMwRUU1REUwIi8+IDwvcmRmOkRlc2NyaXB0aW9uPiA8L3JkZjpSREY+IDwveDp4bXBtZXRhPiA8P3hwYWNrZXQgZW5kPSJyIj8+EwrlwAAAAA5JREFUeNpiMDU0BAgwAAE2AJgB9BnaAAAAAElFTkSuQmCC); + background-size: 300px 1px; +} + +.wy-grid-for-nav { + position: absolute; + width: 100%; + height: 100%; +} + +.wy-nav-side { + position: absolute; + top: 0; + left: 0; + width: 300px; + overflow: hidden; + min-height: 100%; + background: #343131; + z-index: 200; +} + +.wy-nav-top { + display: none; + background: #2980b9; + color: #fff; + padding: 0.4045em 0.809em; + position: relative; + line-height: 50px; + text-align: center; + font-size: 100%; + *zoom: 1; +} + +.wy-nav-top:before, .wy-nav-top:after { + display: table; + content: ""; +} + +.wy-nav-top:after { + clear: both; +} + +.wy-nav-top a { + color: #fff; + font-weight: bold; +} + +.wy-nav-top img { + margin-right: 12px; + height: 45px; + width: 45px; + background-color: #2980b9; + padding: 5px; + border-radius: 100%; +} + +.wy-nav-top i { + font-size: 30px; + float: left; + cursor: pointer; +} + +.wy-nav-content-wrap { + margin-left: 300px; + background: #fff; + min-height: 100%; +} + +.wy-nav-content { + padding: 1.618em 3.236em; + height: 100%; + max-width: 1140px; + margin: auto; +} + +.wy-body-mask { + position: fixed; + width: 100%; + height: 100%; + background: rgba(0, 0, 0, 0.2); + display: none; + z-index: 499; +} + +.wy-body-mask.on { + display: block; +} + +footer { + color: #999; +} + +footer p { + margin-bottom: 12px; +} + +.rst-footer-buttons { + *zoom: 1; +} + +.rst-footer-buttons:before, .rst-footer-buttons:after { + display: table; + content: ""; +} + +.rst-footer-buttons:after { + clear: both; +} + +#search-results .search li { + margin-bottom: 24px; + border-bottom: solid 1px #e1e4e5; + padding-bottom: 24px; +} + +#search-results .search li:first-child { + border-top: solid 1px #e1e4e5; + padding-top: 24px; +} + +#search-results .search li a { + font-size: 120%; + margin-bottom: 12px; + display: inline-block; +} + +#search-results .context { + color: gray; + font-size: 90%; +} + +@media screen and (max-width: 768px) { + .wy-body-for-nav { + background: #fff; + } + + .wy-nav-top { + display: block; + } + + .wy-nav-side { + left: -300px; + } + + .wy-nav-side.shift { + width: 85%; + left: 0; + } + + .wy-nav-content-wrap { + margin-left: 0; + } + + .wy-nav-content-wrap .wy-nav-content { + padding: 1.618em; + } + + .wy-nav-content-wrap.shift { + position: fixed; + min-width: 100%; + left: 85%; + top: 0; + height: 100%; + overflow: hidden; + } +} + +@media screen and (min-width: 1400px) { + .wy-nav-content-wrap { + background: #fff; + } + + .wy-nav-content { + margin: 0; + background: #fff; + } +} + +@media print { + .wy-nav-side { + display: none; + } + + .wy-nav-content-wrap { + margin-left: 0; + } +} + +.rst-versions { + position: fixed; + bottom: 0; + left: 0; + width: 300px; + color: #fcfcfc; + background: #1f1d1d; + border-top: solid 10px #343131; + font-family: "Lato", "proxima-nova", "Helvetica Neue", Arial, sans-serif; + z-index: 400; +} + +.rst-versions a { + color: #2980b9; + text-decoration: none; +} + +.rst-versions .rst-badge-small { + display: none; +} + +.rst-versions .rst-current-version { + padding: 12px; + background-color: #272525; + display: block; + text-align: right; + font-size: 90%; + cursor: pointer; + color: #27ae60; + *zoom: 1; +} + +.rst-versions .rst-current-version:before, .rst-versions .rst-current-version:after { + display: table; + content: ""; +} + +.rst-versions .rst-current-version:after { + clear: both; +} + +.rst-versions .rst-current-version .icon, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-success .wy-input-context, .wy-inline-validate.wy-inline-validate-success .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-danger .wy-input-context, .wy-inline-validate.wy-inline-validate-danger .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-warning .wy-input-context, .wy-inline-validate.wy-inline-validate-warning .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-inline-validate.wy-inline-validate-info .wy-input-context, .wy-inline-validate.wy-inline-validate-info .rst-versions .rst-current-version .wy-input-context, .rst-versions .rst-current-version .wy-tag-input-group .wy-tag .wy-tag-remove, .wy-tag-input-group .wy-tag .rst-versions .rst-current-version .wy-tag-remove, .rst-versions .rst-current-version .rst-content .admonition-title, .rst-content .rst-versions .rst-current-version .admonition-title, .rst-versions .rst-current-version .rst-content h1 .headerlink, .rst-content h1 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h2 .headerlink, .rst-content h2 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h3 .headerlink, .rst-content h3 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h4 .headerlink, .rst-content h4 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h5 .headerlink, .rst-content h5 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content h6 .headerlink, .rst-content h6 .rst-versions .rst-current-version .headerlink, .rst-versions .rst-current-version .rst-content dl dt .headerlink, .rst-content dl dt .rst-versions .rst-current-version .headerlink { + color: #fcfcfc; +} + +.rst-versions .rst-current-version .icon-book { + float: left; +} + +.rst-versions .rst-current-version.rst-out-of-date { + background-color: #e74c3c; + color: #fff; +} + +.rst-versions.shift-up .rst-other-versions { + display: block; +} + +.rst-versions .rst-other-versions { + font-size: 90%; + padding: 12px; + color: gray; + display: none; +} + +.rst-versions .rst-other-versions hr { + display: block; + height: 1px; + border: 0; + margin: 20px 0; + padding: 0; + border-top: solid 1px #413d3d; +} + +.rst-versions .rst-other-versions dd { + display: inline-block; + margin: 0; +} + +.rst-versions .rst-other-versions dd a { + display: inline-block; + padding: 6px; + color: #fcfcfc; +} + +.rst-versions.rst-badge { + width: auto; + bottom: 20px; + right: 20px; + left: auto; + border: none; + max-width: 300px; +} + +.rst-versions.rst-badge .icon-book { + float: none; +} + +.rst-versions.rst-badge.shift-up .rst-current-version { + text-align: right; +} + +.rst-versions.rst-badge.shift-up .rst-current-version .icon-book { + float: left; +} + +.rst-versions.rst-badge .rst-current-version { + width: auto; + height: 30px; + line-height: 30px; + padding: 0 6px; + display: block; + text-align: center; +} + +@media screen and (max-width: 768px) { + .rst-versions { + width: 85%; + display: none; + } + + .rst-versions.shift { + display: block; + } + + img { + width: 100%; + height: auto; + } +} + +.rst-content img { + max-width: 100%; + height: auto !important; +} + +.rst-content .section>img { + margin-bottom: 24px; +} + +.rst-content a.reference.external:after { + font-family: fontawesome-webfont; + content: " \f08e "; + color: #b3b3b3; + vertical-align: super; + font-size: 60%; +} + +.rst-content blockquote { + margin-left: 24px; + line-height: 24px; + margin-bottom: 24px; +} + +.rst-content .note .last, .rst-content .note p.first, .rst-content .attention .last, .rst-content .attention p.first, .rst-content .caution .last, .rst-content .caution p.first, .rst-content .danger .last, .rst-content .danger p.first, .rst-content .error .last, .rst-content .error p.first, .rst-content .hint .last, .rst-content .hint p.first, .rst-content .important .last, .rst-content .important p.first, .rst-content .tip .last, .rst-content .tip p.first, .rst-content .warning .last, .rst-content .warning p.first { + margin-bottom: 0; +} + +.rst-content .admonition-title { + font-weight: bold; +} + +.rst-content .admonition-title:before { + margin-right: 4px; +} + +.rst-content .admonition table { + border-color: rgba(0, 0, 0, 0.1); +} + +.rst-content .admonition table td, .rst-content .admonition table th { + background: transparent !important; + border-color: rgba(0, 0, 0, 0.1) !important; +} + +.rst-content .section ol.loweralpha, .rst-content .section ol.loweralpha li { + list-style: lower-alpha; +} + +.rst-content .section ol.upperalpha, .rst-content .section ol.upperalpha li { + list-style: upper-alpha; +} + +.rst-content .section ol p, .rst-content .section ul p { + margin-bottom: 12px; +} + +.rst-content .line-block { + margin-left: 24px; +} + +.rst-content .topic-title { + font-weight: bold; + margin-bottom: 12px; +} + +.rst-content .toc-backref { + color: #404040; +} + +.rst-content .align-right { + float: right; + margin: 0px 0px 24px 24px; +} + +.rst-content .align-left { + float: left; + margin: 0px 24px 24px 0px; +} + +.rst-content h1 .headerlink, .rst-content h2 .headerlink, .rst-content h3 .headerlink, .rst-content h4 .headerlink, .rst-content h5 .headerlink, .rst-content h6 .headerlink, .rst-content dl dt .headerlink { + display: none; + visibility: hidden; + font-size: 14px; +} + +.rst-content h1 .headerlink:after, .rst-content h2 .headerlink:after, .rst-content h3 .headerlink:after, .rst-content h4 .headerlink:after, .rst-content h5 .headerlink:after, .rst-content h6 .headerlink:after, .rst-content dl dt .headerlink:after { + visibility: visible; + content: "\f0c1"; + font-family: fontawesome-webfont; + display: inline-block; +} + +.rst-content h1:hover .headerlink, .rst-content h2:hover .headerlink, .rst-content h3:hover .headerlink, .rst-content h4:hover .headerlink, .rst-content h5:hover .headerlink, .rst-content h6:hover .headerlink, .rst-content dl dt:hover .headerlink { + display: inline-block; +} + +.rst-content .sidebar { + float: right; + width: 40%; + display: block; + margin: 0 0 24px 24px; + padding: 24px; + background: #f3f6f6; + border: solid 1px #e1e4e5; +} + +.rst-content .sidebar p, .rst-content .sidebar ul, .rst-content .sidebar dl { + font-size: 90%; +} + +.rst-content .sidebar .last { + margin-bottom: 0; +} + +.rst-content .sidebar .sidebar-title { + display: block; + font-family: "Roboto Slab", "ff-tisa-web-pro", "Georgia", Arial, sans-serif; + font-weight: bold; + background: #e1e4e5; + padding: 6px 12px; + margin: -24px; + margin-bottom: 24px; + font-size: 100%; +} + +.rst-content .highlighted { + background: #f1c40f; + display: inline-block; + font-weight: bold; + padding: 0 6px; +} + +.rst-content .footnote-reference, .rst-content .citation-reference { + vertical-align: super; + font-size: 90%; +} + +.rst-content table.docutils.citation, .rst-content table.docutils.footnote { + background: none; + border: none; + color: #999; +} + +.rst-content table.docutils.citation td, .rst-content table.docutils.citation tr, .rst-content table.docutils.footnote td, .rst-content table.docutils.footnote tr { + border: none; + background-color: transparent !important; + white-space: normal; +} + +.rst-content table.docutils.citation td.label, .rst-content table.docutils.footnote td.label { + padding-left: 0; + padding-right: 0; + vertical-align: top; +} + +.rst-content table.field-list { + border: none; +} + +.rst-content table.field-list td { + border: none; +} + +.rst-content table.field-list .field-name { + padding-right: 10px; + text-align: left; +} + +.rst-content table.field-list .field-body { + text-align: left; + padding-left: 0; +} + +.rst-content tt { + color: #000; +} + +.rst-content tt big, .rst-content tt em { + font-size: 100% !important; + line-height: normal; +} + +.rst-content tt .xref, a .rst-content tt { + font-weight: bold; +} + +.rst-content dl { + margin-bottom: 24px; +} + +.rst-content dl dt { + font-weight: bold; +} + +.rst-content dl p, .rst-content dl table, .rst-content dl ul, .rst-content dl ol { + margin-bottom: 12px !important; +} + +.rst-content dl dd { + margin: 0 0 12px 24px; +} + +.rst-content dl:not(.docutils) { + margin-bottom: 24px; +} + +.rst-content dl:not(.docutils) dt { + display: inline-block; + margin: 6px 0; + font-size: 90%; + line-height: normal; + background: #e7f2fa; + color: #2980b9; + border-top: solid 3px #6ab0de; + padding: 6px; + position: relative; +} + +.rst-content dl:not(.docutils) dt:before { + color: #6ab0de; +} + +.rst-content dl:not(.docutils) dt .headerlink { + color: #404040; + font-size: 100% !important; +} + +.rst-content dl:not(.docutils) dl dt { + margin-bottom: 6px; + border: none; + border-left: solid 3px #ccc; + background: #f0f0f0; + color: gray; +} + +.rst-content dl:not(.docutils) dl dt .headerlink { + color: #404040; + font-size: 100% !important; +} + +.rst-content dl:not(.docutils) dt:first-child { + margin-top: 0; +} + +.rst-content dl:not(.docutils) tt { + font-weight: bold; +} + +.rst-content dl:not(.docutils) tt.descname, .rst-content dl:not(.docutils) tt.descclassname { + background-color: transparent; + border: none; + padding: 0; + font-size: 100% !important; +} + +.rst-content dl:not(.docutils) tt.descname { + font-weight: bold; +} + +.rst-content dl:not(.docutils) .viewcode-link { + display: inline-block; + color: #27ae60; + font-size: 80%; + padding-left: 24px; +} + +.rst-content dl:not(.docutils) .optional { + display: inline-block; + padding: 0 4px; + color: #000; + font-weight: bold; +} + +.rst-content dl:not(.docutils) .property { + display: inline-block; + padding-right: 8px; +} + +@media screen and (max-width: 480px) { + .rst-content .sidebar { + width: 100%; + } +} + +span[id*='MathJax-Span'] { + color: #404040; +} + +.admonition.note span[id*='MathJax-Span'] { + color: #fff; +} + +.admonition.warning span[id*='MathJax-Span'] { + color: #fff; +} + +.search-reset-start { + color: #463E3F; + float: right; + position: relative; + top: -25px; + left: -10px; + z-index: 10; +} + +.search-reset-start:hover { + cursor: pointer; + color: #2980B9; +} + +#search-box-id { + padding-right: 25px; +} Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.eot and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.eot differ diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.svg ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.svg --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.svg 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.svg 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,399 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.ttf and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.ttf differ Binary files /tmp/tmpeNoH6g/W6Wsj24ec7/ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.woff and /tmp/tmpeNoH6g/QxW1Nd0MIc/ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/font/fontawesome_webfont.woff differ diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/js/theme.js ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/js/theme.js --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/js/theme.js 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/static/js/theme.js 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,16 @@ +$( document ).ready(function() { + // Shift nav in mobile when clicking the menu. + $("[data-toggle='wy-nav-top']").click(function() { + $("[data-toggle='wy-nav-shift']").toggleClass("shift"); + $("[data-toggle='rst-versions']").toggleClass("shift"); + }); + // Close menu when you click a link. + $(".wy-menu-vertical .current ul li a").click(function() { + $("[data-toggle='wy-nav-shift']").removeClass("shift"); + $("[data-toggle='rst-versions']").toggleClass("shift"); + }); + $("[data-toggle='rst-current-version']").click(function() { + $("[data-toggle='rst-versions']").toggleClass("shift-up"); + }); + $("table.docutils:not(.field-list").wrap("
"); +}); diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/theme.conf ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/theme.conf --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/theme.conf 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/theme.conf 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,7 @@ +[theme] +inherit = basic +stylesheet = css/theme.min.css + +[options] +typekit_id = hiw1hhg +analytics_id = diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/versions.html ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/versions.html --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/versions.html 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/_themes/srtd/versions.html 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,37 @@ +{% if READTHEDOCS %} +{# Add rst-badge after rst-versions for small badge style. #} +
+ + Read the Docs + v: {{ current_version }} + + +
+
+
Versions
+ {% for slug, url in versions %} +
{{ slug }}
+ {% endfor %} +
+
+
Downloads
+ {% for type, url in downloads %} +
{{ type }}
+ {% endfor %} +
+
+
On Read the Docs
+
+ Project Home +
+
+ Builds +
+
+
+ Free document hosting provided by Read the Docs. + +
+
+{% endif %} + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/trademarks.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/trademarks.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/trademarks.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/trademarks.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,96 @@ + +Trademark Usage +`````````````````````````````````````` +Why is it important to use the TM, SM, and ® for our registered marks? + +Before a trademark is registered with the United States Patent and Trademark Office it is appropriate to use the TM or SM symbol depending whether the product is for goods or services. It is important to use the TM or SM as it is notification to the public that Ansible claims rights to the mark even though it has not yet been registered. + +Once the trademark is registered, it is appropriate to use the symbol in place of the TM or SM. The symbol designation must be used in conjunction with the trademark if Ansible is to fully protect its rights. If we don't protect these marks, we run the risk of losing them in the way of Aspirin or Trampoline or Escalator. + +General Rules: ++++++++++++++++ + +Trademarks should be used on 1st references on a page or within a section. + +Use Ansible Tower® or Ansible®, on first reference when referring to products. + +Use "Ansible" alone as the company name, as in "Ansible announced quarterly results," which is not marked. + +Also add the trademark disclaimer. +* When using Ansible trademarks in the body of written text, you should use the following credit line in a prominent place, usually a footnote. + + For Registered Trademarks: + - [Name of Trademark] is a registered trademark of Ansible, Inc. in the United States and other countries. + + For Unregistered Trademarks (TMs/SMs): + - [Name of Trademark] is a trademark of Ansible, Inc. in the United States and other countries. + + For registered and unregistered trademarks: + - [Name of Trademark] is a registered trademark and [Name of Trademark] is a trademark of Ansible, Inc. in the United States and other countries. + +Guidelines for the proper use of trademarks: ++++++++++++++++++++++++++++++++++++++++++++++ + + Always distinguish trademarks from surround text with at least initial capital letters or in all capital letters. + +Always use proper trademark form and spelling. + +Never use a trademark as a noun. Always use a trademark as an adjective modifying the noun. + + Correct: + Ansible Tower® system performance is incredible. + + Incorrect: + Ansible's performance is incredible. + +Never use a trademark as a verb. Trademarks are products or services, never actions. + + Correct: + "Orchestrate your entire network using Ansible Tower®." + + Incorrect: + "Ansible your entire network." + +Never modify a trademark to a plural form. Instead, change the generic word from the singular to the plural. + + Correct: + "Corporate demand for Ansible Tower® configuration software is surging." + + Incorrect: + "Corporate demand for Ansible is surging." + +Never modify a trademark from its possessive form, or make a trademark possessive. Always use it in the form it has been registered. + +Never translate a trademark into another language. + +Never use trademarks to coin new words or names. + +Never use trademarks to create a play on words. + +Never alter a trademark in any way including through unapproved fonts or visual identifiers. + +Never abbreviate or use any Ansible trademarks as an acronym. + +The importance of Ansible trademarks +++++++++++++++++++++++++++++++++++++++++++++++++ + +The Ansible trademark and the "A" logo in a shaded circle are our most valuable assets. The value of these trademarks encompass the Ansible Brand. Effective trademark use is more than just a name, it defines the level of quality the customer will receive and it ties a product or service to a corporate image. A trademark may serve as the basis for many of our everyday decisions and choices. The Ansible Brand is about how we treat customers and each other. In order to continue to build a stronger more valuable Brand we must use it in a clear and consistent manner. + +The mark consists of the letter "A" in a shaded circle. As of 5/11/15, this was a pending trademark (registration in process). + +Common Ansible Trademarks ++++++++++++++++++++++++++++++++++++++++ +* Ansible® +* Ansible Tower® + +Other Common Trademarks and Resource Sites: +++++++++++++++++++++++++++++++++++++++++++++++++ +- Linux is a registered trademark of Linus Torvalds. +- UNIX® is a registered trademark of The Open Group. +- Microsoft, Windows, Vista, XP, and NT are registered trademarks or trademarks of Microsoft Corporation in the United States and/or other countries. http://members.microsoft.com—en-us.mspx +- Apple, Mac, Mac OS, Macintosh, Pages and TrueType are either registered trademarks or trademarks of Apple Computer, Inc. in the United States and/or other countries. http://www.apple.com—appletmlist.html +- Adobe, Acrobat, GoLive, InDesign, Illustrator, PostScript , PhotoShop and the OpenType logo are either registered trademarks or trademarks of Adobe Systems Incorporated in the United States and/or other countries. htto:// www.adobe.com—trade.html +- Macromedia and Macromedia Flash are trademarks of Macromedia, Inc. http://www.adobe.com—trademarkguideline.html +- IBM is a registered trademark of International Business Machines Corporation. http://www.ibm.com—copytrade.shtml +- Celeron, Celeron Inside, Centrino, Centrino logo, Core Inside, Intel Core, Intel Inside, Intel Inside logo, Itanium, Itanium Inside, Pentium, Pentium Inside,VTune, Xeon, and Xeon Inside are trademarks or registered trademarks of Intel Corporation or its subsidiaries in the United States and other countries. http://www.intel.com—tradmarx.htm + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/voice_style.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/voice_style.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/voice_style.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/voice_style.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,20 @@ + +Voice Style +````````````````````` +The essence of the Ansible writing style is short sentences that flow naturally together. Mix up sentence structures. Vary sentence subjects. Address the reader directly. Ask a question. And when the reader adjusts to the pace of shorter sentences, write a longer one. + +- Write how real people speak... +- ...but try to avoid slang and colloquialisms that might not translate well into other languages. +- Say big things with small words. +- Be direct. Tell the reader exactly what you want them to do. +- Be honest. +- Short sentences show confidence. +- Grammar rules are meant to be bent, but only if the reader knows you are doing this. +- Choose words with fewer syllables for faster reading and better understanding. +- Think of copy as one-on-one conversations rather than as a speech. It's more difficult to ignore someone who is speaking to you directly. +- When possible, start task-oriented sentences (those that direct a user to do something) with action words. For example: Find software... Contact support... Install the media.... and so forth. + +Active Voice +------------------ +Use the active voice ("Start Linuxconf by typing...") rather than passive ("Linuxconf can be started by typing...") whenever possible. Active voice makes for more lively, interesting reading. +Also avoid future tense (or using the term "will") whenever possible For example, future tense ("The screen will display...") does not read as well as an active voice ("The screen displays"). Remember, the users you are writing for most often refer to the documentation while they are using the system, not after or in advance of using the system. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/why_use.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/why_use.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/style_guide/why_use.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/style_guide/why_use.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,21 @@ +Why Use a Style Guide? +````````````````````````````````` + +Style guides are important because they ensure consistency in the content, look, and feel of a book or a website. + +Remember, a style guide is only useful if it is used, updated, and enforced. Style Guides are useful for engineering-related documentation, sales and marketing materials, support docs, community contributions, and more. + +As changes are made to the overall Ansible site design, be sure to update this style guide with those changes. Or, should other resources listed below have major revisions, consider including company information here for ease of reference. + +This style guide incorporates current Ansible resources and information so that overall site and documentation consistency can be met. + +.. raw:: html + +
+ + “If you don't find it in the index, look very carefully through the entire catalogue.†+ ― Sears, Roebuck and Co., 1897 Sears Roebuck & Co. Catalogue + +.. raw:: html + +
diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/compile/index.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/compile/index.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/compile/index.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/compile/index.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Compile Tests +============= + +See :doc:`../../testing_compile` for more information. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-doc.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » ansible-doc +========================== + +Verifies that ``ansible-doc`` can parse module documentation on all supported Python versions. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/ansible-var-precedence-check.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » ansible-var-precedence-check +=========================================== + +Check the order of precedence for Ansible variables against :ref:`variable_precedence`. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/boilerplate.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,10 @@ +Sanity Tests » boilerplate +========================== + +Most Python files should include the following boilerplate: + +.. code-block:: python + + from __future__ import (absolute_import, division, print_function) + __metaclass__ = type + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/configure-remoting-ps1.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,5 @@ +Sanity Tests » configure-remoting-ps1 +===================================== + +The file ``examples/scripts/ConfigureRemotingForAnsible.ps1`` is required and must be a regular file. +It is used by external automated processes and cannot be moved, renamed or replaced with a symbolic link. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/empty-init.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,7 @@ +Sanity Tests » empty-init +========================= + +The ``__init__.py`` files under the following directories must be empty: + +- ``lib/ansible/modules/`` +- ``test/units/`` diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/import.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/import.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/import.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/import.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,5 @@ +Sanity Tests » import +===================== + +All Python imports in ``lib/ansible/modules/`` and ``lib/ansible/module_utils/`` which are not from the Python standard library +must be imported in a try/except ImportError block. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/integration-aliases.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,36 @@ +Sanity Tests » integration-aliases +================================== + +Each integration test must have an ``aliases`` file to control test execution. + +If the tests cannot be run as part of CI (requires external services, unsupported dependencies, etc.), +then they most likely belong in ``test/integration/roles/`` instead of ``test/integration/targets/``. +In that case, do not add an ``aliases`` file. Instead, just relocate the tests. + +In some cases tests requiring external resources can be run as a part of CI. +This is often true when those resources can be provided by a docker container. + +However, if you think that the tests should be able to be supported by CI, please discuss test +organization with @mattclay or @gundalow on GitHub or #ansible-devel on IRC. + +If the tests can be run as part of CI, you'll need to add an appropriate CI alias, such as: + +- ``posix/ci/group1`` +- ``windows/ci/group2`` + +The CI groups are used to balance tests across multiple jobs to minimize test run time. +Using the relevant ``group1`` entry is fine in most cases. Groups can be changed later to redistribute tests. + +Aliases can also be used to express test requirements: + +- ``needs/privileged`` +- ``needs/root`` +- ``needs/ssh`` + +Other aliases are used to skip tests under certain conditions: + +- ``skip/freebsd`` +- ``skip/osx`` +- ``skip/python3`` + +Take a look at existing ``aliases`` files to see what aliases are available and how they're used. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/line-endings.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » line-endings +=========================== + +All files must use ``\n`` for line endings instead of ``\r\n``. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-basestring.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,11 @@ +Sanity Tests » no-basestring +============================ + +Do not use ``isinstance(s, basestring)`` as basestring has been removed in +Python3. You can import ``string_types``, ``binary_type``, or ``text_type`` +from ``ansible.module_utils.six`` and then use ``isinstance(s, string_types)`` +or ``isinstance(s, (binary_type, text_type))`` instead. + +If this is part of code to convert a string to a particular type, +``ansible.module_utils._text`` contains several functions that may be even +better for you: ``to_text``, ``to_bytes``, and ``to_native``. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iteritems.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,16 @@ +Sanity Tests » no-dict-iteritems +================================ + +The ``dict.iteritems`` method has been removed in Python 3. There are two recommended alternatives: + +.. code-block:: python + + for KEY, VALUE in DICT.items(): + pass + +.. code-block:: python + + from ansible.module_utils.six import iteritems + + for KEY, VALUE in iteritems(DICT): + pass diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-iterkeys.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,9 @@ +Sanity Tests » no-dict-iterkeys +=============================== + +The ``dict.iterkeys`` method has been removed in Python 3. Use the following instead: + +.. code-block:: python + + for KEY in DICT: + pass diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-dict-itervalues.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,16 @@ +Sanity Tests » no-dict-itervalues +================================= + +The ``dict.itervalues`` method has been removed in Python 3. There are two recommended alternatives: + +.. code-block:: python + + for VALUE in DICT.values(): + pass + +.. code-block:: python + + from ansible.module_utils.six import itervalues + + for VALUE in itervalues(DICT): + pass diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-get-exception.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,28 @@ +Sanity Tests » no-get-exception +=============================== + +We created a function, ``ansible.module_utils.pycompat24.get_exception`` to +help retrieve exceptions in a manner compatible with Python-2.4 through +Python-3.6. We no longer support Python-2.4 and Python-2.5 so this is +extraneous and we want to deprecate the function. Porting code should look +something like this: + +.. code-block:: python + + # Unfixed code: + try: + raise IOError('test') + except IOError: + e = get_excetion() + do_something(e) + except: + e = get_exception() + do_something_else(e) + + # After fixing: + try: + raise IOError('test') + except IOErrors as e: + do_something(e) + except Exception as e: + do_something_else(e) diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-underscore-variable.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,28 @@ +Sanity Tests » no-underscore-variable +===================================== + +In the future, Ansible may use the identifier ``_`` to internationalize its +message strings. To be ready for that, we need to make sure that there are +no conflicting identifiers defined in the code base. + +In common practice, ``_`` is frequently used as a dummy variable (a variable +to receive a value from a function where the value is useless and never used). +In Ansible, we're using the identifier ``dummy`` for this purpose instead. + +Example of unfixed code: + +.. code-block:: python + + for _ in range(0, retries): + success = retry_thing() + if success: + break + +Example of fixed code: + +.. code-block:: python + + for dummy in range(0, retries): + success = retry_thing() + if success: + break diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-unicode-literals.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,16 @@ +Sanity Tests » no-unicode_literals +================================== + +The use of :code:`from __future__ import unicode_literals` has been deemed an anti-pattern. The +problems with it are: + +* It makes it so one can't jump into the middle of a file and know whether a bare literal string is + a byte string or text string. The programmer has to first check the top of the file to see if the + import is there. +* It removes the ability to define native strings (a string which should be a byte string on python2 + and a text string on python3) via a string literal. +* It makes for more context switching. A programmer could be reading one file which has + `unicode_literals` and know that bare string literals are text strings but then switch to another + file (perhaps tracing program execution into a third party library) and have to switch their + understanding of what bare string literals are. + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/no-wildcard-import.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,29 @@ +Sanity Tests » no-wildcard-import +================================= + +Using :code:`import *` is a bad habit which pollutes your namespace, hinders +debugging, and interferes with static analysis of code. For those reasons, we +do want to limit the use of :code:`import *` in the ansible code. Change our +code to import the specific names that you need instead. + +Examples of unfixed code: + +.. code-block:: python + + from ansible.module_utils.six import * + if isinstance(variable, string_types): + do_something(variable) + + from ansible.module_utils.basic import * + module = AnsibleModule() + +Examples of fixed code: + +.. code-block:: python + + from ansible.module_utils import six + if isinstance(variable, six.string_types): + do_something(variable) + + from ansible.module_utils.basic import AnsibleModule + module = AnsibleModule() diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/pep8.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/pep8.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/pep8.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/pep8.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,6 @@ +Sanity Tests » pep8 +=================== + +Python static analysis for PEP 8 style guideline compliance. + +See :doc:`../../testing_pep8` for more information. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/pylint-ansible-test.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,6 @@ +Sanity Tests » pylint-ansible-test +================================== + +Python static analysis for common programming errors. + +A more strict set of rules applied to ``ansible-test``. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/pylint.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/pylint.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/pylint.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/pylint.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » pylint +===================== + +Python static analysis for common programming errors. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/replace-urlopen.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » replace-urlopen +============================== + +Use ``open_url`` from ``module_utils`` instead of ``urlopen``. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/required-and-default-attributes.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,5 @@ +Sanity Tests » required-and-default-attributes +============================================== + +Use only one of ``default`` or ``required`` with ``FieldAttribute``. + diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/rstcheck.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » rstcheck +======================= + +Check reStructuredText files for syntax and formatting issues. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/sanity-docs.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » sanity-docs +========================== + +Documentation for each ``ansible-test sanity`` test is required. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/shebang.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/shebang.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/shebang.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/shebang.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,16 @@ +Sanity Tests » shebang +====================== + +Most executable files should only use one of the following shebangs: + +- ``#!/bin/sh`` +- ``#!/bin/bash`` +- ``#!/usr/bin/make`` +- ``#!/usr/bin/env python`` +- ``#!/usr/bin/env bash`` + +NOTE: For ``#!/bin/bash``, any of the options ``eux`` may also be used, such as ``#!/bin/bash -eux``. + +This does not apply to Ansible modules, which should not be executable and must always use ``#!/usr/bin/python``. + +Some exceptions are permitted. Ask if you have questions. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/shellcheck.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » shellcheck +========================= + +Static code analysis for shell scripts using the excellent `shellcheck `_ tool. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/test-constraints.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » test-constraints +=============================== + +Constraints for test requirements should be in ``test/runner/requirements/constraints.txt``. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/use-compat-six.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » use-compat-six +============================= + +Use ``six`` from ``module_utils`` instead of ``six``. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/validate-modules.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,6 @@ +Sanity Tests » validate-modules +=============================== + +Analyze modules for common issues in code and documentation. + +See :doc:`../../testing_validate-modules` for more information. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing/sanity/yamllint.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,4 @@ +Sanity Tests » yamllint +======================= + +Check YAML files for syntax and formatting issues. diff -Nru ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing_validate-modules.rst ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing_validate-modules.rst --- ansible-2.3.2.0/docs/docsite/rst/dev_guide/testing_validate-modules.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/dev_guide/testing_validate-modules.rst 2017-09-19 17:10:47.000000000 +0000 @@ -57,127 +57,86 @@ Errors ------ -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| code | sample message | -+=========+============================================================================================================================================+ -| **1xx** | **Locations** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 101 | Interpreter line is not ``#!/usr/bin/python`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 102 | Interpreter line is not ``#!powershell`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 103 | Did not find a call to ``main()`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 104 | Call to ``main()`` not the last line | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 105 | GPLv3 license header not found | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 106 | Import found before documentation variables. All imports must appear below ``DOCUMENTATION``/``EXAMPLES``/``RETURN``/``ANSIBLE_METADATA`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 107 | Imports should be directly below ``DOCUMENTATION``/``EXAMPLES``/``RETURN``/``ANSIBLE_METADATA`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| **2xx** | **Imports** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 201 | Did not find a ``module_utils`` import | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 203 | ``requests`` import found, should use ``ansible.module_utils.urls`` instead | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 204 | ``boto`` import found, new modules should use ``boto3`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 205 | ``sys.exit()`` call found. Should be ``exit_json``/``fail_json`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 206 | ``WANT_JSON`` not found in module | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 207 | ``REPLACER_WINDOWS`` not found in module | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 208 | ``module_utils`` imports should import specific components, not ``*`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| **3xx** | **Documentation** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 301 | No ``DOCUMENTATION`` provided | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 302 | ``DOCUMENTATION`` is not valid YAML | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 303 | ``DOCUMENTATION`` fragment missing | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 304 | Unknown ``DOCUMENTATION`` error | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 305 | Invalid ``DOCUMENTATION`` schema | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 306 | Module level ``version_added`` is not a valid version number | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 307 | Module level ``version_added`` is incorrect | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 308 | ``version_added`` for new option is not a valid version number | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 309 | ``version_added`` for new option is incorrect | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 310 | No ``EXAMPLES`` provided | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 311 | ``EXAMPLES`` is not valid YAML | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 312 | No ``RETURN`` documentation provided | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 313 | ``RETURN`` is not valid YAML | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 314 | No ``ANSIBLE_METADATA`` provided | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 315 | ``ANSIBLE_METADATA`` is not valid YAML | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 316 | Invalid ``ANSIBLE_METADATA`` schema | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 317 | option is marked as required but specifies a default. Arguments with a default should not be marked as required | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 318 | Module deprecated, but DOCUMENTATION.deprecated is missing | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 319 | ``RETURN`` fragments missing or invalid | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| **4xx** | **Syntax** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 401 | Python ``SyntaxError`` while parsing module | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 402 | Indentation contains tabs | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 403 | Type comparison using ``type()`` found. Use ``isinstance()`` instead | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| **5xx** | **Naming** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 501 | Official Ansible modules must have a ``.py`` extension for python modules or a ``.ps1`` for powershell modules | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 502 | Ansible module subdirectories must contain an ``__init__.py`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 503 | Missing python documentation file | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ +========= =================== + code sample message +--------- ------------------- + **1xx** **Locations** + 101 Interpreter line is not ``#!/usr/bin/python`` + 102 Interpreter line is not ``#!powershell`` + 103 Did not find a call to ``main()`` + 104 Call to ``main()`` not the last line + 105 GPLv3 license header not found + 106 Import found before documentation variables. All imports must appear below + ``DOCUMENTATION``/``EXAMPLES``/``RETURN``/``ANSIBLE_METADATA`` + 107 Imports should be directly below ``DOCUMENTATION``/``EXAMPLES``/``RETURN``/``ANSIBLE_METADATA`` + .. +--------- ------------------- + **2xx** **Imports** + 201 Did not find a ``module_utils`` import + 203 ``requests`` import found, should use ``ansible.module_utils.urls`` instead + 204 ``boto`` import found, new modules should use ``boto3`` + 205 ``sys.exit()`` call found. Should be ``exit_json``/``fail_json`` + 206 ``WANT_JSON`` not found in module + 207 ``REPLACER_WINDOWS`` not found in module + 208 ``module_utils`` imports should import specific components, not ``*`` + 209 Only the following ``from __future__`` imports are allowed: + ``absolute_import``, ``division``, and ``print_function``. + .. +--------- ------------------- + **3xx** **Documentation** + 301 No ``DOCUMENTATION`` provided + 302 ``DOCUMENTATION`` is not valid YAML + 303 ``DOCUMENTATION`` fragment missing + 304 Unknown ``DOCUMENTATION`` error + 305 Invalid ``DOCUMENTATION`` schema + 306 Module level ``version_added`` is not a valid version number + 307 Module level ``version_added`` is incorrect + 308 ``version_added`` for new option is not a valid version number + 309 ``version_added`` for new option is incorrect + 310 No ``EXAMPLES`` provided + 311 ``EXAMPLES`` is not valid YAML + 312 No ``RETURN`` documentation provided + 313 ``RETURN`` is not valid YAML + 314 No ``ANSIBLE_METADATA`` provided + 315 ``ANSIBLE_METADATA`` is not valid YAML + 316 Invalid ``ANSIBLE_METADATA`` schema + 317 option is marked as required but specifies a default. + Arguments with a default should not be marked as required + 318 Module deprecated, but DOCUMENTATION.deprecated is missing + 319 ``RETURN`` fragments missing or invalid + .. +--------- ------------------- + **4xx** **Syntax** + 401 Python ``SyntaxError`` while parsing module + 402 Indentation contains tabs + 403 Type comparison using ``type()`` found. Use ``isinstance()`` instead + .. +--------- ------------------- + **5xx** **Naming** + 501 Official Ansible modules must have a ``.py`` extension for python + modules or a ``.ps1`` for powershell modules + 502 Ansible module subdirectories must contain an ``__init__.py`` + 503 Missing python documentation file +========= =================== Warnings -------- -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| code | sample message | -+=========+============================================================================================================================================+ -| **1xx** | **Locations** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 107 | Imports should be directly below ``DOCUMENTATION``/``EXAMPLES``/``RETURN``/``ANSIBLE_METADATA`` for legacy modules | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| **2xx** | **Imports** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 208 | ``module_utils`` imports should import specific components for legacy module, not ``*`` | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 291 | Try/Except ``HAS_`` expression missing | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 292 | Did not find ``ansible.module_utils.basic`` import | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| **3xx** | **Documentation** | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 312 | No ``RETURN`` documentation provided for legacy module | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 391 | Unknown pre-existing ``DOCUMENTATION`` error | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ -| 392 | Pre-existing ``DOCUMENTATION`` fragment missing | -+---------+--------------------------------------------------------------------------------------------------------------------------------------------+ +========= =================== + code sample message +--------- ------------------- + **1xx** **Locations** + 107 Imports should be directly below ``DOCUMENTATION``/``EXAMPLES``/``RETURN``/``ANSIBLE_METADATA`` for legacy modules + .. +--------- ------------------- + **2xx** **Imports** + 208 ``module_utils`` imports should import specific components for legacy module, not ``*`` + 291 Try/Except ``HAS_`` expression missing + 292 Did not find ``ansible.module_utils.basic`` import + .. +--------- ------------------- + **3xx** **Documentation** + 312 No ``RETURN`` documentation provided for legacy module + 391 Unknown pre-existing ``DOCUMENTATION`` error + 392 Pre-existing ``DOCUMENTATION`` fragment missing +========= =================== diff -Nru ansible-2.3.2.0/docs/docsite/rst/faq.rst ansible-2.4.0.0/docs/docsite/rst/faq.rst --- ansible-2.3.2.0/docs/docsite/rst/faq.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/faq.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,7 +1,7 @@ Frequently Asked Questions ========================== -Here are some commonly-asked questions and their answers. +Here are some commonly asked questions and their answers. .. _set_environment: @@ -9,7 +9,7 @@ How can I set the PATH or any other environment variable for a task or entire playbook? +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -Setting environment variables can be done with the `environment` keyword. It can be used at task or play level:: +Setting environment variables can be done with the `environment` keyword. It can be used at the task or the play level:: environment: PATH: "{{ ansible_env.PATH }}:/thingy/bin" @@ -111,10 +111,10 @@ While you can write ansible modules in any language, most ansible modules are written in Python, and some of these are important core ones. -By default Ansible assumes it can find a /usr/bin/python on your remote system that is a 2.X version of Python, specifically -2.4 or higher. +By default, Ansible assumes it can find a /usr/bin/python on your remote system that is a 2.X version of Python, specifically +2.6 or higher. -Setting of an inventory variable 'ansible_python_interpreter' on any host will allow Ansible to auto-replace the interpreter +Setting the inventory variable 'ansible_python_interpreter' on any host will allow Ansible to auto-replace the interpreter used when executing python modules. Thus, you can point to any python you want on the system if /usr/bin/python on your system does not point to a Python 2.X interpreter. @@ -149,7 +149,7 @@ ++++++++++++++++++++++++ If cowsay is installed, Ansible takes it upon itself to make your day happier when running playbooks. If you decide -that you would like to work in a professional cow-free environment, you can either uninstall cowsay, or set an environment variable: +that you would like to work in a professional cow-free environment, you can either uninstall cowsay, or set the :envvar:`ANSIBLE_NOCOWS` environment variable: .. code-block:: shell-session @@ -173,7 +173,7 @@ How do I see all the inventory vars defined for my host? ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ -You can see the resulting vars you define in inventory running the following command: +By running the following command, you can see vars resulting from what you've defined in the inventory: .. code-block:: shell-session @@ -229,7 +229,7 @@ What happens if we want the ip address of the first webserver in the webservers group? Well, we can do that too. Note that if we are using dynamic inventory, which host is the 'first' may not be consistent, so you wouldn't want to do this unless your inventory -was static and predictable. (If you are using :doc:`tower`, it will use database order, so this isn't a problem even if you are using cloud +is static and predictable. (If you are using :doc:`tower`, it will use database order, so this isn't a problem even if you are using cloud based inventory scripts). Anyway, here's the trick: @@ -252,7 +252,7 @@ How do I copy files recursively onto a target host? +++++++++++++++++++++++++++++++++++++++++++++++++++ -The "copy" module has a recursive parameter, though if you want to do something more efficient for a large number of files, take a look at the "synchronize" module instead, which wraps rsync. See the module index for info on both of these modules. +The "copy" module has a recursive parameter. However, take a look at the "synchronize" module if you want to do something more efficient for a large number of files. The "synchronize" module wraps rsync. See the module index for info on both of these modules. .. _shell_env: @@ -260,7 +260,7 @@ ++++++++++++++++++++++++++++++++++++++++++++ If you just need to access existing variables, use the 'env' lookup plugin. For example, to access the value of the HOME -environment variable on management machine:: +environment variable on the management machine:: --- # ... @@ -269,7 +269,7 @@ If you need to set environment variables, see the Advanced Playbooks section about environments. -Ansible 1.4 will also make remote environment variables available via facts in the 'ansible_env' variable: +Starting with Ansible 1.4, remote environment variables are available via facts in the 'ansible_env' variable: .. code-block:: jinja @@ -325,7 +325,7 @@ How do I submit a change to the documentation? ++++++++++++++++++++++++++++++++++++++++++++++ -Great question! Documentation for Ansible is kept in the main project git repository, and complete instructions for contributing can be found in the docs README `viewable on GitHub `_. Thanks! +Great question! Documentation for Ansible is kept in the main project git repository, and complete instructions for contributing can be found in the docs README `viewable on GitHub `_. Thanks! .. _keep_secret_data: @@ -350,7 +350,7 @@ Though this will make the play somewhat difficult to debug. It's recommended that this be applied to single tasks only, once a playbook is completed. Note that the use of the no_log attribute does not prevent data from being shown when debugging Ansible itself via -the ANSIBLE_DEBUG environment variable. +the :envvar:`ANSIBLE_DEBUG` environment variable. .. _when_to_use_brackets: @@ -364,7 +364,7 @@ Conditionals are always run through Jinja2 as to resolve the expression, so `when:`, `failed_when:` and `changed_when:` are always templated and you should avoid adding `{{}}`. -In most other cases you should always use the brackets, even if previouslly you could use variables without specifying (like `with_` clauses), +In most other cases you should always use the brackets, even if previously you could use variables without specifying (like `with_` clauses), as this made it hard to distinguish between an undefined variable and a string. Another rule is 'moustaches don't stack'. We often see this: diff -Nru ansible-2.3.2.0/docs/docsite/rst/galaxy.rst ansible-2.4.0.0/docs/docsite/rst/galaxy.rst --- ansible-2.3.2.0/docs/docsite/rst/galaxy.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/galaxy.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,7 +1,7 @@ Ansible Galaxy ++++++++++++++ -*Ansible Galaxy* refers to the `Galaxy `_ website where users can share roles, and to a command line tool for installing, +*Ansible Galaxy* refers to the `Galaxy `_ website where users can share roles, and to a command line tool for installing, creating and managing roles. .. contents:: Topics @@ -9,11 +9,11 @@ The Website ``````````` -`Galaxy `_, is a free site for finding, downloading, and sharing community developed roles. Downloading roles from Galaxy is +`Galaxy `_, is a free site for finding, downloading, and sharing community developed roles. Downloading roles from Galaxy is a great way to jumpstart your automation projects. -You can also use the site to share roles that you create. By authenticating with the site using your GitHub account, you're able to *import* roles, making -them available to the Ansible community. Imported roles become available in the Galaxy search index and visible on the site, allowing users to +You can also use the site to share roles that you create. By authenticating with the site using your GitHub account, you're able to *import* roles, making +them available to the Ansible community. Imported roles become available in the Galaxy search index and visible on the site, allowing users to discover and download them. Learn more by viewing `the About page `_. @@ -21,7 +21,7 @@ The command line tool ````````````````````` -The ``ansible-galaxy`` command comes bundled with Ansible, and you can use it to install roles from Galaxy or directly from a git based SCM. You can +The ``ansible-galaxy`` command comes bundled with Ansible, and you can use it to install roles from Galaxy or directly from a git based SCM. You can also use it to create a new role, remove roles, or perform tasks on the Galaxy website. The command line tool by default communicates with the Galaxy website API using the server address *https://galaxy.ansible.com*. Since the `Galaxy project `_ @@ -41,7 +41,7 @@ roles_path ========== -Be aware that by default Ansible downloads roles to the path specified by the environment variable *ANSIBLE_ROLES_PATH*. This can be set to a series of +Be aware that by default Ansible downloads roles to the path specified by the environment variable :envvar:`ANSIBLE_ROLES_PATH`. This can be set to a series of directories (i.e. */etc/ansible/roles:~/.ansible/roles*), in which case the first writable path will be used. When Ansible is first installed it defaults to */etc/ansible/roles*, which requires *root* privileges. @@ -55,7 +55,7 @@ .. seealso:: :doc:`intro_configuration` - All about configuration files + All about configuration files version ======= @@ -64,7 +64,7 @@ :: - $ ansible-galaxy install geerlingguy.apache,v1.0.0 + $ ansible-galaxy install geerlingguy.apache,v1.0.0 It's also possible to point directly to the git repository and specify a branch name or commit hash as the version. For example, the following will install a specific commit: @@ -77,7 +77,7 @@ Installing multiple roles from a file ===================================== -Beginning with Ansible 1.8 it is possible to install multiple roles by including the roles in a *requirements.yml* file. The format of the file is YAML, and the +Beginning with Ansible 1.8 it is possible to install multiple roles by including the roles in a *requirements.yml* file. The format of the file is YAML, and the file extension must be either *.yml* or *.yaml*. Use the following command to install roles included in *requirements.yml*: @@ -93,14 +93,14 @@ src The source of the role. Use the format *username.role_name*, if downloading from Galaxy; otherwise, provide a URL pointing - to a repository within a git based SCM. See the examples below. This is a required attribute. + to a repository within a git based SCM. See the examples below. This is a required attribute. scm - Specify the SCM. As of this writing only *git* or *hg* are supported. See the examples below. Defaults to *git*. + Specify the SCM. As of this writing only *git* or *hg* are supported. See the examples below. Defaults to *git*. version: The version of the role to download. Provide a release tag value, commit hash, or branch name. Defaults to *master*. name: Download the role to a specific name. Defaults to the Galaxy name when downloading from Galaxy, otherwise it defaults - to the name of the repository. + to the name of the repository. Use the following example as a guide for specifying roles in *requirements.yml*: @@ -116,7 +116,7 @@ - src: https://github.com/bennojoy/nginx version: master name: nginx_role - + # from a webserver, where the role is packaged in a tar.gz - src: https://some.webserver.example.com/files/master.tar.gz name: http-role @@ -128,46 +128,82 @@ # from Bitbucket, alternative syntax and caveats - src: http://bitbucket.org/willthames/hg-ansible-galaxy scm: hg - + # from GitLab or other git-based scm - src: git@gitlab.company.com:mygroup/ansible-base.git scm: git version: "0.1" # quoted, so YAML doesn't parse this as a floating-point value +Installing multiple roles from multiple files +============================================= + +At a basic level, including requirements files allows you to break up bits of roles into smaller files. Role includes pull in roles from other files. + +Use the following command to install roles includes in *requirements.yml* + *webserver,yml* + +:: + + ansible-galaxy install -r requirements.yml + +Content of the *requirements.yml* file: + +:: + + # from galaxy + - src: yatesr.timezone + + - import_tasks: /webserver.yml + + +Content of the *webserver.yml* file: + +:: + + # from github + - src: https://github.com/bennojoy/nginx + + # from Bitbucket + - src: git+http://bitbucket.org/willthames/git-ansible-galaxy + version: v1.4 + Dependencies ============ Roles can also be dependent on other roles, and when you install a role that has dependencies, those dependenices will automatically be installed. -You specify role dependencies in the *meta/main.yml* file by providing a list of roles. If the source of a role is Galaxy, you can simply specify the role in -the format *username.role_name*. The more complex format used in *requirements.yml* is also supported, allowing you to provide src, scm, version and name. +You specify role dependencies in the ``meta/main.yml`` file by providing a list of roles. If the source of a role is Galaxy, you can simply specify the role in +the format ``username.role_name``. The more complex format used in ``requirements.yml`` is also supported, allowing you to provide ``src``, ``scm``, ``version``, and ``name``. + +Tags are inherited *down* the dependency chain. In order for tags to be applied to a role and all its dependencies, the tag should be applied to the role, not to all the tasks within a role. + +Roles listed as dependencies are subject to conditionals and tag filtering, and may not execute fully depeneding on +what tags and conditinoals are applied. Dependencies found in Galaxy can be specified as follows: -:: +:: dependencies: - - geerlingguy.apache + - geerlingguy.apache - geerlingguy.ansible -The complex form can also be used as follows: +The complex form can also be used as follows: :: dependencies: - src: geerlingguy.ansible - - src: git+https://github.com/geerlingguy/ansible-role-composer.git + - src: git+https://github.com/geerlingguy/ansible-role-composer.git version: 775396299f2da1f519f0d8885022ca2d6ee80ee8 name: composer -When dependencies are encountered by ``ansible-galaxy``, it will automatically install each dependency to the *roles_path*. To understand how dependencies -are handled during play execution, see :doc:`playbooks_roles`. +When dependencies are encountered by ``ansible-galaxy``, it will automatically install each dependency to the ``roles_path``. To understand how dependencies are handled during play execution, see :doc:`playbooks_reuse_roles`. .. note:: - At the time of this writing, the Galaxy website expects all role dependencies to exist in Galaxy, and therefore dependencies to be specified in the - *username.role_name* format. If you import a role with a dependency where the *src* value is a URL, the import process will fail. + At the time of this writing, the Galaxy website expects all role dependencies to exist in Galaxy, and therefore dependencies to be specified in the + ``username.role_name`` format. If you import a role with a dependency where the ``src`` value is a URL, the import process will fail. Create roles ------------ @@ -179,7 +215,7 @@ $ ansible-galaxy init role_name The above will create the following directory structure in the current working directory: - + :: README.md @@ -201,7 +237,7 @@ Force ===== -If a directory matching the name of the role already exists in the current working directory, the init command will result in an error. To ignore the error +If a directory matching the name of the role already exists in the current working directory, the init command will result in an error. To ignore the error use the *--force* option. Force will create the above subdirectories and files, replacing anything that matches. Container Enabled @@ -247,7 +283,7 @@ The search command will return a list of the first 1000 results matching your search: :: - + Found 2 roles matching your search: Name Description @@ -326,7 +362,7 @@ ------------------------ Using the ``import``, ``delete`` and ``setup`` commands to manage your roles on the Galaxy website requires authentication, and the ``login`` command -can be used to do just that. Before you can use the ``login`` command, you must create an account on the Galaxy website. +can be used to do just that. Before you can use the ``login`` command, you must create an account on the Galaxy website. The ``login`` command requires using your GitHub credentials. You can use your username and password, or you can create a `personal access token `_. If you choose to create a token, grant minimal access to the token, as it is used just to verify identify. @@ -346,11 +382,11 @@ Password for dsmith: Successfully logged into Galaxy as dsmith -When you choose to use your username and password, your password is not sent to Galaxy. It is used to authenticates with GitHub and create a personal access token. +When you choose to use your username and password, your password is not sent to Galaxy. It is used to authenticates with GitHub and create a personal access token. It then sends the token to Galaxy, which in turn verifies that your identity and returns a Galaxy access token. After authentication completes the GitHub token is -destroyed. +destroyed. -If you do not wish to use your GitHub password, or if you have two-factor authentication enabled with GitHub, use the *--github-token* option to pass a personal access token +If you do not wish to use your GitHub password, or if you have two-factor authentication enabled with GitHub, use the *--github-token* option to pass a personal access token that you create. @@ -358,7 +394,7 @@ ------------- The ``import`` command requires that you first authenticate using the ``login`` command. Once authenticated you can import any GitHub repository that you own or have -been granted access. +been granted access. Use the following to import to role: @@ -417,20 +453,20 @@ Travis integrations ------------------- -You can create an integration or connection between a role in Galaxy and `Travis `_. Once the connection is established, a build in Travis will -automatically trigger an import in Galaxy, updating the search index with the latest information about the role. +You can create an integration or connection between a role in Galaxy and `Travis `_. Once the connection is established, a build in Travis will +automatically trigger an import in Galaxy, updating the search index with the latest information about the role. -You create the integration using the ``setup`` command, but before an integration can be created, you must first authenticate using the ``login`` command; you will -also need an account in Travis, and your Travis token. Once you're ready, use the following command to create the integration: +You create the integration using the ``setup`` command, but before an integration can be created, you must first authenticate using the ``login`` command; you will +also need an account in Travis, and your Travis token. Once you're ready, use the following command to create the integration: :: $ ansible-galaxy setup travis github_user github_repo xxx-travis-token-xxx -The setup command requires your Travis token, however the token is not stored in Galaxy. It is used along with the GitHub username and repo to create a hash as described +The setup command requires your Travis token, however the token is not stored in Galaxy. It is used along with the GitHub username and repo to create a hash as described in `the Travis documentation `_. The hash is stored in Galaxy and used to verify notifications received from Travis. -The setup command enables Galaxy to respond to notifications. To configure Travis to run a build on your repository and send a notification, follow the +The setup command enables Galaxy to respond to notifications. To configure Travis to run a build on your repository and send a notification, follow the `Travis getting started guide `_. To instruct Travis to notify Galaxy when a build completes, add the following to your .travis.yml file: @@ -471,7 +507,7 @@ .. seealso:: - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` All about ansible roles `Mailing List `_ Questions? Help? Ideas? Stop by the list on Google Groups diff -Nru ansible-2.3.2.0/docs/docsite/rst/glossary.rst ansible-2.4.0.0/docs/docsite/rst/glossary.rst --- ansible-2.3.2.0/docs/docsite/rst/glossary.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/glossary.rst 2017-09-19 17:10:47.000000000 +0000 @@ -436,7 +436,7 @@ labeled ``ntp``, and then run just the ``ntp`` steps to reconfigure the time server information on a remote host. - Tasks + Task :term:`Playbooks` exist to run tasks. Tasks combine an :term:`action` (a module and its arguments) with a name and optionally some other keywords (like :term:`looping directives `). :term:`Handlers` @@ -444,6 +444,9 @@ unless they are notified by name when a task reports an underlying change on a remote system. + Tasks + A list of :term:`Task`. + Templates Ansible can easily transfer files to remote systems but often it is desirable to substitute variables in other files. Variables may come diff -Nru ansible-2.3.2.0/docs/docsite/rst/guide_aws.rst ansible-2.4.0.0/docs/docsite/rst/guide_aws.rst --- ansible-2.3.2.0/docs/docsite/rst/guide_aws.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/guide_aws.rst 2017-09-19 17:10:47.000000000 +0000 @@ -117,7 +117,7 @@ add_host: hostname={{ item.public_ip }} groups=ec2hosts with_items: "{{ ec2.instances }}" -With the host group now created, a second play at the bottom of the the same provisioning playbook file might now have some configuration steps:: +With the host group now created, a second play at the bottom of the same provisioning playbook file might now have some configuration steps:: # demo_setup.yml diff -Nru ansible-2.3.2.0/docs/docsite/rst/guide_azure.rst ansible-2.4.0.0/docs/docsite/rst/guide_azure.rst --- ansible-2.3.2.0/docs/docsite/rst/guide_azure.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/guide_azure.rst 2017-09-19 17:10:47.000000000 +0000 @@ -7,14 +7,19 @@ Requirements ------------ -Using the Azure Resource Manager modules requires having `Azure Python SDK `_ -installed on the host running Ansible. You will need to have == v2.0.0RC5 installed. The simplest way to install the -SDK is via pip: +Using the Azure Resource Manager modules requires having specific Azure SDK modules +installed on the host running Ansible. .. code-block:: bash - $ pip install "azure==2.0.0rc5" + $ pip install ansible[azure] +If you are running Ansible from source, you can install the dependencies from the +root directory of the Ansible repo. + +.. code-block:: bash + + $ pip install .[azure] Authenticating with Azure ------------------------- @@ -114,6 +119,14 @@ * subscription_id +Other Cloud Environments +------------------------ + +To use an Azure Cloud other than the default public cloud (eg, Azure China Cloud, Azure US Government Cloud, Azure Stack), +pass the "cloud_environment" argument to modules, configure it in a credential profile, or set the "AZURE_CLOUD_ENVIRONMENT" +environment variable. The value is either a cloud name as defined by the Azure Python SDK (eg, "AzureChinaCloud", +"AzureUSGovernment"; defaults to "AzureCloud") or an Azure metadata discovery URL (for Azure Stack). + Creating Virtual Machines ------------------------- @@ -156,7 +169,7 @@ name: publicip001 - name: Create security group that allows SSH - azure_rm_securitygroup: + azure_rm_securitygroup: resource_group: Testing name: secgroup001 rules: diff -Nru ansible-2.3.2.0/docs/docsite/rst/guide_cloudstack.rst ansible-2.4.0.0/docs/docsite/rst/guide_cloudstack.rst --- ansible-2.3.2.0/docs/docsite/rst/guide_cloudstack.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/guide_cloudstack.rst 2017-09-19 17:10:47.000000000 +0000 @@ -13,7 +13,7 @@ Prerequisites ````````````` -Prerequisites for using the CloudStack modules are minimal. In addition to ansible itself, all of the modules require the python library ``cs`` https://pypi.python.org/pypi/cs. +Prerequisites for using the CloudStack modules are minimal. In addition to Ansible itself, all of the modules require the python library ``cs`` https://pypi.python.org/pypi/cs. You'll need this Python module installed on the execution host, usually your workstation. @@ -21,11 +21,17 @@ $ pip install cs +Or alternatively starting with Debian 9 and Ubuntu 16.04: + +.. code-block:: bash + + $ sudo apt install python-cs + .. note:: cs also includes a command line interface for ad-hoc interaction with the CloudStack API e.g. ``$ cs listVirtualMachines state=Running``. Limitations and Known Issues ```````````````````````````` -VPC support is not yet fully implemented and tested. The community is working on the VPC integration. +VPC support has been improved since Ansible 2.3 but is still not yet fully implemented. The community is working on the VPC integration. Credentials File ```````````````` @@ -46,9 +52,36 @@ endpoint = https://cloud.example.com/client/api key = api key secret = api secret + timeout = 30 .. Note:: The section ``[cloudstack]`` is the default section. ``CLOUDSTACK_REGION`` environment variable can be used to define the default section. +.. versionadded:: 2.4 + +The ENV variables support ``CLOUDSTACK_*`` as written in the documentation of the library ``cs``, like e.g ``CLOUDSTACK_TIMEOUT``, ``CLOUDSTACK_METHOD``, etc. has been implemented into Ansible. It is even possible to have some incomplete config in your cloudstack.ini: + +.. code-block:: bash + + $ cat $HOME/.cloudstack.ini + [cloudstack] + endpoint = https://cloud.example.com/client/api + timeout = 30 + +and fulfill the missing data by either setting ENV variables or tasks params: + +.. code-block:: yaml + + --- + - name: provision our VMs + hosts: cloud-vm + connection: local + tasks: + - name: ensure VMs are created and running + cs_instance: + api_key: your api key + api_secret: your api secret + ... + Regions ``````` If you use more than one CloudStack region, you can define as many sections as you want and name them as you like, e.g.: diff -Nru ansible-2.3.2.0/docs/docsite/rst/guide_gce.rst ansible-2.4.0.0/docs/docsite/rst/guide_gce.rst --- ansible-2.3.2.0/docs/docsite/rst/guide_gce.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/guide_gce.rst 2017-09-19 17:10:47.000000000 +0000 @@ -95,7 +95,7 @@ .. code-block:: python GCE_PARAMS = ('', '') - GCE_KEYWORD_PARAMS = {'project': 'project_id'} + GCE_KEYWORD_PARAMS = {'project': 'project_id', 'datacenter': ''} Configuring Modules with Environment Variables `````````````````````````````````````````````` @@ -168,7 +168,7 @@ exit 1 fi - export SSL_CERT_FILE=$(pwd)/cacert.cer + export SSL_CERT_FILE=$(pwd)/cacert.pem export ANSIBLE_HOST_KEY_CHECKING=False if [[ ! -f "$SSL_CERT_FILE" ]]; then diff -Nru ansible-2.3.2.0/docs/docsite/rst/guide_kubernetes.rst ansible-2.4.0.0/docs/docsite/rst/guide_kubernetes.rst --- ansible-2.3.2.0/docs/docsite/rst/guide_kubernetes.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/guide_kubernetes.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,56 @@ +Getting Started with Kubernetes and OpenShift +============================================= + +Modules for interacting with the Kubernetes (K8s) and OpenShift API are under development, and can be used in preview mode. To use them, review the requirements, and then follow the installation and use instructions. + +Requirements +------------ + +To use the modules, you'll need the following: + +- Run Ansible from source. For assistance, view `running from source <./intro_installation.html/#running-from-source>`_ +- `OpenShift Rest Client `_ installed on the host that will execute the modules + + +Installation and use +-------------------- + +The individual modules, as of this writing, are not part of the Ansible repository, but they can be accessed by installing the role, `ansible.kubernetes-modules `_, and including it in a playbook. + +To install, run the following: + +.. code-block:: bash + + $ ansible-galaxy install ansible.kubernetes-modules + +Next, include it in a playbook, as follows: + +.. code-block:: bash + + --- + - hosts: localhost + remote_user: root + roles: + - role: ansible.kubernetes-modules + - role: hello-world + +Because the role is referenced, ``hello-world`` is able to access the modules, and use them to deploy an application. + +The modules are found in the ``library`` folder of the role. Each includes full documentation for parameters and the returned data structure. However, not all modules include examples, only those where `testing data `_ has been created. + +Authenticating with the API +--------------------------- + +By default the OpenShift Rest Client will look for ``~/.kube/config``, and if found, connect using the active context. You can override the location of the file using the``kubeconfig`` parameter, and the context, using the ``context`` parameter. + +Basic authentication is also supported using the ``username`` and ``password`` options. You can override the URL using the ``host`` parameter. Certificate authentication works through the ``ssl_ca_cert``, ``cert_file``, and ``key_file`` parameters, and for token authentication, use the ``api_key`` parameter. + +To disable SSL certificate verification, set ``verify_ssl`` to false. + +Filing issues +````````````` + +If you find a bug or have a suggestion regarding individual modules or the role, please file issues at `OpenShift Rest Client issues `_. + +There is also a utility module, k8s_common.py, that is part of the `Ansible `_ repo. If you find a bug or have suggestions regarding it, please file issues at `Ansible issues `_. + diff -Nru ansible-2.3.2.0/docs/docsite/rst/guide_rolling_upgrade.rst ansible-2.4.0.0/docs/docsite/rst/guide_rolling_upgrade.rst --- ansible-2.3.2.0/docs/docsite/rst/guide_rolling_upgrade.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/guide_rolling_upgrade.rst 2017-09-19 17:10:47.000000000 +0000 @@ -100,7 +100,7 @@ all systems, and then a series of application-specific roles that install and configure particular parts of the site. Roles can have variables and dependencies, and you can pass in parameters to roles to modify their behavior. -You can read more about roles in the :doc:`playbooks_roles` section. +You can read more about roles in the :doc:`playbooks_reuse_roles` section. .. _lamp_group_variables: @@ -276,7 +276,7 @@ The lamp_haproxy example discussed here. :doc:`playbooks` An introduction to playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` An introduction to playbook roles :doc:`playbooks_variables` An introduction to Ansible variables diff -Nru ansible-2.3.2.0/docs/docsite/rst/index.rst ansible-2.4.0.0/docs/docsite/rst/index.rst --- ansible-2.3.2.0/docs/docsite/rst/index.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/index.rst 2017-09-19 17:10:47.000000000 +0000 @@ -7,7 +7,7 @@ Welcome to the Ansible documentation! Ansible is an IT automation tool. It can configure systems, deploy software, and orchestrate more advanced IT tasks -such as continuous deployments or zero downtime rolling updates. +such as continuous deployments or zero downtime rolling updates. Ansible's main goals are simplicity and ease-of-use. It also has a strong focus on security and reliability, featuring a minimum of moving parts, usage of OpenSSH for transport (with an accelerated socket mode and pull modes as alternatives), and a language that is designed around auditability by humans--even those not familiar with the program. @@ -16,7 +16,7 @@ Ansible manages machines in an agent-less manner. There is never a question of how to upgrade remote daemons or the problem of not being able to manage systems because daemons are uninstalled. Because OpenSSH is one of the most peer-reviewed open source components, security exposure is greatly reduced. Ansible is decentralized--it relies on your existing OS credentials to control access to remote machines. If needed, Ansible can easily connect with Kerberos, LDAP, and other centralized authentication management systems. -This documentation covers the current released version of Ansible (|version|) and also some development version features (|versiondev|). For recent features, we note in each section the version of Ansible where the feature was added. +This documentation covers the current released version of Ansible (2.4) and also some development version features ('devel'). For recent features, we note in each section the version of Ansible where the feature was added. Ansible, Inc. releases a new major release of Ansible approximately every two months. The core application evolves somewhat conservatively, valuing simplicity in language design and setup. However, the community around new modules and plugins being developed and contributed moves very quickly, typically adding 20 or so new modules in each release. @@ -31,6 +31,8 @@ playbooks_special_topics modules modules_by_category + vault + command_line_tools guides dev_guide/index tower @@ -38,8 +40,9 @@ galaxy test_strategies faq + config glossary YAMLSyntax - porting_guide_2.0 + porting_guides python_3_support - + release_and_maintenance diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro_configuration.rst ansible-2.4.0.0/docs/docsite/rst/intro_configuration.rst --- ansible-2.3.2.0/docs/docsite/rst/intro_configuration.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro_configuration.rst 2017-09-19 17:10:47.000000000 +0000 @@ -402,6 +402,42 @@ forks = 5 +.. _fact_caching: + +fact_caching +============ + +This option allows you to configure fact caching. When a fact cache +is enabled and there is valid data for a host, Ansible will use that rather than running an implicit ``setup`` job on a remote host. + +The value of this option should be the name of a cache plugin. +Current versions of Ansible include ``redis`` and ``jsonfile``:: + + fact_caching = jsonfile + +.. _fact_caching_connection: + +fact_caching_connection +======================= + +This option tells Ansible where to cache facts. The value is plugin +dependent. For the ``jsonfile`` plugin, it should be a path to a +local directory. For the ``redis`` plugin, the value is a +``host:port:database`` triplet:: + + fact_caching_connection = localhost:6379:0 + +.. _fact_caching_timeout: + +fact_caching_timeout +==================== + +This option tells Ansible when to expire values from the cache. +Setting this value to 0 effectively disables expiry, and a positive +value is a TTL in seconds:: + + fact_caching_timeout = 86400 + .. _fact_path: fact_path @@ -521,12 +557,12 @@ inventory_ignore_extensions =========================== -Coma-separated list of file extension patterns to ignore when Ansible inventory +Comma-separated list of file extension patterns to ignore when Ansible inventory is a directory with multiple sources (static and dynamic):: inventory_ignore_extensions = ~, .orig, .bak, .ini, .cfg, .retry, .pyc, .pyo -This option can be overridden by setting ``ANSIBLE_INVENTORY_IGNORE`` +This option can be overridden by setting :envvar:`ANSIBLE_INVENTORY_IGNORE` environment variable. .. _jinja2_extensions: @@ -583,7 +619,7 @@ itself. The default location is a subdirectory of the user's home directory. If you'd like to change that, you can do so by altering this setting:: - local_tmp = $HOME/.ansible/tmp + local_tmp = ~/.ansible/tmp Ansible will then choose a random directory name inside this location. @@ -621,15 +657,16 @@ .. versionadded:: 2.3 -This allows changing how multiple --tags and --skip-tags arguments are handled -on the command line. In Ansible up to and including 2.3, specifying --tags -more than once will only take the last value of --tags. Setting this config -value to True will mean that all of the --tags options will be merged -together. The same holds true for --skip-tags. +This allows changing how multiple :option:`--tags` and :option:`--skip-tags` +arguments are handled on the command line. Specifying :option:`--tags` more +than once merges all of the :option:`--tags` options together. If you want +the pre-2.4.x behaviour where only the last value of :option:`--tags` is used, +then set this to False. The same holds true for :option:`--skip-tags`. .. note:: The default value for this in 2.3 is False. In 2.4, the - default value will be True. After 2.4, the option is going away. - Multiple --tags and multiple --skip-tags will always be merged together. + default value is True. After 2.8, the option will be removed. + Multiple :option:`--tags` and multiple :option:`--skip-tags` will always + be merged together. .. _module_lang: @@ -761,7 +798,7 @@ cases, you may not wish to use the default location and would like to change the path. You can do so by altering this setting:: - remote_tmp = $HOME/.ansible/tmp + remote_tmp = ~/.ansible/tmp The default is to use a subdirectory of the user's home directory. Ansible will then choose a random directory name inside this location. @@ -776,6 +813,27 @@ remote_user = root + +.. _restrict_facts_namespace: + +restrict_facts_namespace +======================== + +.. versionadded:: 2.4 + +This allows restricting facts in their own namespace (under ansible_facts) instead of pushing them into the main. +False by default. Can also be set via the environment variable :envvar:`ANSIBLE_RESTRICT_FACTS`. Using `ansible_system` as an example: + +When False:: + + - debug: var=ansible_system + + +When True:: + + - debug: var=ansible_facts.ansible_system + + .. _retry_files_enabled: retry_files_enabled @@ -1098,7 +1156,16 @@ It is the ``%(directory)s`` part of the ``control_path`` option. This defaults to:: - control_path_dir=$HOME/.ansible/cp + control_path_dir=~/.ansible/cp + +.. _retries: + +retries +======= + +Adds the option to retry failed ssh executions if the failure is encountered in ssh itself, not the remote command. This can be helpful if there are transient network issues. Enabled by setting retries to an integer greater than 1. Defaults to:: + + retries = 0 .. _scp_if_ssh: diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro_dynamic_inventory.rst ansible-2.4.0.0/docs/docsite/rst/intro_dynamic_inventory.rst --- ansible-2.3.2.0/docs/docsite/rst/intro_dynamic_inventory.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro_dynamic_inventory.rst 2017-09-19 17:10:47.000000000 +0000 @@ -247,7 +247,7 @@ If you use an OpenStack based cloud, instead of manually maintaining your own inventory file, you can use the openstack.py dynamic inventory to pull information about your compute instances directly from OpenStack. -You can download the latest version of the OpenStack inventory script at: https://raw.githubusercontent.com/ansible/ansible/devel/contrib/inventory/openstack.py +You can download the latest version of the OpenStack inventory script `here `_ You can use the inventory script explicitly (by passing the `-i openstack.py` argument to Ansible) or implicitly (by placing the script at `/etc/ansible/hosts`). @@ -265,13 +265,13 @@ .. note:: - An OpenStack RC file contains the environment variables required by the client tools to establish a connection with the cloud provider, such as the authentication URL, user name, password and region name. For more information on how to download, create or source an OpenStack RC file, please refer to `Set environment variables using the OpenStack RC file `_. + An OpenStack RC file contains the environment variables required by the client tools to establish a connection with the cloud provider, such as the authentication URL, user name, password and region name. For more information on how to download, create or source an OpenStack RC file, please refer to `Set environment variables using the OpenStack RC file `_. You can confirm the file has been successfully sourced by running a simple command, such as `nova list` and ensuring it return no errors. .. note:: - The OpenStack command line clients are required to run the `nova list` command. For more information on how to install them, please refer to `Install the OpenStack command-line clients `_. + The OpenStack command line clients are required to run the `nova list` command. For more information on how to install them, please refer to `Install the OpenStack command-line clients `_. You can test the OpenStack dynamic inventory script manually to confirm it is working as expected:: @@ -348,7 +348,7 @@ ~, .orig, .bak, .ini, .cfg, .retry, .pyc, .pyo -You can replace this list with your own selection by configuring an ``inventory_ignore_extensions`` list in ansible.cfg, or setting the ANSIBLE_INVENTORY_IGNORE environment variable. The value in either case should be a comma-separated list of patterns, as shown above. +You can replace this list with your own selection by configuring an ``inventory_ignore_extensions`` list in ansible.cfg, or setting the :envvar:`ANSIBLE_INVENTORY_IGNORE` environment variable. The value in either case should be a comma-separated list of patterns, as shown above. Any ``group_vars`` and ``host_vars`` subdirectories in an inventory directory will be interpreted as expected, making inventory directories a powerful way to organize different sets of configurations. diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro_getting_started.rst ansible-2.4.0.0/docs/docsite/rst/intro_getting_started.rst --- ansible-2.3.2.0/docs/docsite/rst/intro_getting_started.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro_getting_started.rst 2017-09-19 17:10:47.000000000 +0000 @@ -8,14 +8,13 @@ Foreword ```````` -Now that you've read :doc:`intro_installation` and installed Ansible, it's time to dig in and get -started with some commands. +Now that you've read :doc:`intro_installation` and installed Ansible, it's time to get +started with some ad-hoc commands. What we are showing first are not the powerful configuration/deployment/orchestration features of Ansible. These features are handled by playbooks which are covered in a separate section. -This section is about how to initially get going. Once you have these concepts down, read :doc:`intro_adhoc` for some more -detail, and then you'll be ready to dive into playbooks and explore the most interesting parts! +This section is about how to initially get Ansible running. Once you understand these concepts, read :doc:`intro_adhoc` for some more detail, and then you'll be ready to begin learning about playbooks and explore the most interesting parts! .. _remote_connection_information: @@ -136,7 +135,7 @@ [defaults] host_key_checking = False -Alternatively this can be set by an environment variable: +Alternatively this can be set by the :envvar:`ANSIBLE_HOST_KEY_CHECKING` environment variable: .. code-block:: bash diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro_installation.rst ansible-2.4.0.0/docs/docsite/rst/intro_installation.rst --- ansible-2.3.2.0/docs/docsite/rst/intro_installation.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro_installation.rst 2017-09-19 17:10:47.000000000 +0000 @@ -3,14 +3,6 @@ .. contents:: Topics -.. _getting_ansible: - -Getting Ansible -``````````````` - -You may also wish to follow the `GitHub project `_ if -you have a GitHub account. This is also where we keep the issue tracker for sharing -bugs and feature ideas. .. _what_will_be_installed: @@ -45,10 +37,10 @@ Control Machine Requirements ```````````````````````````` -Currently Ansible can be run from any machine with Python 2.6 or 2.7 installed (Windows isn't supported for the control machine). +Currently Ansible can be run from any machine with Python 2 (versions 2.6 or 2.7) or Python 3 (versions 3.5 and higher) installed (Windows isn't supported for the control machine). .. note:: - Ansible 2.2 introduces a tech preview of support for Python 3. For more information, see `Python 3 Support `_. + Ansible 2.2 introduces a tech preview of support for Python 3 (versions 3.5 and higher). For more information, see `Python 3 Support `_. This includes Red Hat, Debian, CentOS, OS X, any of the BSDs, and so on. @@ -66,10 +58,9 @@ Managed Node Requirements ````````````````````````` -On the managed nodes, you need a way to communicate, which is normally ssh. By default this uses sftp. If that's not available, you can switch to scp in ansible.cfg. -You also need Python 2.4 or later. If you are running less than Python 2.5 on the remotes, you will also need: - -* ``python-simplejson`` +On the managed nodes, you need a way to communicate, which is normally ssh. By +default this uses sftp. If that's not available, you can switch to scp in +:file:`ansible.cfg`. You also need Python 2.6 or later. .. note:: @@ -91,7 +82,7 @@ Ansible 2.2 introduces a tech preview of support for Python 3. For more information, see `Python 3 Support `_. By default, Ansible uses Python 2 in order to maintain compatibility with older distributions - such as RHEL 5 and RHEL 6. However, some Linux distributions (Gentoo, Arch) may not have a + such as RHEL 6. However, some Linux distributions (Gentoo, Arch) may not have a Python 2.X interpreter installed by default. On those systems, you should install one, and set the 'ansible_python_interpreter' variable in inventory (see :doc:`intro_inventory`) to point at your 2.X Python. Distributions like Red Hat Enterprise Linux, CentOS, Fedora, and Ubuntu all have a 2.X interpreter installed @@ -113,25 +104,27 @@ Latest Release Via Yum ++++++++++++++++++++++ -RPMs are available from yum for `EPEL -`_ 6, 7, and currently supported -Fedora distributions. +.. note:: We’ve changed how the Ansible community packages are distributed. + For users of RHEL/CentOS/Scientific Linux version 7, the Ansible community RPM + package will transition from the EPEL repository to the Extras channel. There will be no + change for version 6 of RHEL/CentOS/Scientific Linux since Extras is not a part of version 6. -Ansible itself can manage earlier operating -systems that contain Python 2.4 or higher (so also EL5). +RPMs for RHEL7 are available from `the Extras channel `_. -Fedora users can install Ansible directly, though if you are using RHEL or CentOS and have not already done so, `configure EPEL `_ +RPMs for RHEL6 are available from yum for `EPEL +`_ 6 and currently supported +Fedora distributions. -.. code-block:: bash +Ansible will also have RPMs/YUM-repo available at ``_ with the release version. + + + .. _from_source: Running From Source @@ -305,13 +301,15 @@ If you are intending to use Tower as the Control Machine, do not use a source install. Please use OS package manager (like ``apt/yum``) or ``pip`` to install a stable version. -To install from source. +To install from source, clone the Ansible git repository: .. code-block:: bash $ git clone git://github.com/ansible/ansible.git --recursive $ cd ./ansible +Once git has cloned the Ansible repository, setup the Ansible environment: + Using Bash: .. code-block:: bash @@ -374,6 +372,16 @@ You can also use "sudo make install". +.. _getting_ansible: + +Ansible on GitHub +````````````````` + +You may also wish to follow the `GitHub project `_ if +you have a GitHub account. This is also where we keep the issue tracker for sharing +bugs and feature ideas. + + .. seealso:: :doc:`intro_adhoc` diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro_inventory.rst ansible-2.4.0.0/docs/docsite/rst/intro_inventory.rst --- ansible-2.3.2.0/docs/docsite/rst/intro_inventory.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro_inventory.rst 2017-09-19 17:10:47.000000000 +0000 @@ -5,22 +5,24 @@ .. contents:: Topics -Ansible works against multiple systems in your infrastructure at the -same time. It does this by selecting portions of systems listed in -Ansible's inventory file, which defaults to being saved in -the location ``/etc/ansible/hosts``. You can specify a different inventory file using the -``-i `` option on the command line. - -Not only is this inventory configurable, but you can also use -multiple inventory files at the same time (explained below) and also -pull inventory from dynamic or cloud sources, as described in :doc:`intro_dynamic_inventory`. +Ansible works against multiple systems in your infrastructure at the same time. +It does this by selecting portions of systems listed in Ansible's inventory, +which defaults to being saved in the location ``/etc/ansible/hosts``. +You can specify a different inventory file using the ``-i `` option on the command line. + +Not only is this inventory configurable, but you can also use multiple inventory files at the same time and +pull inventory from dynamic or cloud sources or different formats (YAML, ini, etc), as described in :doc:`intro_dynamic_inventory`. +Introduced in version 2.4, Ansible has inventory plugins to make this flexible and customizable. .. _inventoryformat: Hosts and Groups ++++++++++++++++ -The format for ``/etc/ansible/hosts`` is an INI-like format and looks like this:: +The inventory file can be in one of many formats, depending on the inventory plugins you have. +For this example, the format for ``/etc/ansible/hosts`` is an INI-like (one of Ansible's defaults) and looks like this: + +.. code-block:: ini mail.example.com @@ -36,31 +38,73 @@ The headings in brackets are group names, which are used in classifying systems and deciding what systems you are controlling at what times and for what purpose. +A YAML version would look like: + +.. code-block:: yaml + + all: + hosts: + mail.example.com + children: + webservers: + hosts: + foo.example.com: + bar.example.com: + dbservers: + hosts: + one.example.com: + two.example.com: + three.example.com: + + It is ok to put systems in more than one group, for instance a server could be both a webserver and a dbserver. If you do, note that variables will come from all of the groups they are a member of. Variable precedence is detailed in a later chapter. -If you have hosts that run on non-standard SSH ports you can put the port number -after the hostname with a colon. Ports listed in your SSH config file won't be used with the `paramiko` -connection but will be used with the `openssh` connection. +If you have hosts that run on non-standard SSH ports you can put the port number after the hostname with a colon. +Ports listed in your SSH config file won't be used with the `paramiko` connection but will be used with the `openssh` connection. + +To make things explicit, it is suggested that you set them if things are not running on the default port: -To make things explicit, it is suggested that you set them if things are not running on the default port:: +.. code-block:: ini badwolf.example.com:5309 -Suppose you have just static IPs and want to set up some aliases that live in your host file, or you are connecting through tunnels. You can also describe hosts like this:: +Suppose you have just static IPs and want to set up some aliases that live in your host file, or you are connecting through tunnels. +You can also describe hosts via variables: + +In INI: + +.. code-block:: ini jumper ansible_port=5555 ansible_host=192.0.2.50 -In the above example, trying to ansible against the host alias "jumper" (which may not even be a real hostname) will contact 192.0.2.50 on port 5555. Note that this is using a feature of the inventory file to define some special variables. Generally speaking this is not the best -way to define variables that describe your system policy, but we'll share suggestions on doing this later. We're just getting started. +In YAML: -Adding a lot of hosts? If you have a lot of hosts following similar patterns you can do this rather than listing each hostname:: +.. code-block:: yaml + hosts: + jumper: + ansible_port: 5555 + ansible_host: 192.0.2.50 + +In the above example, trying to ansible against the host alias "jumper" (which may not even be a real hostname) will contact 192.0.2.50 on port 5555. +Note that this is using a feature of the inventory file to define some special variables. +Generally speaking, this is not the best way to define variables that describe your system policy, but we'll share suggestions on doing this later. + +.. note:: Values passed in the INI format using the ``key=value`` syntax are not interpreted as Python literal structure + (strings, numbers, tuples, lists, dicts, booleans, None), but as a string. For example ``var=FALSE`` would create a string equal to 'FALSE'. + Do not rely on types set during definition, always make sure you specify type with a filter when needed when consuming the variable. + +If you are adding a lot of hosts following similar patterns, you can do this rather than listing each hostname: + +.. code-block:: ini [webservers] www[01:50].example.com -For numeric patterns, leading zeros can be included or removed, as desired. Ranges are inclusive. You can also define alphabetic ranges:: +For numeric patterns, leading zeros can be included or removed, as desired. Ranges are inclusive. You can also define alphabetic ranges: + +.. code-block:: ini [databases] db-[a:f].example.com @@ -70,7 +114,7 @@ You can also select the connection type and user on a per host basis: -:: +.. code-block:: ini [targets] @@ -78,15 +122,16 @@ other1.example.com ansible_connection=ssh ansible_user=mpdehaan other2.example.com ansible_connection=ssh ansible_user=mdehaan -As mentioned above, setting these in the inventory file is only a shorthand, and we'll discuss how to store them in individual files -in the 'host_vars' directory a bit later on. +As mentioned above, setting these in the inventory file is only a shorthand, and we'll discuss how to store them in individual files in the 'host_vars' directory a bit later on. .. _host_variables: Host Variables ++++++++++++++ -As alluded to above, it is easy to assign variables to hosts that will be used later in playbooks:: +As described above, it is easy to assign variables to hosts that will be used later in playbooks: + +.. code-block:: ini [atlanta] host1 http_port=80 maxRequestsPerChild=808 @@ -97,7 +142,11 @@ Group Variables +++++++++++++++ -Variables can also be applied to an entire group at once:: +Variables can also be applied to an entire group at once: + +The INI way: + +.. code-block:: ini [atlanta] host1 @@ -107,12 +156,30 @@ ntp_server=ntp.atlanta.example.com proxy=proxy.atlanta.example.com +The YAML version: + +.. code-block:: yaml + + atlanta: + hosts: + host1: + host2: + vars: + ntp_server: ntp.atlanta.example.com + proxy: proxy.atlanta.example.com + +Be aware that this is only a convenient way to apply variables to multiple hosts at once; even though you can target hosts by group, **variables are always flattened to the host level** before a play is executed. + .. _subgroups: Groups of Groups, and Group Variables +++++++++++++++++++++++++++++++++++++ -It is also possible to make groups of groups using the ``:children`` suffix. Just like above, you can apply variables using ``:vars``:: +It is also possible to make groups of groups using the ``:children`` suffix in INI or the ``children:`` entry in YAML. +You can apply variables using ``:vars`` or ``vars:``: + + +.. code-block:: ini [atlanta] host1 @@ -138,8 +205,38 @@ southwest northwest -If you need to store lists or hash data, or prefer to keep host and group specific variables -separate from the inventory file, see the next section. +.. code-block:: yaml + + all: + children: + usa: + children: + southeast: + children: + atlanta: + hosts: + host1: + host2: + raleigh: + hosts: + host2: + host3: + vars: + some_server: foo.southeast.example.com + halon_system_timeout: 30 + self_destruct_countdown: 60 + escape_pods: 2 + northeast: + northwest: + southwest: + +If you need to store lists or hash data, or prefer to keep host and group specific variables separate from the inventory file, see the next section. +Child groups have a couple of properties to note: + + - Any host that is member of a child group is automatically a member of the parent group. + - A child group's variables will have higher precedence (override) a parent group's variables. + - Groups can have multiple parents and children, but not circular relationships. + - Hosts can also be in multiple groups, but there will only be **one** instance of a host, merging the data from the multiple groups. .. _default_groups: @@ -148,20 +245,20 @@ There are two default groups: ``all`` and ``ungrouped``. ``all`` contains every host. ``ungrouped`` contains all hosts that don't have another group aside from ``all``. +Every host will always belong to at least 2 groups. +Though ``all`` and ``ungrouped`` are always present, they can be implicit and not appear in group listings like ``group_names``. .. _splitting_out_vars: Splitting Out Host and Group Specific Data ++++++++++++++++++++++++++++++++++++++++++ -The preferred practice in Ansible is actually not to store variables in the main inventory file. +The preferred practice in Ansible is to not store variables in the main inventory file. -In addition to storing variables directly in the INI file, host -and group variables can be stored in individual files relative to the -inventory file. +In addition to storing variables directly in the inventory file, host and group variables can be stored in individual files relative to the inventory file (not directory, it is always the file). -These variable files are in YAML format. Valid file extensions include '.yml', '.yaml', '.json', -or no file extension. See :doc:`YAMLSyntax` if you are new to YAML. +These variable files are in YAML format. Valid file extensions include '.yml', '.yaml', '.json', or no file extension. +See :doc:`YAMLSyntax` if you are new to YAML. Assuming the inventory file path is:: @@ -182,9 +279,9 @@ ntp_server: acme.example.org database_server: storage.example.org -It is ok if these files do not exist, as this is an optional feature. +It is okay if these files do not exist, as this is an optional feature. -As an advanced use-case, you can create *directories* named after your groups or hosts, and +As an advanced use case, you can create *directories* named after your groups or hosts, and Ansible will read all the files in these directories. An example with the 'raleigh' group:: /etc/ansible/group_vars/raleigh/db_settings @@ -217,7 +314,7 @@ .. include:: ../rst_common/ansible_ssh_changes_note.rst -SSH connection: +General for all connections: ansible_host The name of the host to connect to, if different from the alias you wish to give to it. @@ -225,6 +322,10 @@ The ssh port number, if not 22 ansible_user The default ssh user name to use. + + +Specific to the SSH connection: + ansible_ssh_pass The ssh password to use (never store this variable in plain text; always use a vault. See :ref:`best_practices_for_variables_and_vaults`) ansible_ssh_private_key_file @@ -241,10 +342,7 @@ This setting is always appended to the default :command:`ssh` command line. ansible_ssh_pipelining Determines whether or not to use SSH pipelining. This can override the ``pipelining`` setting in :file:`ansible.cfg`. - -.. versionadded:: 2.2 - -ansible_ssh_executable +ansible_ssh_executable (added in version 2.2) This setting overrides the default behavior to use the system :command:`ssh`. This can override the ``ssh_executable`` setting in :file:`ansible.cfg`. @@ -258,6 +356,10 @@ Equivalent to ``ansible_sudo_user`` or ``ansible_su_user``, allows to set the user you become through privilege escalation ansible_become_pass Equivalent to ``ansible_sudo_pass`` or ``ansible_su_pass``, allows you to set the privilege escalation password (never store this variable in plain text; always use a vault. See :ref:`best_practices_for_variables_and_vaults`) +ansible_become_exe + Equivalent to ``ansible_sudo_exe`` or ``ansible_su_exe``, allows you to set the executable for the escalation method selected +ansible_become_flags + Equivalent to ``ansible_sudo_flags`` or ``ansible_su_flags``, allows you to set the flags passed to the selected escalation method. This can be also set globally in :file:`ansible.cfg` in the ``sudo_flags`` option Remote host environment parameters: @@ -284,7 +386,7 @@ to use :command:`/bin/sh` (i.e. :command:`/bin/sh` is not installed on the target machine or cannot be run from sudo.). -Examples from a host file:: +Examples from an Ansible-INI host file:: some_host ansible_port=2222 ansible_user=manager aws_host ansible_ssh_private_key_file=/home/example/.ssh/aws.pem @@ -337,6 +439,9 @@ path: "/var/jenkins_home/.ssh/jupiter" state: directory +.. note:: If you're reading the docs from the beginning, this may be the first example you've seen of an Ansible playbook. This is not an inventory file. + Playbooks will be covered in great detail later in the docs. + .. seealso:: :doc:`intro_dynamic_inventory` @@ -349,3 +454,4 @@ Questions? Help? Ideas? Stop by the list on Google Groups `irc.freenode.net `_ #ansible IRC chat channel + diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro_networking.rst ansible-2.4.0.0/docs/docsite/rst/intro_networking.rst --- ansible-2.3.2.0/docs/docsite/rst/intro_networking.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro_networking.rst 2017-09-19 17:10:47.000000000 +0000 @@ -140,11 +140,15 @@ The following environment variables are available to Ansible networking modules: -username ANSIBLE_NET_USERNAME -password ANSIBLE_NET_PASSWORD -ssh_keyfile ANSIBLE_NET_SSH_KEYFILE -authorize ANSIBLE_NET_AUTHORIZE -auth_pass ANSIBLE_NET_AUTH_PASS +username :envvar:`ANSIBLE_NET_USERNAME` + +password :envvar:`ANSIBLE_NET_PASSWORD` + +ssh_keyfile :envvar:`ANSIBLE_NET_SSH_KEYFILE` + +authorize :envvar:`ANSIBLE_NET_AUTHORIZE` + +auth_pass :envvar:`ANSIBLE_NET_AUTH_PASS` Variables are evaulated in the following order, listed from lowest to highest priority: @@ -171,7 +175,7 @@ Conditional statements evaluate the results from the commands that are executed remotely on the device. Once the task executes the command -set, the wait_for argument can be used to evaluate the results before +set, the waitfor argument can be used to evaluate the results before returning control to the Ansible playbook. For example:: diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro.rst ansible-2.4.0.0/docs/docsite/rst/intro.rst --- ansible-2.3.2.0/docs/docsite/rst/intro.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,7 +1,7 @@ Introduction ============ -Before we dive into the really fun parts -- playbooks, configuration management, deployment, and orchestration -- we'll learn how to get Ansible installed and cover some basic concepts. We'll also go over how to execute ad-hoc commands in parallel across your nodes using /usr/bin/ansible, and see what sort of modules are available in Ansible's core (you can also write your own, which is covered later). +Before we start exploring the main components of Ansible -- playbooks, configuration management, deployment, and orchestration -- we'll learn how to get Ansible installed and cover some basic concepts. We'll also go over how to execute ad-hoc commands in parallel across your nodes using /usr/bin/ansible, and see what modules are available in Ansible's core (you can also write your own, which is covered later). .. toctree:: :maxdepth: 1 diff -Nru ansible-2.3.2.0/docs/docsite/rst/intro_windows.rst ansible-2.4.0.0/docs/docsite/rst/intro_windows.rst --- ansible-2.3.2.0/docs/docsite/rst/intro_windows.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/intro_windows.rst 2017-09-19 17:10:47.000000000 +0000 @@ -14,6 +14,7 @@ native PowerShell remoting, rather than SSH. Ansible will still be run from a Linux control machine, and uses the "winrm" Python module to talk to remote hosts. +While not supported by Microsoft or Ansible, this Linux control machine can be a Windows Subsystem for Linux (WSL) bash shell. No additional software needs to be installed on the remote machines for Ansible to manage them, it still maintains the agentless properties that make it popular on Linux/Unix. @@ -31,6 +32,44 @@ .. Note:: on distributions with multiple python versions, use pip2 or pip2.x, where x matches the python minor version Ansible is running under. +.. _windows_control_machine: + +Using a Windows control machine +``````````````````````````````` +A Linux control machine is required to manage Windows hosts. This Linux control machine can be a Windows Subsystem for Linux (WSL) bash shell. + + +.. Note:: Running Ansible from a Windows control machine directly is not a goal of the project. Refrain from asking for this feature, as it limits what technologies, features, and code we can use in the main project in the future. + +.. Note:: The Windows Subsystem for Linux (Beta) is not supported by Microsoft or Ansible and should not be used for production systems. + +If you would like to experiment with the Windows Subsystem for Linux (WSL), first enable the Windows Subsystem for Linux using +`these instructions `_. +This requires a reboot. + +Once WSL is enabled, you can open the Bash terminal. At the prompt, you can quickly start using the latest Ansible release by running the following commands:: + + sudo apt-get update + sudo apt-get install python-pip git libffi-dev libssl-dev -y + pip install ansible pywinrm + + # this step is only necessary for Windows builds earlier than 16188, and must be repeated each time bash is launched, + # unless bash is launched as ``bash --login`` + # see https://github.com/Microsoft/BashOnWindows/issues/2148 and + # https://github.com/Microsoft/BashOnWindows/issues/816#issuecomment-301216901 for details + source ~/.profile + +After you've successfully run these commands, you can start to create your inventory, write example playbooks and start targeting systems using the plethora of available Windows modules. + +If you want to run Ansible from source for development purposes, simply uninstall the pip-installed version (which will leave all the necessary dependencies behind), then clone the Ansible source, and run the hacking script to configure it to run from source:: + + pip uninstall ansible -y + git clone https://github.com/ansible/ansible.git + source ansible/hacking/env-setup + +.. Note:: Ansible is also reported to "work" on Cygwin, but installation is more cumbersome, and will incur sporadic failures due to Cygwin's implementation of ``fork()``. + + Authentication Options `````````````````````` @@ -149,6 +188,15 @@ klist +Automatic kerberos ticket management +------------------------------------ + +Ansible defaults to automatically managing kerberos tickets (as of Ansible 2.3) when both username and password are specified for a host that's configured for kerberos. A new ticket is created in a temporary credential cache for each host, before each task executes (to minimize the chance of ticket expiration). The temporary credential caches are deleted after each task, and will not interfere with the default credential cache. + +To disable automatic ticket management (e.g., to use an existing SSO ticket or call ``kinit`` manually to populate the default credential cache), set ``ansible_winrm_kinit_mode=manual`` via inventory. + +Automatic ticket management requires a standard ``kinit`` binary on the control host system path. To specify a different location or binary name, set the ``ansible_winrm_kinit_cmd`` inventory var to the fully-qualified path to an MIT krbv5 ``kinit``-compatible binary. + Troubleshooting kerberos connections ------------------------------------ @@ -252,8 +300,10 @@ * ``ansible_winrm_path``: Specify an alternate path to the WinRM endpoint. Ansible uses ``/wsman`` by default. * ``ansible_winrm_realm``: Specify the realm to use for Kerberos authentication. If the username contains ``@``, Ansible will use the part of the username after ``@`` by default. * ``ansible_winrm_transport``: Specify one or more transports as a comma-separated list. By default, Ansible will use ``kerberos,plaintext`` if the ``kerberos`` module is installed and a realm is defined, otherwise ``plaintext``. -* ``ansible_winrm_server_cert_validation``: Specify the server certificate validation mode (``ignore`` or ``validate``). Ansible defaults to ``validate`` on Python 2.7.9 and higher, which will result in certificate validation errors against the Windows self-signed certificates. Unless verifiable certificates have been configured on the WinRM listeners, this should be set to ``ignore`` +* ``ansible_winrm_server_cert_validation``: Specify the server certificate validation mode (``ignore`` or ``validate``). Ansible defaults to ``validate`` on Python 2.7.9 and higher, which will result in certificate validation errors against the Windows self-signed certificates. Unless verifiable certificates have been configured on the WinRM listeners, this should be set to ``ignore``. * ``ansible_winrm_kerberos_delegation``: Set to ``true`` to enable delegation of commands on the remote host when using kerberos. +* ``ansible_winrm_operation_timeout_sec``: Increase the default timeout for WinRM operations (default: ``20``). +* ``ansible_winrm_read_timeout_sec``: Increase the WinRM read timeout if you experience read timeout errors (default: ``30``), e.g. intermittent network issues. * ``ansible_winrm_*``: Any additional keyword arguments supported by ``winrm.Protocol`` may be provided. .. _windows_system_prep: @@ -317,20 +367,24 @@ * add_host * assert -* async +* async_status * debug * fail * fetch * group_by +* include +* include_role * include_vars * meta * pause * raw * script * set_fact +* set_stats * setup * slurp * template (also: win_template) +* wait_for_connection Some modules can be utilised in playbooks that target windows by delegating to localhost, depending on what you are attempting to achieve. For example, ``assemble`` can be used to create a file on your ansible controller that is then @@ -375,17 +429,6 @@ What modules you see in ``windows/`` are just a start. Additional modules may be submitted as pull requests to github. -.. _windows_and_linux_control_machine: - -Reminder: You Must Have a Linux Control Machine -``````````````````````````````````````````````` - -Note running Ansible from a Windows control machine is NOT a goal of the project. Refrain from asking for this feature, -as it limits what technologies, features, and code we can use in the main project in the future. A Linux control machine -will be required to manage Windows hosts. - -Cygwin is not supported, so please do not ask questions about Ansible running from Cygwin. - .. _windows_facts: Windows Facts @@ -436,7 +479,7 @@ - name: Move file on remote Windows Server from one location to another win_command: Powershell.exe "Move-Item C:\teststuff\myfile.conf C:\builds\smtp.conf" -Bear in mind that using ``win_command`` or ``win_shell`` will always report ``changed``, and it is your responsiblity to ensure PowerShell will need to handle idempotency as appropriate (the move examples above are inherently not idempotent), so where possible use (or write) a module. +Bear in mind that using ``win_command`` or ``win_shell`` will always report ``changed``, and it is your responsibility to ensure PowerShell will need to handle idempotency as appropriate (the move examples above are inherently not idempotent), so where possible use (or write) a module. Here's an example of how to use the ``win_stat`` module to test for file existence. Note that the data returned by the ``win_stat`` module is slightly different than what is provided by the Linux equivalent:: diff -Nru ansible-2.3.2.0/docs/docsite/rst/modules_support.rst ansible-2.4.0.0/docs/docsite/rst/modules_support.rst --- ansible-2.3.2.0/docs/docsite/rst/modules_support.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/modules_support.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,44 +1,50 @@ -Module Support --------------- +Module Maintenance & Support +---------------------------- .. toctree:: :maxdepth: 1 -Ansible has many modules, but not all of them are maintained by the core project commiters. Each module should have associated metadata that indicates which of the following categories they fall into. This should be visible in each module's documentation. - -Documentation updates for each module can also be edited directly in the module and by submitting a pull request to the module source code; just look for the "DOCUMENTATION" block in the source tree. - -If you believe you have found a bug in a module and are already running the latest stable or development version of Ansible, first look in the `issue tracker at github.com/ansible/ansible `_ to see if a bug has already been filed. If not, we would be grateful if you would file one. - -Should you have a question rather than a bug report, inquiries are welcome on the `ansible-project google group `_ or on Ansible's "#ansible" channel, located on irc.freenode.net. - -For development-oriented topics, use the `ansible-devel google group `_ or Ansible's "#ansible" and "#ansible-devel" channels, located on irc.freenode.net. You should also read :doc:`community`, :doc:`dev_guide/developing_test_pr` and :doc:`dev_guide/developing_modules`. - -The modules are hosted on GitHub in a subdirectory of the `ansible `_ repo. +To help identify maintainers and understand how the included modules are officially supported, each module now has associated metadata that provides additional clarity for maintenance and support. Core ```` -These are modules that the core ansible team maintains and will always ship with ansible itself. -They will also receive slightly higher priority for all requests. Non-core modules are still fully usable. +:doc:`Core modules are maintained by the Ansible Engineering Team`. +These modules are integral to the basic foundations of the Ansible distribution. -Curated +Network ``````` -Some examples of Curated modules are submitted by other companies or maintained by the community. Maintainers of these types of modules must watch for any issues reported or pull requests raised against the module. +:doc:`Network modules are maintained by the Ansible Network Team`. Please note there are :doc:`additional networking modules` that are categorized as Certified or Community not maintained by Ansible. + -Core Committers will review all modules becoming Curated. Core Committers will review proposed changes to existing Curated modules once the community maintainers of the module have approved the changes. Core committers will also ensure that any issues that arise due to Ansible engine changes will be remediated. -Also, it is strongly recommended (but not presently required) for these types of modules to have unit tests. +Certified +````````` -These modules are currently shipped with Ansible, but might be shipped separately in the future. +Certified modules are part of a future planned program currently in development. Community ````````` -These modules **are not** supported by Core Committers or by companies/partners associated to the module. They are maintained by the community. -They are still fully usable, but the response rate to issues is purely up to the community. Best effort support will be provided but is not covered under any support contracts. +:doc:`Community modules are submitted and maintained by the Ansible community`. These modules are not maintained by Ansible, and are included as a convenience. -These modules are currently shipped with Ansible, but will most likely be shipped separately in the future. +Issue Reporting +``````````````` + +If you believe you have found a bug in a module and are already running the latest stable or development version of Ansible, first look at the `issue tracker in the Ansible repo `_ to see if an issue has already been filed. If not, please file one. + +Should you have a question rather than a bug report, inquiries are welcome on the `ansible-project Google group `_ or on Ansible’s “#ansible†channel, located on irc.freenode.net. + +For development-oriented topics, use the `ansible-devel Google group `_ or Ansible’s #ansible and #ansible-devel channels, located on irc.freenode.net. You should also read :doc:`Community Information & Contributing `, :doc:`Testing Ansible `, and :doc:`Developing Modules `. + +The modules are hosted on GitHub in a subdirectory of the `Ansible `_ repo. + +NOTE: If you have a Red Hat Ansible Engine product subscription, please follow the standard issue reporting process via the Red Hat Customer Portal. + +Support +``````` +For more information on how included Ansible modules are supported by Red Hat, +please refer to the following `knowledgebase article `_ as well as other resources on the `Red Hat Customer Portal. `_ .. seealso:: diff -Nru ansible-2.3.2.0/docs/docsite/rst/network_debug_troubleshooting.rst ansible-2.4.0.0/docs/docsite/rst/network_debug_troubleshooting.rst --- ansible-2.3.2.0/docs/docsite/rst/network_debug_troubleshooting.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/network_debug_troubleshooting.rst 2017-09-19 17:10:47.000000000 +0000 @@ -36,7 +36,7 @@ * Not using ``connection: local`` -.. warning: ``unable to open shell` +.. warning:: ``unable to open shell`` The ``unable to open shell`` message is new in Ansible 2.3, it means that the ``ansible-connection`` daemon has not been able to successfully talk to the remote network device. This generally means that there is an authentication issue. See the "Authentication and connection issues" section @@ -51,7 +51,7 @@ Ansible 2.3 features improved logging to help diagnose and troubleshoot issues regarding Ansible Networking modules. -Because logging is very verbose it is disabled by default. It can be enabled via the ``ANSIBLE_LOG_PATH`` and ``ANISBLE_DEBUG`` options:: +Because logging is very verbose it is disabled by default. It can be enabled via the :envvar:`ANSIBLE_LOG_PATH` and :envvar:`ANSIBLE_DEBUG` options:: # Specify the location for the log file export ANSIBLE_LOG_PATH=~/ansible.log @@ -143,7 +143,7 @@ **Platforms:** Any -The ``unable to open shell`` message is new in Ansible 2.3. This message means that the ``ansible-connection`` daemon has not been able to successfully talk to the remote network device. This generally means that there is an authentication issue. It is a "catch all" message, meaning you need to enable ``ANSIBLE_LOG_PATH`` to find the underlying issues. +The ``unable to open shell`` message is new in Ansible 2.3. This message means that the ``ansible-connection`` daemon has not been able to successfully talk to the remote network device. This generally means that there is an authentication issue. It is a "catch all" message, meaning you need to enable :ref:`log_path` to find the underlying issues. @@ -166,7 +166,6 @@ "changed": false, "failed": true, "msg": "unable to open shell", - "rc": 255 } Suggestions to resolve: @@ -223,7 +222,7 @@ Suggestions to resolve: -If you are specifying credentials via ``password:`` (either directly or via ``provider:``) or the environment variable ``ANSIBLE_NET_PASSWORD`` it is possible that ``paramiko`` (the Python SSH library that Ansible uses) is using ssh keys, and therefore the credentials you are specifying are being ignored. To find out if this is the case, disable "look for keys". This can be done like this: +If you are specifying credentials via ``password:`` (either directly or via ``provider:``) or the environment variable :envvar:`ANSIBLE_NET_PASSWORD` it is possible that ``paramiko`` (the Python SSH library that Ansible uses) is using ssh keys, and therefore the credentials you are specifying are being ignored. To find out if this is the case, disable "look for keys". This can be done like this: .. code-block:: yaml @@ -277,7 +276,7 @@ ansible.cfg -.. code-block: ini +.. code-block:: ini [paramiko_connection] host_key_auto_add = True @@ -301,8 +300,8 @@ 2017-04-04 12:19:05,670 p=18591 u=fred | using connection plugin network_cli 2017-04-04 12:19:06,606 p=18591 u=fred | connecting to host veos01 returned an error 2017-04-04 12:19:06,606 p=18591 u=fred | No authentication methods available - 2017-04-04 12:19:35,708 p=18591 u=fred | number of connection attempts exceeded, unable to connect to control socket - 2017-04-04 12:19:35,709 p=18591 u=fred | persistent_connect_interval=1, persistent_connect_retries=30 + 2017-04-04 12:19:35,708 p=18591 u=fred | connect retry timeout expired, unable to connect to control socket + 2017-04-04 12:19:35,709 p=18591 u=fred | persistent_connect_retry_timeout is 15 secs Suggestions to resolve: @@ -328,16 +327,62 @@ Timeouts -------- +Persistent connection idle timeout: + +For example: + +.. code-block:: yaml -All network modules support a timeout value that can be set on a per task -basis. The timeout value controls the amount of time in seconds before the + 2017-04-04 12:19:05,670 p=18591 u=fred | persistent connection idle timeout triggered, timeout value is 30 secs + +Suggestions to resolve: + +Increase value of presistent connection idle timeout. +.. code-block:: yaml + + export ANSIBLE_PERSISTENT_CONNECT_TIMEOUT=60 + +To make this a permanent change, add the following to your ``ansible.cfg`` file: + +.. code-block:: ini + + [persistent_connection] + connect_timeout = 60 + +Command timeout: +For example: + +.. code-block:: yaml + + 2017-04-04 12:19:05,670 p=18591 u=fred | command timeout triggered, timeout value is 10 secs + +Suggestions to resolve: + +Options 1: +Increase value of command timeout in configuration file or by setting environment variable. +Note: This value should be less than persistent connection idle timeout ie. connect_timeout + +.. code-block:: yaml + + export ANSIBLE_PERSISTENT_COMMAND_TIMEOUT=30 + +To make this a permanent change, add the following to your ``ansible.cfg`` file: + +.. code-block:: ini + + [persistent_connection] + command_timeout = 30 + +Option 2: +Increase command timeout per task basis. All network modules support a +timeout value that can be set on a per task basis. +The timeout value controls the amount of time in seconds before the task will fail if the command has not returned. For example: .. FIXME: Detail error here - Suggestions to resolve: .. code-block:: yaml @@ -353,6 +398,33 @@ In this case, changing the timeout value form the default 10 seconds to 30 seconds will prevent the task from failing before the command completes successfully. +Note: This value should be less than persistent connection idle timeout ie. connect_timeout + +Persistent socket connect timeout: +For example: + +.. code-block:: yaml + + 2017-04-04 12:19:35,708 p=18591 u=fred | connect retry timeout expired, unable to connect to control socket + 2017-04-04 12:19:35,709 p=18591 u=fred | persistent_connect_retry_timeout is 15 secs + +Suggestions to resolve: + +Increase value of presistent connection idle timeout. +Note: This value should be greater than SSH timeout ie. timeout value under defaults +section in configuration file and less than the value of the persistent +connection idle timeout (connect_timeout) + +.. code-block:: yaml + + export ANSIBLE_PERSISTENT_CONNECT_RETRY_TIMEOUT=30 + +To make this a permanent change, add the following to your ``ansible.cfg`` file: + +.. code-block:: ini + + [persistent_connection] + connect_retry_timeout = 30 @@ -403,7 +475,6 @@ "changed": false, "failed": true, "msg": "unable to enter configuration mode", - "rc": 255 } Suggestions to resolve: @@ -419,7 +490,7 @@ authorize: yes register: result -If the user requires a password to go into privileged mode, this can be specified with ``auth_pass``; if ``auth_pass`` isn't set, the environment variable ``ANSIBLE_NET_AUTHORIZE`` will be used instead. +If the user requires a password to go into privileged mode, this can be specified with ``auth_pass``; if ``auth_pass`` isn't set, the environment variable :envvar:`ANSIBLE_NET_AUTHORIZE` will be used instead. Add `authorize: yes` to the task. For example: @@ -437,39 +508,38 @@ .. delete_to not honoured ---------------------- - + FIXME Do we get an error message - + FIXME Link to howto - - - - + + + + fixmes ====== - + Error: "number of connection attempts exceeded, unable to connect to control socket" ------------------------------------------------------------------------------------ - + **Platforms:** Any - + This occurs when Ansible wasn't able to connect to the remote device and obtain a shell with the timeout. - - - This information is available when ``ANSIBLE_LOG_PATH`` is set see (FIXMELINKTOSECTION): - + + + This information is available when :ref:`DEFAULT_LOG_PATH` is set see (FIXMELINKTOSECTION): + .. code-block:: yaml - + less $ANSIBLE_LOG_PATH - 2017-03-10 15:32:06,173 p=19677 u=fred | number of connection attempts exceeded, unable to connect to control socket - 2017-03-10 15:32:06,174 p=19677 u=fred | persistent_connect_interval=1, persistent_connect_retries=10 + 2017-03-10 15:32:06,173 p=19677 u=fred | connect retry timeout expired, unable to connect to control socket + 2017-03-10 15:32:06,174 p=19677 u=fred | persistent_connect_retry_timeout is 15 secs 2017-03-10 15:32:06,222 p=19669 u=fred | fatal: [veos01]: FAILED! => { - + Suggestions to resolve: - + Do stuff For example: - + .. code-block:: yaml - + Example stuff - diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbook_pathing.rst ansible-2.4.0.0/docs/docsite/rst/playbook_pathing.rst --- ansible-2.3.2.0/docs/docsite/rst/playbook_pathing.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbook_pathing.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,41 @@ +======================= +Search paths in Ansible +======================= + +Absolute paths are not an issue as they always have a known start, but relative paths ... well, they are relative. + +Config paths +============ + +By default these should be relative to the config file, some are specifically relative to the 'cwd' or the playbook and should have this noted in their description. Things like ssh keys are left to use 'cwd' because it mirrors how the underlying tools would use it. + + +Task paths +========== + +Here things start getting complicated, there are 2 different scopes to consider, task evalution (paths are all local, like in lookups) and task execution, which is normally on the remote, unless an action plugin is involved. + +Some tasks that require 'local' resources use action plugins (template and copy are examples of these), in which case the path is also local. + +The magic of 'local' paths +-------------------------- + +Lookups and action plugins both use a special 'search magic' to find things, taking the current play into account, it uses from most specific to most general playbook dir in which a task is contained (this includes roles and includes). + +Using this magic, relative paths get attempted first with a 'files|templates|vars' appended (if not already present), depending on action being taken, 'files' is the default. (i.e include_vars will use vars/). The paths will be searched from most specific to most general (i.e role before play). +dependent roles WILL be traversed (i.e task is in role2, role2 is a dependency of role1, role2 will be looked at first, then role1, then play). +i.e :: + + role search path is rolename/{files|vars|templates}/, rolename/tasks/. + play search path is playdir/{files|vars|templates}/, playdir/. + + +The current working directory (cwd) is not searched. If you see it, it just happens to coincide with one of the paths above. +If you `include` a task file from a role, it will NOT trigger role behavior, this only happens when running as a role, `include_role` will work. +A new variable `ansible_search_path` var will have the search path used, in order (but without the appended subdirs). Using 5 "v"s (`-vvvvv`) should show the detail of the search as it happens. + +As for includes, they try the path of the included file first and fall back to the play/role that includes them. + + + +.. note: The 'cwd' might vary depending on the connection plugin and if the action is local or remote. For the remote it is normally the directory on which the login shell puts the user. For local it is either the directory you executed ansible from or in some cases the playbook directory. diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_acceleration.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_acceleration.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_acceleration.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_acceleration.rst 2017-09-19 17:10:47.000000000 +0000 @@ -3,30 +3,30 @@ .. versionadded:: 1.3 -.. note:: +.. note:: Accelerated mode is deprecated. Consider using SSH with ControlPersist and pipelining enabled instead. This feature will be removed in a future release. Deprecation warnings can be disabled by setting :code:`deprecation_warnings=False` in :code:`ansible.cfg`. You Might Not Need This! ```````````````````````` -Are you running Ansible 1.5 or later? If so, you may not need accelerated mode due to a new feature called "SSH pipelining" and should read the :ref:`pipelining` section of the documentation. +Are you running Ansible 1.5 or later? If so, you may not need accelerated mode due to a new feature called "SSH pipelining" and should read the :ref:`pipelining` section of the documentation. For users on 1.5 and later, accelerated mode only makes sense if you (A) are managing from an Enterprise Linux 6 or earlier host and still are on paramiko, or (B) can't enable TTYs with sudo as described in the pipelining docs. If you can use pipelining, Ansible will reduce the amount of files transferred over the wire, -making everything much more efficient, and performance will be on par with accelerated mode in nearly all cases, possibly excluding very large file transfer. Because less moving parts are involved, pipelining is better than accelerated mode for nearly all use cases. +making everything much more efficient, and performance will be on par with accelerated mode in nearly all cases, possibly excluding very large file transfer. Because less moving parts are involved, pipelining is better than accelerated mode for nearly all use cases. -Accelerated moded remains around in support of EL6 +Accelerated mode remains around in support of EL6 control machines and other constrained environments. Accelerated Mode Details ```````````````````````` While OpenSSH using the ControlPersist feature is quite fast and scalable, there is a certain small amount of overhead involved in -using SSH connections. While many people will not encounter a need, if you are running on a platform that doesn't have ControlPersist support (such as an EL6 control machine), you'll probably be even more interested in tuning options. +using SSH connections. While many people will not encounter a need, if you are running on a platform that doesn't have ControlPersist support (such as an EL6 control machine), you'll probably be even more interested in tuning options. -Accelerated mode is there to help connections work faster, but still uses SSH for initial secure key exchange. There is no +Accelerated mode is there to help connections work faster, but still uses SSH for initial secure key exchange. There is no additional public key infrastructure to manage, and this does not require things like NTP or even DNS. Accelerated mode can be anywhere from 2-6x faster than SSH with ControlPersist enabled, and 10x faster than paramiko. @@ -62,7 +62,7 @@ # default port is 5099 accelerate_port: 10000 -The `accelerate_port` option can also be specified in the environment variable ACCELERATE_PORT, or in your `ansible.cfg` configuration:: +The `accelerate_port` option can also be specified in the environment variable :envvar:`ACCELERATE_PORT`, or in your `ansible.cfg` configuration:: [accelerate] accelerate_port = 5099 diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_async.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_async.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_async.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_async.rst 2017-09-19 17:10:47.000000000 +0000 @@ -79,6 +79,47 @@ "check on it later" task to fail because the temporary status file that the ``async_status:`` is looking for will not have been written or no longer exist +If you would like to run multiple asynchronous tasks while limiting the amount +of tasks running concurrently, you can do it this way:: + + ##################### + # main.yml + ##################### + - name: Run items asynchronously in batch of two items + vars: + sleep_durations: + - 1 + - 2 + - 3 + - 4 + - 5 + durations: "{{ item }}" + include_tasks: execute_batch.yml + with_items: + - "{{ sleep_durations | batch(2) | list }}" + + ##################### + # execute_batch.yml + ##################### + - name: Async sleeping for batched_items + command: sleep {{ async_item }} + async: 45 + poll: 0 + with_items: "{{ durations }}" + loop_control: + loop_var: "async_item" + register: async_results + + - name: Check sync status + async_status: + jid: "{{ async_result_item.ansible_job_id }}" + with_items: "{{ async_results.results }}" + loop_control: + loop_var: "async_result_item" + register: async_poll_results + until: async_poll_results.finished + retries: 30 + .. seealso:: :doc:`playbooks` diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_best_practices.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_best_practices.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_best_practices.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_best_practices.rst 2017-09-19 17:10:47.000000000 +0000 @@ -16,9 +16,8 @@ Your usage of Ansible should fit your needs, however, not ours, so feel free to modify this approach and organize as you see fit. -One thing you will definitely want to do though, is use the "roles" organization feature, which is documented as part -of the main playbooks page. See :doc:`playbooks_roles`. You absolutely should be using roles. Roles are great. Use roles. Roles! -Did we say that enough? Roles are great. +One crucial way to organize your playbook content is Ansible's "roles" organization feature, which is documented as part +of the main playbooks page. You should take the time to read and understand the roles documentation which is available here: :doc:`playbooks_reuse_roles`. .. _directory_layout: @@ -38,6 +37,7 @@ hostname2 # "" library/ # if any custom modules, put them here (optional) + module_utils/ # if any custom module_utils to support modules, put them here (optional) filter_plugins/ # if any custom filter plugins, put them here (optional) site.yml # master playbook @@ -62,6 +62,7 @@ meta/ # main.yml # <-- role dependencies library/ # roles can also include custom modules + module_utils/ # roles can also include custom module_utils lookup_plugins/ # or other types of plugins, like lookup in this case webtier/ # same kind of structure as "common" was above, done for the webtier role @@ -97,6 +98,7 @@ stagehost2 # "" library/ + module_utils/ filter_plugins/ site.yml @@ -214,15 +216,15 @@ Top Level Playbooks Are Separated By Role ````````````````````````````````````````` -In site.yml, we include a playbook that defines our entire infrastructure. Note this is SUPER short, because it's just including -some other playbooks. Remember, playbooks are nothing more than lists of plays:: +In site.yml, we import a playbook that defines our entire infrastructure. This is a very short example, because it's just importing +some other playbooks:: --- # file: site.yml - - include: webservers.yml - - include: dbservers.yml + - import_plays: webservers.yml + - import_plays: dbservers.yml -In a file like webservers.yml (also at the top level), we simply map the configuration of the webservers group to the roles performed by the webservers group. Also notice this is incredibly short. For example:: +In a file like webservers.yml (also at the top level), we map the configuration of the webservers group to the roles performed by the webservers group:: --- # file: webservers.yml @@ -269,7 +271,7 @@ - name: restart ntpd service: name=ntpd state=restarted -See :doc:`playbooks_roles` for more information. +See :doc:`playbooks_reuse_roles` for more information. .. _organization_examples: @@ -297,8 +299,8 @@ What about just the first 10, and then the next 10?:: - ansible-playbook -i production webservers.yml --limit boston[1-10] - ansible-playbook -i production webservers.yml --limit boston[11-20] + ansible-playbook -i production webservers.yml --limit boston[1:10] + ansible-playbook -i production webservers.yml --limit boston[11:20] And of course just basic ad-hoc stuff is also possible.:: @@ -367,7 +369,7 @@ This allows playbooks to target machines based on role, as well as to assign role specific variables using the group variable system. -See :doc:`playbooks_roles`. +See :doc:`playbooks_reuse_roles`. .. _os_variance: @@ -422,7 +424,7 @@ Bundling Ansible Modules With Playbooks +++++++++++++++++++++++++++++++++++++++ -If a playbook has a "./library" directory relative to its YAML file, this directory can be used to add ansible modules that will +If a playbook has a :file:`./library` directory relative to its YAML file, this directory can be used to add ansible modules that will automatically be in the ansible module path. This is a great way to keep modules that go with a playbook together. This is shown in the directory structure example at the start of this section. @@ -471,7 +473,7 @@ For general maintenance, it is often easier to use ``grep``, or similar tools, to find variables in your Ansible setup. Since vaults obscure these variables, it is best to work with a layer of indirection. When running a playbook, Ansible finds the variables in the unencrypted file and all sensitive variables come from the encrypted file. -A best practice approach for this is to start with a ``group_vars/`` subdirectory named after the group. Inside of this subdirectory, create two files named ``vars`` and ``vault``. Inside of the ``vars`` file, define all of the variables needed, including any sensitive ones. Next, copy all of the sensitive variables over to the ``vault`` file and prefix these variables with ``vault_``. You should adjust the variables in the ``vars`` file to point to the matching ``vault_`` variables and ensure that the ``vault`` file is vault encrypted. +A best practice approach for this is to start with a ``group_vars/`` subdirectory named after the group. Inside of this subdirectory, create two files named ``vars`` and ``vault``. Inside of the ``vars`` file, define all of the variables needed, including any sensitive ones. Next, copy all of the sensitive variables over to the ``vault`` file and prefix these variables with ``vault_``. You should adjust the variables in the ``vars`` file to point to the matching ``vault_`` variables using jinja2 syntax, and ensure that the ``vault`` file is vault encrypted. This best practice has no limit on the amount of variable and vault files or their names. diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_blocks.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_blocks.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_blocks.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_blocks.rst 2017-09-19 17:10:47.000000000 +0000 @@ -9,20 +9,18 @@ .. code-block:: YAML - :emphasize-lines: 2 + :emphasize-lines: 3 :caption: Block example tasks: - - block: + - name: Install Apache + block: - yum: name={{ item }} state=installed with_items: - httpd - memcached - - template: src=templates/src.j2 dest=/etc/foo.conf - - service: name=bar state=started enabled=True - when: ansible_distribution == 'CentOS' become: true become_user: root @@ -32,6 +30,9 @@ and evaluating it in the task's context. Also they inherit the privilege escalation directives enabling "become to root" for all the enclosed tasks. +.. versionadded:: 2.3 + + The ``name:`` keyword for ``blocks:`` was added in Ansible 2.3. .. _block_error_handling: @@ -41,20 +42,21 @@ Blocks also introduce the ability to handle errors in a way similar to exceptions in most programming languages. .. code-block:: YAML - :emphasize-lines: 2,6,10 + :emphasize-lines: 3,7,11 :caption: Block error handling example - tasks: - - block: - - debug: msg='i execute normally' - - command: /bin/false - - debug: msg='i never execute, cause ERROR!' - rescue: - - debug: msg='I caught an error' - - command: /bin/false - - debug: msg='I also never execute :-(' - always: - - debug: msg="this always executes" + tasks: + - name: Attempt and gracefull roll back demo + block: + - debug: msg='I execute normally' + - command: /bin/false + - debug: msg='I never execute, due to the above task failing' + rescue: + - debug: msg='I caught an error' + - command: /bin/false + - debug: msg='I also never execute :-(' + always: + - debug: msg="this always executes" The tasks in the ``block`` would execute normally, if there is any error the ``rescue`` section would get executed @@ -65,26 +67,27 @@ Another example is how to run handlers after an error occurred : .. code-block:: YAML - :emphasize-lines: 4,8 + :emphasize-lines: 5,9 :caption: Block run handlers in error handling tasks: - - block: - - debug: msg='i execute normally' - notify: run me even after an error - - command: /bin/false - rescue: - - name: make sure all handlers run - meta: flush_handlers - handlers: - - name: run me even after an error - debug: msg='this handler runs even on error' + - name: Attempt and gracefull roll back demo + block: + - debug: msg='I execute normally' + notify: run me even after an error + - command: /bin/false + rescue: + - name: make sure all handlers run + meta: flush_handlers + handlers: + - name: run me even after an error + debug: msg='this handler runs even on error' .. seealso:: :doc:`playbooks` An introduction to playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles `User Mailing List `_ Have a question? Stop by the google group! @@ -93,4 +96,3 @@ - diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_checkmode.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_checkmode.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_checkmode.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_checkmode.rst 2017-09-19 17:10:47.000000000 +0000 @@ -33,7 +33,7 @@ 1. Force a task to **run in check mode**, even when the playbook is called **without** ``--check``. This is called ``check_mode: yes``. 2. Force a task to **run in normal mode** and make changes to the system, even when the playbook is called **with** ``--check``. This is called ``check_mode: no``. -.. note:: Prior to version 2.2 only the the equivalent of ``check_mode: no`` existed. The notation for that was ``always_run: yes``. +.. note:: Prior to version 2.2 only the equivalent of ``check_mode: no`` existed. The notation for that was ``always_run: yes``. Instead of ``yes``/``no`` you can use a Jinja2 expression, just like the ``when`` clause. @@ -51,7 +51,7 @@ Running single tasks with ``check_mode: yes`` can be useful to write tests for -ansible modules, either to test the module itself or to the the conditions under +ansible modules, either to test the module itself or to the conditions under which a module would make changes. With ``register`` (see :doc:`playbooks_conditionals`) you can check the potential changes. @@ -86,9 +86,10 @@ .. versionadded:: 1.1 -The ``--diff`` option to ansible-playbook works great with ``--check`` (detailed above) but can also be used by itself. When this flag is supplied, if any templated files on the remote system are changed, and the ansible-playbook CLI will report back -the textual changes made to the file (or, if used with ``--check``, the changes that would have been made). Since the diff -feature produces a large amount of output, it is best used when checking a single host at a time, like so:: +The ``--diff`` option to ansible-playbook works great with ``--check`` (detailed above) but can also be used by itself. +When this flag is supplied and the module supports this, Ansible will report back the changes made or, if used with ``--check``, the changes that would have been made. +This is mostly used in modules that manipulate files (i.e. template) but other modules might also show 'before and after' information (i.e. user). +Since the diff feature produces a large amount of output, it is best used when checking a single host at a time. For example:: ansible-playbook foo.yml --check --diff --limit foo.example.com diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_conditionals.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_conditionals.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_conditionals.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_conditionals.rst 2017-09-19 17:10:47.000000000 +0000 @@ -154,13 +154,13 @@ .. _when_roles_and_includes: -Applying 'when' to roles and includes -````````````````````````````````````` +Applying 'when' to roles, imports, and includes +``````````````````````````````````````````````` Note that if you have several tasks that all share the same conditional statement, you can affix the conditional to a task include statement as below. All the tasks get evaluated, but the conditional is applied to each and every task:: - - include: tasks/sometasks.yml + - import_tasks: tasks/sometasks.yml when: "'reticulating splines' in output" .. note:: In versions prior to 2.0 this worked with task includes but not playbook includes. 2.0 allows it to work with both. @@ -174,6 +174,24 @@ You will note a lot of 'skipped' output by default in Ansible when using this approach on systems that don't match the criteria. Read up on the 'group_by' module in the :doc:`modules` docs for a more streamlined way to accomplish the same thing. +When used with `include_*` tasks instead of imports, the conditional is applied _only_ to the include task itself and not any other +tasks within the included file(s). A common situation where this distinction is important is as follows:: + + # include a file to define a variable when it is not already defined + + # main.yml + - include_tasks: other_tasks.yml + when: x is not defined + + # other_tasks.yml + - set_fact: + x: foo + - debug: + var: x + +In the above example, if ``import_tasks`` had been used instead both included tasks would have also been skipped. With ``include_tasks`` +instead, the tasks are executed as expected because the conditional is not applied to them. + .. _conditional_imports: Conditional Imports @@ -310,7 +328,7 @@ :doc:`playbooks` An introduction to playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles :doc:`playbooks_best_practices` Best practices in playbooks diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_delegation.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_delegation.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_delegation.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_delegation.rst 2017-09-19 17:10:47.000000000 +0000 @@ -11,9 +11,11 @@ This section covers all of these features. For examples of these items in use, `please see the ansible-examples repository `_. There are quite a few examples of zero-downtime update procedures for different kinds of applications. -You should also consult the :doc:`modules` section, various modules like 'ec2_elb', 'nagios', and 'bigip_pool', and 'netscaler' dovetail neatly with the concepts mentioned here. +You should also consult the :doc:`modules` section, various modules like 'ec2_elb', 'nagios', and 'bigip_pool', and 'netscaler' dovetail neatly with the concepts mentioned here. -You'll also want to read up on :doc:`playbooks_roles`, as the 'pre_task' and 'post_task' concepts are the places where you would typically call these modules. +You'll also want to read up on :doc:`playbooks_reuse_roles`, as the 'pre_task' and 'post_task' concepts are the places where you would typically call these modules. + +Be aware that certain tasks are impossible to delegate, i.e. `include`, `add_host`, `debug`, etc as they always execute on the controller. .. _rolling_update_batch_size: @@ -54,7 +56,7 @@ In the above example, the first batch would contain a single host, the next would contain 5 hosts, and (if there are any hosts left), every following batch would contain 10 hosts until all available hosts are used. -It is also possible to list multiple batche sizes as percentages:: +It is also possible to list multiple batch sizes as percentages:: - name: test play hosts: webservers @@ -108,9 +110,9 @@ This isn't actually rolling update specific but comes up frequently in those cases. If you want to perform a task on one host with reference to other hosts, use the 'delegate_to' keyword on a task. -This is ideal for placing nodes in a load balanced pool, or removing them. It is also very useful for controlling -outage windows. Using this with the 'serial' keyword to control the number of hosts executing at one time is also -a good idea:: +This is ideal for placing nodes in a load balanced pool, or removing them. It is also very useful for controlling outage windows. +Be aware that it does not make sense to delegate all tasks, debug, add_host, include, etc always get executed on the controller. +Using this with the 'serial' keyword to control the number of hosts executing at one time is also a good idea:: --- @@ -160,6 +162,20 @@ Note that you must have passphrase-less SSH keys or an ssh-agent configured for this to work, otherwise rsync will need to ask for a passphrase. +In case you have to specify more arguments you can use the following syntax:: + + --- + # ... + tasks: + + - name: Send summary mail + local_action: + module: mail + subject: "Summary Mail" + to: "{{ mail_recipient }}" + body: "{{ mail_body }}" + run_once: True + The `ansible_host` variable (`ansible_ssh_host` in 1.x or specific to ssh/paramiko plugins) reflects the host a task is delegated to. .. _delegate_facts: diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_error_handling.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_error_handling.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_error_handling.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_error_handling.rst 2017-09-19 17:10:47.000000000 +0000 @@ -71,15 +71,22 @@ Suppose the error code of a command is meaningless and to tell if there is a failure what really matters is the output of the command, for instance -if the string "FAILED" is in the output. +if the string "FAILED" is in the output. Ansible in 1.4 and later provides a way to specify this behavior as follows:: - - name: this command prints FAILED when it fails + - name: Fail task when the command error output prints FAILED command: /usr/bin/example-command -x -y -z register: command_result failed_when: "'FAILED' in command_result.stderr" +or based on the return code:: + + - name: Fail task when both files are identical + raw: diff foo/file1 bar/file2 + register: diff_cmd + failed_when: diff_cmd.rc == 0 or diff_cmd.rc >= 2 + In previous version of Ansible, this can be still be accomplished as follows:: - name: this command prints FAILED when it fails diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_filters_ipaddr.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_filters_ipaddr.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_filters_ipaddr.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_filters_ipaddr.rst 2017-09-19 17:10:47.000000000 +0000 @@ -496,7 +496,7 @@ All about variables :doc:`playbooks_loops` Looping in playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles :doc:`playbooks_best_practices` Best practices in playbooks diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_filters.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_filters.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_filters.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_filters.rst 2017-09-19 17:10:47.000000000 +0000 @@ -6,7 +6,7 @@ Filters in Ansible are from Jinja2, and are used for transforming data inside a template expression. Jinja2 ships with many filters. See `builtin filters`_ in the official Jinja2 template documentation. -Take into account that templating happens on the the Ansible controller, **not** on the task's target host, so filters also execute on the controller as they manipulate local data. +Take into account that templating happens on the Ansible controller, **not** on the task's target host, so filters also execute on the controller as they manipulate local data. In addition the ones provided by Jinja2, Ansible ships with it's own and allows users to add their own custom filters. @@ -318,6 +318,99 @@ More information about ``ipaddr`` filter and complete usage guide can be found in :doc:`playbooks_filters_ipaddr`. +.. _network_filters: + +Network CLI filters +``````````````````` + +.. versionadded:: 2.4 + +To convert the output of a network device CLI command into structured JSON +output, use the ``parse_cli`` filter:: + + {{ output | parse_cli('path/to/spec') }} + +The ``parse_cli`` filter will load the spec file and pass the command output +through, it returning JSON output. The spec file is a YAML yaml that defines +how to parse the CLI output. + +The spec file should be valid formatted YAML. It defines how to parse the CLI +output and return JSON data. Below is an example of a valid spec file that +will parse the output from the ``show vlan`` command.:: + + --- + vars: + vlan: + vlan_id: "{{ item.vlan_id }}" + name: "{{ item.name }}" + enabled: "{{ item.state != 'act/lshut' }}" + state: "{{ item.state }}" + + keys: + vlans: + type: list + value: "{{ vlan }}" + items: "^(?P\\d+)\\s+(?P\\w+)\\s+(?Pactive|act/lshut|suspended)" + state_static: + value: present + +The spec file above will return a JSON data structure that is a list of hashes +with the parsed VLAN information. + +The same command could be parsed into a hash by using the key and values +directives. Here is an example of how to parse the output into a hash +value using the same ``show vlan`` command.:: + + --- + vars: + vlan: + key: "{{ item.vlan_id }}" + values: + vlan_id: "{{ item.vlan_id }}" + name: "{{ item.name }}" + enabled: "{{ item.state != 'act/lshut' }}" + state: "{{ item.state }}" + + keys: + vlans: + type: list + value: "{{ vlan }}" + items: "^(?P\\d+)\\s+(?P\\w+)\\s+(?Pactive|act/lshut|suspended)" + state_static: + value: present + +Another common use case for parsing CLI commands is to break a large command +into blocks that can parsed. This can be done using the ``start_block`` and +``end_block`` directives to break the command into blocks that can be parsed.:: + + --- + vars: + interface: + name: "{{ item[0].match[0] }}" + state: "{{ item[1].state }}" + mode: "{{ item[2].match[0] }}" + + keys: + interfaces: + value: "{{ interface }}" + start_block: "^Ethernet.*$" + end_block: "^$" + items: + - "^(?PEthernet\\d\\/\\d*)" + - "admin state is (?P.+)," + - "Port mode is (.+)" + + +The example above will parse the output of ``show interface`` into a list of +hashes. + +The network filters also support parsing the output of a CLI command using the +TextFSM library. To parse the CLI output with TextFSM use the following +filter:: + + {{ output | parse_cli_textfsm('path/to/fsm') }} + +Use of the TextFSM filter requires the TextFSM library to be installed. .. _hash_filters: @@ -352,7 +445,7 @@ Hash types available depend on the master system running ansible, -'hash' depends on hashlib password_hash depends on crypt. +'hash' depends on hashlib password_hash depends on passlib (http://passlib.readthedocs.io/en/stable/lib/passlib.hash.html). .. _combine_filter: @@ -501,6 +594,98 @@ .. _other_useful_filters: +URL Split Filter +````````````````` + +.. versionadded:: 2.4 + +The ``urlsplit`` filter extracts the fragment, hostname, netloc, password, path, port, query, scheme, and username from an URL. With no arguments, returns a dictionary of all the fields:: + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('hostname') }} + # => 'www.acme.com' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('netloc') }} + # => 'user:password@www.acme.com:9000' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('username') }} + # => 'user' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('password') }} + # => 'password' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('path') }} + # => '/dir/index.html' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('port') }} + # => '9000' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('scheme') }} + # => 'http' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('query') }} + # => 'query=term' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit('fragment') }} + # => 'fragment' + + {{ "http://user:password@www.acme.com:9000/dir/index.html?query=term#frament" | urlsplit }} + # => + # { + # "fragment": "fragment", + # "hostname": "www.acme.com", + # "netloc": "user:password@www.acme.com:9000", + # "password": "password", + # "path": "/dir/index.html", + # "port": 9000, + # "query": "query=term", + # "scheme": "http", + # "username": "user" + # } + + +Regular Expression Filters +`````````````````````````` + +To search a string with a regex, use the "regex_search" filter:: + + # search for "foo" in "foobar" + {{ 'foobar' | regex_search('(foo)') }} + + # will return empty if it cannot find a match + {{ 'ansible' | regex_search('(foobar)') }} + + +To search for all occurrences of regex matches, use the "regex_findall" filter:: + + # Return a list of all IPv4 addresses in the string + {{ 'Some DNS servers are 8.8.8.8 and 8.8.4.4' | regex_findall('\b(?:[0-9]{1,3}\.){3}[0-9]{1,3}\b') }} + + +To replace text in a string with regex, use the "regex_replace" filter:: + + # convert "ansible" to "able" + {{ 'ansible' | regex_replace('^a.*i(.*)$', 'a\\1') }} + + # convert "foobar" to "bar" + {{ 'foobar' | regex_replace('^f.*o(.*)$', '\\1') }} + + # convert "localhost:80" to "localhost, 80" using named groups + {{ 'localhost:80' | regex_replace('^(?P.+):(?P\\d+)$', '\\g, \\g') }} + + # convert "localhost:80" to "localhost" + {{ 'localhost:80' | regex_replace(':80') }} + +.. note:: Prior to ansible 2.0, if "regex_replace" filter was used with variables inside YAML arguments (as opposed to simpler 'key=value' arguments), + then you needed to escape backreferences (e.g. ``\\1``) with 4 backslashes (``\\\\``) instead of 2 (``\\``). + +.. versionadded:: 2.0 + +To escape special characters within a regex, use the "regex_escape" filter:: + + # convert '^f.*o(.*)$' to '\^f\.\*o\(\.\*\)\$' + {{ '^f.*o(.*)$' | regex_escape() }} + + Other Useful Filters ```````````````````` @@ -578,30 +763,6 @@ .. versionadded:: 1.6 -To replace text in a string with regex, use the "regex_replace" filter:: - - # convert "ansible" to "able" - {{ 'ansible' | regex_replace('^a.*i(.*)$', 'a\\1') }} - - # convert "foobar" to "bar" - {{ 'foobar' | regex_replace('^f.*o(.*)$', '\\1') }} - - # convert "localhost:80" to "localhost, 80" using named groups - {{ 'localhost:80' | regex_replace('^(?P.+):(?P\\d+)$', '\\g, \\g') }} - - # convert "localhost:80" to "localhost" - {{ 'localhost:80' | regex_replace(':80') }} - -.. note:: Prior to ansible 2.0, if "regex_replace" filter was used with variables inside YAML arguments (as opposed to simpler 'key=value' arguments), - then you needed to escape backreferences (e.g. ``\\1``) with 4 backslashes (``\\\\``) instead of 2 (``\\``). - -.. versionadded:: 2.0 - -To escape special characters within a regex, use the "regex_escape" filter:: - - # convert '^f.*o(.*)$' to '\^f\.\*o\(\.\*\)\$' - {{ '^f.*o(.*)$' | regex_escape() }} - To make use of one attribute from each item in a list of complex variables, use the "map" filter (see the `Jinja2 map() docs`_ for more):: # get a comma-separated list of the mount points (e.g. "/,/mnt/stuff") on a host @@ -609,8 +770,8 @@ To get date object from string use the `to_datetime` filter, (new in version in 2.2):: - # get amount of seconds between two dates, default date format is %Y-%d-%m %H:%M:%S but you can pass your own one - {{ (("2016-08-04 20:00:12"|to_datetime) - ("2015-10-06"|to_datetime('%Y-%d-%m'))).seconds }} + # get amount of seconds between two dates, default date format is %Y-%m-%d %H:%M:%S but you can pass your own one + {{ (("2016-08-14 20:00:12"|to_datetime) - ("2015-12-25"|to_datetime('%Y-%m-%d'))).seconds }} Combination Filters @@ -624,29 +785,48 @@ - name: give me largest permutations (order matters) debug: msg="{{ [1,2,3,4,5]|permutations|list }}" - - name: give me permutations of sets of 3 + - name: give me permutations of sets of three debug: msg="{{ [1,2,3,4,5]|permutations(3)|list }}" Combinations always require a set size:: - - name: give me combinations for sets of 2 + - name: give me combinations for sets of two debug: msg="{{ [1,2,3,4,5]|combinations(2)|list }}" To get a list combining the elements of other lists use ``zip``:: - - name: give me list combo of 2 lists + - name: give me list combo of two lists debug: msg="{{ [1,2,3,4,5]|zip(['a','b','c','d','e','f'])|list }}" - - name: give me shortest combo of 2 lists + - name: give me shortest combo of two lists debug: msg="{{ [1,2,3]|zip(['a','b','c','d','e','f'])|list }}" To always exhaust all list use ``zip_longest``:: - - name: give me longest combo of 3 lists , fill with X + - name: give me longest combo of three lists , fill with X debug: msg="{{ [1,2,3]|zip_longest(['a','b','c','d','e','f'], [21, 22, 23], fillvalue='X')|list }}" +.. versionadded:: 2.4 + +To format a date using a string (like with the shell date command), use the "strftime" filter:: + + # Display year-month-day + {{ '%Y-%m-%d' | strftime }} + + # Display hour:min:sec + {{ '%H:%M:%S' | strftime }} + + # Use ansible_date_time.epoch fact + {{ '%Y-%m-%d %H:%M:%S' | strftime(ansible_date_time.epoch) }} + + # Use arbitrary epoch value + {{ '%Y-%m-%d' | strftime(0) }} # => 1970-01-01 + {{ '%Y-%m-%d' | strftime(1441357287) }} # => 2015-09-04 + +.. note:: To get all string possibilities, check https://docs.python.org/2/library/time.html#time.strftime + Debugging Filters ````````````````` @@ -677,7 +857,7 @@ All about variables :doc:`playbooks_loops` Looping in playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles :doc:`playbooks_best_practices` Best practices in playbooks diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_intro.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_intro.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_intro.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_intro.rst 2017-09-19 17:10:47.000000000 +0000 @@ -229,6 +229,33 @@ not come into play. Ansible also takes care to not log password parameters. + +.. _order: + +.. versionadded:: 2.4 + +You can also control the order in which hosts are run. The default is to follow the order supplied by the inventory:: + + - hosts: all + order: sorted + gather_facts: False + tasks: + - debug: var=inventory_hostname + +Possible values for order are: + +inventory: + The default. The order is 'as provided' by the inventory +reverse_inventory: + As the name implies, this reverses the order 'as provided' by the inventory +sorted: + Hosts are alphabetically sorted by name +reverse_sorted: + Hosts are sorted by name in reverse alphabetical order +shuffle: + Hosts are randomly ordered each run + + .. _tasks_list: Tasks list @@ -282,8 +309,8 @@ them work as simply as you would expect:: tasks: - - name: disable selinux - command: /sbin/setenforce 0 + - name: enable selinux + command: /sbin/setenforce 1 The **command** and **shell** module care about return codes, so if you have a command whose successful exit code is not zero, you may wish to do this:: @@ -318,7 +345,7 @@ Those same variables are usable in templates, which we'll get to later. Now in a very basic playbook all the tasks will be listed directly in that play, though it will usually -make more sense to break up tasks using the ``include:`` directive. We'll show that a bit later. +make more sense to break up tasks as described in :doc:`playbooks_reuse`. .. _action_shorthand: diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_lookups.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_lookups.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_lookups.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_lookups.rst 2017-09-19 17:10:47.000000000 +0000 @@ -45,10 +45,10 @@ A great alternative to the password lookup plugin, if you don't need to generate random passwords on a per-host basis, would be to use :doc:`playbooks_vault`. Read the documentation there and consider using it first, it will be more desirable for most applications. ``password`` generates a random plaintext password and stores it in -a file at a given filepath. +a file at a given filepath. (Docs about crypted save modes are pending) - + If the file exists previously, it will retrieve its contents, behaving just like with_file. Usage of variables like "{{ inventory_hostname }}" in the filepath can be used to set up random passwords per host (which simplifies password management in 'host_vars' variables). @@ -109,40 +109,42 @@ .. versionadded:: 2.3 The ``passwordstore`` lookup enables Ansible to retrieve, create or update passwords from -the passwordstore.org ``pass`` utility. It also retrieves YAML style keys stored as multilines +the passwordstore.org_ ``pass`` utility. It also retrieves YAML style keys stored as multilines in the passwordfile. +.. _passwordstore.org: https://www.passwordstore.org + Examples -------- Basic lookup. Fails if example/test doesn't exist:: - password="{{ lookup('passwordstore', 'example/test')}}` + password="{{ lookup('passwordstore', 'example/test')}}" Create pass with random 16 character password. If password exists just give the password:: - password="{{ lookup('passwordstore', 'example/test create=true')}}` + password="{{ lookup('passwordstore', 'example/test create=true')}}" Different size password:: - password="{{ lookup('passwordstore', 'example/test create=true length=42')}}` + password="{{ lookup('passwordstore', 'example/test create=true length=42')}}" Create password and overwrite the password if it exists. As a bonus, this module includes the old password inside the pass file:: - password="{{ lookup('passwordstore', 'example/test create=true overwrite=true')}}` + password="{{ lookup('passwordstore', 'example/test create=true overwrite=true')}}" Return the value for user in the KV pair user: username:: - password="{{ lookup('passwordstore', 'example/test subkey=user')}}` + password="{{ lookup('passwordstore', 'example/test subkey=user')}}" Return the entire password file content:: - password="{{ lookup('passwordstore', 'example/test returnall=true')}}` + password="{{ lookup('passwordstore', 'example/test returnall=true')}}" The location of the password-store directory can be specified in the following ways: - Default is ~/.password-store - Can be overruled by PASSWORD_STORE_DIR environment variable - Can be overruled by 'passwordstore: path/to/.password-store' ansible setting - - Can be overrules by 'directory=path' argument in the lookup call + - Can be overruled by 'directory=path' argument in the lookup call .. _csvfile_lookup: @@ -251,6 +253,7 @@ file ansible.ini Name of the file to load section global Default section where to lookup for key. re False The key is a regexp. +encoding utf-8 Text encoding to use. default empty string return value if the key is not in the ini file ========== ============ ========================================================================================= @@ -473,7 +476,7 @@ #optional query parameters #we accept any parameter from the normal mongodb query. - # the offical documentation is here + # the official documentation is here # https://api.mongodb.org/python/current/api/pymongo/collection.html?highlight=find#pymongo.collection.Collection.find # filter: { "hostname": "batman" } projection: { "pid": True , "_id" : False , "hostname" : True } @@ -553,6 +556,9 @@ - debug: msg="{{ lookup('template', './some_template.j2') }} is a value from evaluation of this template" + # Since 2.4, you can pass in variables during evaluation + - debug: msg="{{ lookup('template', './some_template.j2', template_vars=dict(x=42)) }} is evaluated with x=42" + - name: loading a json file from a template as a string debug: msg="{{ lookup('template', './some_json.json.j2', convert_data=False) }} is a value from evaluation of this template" @@ -563,6 +569,7 @@ - debug: msg="{{ lookup('shelvefile', 'file=path_to_some_shelve_file.db key=key_to_retrieve') }} # The following lookups were added in 1.9 + # url lookup splits lines by default, an option to disable this was added in 2.4 - debug: msg="{{item}}" with_url: - 'https://github.com/gremlin.keys' @@ -602,6 +609,3 @@ Have a question? Stop by the google group! `irc.freenode.net `_ #ansible IRC chat channel - - - diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_loops.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_loops.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_loops.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_loops.rst 2017-09-19 17:10:47.000000000 +0000 @@ -59,6 +59,31 @@ Loops are actually a combination of things `with_` + `lookup()`, so any lookup plugin can be used as a source for a loop, 'items' is lookup. +Please note that ``with_items`` flattens the first depth of the list it is +provided and can yield unexpected results if you pass a list which is composed +of lists. You can work around this by wrapping your nested list inside a list:: + + # This will run debug three times since the list is flattened + - debug: + msg: "{{ item }}" + vars: + nested_list: + - - one + - two + - three + with_items: "{{ nested_list }}" + + # This will run debug once with the three items + - debug: + msg: "{{ item }}" + vars: + nested_list: + - - one + - two + - three + with_items: + - "{{ nested_list }}" + .. _nested_loops: Nested Loops @@ -112,8 +137,7 @@ - name: Print phone records debug: msg: "User {{ item.key }} is {{ item.value.name }} ({{ item.value.telephone }})" - with_dict: - - "{{ users }}" + with_dict: "{{ users }}" .. _looping_over_fileglobs: @@ -171,24 +195,87 @@ src: "{{ item }}" dest: "/etc/fooapp/" owner: "root" - mode: 600 + mode: 0600 with_fileglob: - "/playbooks/files/fooapp/*" .. note:: When using a relative path with ``with_fileglob`` in a role, Ansible resolves the path relative to the `roles//files` directory. + +Looping over Filetrees +`````````````````````` + +``with_filetree`` recursively matches all files in a directory tree, enabling you to template a complete tree of files on a target system while retaining permissions and ownership. + +The ``filetree`` lookup-plugin supports directories, files and symlinks, including SELinux and other file properties. Here is a complete list of what each file object consists of: + +* src +* root +* path +* mode +* state +* owner +* group +* seuser +* serole +* setype +* selevel +* uid +* gid +* size +* mtime +* ctime + +If you provide more than one path, it will implement a ``with_first_found`` logic, and will not process entries it already processed in previous paths. This enables the user to merge different trees in order of importance, or add role_vars specific paths to influence different instances of the same role. + +Here is an example of how we use with_filetree within a role. The ``web/`` path is relative to either ``roles//files/`` or ``files/``:: + + --- + - name: Create directories + file: + path: /web/{{ item.path }} + state: directory + mode: '{{ item.mode }}' + with_filetree: web/ + when: item.state == 'directory' + + - name: Template files + template: + src: '{{ item.src }}' + dest: /web/{{ item.path }} + mode: '{{ item.mode }}' + with_filetree: web/ + when: item.state == 'file' + + - name: Recreate symlinks + file: + src: '{{ item.src }}' + dest: /web/{{ item.path }} + state: link + force: yes + mode: '{{ item.mode }}' + with_filetree: web/ + when: item.state == 'link' + + +The following properties are also available: + +* ``root``: allows filtering by original location +* ``path``: contains the relative path to root +* ``uidi``, ``gid``: force-create by exact id, rather than by name +* ``size``, ``mtime``, ``ctime``: filter out files by size, mtime or ctime + + Looping over Parallel Sets of Data `````````````````````````````````` -.. note:: This is an uncommon thing to want to do, but we're documenting it for completeness. You probably won't be reaching for this one often. - -Suppose you have the following variable data was loaded in via somewhere:: +Suppose you have the following variable data:: --- alpha: [ 'a', 'b', 'c', 'd' ] numbers: [ 1, 2, 3, 4 ] -And you want the set of '(a, 1)' and '(b, 2)' and so on. Use 'with_together' to get this:: +...and you want the set of '(a, 1)' and '(b, 2)'. Use 'with_together' to get this:: tasks: - debug: @@ -203,7 +290,7 @@ Suppose you want to do something like loop over a list of users, creating them, and allowing them to login by a certain set of SSH keys. -How might that be accomplished? Let's assume you had the following defined and loaded in via "vars_files" or maybe a "group_vars/all" file:: +In this example, we'll assume you have the following defined and loaded in via "vars_files" or maybe a "group_vars/all" file:: --- users: @@ -232,7 +319,7 @@ - "*.*:SELECT" - "DB2.*:ALL" -It might happen like so:: +You could loop over these subelements like this:: - name: Create User user: @@ -260,7 +347,7 @@ priv: "{{ item.0.mysql.privs | join('/') }}" with_subelements: - "{{ users }}" - - "{{ mysql.hosts }}" + - mysql.hosts Subelements walks a list of hashes (aka dictionaries) and then traverses a list with a given (nested sub-)key inside of those records. @@ -278,10 +365,12 @@ Looping over Integer Sequences `````````````````````````````` -``with_sequence`` generates a sequence of items in ascending numerical order. You -can specify a start, end, and an optional step value. +``with_sequence`` generates a sequence of items. You +can specify a start value, an end value, an optional "stride" value that specifies the number of steps to increment the sequence, and an optional printf-style format string. + +Arguments should be specified as key=value pair strings. -Arguments should be specified in key=value pairs. If supplied, the 'format' is a printf style string. +A simple shortcut form of the arguments string is also accepted: ``[start-]end[/stride][:format]``. Numerical values can be specified in decimal, hexadecimal (0x3f8) or octal (0600). Negative numbers are not supported. This works as follows:: @@ -304,27 +393,20 @@ name: "{{ item }}" state: present groups: "evens" - with_sequence: - - start: 0 - - end: 32 - - format: testuser%02x + with_sequence: start=0 end=32 format=testuser%02x # create a series of directories with even numbers for some reason - file: dest: "/var/stuff/{{ item }}" state: directory - with_sequence: - - start: 4 - - end: 16 - - stride: 2 + with_sequence: start=4 end=16 stride=2 # a simpler way to use the sequence plugin # create 4 groups - group: name: "group{{ item }}" state: present - with_sequence: - count: 4 + with_sequence: count=4 .. _random_choice: @@ -355,8 +437,7 @@ Sometimes you would want to retry a task until a certain condition is met. Here's an example:: - - action: - shell /usr/bin/foo + - shell: /usr/bin/foo register: result until: result.stdout.find("all systems go") != -1 retries: 5 @@ -368,6 +449,8 @@ The task returns the results returned by the last task run. The results of individual retries can be viewed by -vv option. The registered variable will also have a new key "attempts" which will have the number of the retries for the task. +.. note:: If the "until" parameter isn't defined, the value for the "retries" parameter is forced to 1. + .. _with_first_found: Finding First Matched Files @@ -618,7 +701,7 @@ `````````````````````````` If you wish to loop over the inventory, or just a subset of it, there is multiple ways. -One can use a regular ``with_items`` with the ``play_hosts`` or ``groups`` variables, like this:: +One can use a regular ``with_items`` with the ``ansible_play_batch`` or ``groups`` variables, like this:: # show all the hosts in the inventory - debug: @@ -630,7 +713,7 @@ - debug: msg: "{{ item }}" with_items: - - "{{ play_hosts }}" + - "{{ ansible_play_batch }}" There is also a specific lookup plugin ``inventory_hostnames`` that can be used like this:: @@ -660,7 +743,7 @@ As of Ansible 2.1, the `loop_control` option can be used to specify the name of the variable to be used for the loop:: # main.yml - - include: inner.yml + - include_tasks: inner.yml with_items: - 1 - 2 @@ -724,7 +807,7 @@ for `item`:: # main.yml - - include: inner.yml + - include_tasks: inner.yml with_items: - 1 - 2 @@ -754,7 +837,7 @@ :doc:`playbooks` An introduction to playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles :doc:`playbooks_best_practices` Best practices in playbooks diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_prompts.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_prompts.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_prompts.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_prompts.rst 2017-09-19 17:10:47.000000000 +0000 @@ -27,6 +27,9 @@ - name: "favcolor" prompt: "what is your favorite color?" +.. note:: + Prompts for individual ``vars_prompt`` variables will be skipped for any variable that is already defined through the command line ``--extra-vars`` option, or when running from a non-interactive session (such as cron or Ansible Tower). See :ref:`_passing_variables_on_the_command_line` in the /Variables/ chapter. + If you have a variable that changes infrequently, it might make sense to provide a default value that can be overridden. This can be accomplished using the default argument:: diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_python_version.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_python_version.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_python_version.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_python_version.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,69 @@ +.. _pb-py-compat: + +Python Version and Templating +============================= + +Jinja2 templates leverage Python data types and standard functions. This +makes for a rich set of operations that can be performed on data. However, +this also means that certain specifics of the underlying Python becomes +visible to template authors. Since Ansible playbooks use Jinja2 for templates +and variables, this means that playbook authors need to be aware of these +specifics as well. + +Unless otherwise noted, these differences are only of interest when running +Ansible in Python2 versus Python3. Changes within Python2 and Python3 are +generally small enough that they are not visible at the jinja2 level. + +.. _pb-py-compat-dict-views: + +Dictionary Views +---------------- + +In Python2, the :meth:`dict.keys`, :meth:`dict.values`, and :meth:`dict.items` +methods returns a list. Jinja2 returns that to Ansible via a string +representation that Ansible can turn back into a list. In Python3, those +methods return a :ref:`dictionary view ` object. The +string representation that Jinja2 returns for dictionary views cannot be parsed back +into a list by Ansible. It is, however, easy to make this portable by +using the :func:`list ` filter whenever using :meth:`dict.keys`, +:meth:`dict.values`, or :meth:`dict.items`:: + + vars: + hosts: + testhost1: 127.0.0.2 + testhost2: 127.0.0.3 + tasks: + - debug: + msg: '{{ item }}' + # Only works with Python 2 + #with_items: "{{ hosts.keys() }}" + # Works with both Python 2 and Python 3 + with_items: "{{ hosts.keys() | list }}" + +.. _pb-py-compat-iteritems: + +dict.iteritems() +---------------- + +In Python2, dictionaries have :meth:`~dict.iterkeys`, +:meth:`~dict.itervalues`, and :meth:`~dict.iteritems` methods. These methods +have been removed in Python3. Playbooks and Jinja2 templates should use +:meth:`dict.keys`, :meth:`dict.values`, and :meth:`dict.items` in order to be +compatible with both Python2 and Python3:: + + vars: + hosts: + testhost1: 127.0.0.2 + testhost2: 127.0.0.3 + tasks: + - debug: + msg: '{{ item }}' + # Only works with Python 2 + #with_items: "{{ hosts.iteritems() }}" + # Works with both Python 2 and Python 3 + with_items: "{{ hosts.items() | list }}" + +.. seealso:: + * The :ref:`pb-py-compat-dict-views` entry for information on + why the :func:`list filter ` is necessary + here. diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_reuse_includes.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_reuse_includes.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_reuse_includes.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_reuse_includes.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,125 @@ +Including and Importing +======================= + +.. contents:: Topics + +Includes vs. Imports +```````````````````` + +As noted in :doc:`playbooks_reuse`, include and import statements are very similar, however the Ansible executor engine treats them very differently. + +- All ``import*`` statements are pre-processed at the time playbooks are parsed. +- All ``include*`` statements are processed as they encountered during the execution of the playbook. + +Please refer to :doc:`playbooks_reuse` for documentation concerning the trade-offs one may encounter when using each type. + +Also be aware that this behaviour changed in 2.4; prior to that Ansible version only ``include`` was available, and it behaved differently depending on context. + +.. versionadded:: 2.4 + +Importing Playbooks +``````````````````` + +It is possible to include playbooks inside a master playbook. For example:: + + --- + - import_playbook: webservers.yml + - import_playbook: databases.yml + +The plays and tasks in each playbook listed will be run in the order they are listed, just as if they had been defined here directly. + +Prior to 2.4 only ``include`` was available and worked for both playbooks and tasks as both import and include. + + +.. versionadded:: 2.4 + +Including and Importing Task Files +`````````````````````````````````` + +Use of included task lists is a great way to define a role that system is going to fulfill. A task include file simply contains a flat list of tasks:: + + # common_tasks.yml + --- + - name: placeholder foo + command: /bin/foo + - name: placeholder bar + command: /bin/bar + +You can then use ``import_tasks`` or ``include_tasks`` to include this file in your main task list:: + + tasks: + - import_tasks: common_tasks.yml + # or + - include_tasks: common_tasks.yml + +You can also pass variables into imports and includes:: + + tasks: + - import_tasks: wordpress.yml wp_user=timmy + - import_tasks: wordpress.yml wp_user=alice + - import_tasks: wordpress.yml wp_user=bob + +Variables can also be passed to include files using an alternative syntax, which also supports structured variables like dictionaries and lists:: + + tasks: + - include_tasks: wordpress.yml + vars: + wp_user: timmy + ssh_keys: + - "{{ lookup('file', 'keys/one.pub') }}" + - "{{ lookup('file', 'keys/two.pub') }}" + +Using either syntax, variables passed in can then be used in the included files. These variables will only be available to tasks within the included file. See :doc:`variable_precedence` for more details on variable inheritance and precedence. + +Task include statements can be used at arbitrary depth. + +.. note:: + Static and dynamic can be mixed, however this is not recommended as it may lead to difficult-to-diagnose bugs in your playbooks. + +Includes and imports can also be used in the ``handlers:`` section; for instance, if you want to define how to restart apache, you only have to do that once for all of your playbooks. You might make a handlers.yml that looks like:: + + # more_handlers.yml + --- + - name: restart apache + service: name=apache state=restarted + +And in your main playbook file:: + + handlers: + - include_tasks: more_handlers.yml + # or + - import_tasks: more_handlers.yml + +.. note:: + Be sure to refer to the limitations/trade-offs for handlers noted in :doc:`playbooks_reuse`. + +You can mix in includes along with your regular non-included tasks and handlers. + +Including and Importing Roles +````````````````````````````` + +Please refer to :doc:`playbooks_reuse_roles` for details on including and importing roles. + +.. seealso:: + + :doc:`YAMLSyntax` + Learn about YAML syntax + :doc:`playbooks` + Review the basic Playbook language features + :doc:`playbooks_best_practices` + Various tips about managing playbooks in the real world + :doc:`playbooks_variables` + All about variables in playbooks + :doc:`playbooks_conditionals` + Conditionals in playbooks + :doc:`playbooks_loops` + Loops in playbooks + :doc:`modules` + Learn about available modules + :doc:`dev_guide/developing_modules` + Learn how to extend Ansible by writing your own modules + `GitHub Ansible examples `_ + Complete playbook files from the GitHub project source + `Mailing List `_ + Questions? Help? Ideas? Stop by the list on Google Groups + diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_reuse_roles.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_reuse_roles.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_reuse_roles.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_reuse_roles.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,379 @@ +Roles +===== + +.. contents:: Topics + +.. versionadded:: 1.2 + +Roles are ways of automatically loading certain vars_files, tasks, and handlers based on a known file structure. Grouping content by roles also allows easy sharing of roles with other users. + +Role Directory Structure +```````````````````````` + +Example project structure:: + + site.yml + webservers.yml + fooservers.yml + roles/ + common/ + tasks/ + handlers/ + files/ + templates/ + vars/ + defaults/ + meta/ + webservers/ + tasks/ + defaults/ + meta/ + +Roles expect files to be in certain directory names. Roles must include at least one of these directories, however it is perfectly fine to exclude any which are not being used. When in use, each directory must contain a ``main.yml`` file, which contains the relevant content: + +- ``tasks`` - contains the main list of tasks to be executed by the role. +- ``handlers`` - contains handlers, which may be used by this role or even anywhere outside this role. +- ``defaults`` - default variables for the role (see :doc:`Variables` for more information). +- ``vars`` - other variables for the role (see :doc:`Variables` for more information). +- ``files`` - contains files which can be deployed via this role. +- ``templates`` - contains templates which can be deployed via this role. +- ``meta`` - defines some meta data for this role. See below for more details. + +Other YAML files may be included in certain directories. For example, it is common practice to have platform-specific tasks included from the ``tasks/main.yml`` file:: + + # roles/example/tasks/main.yml + - name: added in 2.4, previouslly you used 'include' + import_tasks: redhat.yml + when: ansible_os_platform|lower == 'redhat' + - import_tasks: debian.yml + when: ansible_os_platform|lower == 'debian' + + # roles/example/tasks/redhat.yml + - yum: + name: "httpd" + state: present + + # roles/example/tasks/debian.yml + - apt: + name: "apache2" + state: present + +Roles may also include modules and other plugin types. For more information, please refer to the :doc:`Embedding Modules and Plugins In Roles` section below. + +Using Roles +``````````` + +The classic (original) way to use roles is via the ``roles:`` option for a given play:: + + --- + - hosts: webservers + roles: + - common + - webservers + +This designates the following behaviors, for each role 'x': + +- If roles/x/tasks/main.yml exists, tasks listed therein will be added to the play. +- If roles/x/handlers/main.yml exists, handlers listed therein will be added to the play. +- If roles/x/vars/main.yml exists, variables listed therein will be added to the play. +- If roles/x/defaults/main.yml exists, variables listed therein will be added to the play. +- If roles/x/meta/main.yml exists, any role dependencies listed therein will be added to the list of roles (1.3 and later). +- Any copy, script, template or include tasks (in the role) can reference files in roles/x/{files,templates,tasks}/ (dir depends on task) without having to path them relatively or absolutely. + +When used in this manner, the order of execution for your playbook is as follows: + +- Any ``pre_tasks`` defined in the play. +- Any handlers triggered so far will be run. +- Each role listed in ``roles`` will execute in turn. Any role dependencies defined in the roles ``meta/main.yml`` will be run first, subject to tag filtering and conditionals. +- Any ``tasks`` defined in the play. +- Any handlers triggered so far will be run. +- Any ``post_tasks`` defined in the play. +- Any handlers triggered so far will be run. + +.. note:: + See below for more information regarding role dependencies. + +.. note:: + If using tags with tasks (described later as a means of only running part of a playbook), be sure to also tag your pre_tasks, post_tasks, and role dependencies and pass those along as well, especially if the pre/post tasks and role dependencies are used for monitoring outage window control or load balancing. + +As of Ansible 2.4, you can now use roles inline with any other tasks using ``import_role`` or ``include_role``:: + + --- + + - hosts: webservers + tasks: + - debug: + msg: "before we run our role" + - import_role: + name: example + - include_role: + name: example + - debug: + msg: "after we ran our role" + +When roles are defined in the classic manner, they are treated as static imports and processed during playbook parsing. + +.. note:: + The ``include_role`` option was introduced in Ansible 2.3. The usage has changed slightly as of Ansible 2.4 to match the include (dynamic) vs. import (static) usage. See :doc:`Dynamic vs. Static` for more details. + +The name used for the role can be a simple name (see :doc:`Role Search Path` below), or it can be a fully qualified path:: + + --- + + - hosts: webservers + roles: + - { role: '/path/to/my/roles/common' } + +Roles can accept parameters:: + + --- + + - hosts: webservers + roles: + - common + - { role: foo_app_instance, dir: '/opt/a', app_port: 5000 } + - { role: foo_app_instance, dir: '/opt/b', app_port: 5001 } + +Or, using the newer syntax:: + + --- + + - hosts: webservers + tasks: + - include_role: + name: foo_app_instance + vars: + dir: '/opt/a' + app_port: 5000 + ... + +You can conditionally execute a role. This is not generally recommended with the classic syntax, but is common when using ``import_role`` or ``include_role``:: + + --- + + - hosts: webservers + tasks: + - include_role: + name: some_role + when: "ansible_os_family == 'RedHat'" + +Finally, you may wish to assign tags to the roles you specify. You can do so inline:: + + --- + + - hosts: webservers + roles: + - { role: foo, tags: ["bar", "baz"] } + +Or, again, using the newer syntax:: + + --- + + - hosts: webservers + tasks: + - import_role: + name: foo + tags: + - bar + - baz + +.. note:: + This *tags all of the tasks in that role with the tags specified*, appending to any tags that are specified inside the role. The tags in this example will *not* be added to tasks inside an ``include_role``. Tag the ``include_role`` task directly in order to apply tags to tasks in included roles. If you find yourself building a role with lots of tags and you want to call subsets of the role at different times, you should consider just splitting that role into multiple roles. + +Role Duplication and Execution +`````````````````````````````` + +Ansible will only allow a role to execute once, even if defined multiple times, if the parameters defined on the role are not different for each definition. For example:: + + --- + - hosts: webservers + roles: + - foo + - foo + +Given the above, the role ``foo`` will only be run once. + +To make roles run more than once, there are two options: + +1. Pass different parameters in each role definition. +2. Add ``allow_duplicates: true`` to the ``meta/main.yml`` file for the role. + +Example 1 - passing different paramters:: + + --- + - hosts: webservers + roles: + - { role: foo, message: "first" } + - { role: foo, message: "second" } + +In this example, because each role definition has different parameters, ``foo`` will run twice. + +Example 2 - using ``allow_duplicates: true``:: + + # playbook.yml + --- + - hosts: webservers + roles: + - foo + - foo + + # roles/foo/meta/main.yml + --- + allow_duplicates: true + +In this example, ``foo`` will run twice because we have explicitly enabled it to do so. + +Role Default Variables +`````````````````````` + +.. versionadded:: 1.3 + +Role default variables allow you to set default variables for included or dependent roles (see below). To create +defaults, simply add a ``defaults/main.yml`` file in your role directory. These variables will have the lowest priority +of any variables available, and can be easily overridden by any other variable, including inventory variables. + +Role Dependencies +````````````````` + +.. versionadded:: 1.3 + +Role dependencies allow you to automatically pull in other roles when using a role. Role dependencies are stored in the ``meta/main.yml`` file contained within the role directory, as noted above. This file should contain a list of roles and parameters to insert before the specified role, such as the following in an example ``roles/myapp/meta/main.yml``:: + + --- + dependencies: + - { role: common, some_parameter: 3 } + - { role: apache, apache_port: 80 } + - { role: postgres, dbname: blarg, other_parameter: 12 } + +.. note:: + Role dependencies must use the classic role definition style. + +Role dependencies are always executed before the role that includes them, and may be recursive. Dependencies also follow the duplication rules specified above. If another role also lists it as a dependency, it will not be run again based on the same rules given above. + +.. note:: + Always remember that when using ``allow_duplicates: true``, it needs to be in the dependent role's ``meta/main.yml``, not the parent. + +For example, a role named ``car`` depends on a role named ``wheel`` as follows:: + + --- + dependencies: + - { role: wheel, n: 1 } + - { role: wheel, n: 2 } + - { role: wheel, n: 3 } + - { role: wheel, n: 4 } + +And the ``wheel`` role depends on two roles: ``tire`` and ``brake``. The ``meta/main.yml`` for wheel would then contain the following:: + + --- + dependencies: + - { role: tire } + - { role: brake } + +And the ``meta/main.yml`` for ``tire`` and ``brake`` would contain the following:: + + --- + allow_duplicates: true + + +The resulting order of execution would be as follows:: + + tire(n=1) + brake(n=1) + wheel(n=1) + tire(n=2) + brake(n=2) + wheel(n=2) + ... + car + +Note that we did not have to use ``allow_duplicates: true`` for ``wheel``, because each instance defined by ``car`` uses different parameter values. + +.. note:: + Variable inheritance and scope are detailed in the :doc:`playbooks_variables`. + +Embedding Modules and Plugins In Roles +`````````````````````````````````````` + +This is an advanced topic that should not be relevant for most users. + +If you write a custom module (see :doc:`dev_guide/developing_modules`) or a plugin (see :doc:`dev_guide/developing_plugins`), you may wish to distribute it as part of a role. +Generally speaking, Ansible as a project is very interested in taking high-quality modules into ansible core for inclusion, so this shouldn't be the norm, but it's quite easy to do. + +A good example for this is if you worked at a company called AcmeWidgets, and wrote an internal module that helped configure your internal software, and you wanted other +people in your organization to easily use this module -- but you didn't want to tell everyone how to configure their Ansible library path. + +Alongside the 'tasks' and 'handlers' structure of a role, add a directory named 'library'. In this 'library' directory, then include the module directly inside of it. + +Assuming you had this:: + + roles/ + my_custom_modules/ + library/ + module1 + module2 + +The module will be usable in the role itself, as well as any roles that are called *after* this role, as follows:: + + + - hosts: webservers + roles: + - my_custom_modules + - some_other_role_using_my_custom_modules + - yet_another_role_using_my_custom_modules + +This can also be used, with some limitations, to modify modules in Ansible's core distribution, such as to use development versions of modules before they are released in production releases. This is not always advisable as API signatures may change in core components, however, and is not always guaranteed to work. It can be a handy way of carrying a patch against a core module, however, should you have good reason for this. Naturally the project prefers that contributions be directed back to github whenever possible via a pull request. + +The same mechanism can be used to embed and distribute plugins in a role, using the same schema. For example, for a filter plugin:: + + roles/ + my_custom_filter/ + filter_plugins + filter1 + filter2 + +They can then be used in a template or a jinja template in any role called after 'my_custom_filter' + +Role Search Path +```````````````` + +Ansible will search for roles in the following way: + +- A ``roles/`` directory, relative to the playbook file. +- By default, in ``/etc/ansible/roles`` + +In Ansible 1.4 and later you can configure an additional roles_path to search for roles. Use this to check all of your common roles out to one location, and share them easily between multiple playbook projects. See :doc:`intro_configuration` for details about how to set this up in ansible.cfg. + +Ansible Galaxy +`````````````` + +`Ansible Galaxy `_ is a free site for finding, downloading, rating, and reviewing all kinds of community developed Ansible roles and can be a great way to get a jumpstart on your automation projects. + +You can sign up with social auth, and the download client 'ansible-galaxy' is included in Ansible 1.4.2 and later. + +Read the "About" page on the Galaxy site for more information. + +.. seealso:: + + :doc:`galaxy` + How to share roles on galaxy, role management + :doc:`YAMLSyntax` + Learn about YAML syntax + :doc:`playbooks` + Review the basic Playbook language features + :doc:`playbooks_best_practices` + Various tips about managing playbooks in the real world + :doc:`playbooks_variables` + All about variables in playbooks + :doc:`playbooks_conditionals` + Conditionals in playbooks + :doc:`playbooks_loops` + Loops in playbooks + :doc:`modules` + Learn about available modules + :doc:`dev_guide/developing_modules` + Learn how to extend Ansible by writing your own modules + `GitHub Ansible examples `_ + Complete playbook files from the GitHub project source + `Mailing List `_ + Questions? Help? Ideas? Stop by the list on Google Groups + diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_reuse.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_reuse.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_reuse.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_reuse.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,83 @@ +Creating Reusable Playbooks +=========================== + +.. toctree:: + :maxdepth: 1 + + playbooks_reuse_includes + playbooks_reuse_roles + +While it is possible to write a playbook in one very large file (and you might start out learning playbooks this way), eventually you'll want to reuse files and start to organize things. In Ansible, there are three ways to do this: includes, imports, and roles. + +Includes and imports (added in 2.4) allow users to break up large playbooks into smaller files, which can be used across multiple parent playbooks or even multiple times within the same Playbook. + +Roles allow more than just tasks to be packaged together and can include variables, handlers, or even modules and other plugins. Unlike includes and imports, roles can also be uploaded and shared via Ansible Galaxy. + +Dynamic vs. Static +`````````````````` + +Ansible has two modes of operation for reusable content: dynamic and static. + +In Ansible 2.0, the concept of *dynamic* includes was introduced. Due to some limitations with making all includes dynamic in this way, the ability to force includes to be *static* was introduced in Ansible 2.1. Because the *include* task became overloaded to encompass both static and dynamic syntaxes, and because the default behavior of an include could change based on other options set on the Task, Ansible 2.4 introduces the concept of ``include`` vs. ``import``. + +If you use any ``import*`` Task (``import_playbook``, ``import_tasks``, etc.), it will be *static*. +If you use any ``include*`` Task (``include_tasks``, ``include_role``, etc.), it will be *dynamic*. + +The bare ``include`` task (which was used for both Task files and Playbook-level includes) is still available, however it is now considered *deprecated*. + +Differences Between Static and Dynamic +`````````````````````````````````````` + +The two modes of operation are pretty simple: + +* Ansible pre-processes all static imports during Playbook parsing time. +* Dynamic includes are processed during runtime at the point in which that task is encountered. + +When it comes to Ansible task options like ``tags`` and conditional statements (``when:``): + +* For static imports, the parent task options will be copied to all child tasks contained within the import. +* For dynamic includes, the task options will *only* apply to the dynamic task as it is evaluated, and will not be copied to child tasks. + +.. note:: + Roles are a somewhat special case. Prior to Ansible 2.3, roles were always statically included via the special ``roles:`` option for a given play and were always executed first before any other play tasks (unless ``pre_tasks`` were used). Roles can still be used this way, however, Ansible 2.3 introduced the ``include_role`` option to allow roles to be executed inline with other tasks. + +Tradeoffs and Pitfalls Between Includes and Imports +``````````````````````````````````````````````````` + +Using ``include*`` vs. ``import*`` has some advantages as well as some tradeoffs which users should consider when choosing to use each: + +The primary advantage of using ``include*`` statements is looping. When a loop is used with an include, the included tasks or role will be executed once for each item in the loop. + +Using ``include*`` does have some limitations when compared to ``import*`` statements: + +* Tags which only exist inside a dynamic include will not show up in --list-tags output. +* Tasks which only exist inside a dynamic include will not show up in --list-tasks output. +* You cannot use ``notify`` to trigger a handler name which comes from inside a dynamic include (see note below). +* You cannot use ``--start-at-task`` to begin execution at a task inside a dynamic include. + +Using ``import*`` can also have some limitations when compared to dynamic includes: + +* As noted above, loops cannot be used with imports at all. +* When using variables for the target file or role name, variables from inventory sources (host/group vars, etc.) cannot be used. + +.. note:: + Regarding the use of ``notify`` for dynamic tasks: it is still possible to trigger the dynamic include itself, which would result in all tasks within the include being run. + +.. seealso:: + + :doc:`playbooks` + Review the basic Playbook language features + :doc:`playbooks_variables` + All about variables in playbooks + :doc:`playbooks_conditionals` + Conditionals in playbooks + :doc:`playbooks_loops` + Loops in playbooks + :doc:`playbooks_best_practices` + Various tips about managing playbooks in the real world + :doc:`galaxy` + How to share roles on galaxy, role management + `GitHub Ansible examples `_ + Complete playbook files from the GitHub project source + `Mailing List `_ + Questions? Help? Ideas? Stop by the list on Google Groups diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_roles.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_roles.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_roles.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_roles.rst 2017-09-19 17:10:47.000000000 +0000 @@ -3,515 +3,15 @@ .. contents:: Topics -Introduction -```````````` -While it is possible to write a playbook in one very large file (and you might start out learning playbooks this way), -eventually you'll want to reuse files and start to organize things. - -At a basic level, including task files allows you to break up bits of -configuration policy into smaller files. Task includes pull in tasks from other -files. Since handlers are tasks too, you can also include handler files from -the 'handler' section. - -See :doc:`playbooks` if you need a review of these concepts. - -Playbooks can also include plays from other playbook files. When that is done, the plays will be inserted into the playbook to form -a longer list of plays. - -When you start to think about it -- tasks, handlers, variables, and so on -- begin to form larger concepts. You start to think about modeling -what something is, rather than how to make something look like something. It's no longer "apply this handful of THINGS to these hosts", you say "these hosts are dbservers" or "these hosts are webservers". In programming, we might call that "encapsulating" how things work. For instance, -you can drive a car without knowing how the engine works. - -Roles in Ansible build on the idea of include files and combine them to form clean, reusable abstractions -- they allow you to focus -more on the big picture and only dive down into the details when needed. - -We'll start with understanding includes so roles make more sense, but our ultimate goal should be understanding roles -- roles -are great and you should use them every time you write playbooks. - -See the `ansible-examples `_ repository on GitHub for lots of examples of all of this -put together. You may wish to have this open in a separate tab as you dive in. - -Task versus Play includes -````````````````````````` -Tasks and plays both use the `include` keyword, but implement the keyword differently. The difference between them is determined by their positioning and content. If the include is inside a play it can only be a 'task' include and include a list of tasks; if it is at the top level, it can only include plays. For example:: - - # this is a 'play' include - - include: listofplays - - - name: another play - hosts: all - tasks: - - debug: msg=hello - - # this is a 'task' include - - include: stuff.yml - -A 'task' include can appear anywhere a task can, but a 'play' include cannot be inside other plays only alongside them at the same level. -While 'task' includes can take other parameters and have the included tasks inherit them, 'play' includes are very limited and most directives do not work. - - -Task Include Files And Encouraging Reuse -```````````````````````````````````````` - -Suppose you want to reuse lists of tasks between plays or playbooks. You can use -include files to do this. Use of included task lists is a great way to define a role -that system is going to fulfill. Remember, the goal of a play in a playbook is to map -a group of systems into multiple roles. Let's see what this looks like... - -A task include file simply contains a flat list of tasks, like so:: - - --- - # possibly saved as tasks/foo.yml - - - name: placeholder foo - command: /bin/foo - - - name: placeholder bar - command: /bin/bar - -Include directives look like this, and can be mixed in with regular tasks in a playbook:: - - tasks: - - - include: tasks/foo.yml - -You can also pass variables into includes. We call this a 'parameterized include'. - -For instance, to deploy to multiple wordpress instances, I could -encapsulate all of my wordpress tasks in a single wordpress.yml file, and use -it like so:: - - tasks: - - include: wordpress.yml wp_user=timmy - - include: wordpress.yml wp_user=alice - - include: wordpress.yml wp_user=bob - -Starting in 1.0, variables can also be passed to include files using an alternative syntax, -which also supports structured variables:: - - tasks: - - - include: wordpress.yml - vars: - wp_user: timmy - ssh_keys: - - keys/one.txt - - keys/two.txt - -Using either syntax, variables passed in can then be used in the included files. We'll cover them in :doc:`playbooks_variables`. -You can reference them like this:: - - {{ wp_user }} - -(In addition to the explicitly passed-in parameters, all variables from -the vars section are also available for use here as well.) - - -.. note:: - As of 1.0, task include statements can be used at arbitrary depth. - They were previously limited to a single level, so task includes - could not include other files containing task includes. - -Includes can also be used in the 'handlers' section, for instance, if you -want to define how to restart apache, you only have to do that once for all -of your playbooks. You might make a handlers.yml that looks like:: - - --- - # this might be in a file like handlers/handlers.yml - - name: restart apache - service: name=apache state=restarted - -And in your main playbook file, just include it like so, at the bottom -of a play:: - - handlers: - - include: handlers/handlers.yml - -You can mix in includes along with your regular non-included tasks and handlers. - -Includes can also be used to import one playbook file into another. This allows -you to define a top-level playbook that is composed of other playbooks. - -For example:: - - - name: this is a play at the top level of a file - hosts: all - remote_user: root - - tasks: - - - name: say hi - tags: foo - shell: echo "hi..." - - - include: load_balancers.yml - - include: webservers.yml - - include: dbservers.yml - -Note that you cannot do variable substitution when including one playbook -inside another. - -.. note:: - You can not conditionally pass the location to an include file, - like you can with 'vars_files'. If you find yourself needing to do - this, consider how you can restructure your playbook to be more - class/role oriented. This is to say you cannot use a 'fact' to - decide what include file to use. All hosts contained within the - play are going to get the same tasks. ('*when*' provides some - ability for hosts to conditionally skip tasks). - - -.. _dynamic_static: - -Dynamic versus Static Includes -`````````````````````````````` - -In Ansible 2.0 there were changes on how 'task' includes are processed. The 'play' includes are still 'static' or unchanged. - -In previous versions of Ansible, all includes acted as a pre-processor statement and were read during playbook parsing time. -This created problems with things like inventory variables (like group and host vars, which are not available during the parsing time) were used in the included file name. - -After Ansible 2.0, 'task' includes can be 'dynamic', meaning they are not evaluated until the include task is reached during the play execution. -This change allows the reintroduction of loops on include statements, -such as the following:: - - - include: foo.yml param={{item}} - with_items: - - 1 - - 2 - - 3 - -It is also possible to use variables from any source with a dynamic include:: - - - include: "{{inventory_hostname}}.yml" - -Starting in 2.1, Ansible attempts to detect when a 'task' include should be dynamic (read below for details on how detection works). - -.. note:: - When an include statement loads different tasks for different hosts, - the ``linear`` strategy keeps the hosts in lock-step by alternating - which hosts are executing tasks while doing a ``noop`` for all other - hosts. For example, if you had hostA, hostB and hostC with the above - example, hostA would execute all of the tasks in hostA.yml while hostB - and hostC waited. It is generally better to do the above with the - ``free`` strategy, which does not force hosts to execute in lock-step. - -.. note:: - In Ansible 2.0 task includes were always considered dynamic, but since this - created problems in existing playbooks we changed the default in 2.1. - Continue reading below for more details. - -Dynamic includes introduced some other limitations due to the fact that the included -file is not read in until that task is reached during the execution of the play. When using dynamic includes, -it is important to keep these limitations in mind: - -* You cannot use ``notify`` to trigger a handler name which comes from a dynamic include. -* You cannot use ``--start-at-task`` to begin execution at a task inside a dynamic include. -* Tags which only exist inside a dynamic include will not show up in --list-tags output. -* Tasks which only exist inside a dynamic include will not show up in --list-tasks output. - -.. note:: - In Ansible 1.9.x and earlier, an error would be raised if a tag name was - used with ``--tags`` or ``--skip-tags``. This error was disabled in Ansible - 2.0 to prevent incorrect failures with tags which only existed inside of - dynamic includes. - -To work around these limitations, Ansible 2.1 introduces the ``static`` option for includes:: - - - include: foo.yml - static: - -By default, starting in Ansible 2.1, 'task' includes are automatically treated as static rather than -dynamic when the include meets the following conditions: - -* The include does not use any loops -* The included file name does not use any variables -* The ``static`` option is not explicitly disabled (``static: no`` is not present) -* The ansible.cfg options to force static includes (see below) are disabled - -Two options are available in the ansible.cfg configuration for static includes: - -* ``task_includes_static`` - forces all includes in tasks sections to be static. -* ``handler_includes_static`` - forces all includes in handlers sections to be static. - -These options allow users to force playbooks to behave exactly as they did in 1.9.x and before. - -One example on how 'static' vs 'dynamic' behaviour can impact your tasks:: - - - include: "stuff.yml" - static: no - when: verto is defined - -If this task were 'static' the `when` would be inherited by the tasks included, but forcing it to be dynamic, the `when` is now applied to the include task itself. - -.. _roles: - -Roles -````` - -.. versionadded:: 1.2 - -Now that you have learned about tasks and handlers, what is the best way to organize your playbooks? -The short answer is to use roles! Roles are ways of automatically loading certain vars_files, tasks, and -handlers based on a known file structure. Grouping content by roles also allows easy sharing of roles with other users. - -Roles are just automation around 'include' directives as described above, and really don't contain much -additional magic beyond some improvements to search path handling for referenced files. However, that can be a big thing! - -Example project structure:: - - site.yml - webservers.yml - fooservers.yml - roles/ - common/ - files/ - templates/ - tasks/ - handlers/ - vars/ - defaults/ - meta/ - webservers/ - files/ - templates/ - tasks/ - handlers/ - vars/ - defaults/ - meta/ - -In a playbook, it would look like this:: - - --- - - hosts: webservers - roles: - - common - - webservers - -This designates the following behaviors, for each role 'x': - -- If roles/x/tasks/main.yml exists, tasks listed therein will be added to the play -- If roles/x/handlers/main.yml exists, handlers listed therein will be added to the play -- If roles/x/vars/main.yml exists, variables listed therein will be added to the play -- If roles/x/defaults/main.yml exists, variables listed therein will be added to the play -- If roles/x/meta/main.yml exists, any role dependencies listed therein will be added to the list of roles (1.3 and later) -- Any copy, script, template or include tasks (in the role) can reference files in roles/x/{files,templates,tasks}/ (dir depends on task) without having to path them relatively or absolutely - -In Ansible 1.4 and later you can configure a roles_path to search for roles. Use this to check all of your common roles out to one location, and share -them easily between multiple playbook projects. See :doc:`intro_configuration` for details about how to set this up in ansible.cfg. - -.. note:: - Role dependencies are discussed below. - -If any files are not present, they are just ignored. So it's ok to not have a 'vars/' subdirectory for the role, -for instance. - -Note, you are still allowed to list tasks, vars_files, and handlers "loose" in playbooks without using roles, -but roles are a good organizational feature and are highly recommended. If there are loose things in the playbook, -the roles are evaluated first. - -Also, should you wish to parameterize roles, by adding variables, you can do so, like this:: - - --- - - - hosts: webservers - roles: - - common - - { role: foo_app_instance, dir: '/opt/a', app_port: 5000 } - - { role: foo_app_instance, dir: '/opt/b', app_port: 5001 } - -While it's probably not something you should do often, you can also conditionally apply roles like so:: - - --- - - - hosts: webservers - roles: - - { role: some_role, when: "ansible_os_family == 'RedHat'" } - -This works by applying the conditional to every task in the role. Conditionals are covered later on in -the documentation. - -Finally, you may wish to assign tags to the roles you specify. You can do so inline:: - - --- - - - hosts: webservers - roles: - - { role: foo, tags: ["bar", "baz"] } - -Note that this *tags all of the tasks in that role with the tags specified*, overriding any tags that are specified inside the role. If you find yourself building a role with lots of tags and you want to call subsets of the role at different times, you should consider just splitting that role into multiple roles. - -If the play still has a 'tasks' section, those tasks are executed after roles are applied. - -If you want to define certain tasks to happen before AND after roles are applied, you can do this:: - - --- - - - hosts: webservers - - pre_tasks: - - shell: echo 'hello' - - roles: - - { role: some_role } - - tasks: - - shell: echo 'still busy' - - post_tasks: - - shell: echo 'goodbye' - -.. note:: - If using tags with tasks (described later as a means of only running part of a playbook), - be sure to also tag your pre_tasks and post_tasks and pass those along as well, especially if the pre - and post tasks are used for monitoring outage window control or load balancing. - -Role Default Variables -`````````````````````` - -.. versionadded:: 1.3 - -Role default variables allow you to set default variables for included or dependent roles (see below). To create -defaults, simply add a `defaults/main.yml` file in your role directory. These variables will have the lowest priority -of any variables available, and can be easily overridden by any other variable, including inventory variables. - -Role Dependencies -````````````````` - -.. versionadded:: 1.3 - -Role dependencies allow you to automatically pull in other roles when using a role. Role dependencies are stored in the -`meta/main.yml` file contained within the role directory. This file should contain -a list of roles and parameters to insert before the specified role, such as the following in an example -`roles/myapp/meta/main.yml`:: - - --- - dependencies: - - { role: common, some_parameter: 3 } - - { role: apache, apache_port: 80 } - - { role: postgres, dbname: blarg, other_parameter: 12 } - -Role dependencies can also be specified as a full path, just like top level roles:: - - --- - dependencies: - - { role: '/path/to/common/roles/foo', x: 1 } - -Role dependencies can also be installed from source control repos or tar files (via `galaxy`) using comma separated format of path, an optional version (tag, commit, branch etc) and optional friendly role name (an attempt is made to derive a role name from the repo name or archive filename). Both through the command line or via a requirements.yml passed to ansible-galaxy. - - -Roles dependencies are always executed before the role that includes them, and are recursive. By default, -roles can also only be added as a dependency once - if another role also lists it as a dependency it will -not be run again. This behavior can be overridden by adding `allow_duplicates: yes` to the `meta/main.yml` file. -For example, a role named 'car' could add a role named 'wheel' to its dependencies as follows:: - - --- - dependencies: - - { role: wheel, n: 1 } - - { role: wheel, n: 2 } - - { role: wheel, n: 3 } - - { role: wheel, n: 4 } - -And the `meta/main.yml` for wheel contained the following:: - - --- - allow_duplicates: yes - dependencies: - - { role: tire } - - { role: brake } - -The resulting order of execution would be as follows:: - - tire(n=1) - brake(n=1) - wheel(n=1) - tire(n=2) - brake(n=2) - wheel(n=2) - ... - car - -.. note:: - Variable inheritance and scope are detailed in the :doc:`playbooks_variables`. - -Embedding Modules and Plugins In Roles -`````````````````````````````````````` - -This is an advanced topic that should not be relevant for most users. - -If you write a custom module (see :doc:`dev_guide/developing_modules`) or a plugin (see :doc:`dev_guide/developing_plugins`), you may wish to distribute it as part of a role. -Generally speaking, Ansible as a project is very interested in taking high-quality modules into ansible core for inclusion, so this shouldn't be the norm, but it's quite easy to do. - -A good example for this is if you worked at a company called AcmeWidgets, and wrote an internal module that helped configure your internal software, and you wanted other -people in your organization to easily use this module -- but you didn't want to tell everyone how to configure their Ansible library path. - -Alongside the 'tasks' and 'handlers' structure of a role, add a directory named 'library'. In this 'library' directory, then include the module directly inside of it. - -Assuming you had this:: - - roles/ - my_custom_modules/ - library/ - module1 - module2 - -The module will be usable in the role itself, as well as any roles that are called *after* this role, as follows:: - - - - hosts: webservers - roles: - - my_custom_modules - - some_other_role_using_my_custom_modules - - yet_another_role_using_my_custom_modules - -This can also be used, with some limitations, to modify modules in Ansible's core distribution, such as to use development versions of modules before they are released -in production releases. This is not always advisable as API signatures may change in core components, however, and is not always guaranteed to work. It can be a handy -way of carrying a patch against a core module, however, should you have good reason for this. Naturally the project prefers that contributions be directed back -to github whenever possible via a pull request. - -The same mechanism can be used to embed and distribute plugins in a role, using the same schema. For example, for a filter plugin:: - - roles/ - my_custom_filter/ - filter_plugins - filter1 - filter2 - -They can then be used in a template or a jinja template in any role called after 'my_custom_filter' - -Ansible Galaxy -`````````````` - -`Ansible Galaxy `_ is a free site for finding, downloading, rating, and reviewing all kinds of community developed Ansible roles and can be a great way to get a jumpstart on your automation projects. - -You can sign up with social auth, and the download client 'ansible-galaxy' is included in Ansible 1.4.2 and later. - -Read the "About" page on the Galaxy site for more information. +The documentation regarding roles and includes for playbooks have moved. Their new location is here: :doc:`playbooks_reuse`. Please update any links you may have made directly to this page. .. seealso:: :doc:`galaxy` How to share roles on galaxy, role management - :doc:`YAMLSyntax` - Learn about YAML syntax :doc:`playbooks` Review the basic Playbook language features - :doc:`playbooks_best_practices` - Various tips about managing playbooks in the real world - :doc:`playbooks_variables` - All about variables in playbooks - :doc:`playbooks_conditionals` - Conditionals in playbooks - :doc:`playbooks_loops` - Loops in playbooks - :doc:`modules` - Learn about available modules - :doc:`dev_guide/developing_modules` - Learn how to extend Ansible by writing your own modules - `GitHub Ansible examples `_ - Complete playbook files from the GitHub project source - `Mailing List `_ - Questions? Help? Ideas? Stop by the list on Google Groups + :doc:`playbooks_reuse` + Creating reusable Playbooks. diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks.rst ansible-2.4.0.0/docs/docsite/rst/playbooks.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks.rst 2017-09-19 17:10:47.000000000 +0000 @@ -19,7 +19,7 @@ :maxdepth: 2 playbooks_intro - playbooks_roles + playbooks_reuse playbooks_variables playbooks_templating playbooks_conditionals diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_special_topics.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_special_topics.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_special_topics.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_special_topics.rst 2017-09-19 17:10:47.000000000 +0000 @@ -21,4 +21,4 @@ playbooks_tags playbooks_vault playbooks_startnstep - playbooks_directives + playbooks_keywords diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_strategies.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_strategies.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_strategies.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_strategies.rst 2017-09-19 17:10:47.000000000 +0000 @@ -32,7 +32,7 @@ :doc:`playbooks` An introduction to playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles `User Mailing List `_ Have a question? Stop by the google group! diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_tags.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_tags.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_tags.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_tags.rst 2017-09-19 17:10:47.000000000 +0000 @@ -4,7 +4,7 @@ If you have a large playbook it may become useful to be able to run a specific part of the configuration without running the whole playbook. Both plays and tasks support a "tags:" attribute for this reason. -You can **ONLY** filter tasks based on tags from the command line with `--tags` or `--skip-tags`. +You can **ONLY** filter tasks based on tags from the command line with ``--tags`` or ``--skip-tags``. Adding "tags:" in any part of a play (including roles) adds those tags to the contained tasks. Example:: @@ -35,7 +35,7 @@ Tag Reuse ``````````````` -You can apply the same tag name to more than one task, in the same file +You can apply the same tag name to more than one task, in the same file or included files. This will run all tasks with that tag. Example:: @@ -62,8 +62,7 @@ Tag Inheritance ``````````````` -You can apply tags to more than tasks, but they ONLY affect the tasks themselves. Applying tags anywhere else is just a -convenience so you don't have to write it on every task:: +You can apply tags to more than tasks, but they ONLY affect the tasks themselves. Applying tags anywhere else is just a convenience so you don't have to write it on every task:: - hosts: all tags: @@ -81,21 +80,31 @@ roles: - { role: webserver, port: 5000, tags: [ 'web', 'foo' ] } -And include statements:: +And import/include statements:: - - include: foo.yml + - import_tasks: foo.yml + tags: [web,foo] + +or:: + + - include_tasks: foo.yml tags: [web,foo] All of these apply the specified tags to EACH task inside the play, included file, or role, so that these tasks can be selectively run when the playbook is invoked with the corresponding tags. +Tags are inherited *down* the dependency chain. In order for tags to be applied to a role and all its dependencies, +the tag should be applied to the role, not to all the tasks within a role. + +You can see which tags are applied to tasks by running ``ansible-playbook`` with the ``--list-tasks`` option. You can display all tags using the ``--list-tags`` option. + .. _special_tags: Special Tags ```````````` -There is a special 'always' tag that will always run a task, unless specifically skipped (--skip-tags always) +There is a special ``always`` tag that will always run a task, unless specifically skipped (``--skip-tags always``) Example:: @@ -109,17 +118,17 @@ tags: - tag1 -There are another 3 special keywords for tags, 'tagged', 'untagged' and 'all', which run only tagged, only untagged +There are another 3 special keywords for tags, ``tagged``, ``untagged`` and ``all``, which run only tagged, only untagged and all tasks respectively. -By default ansible runs as if '--tags all' had been specified. +By default ansible runs as if ``--tags all`` had been specified. .. seealso:: :doc:`playbooks` An introduction to playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles `User Mailing List `_ Have a question? Stop by the google group! diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_templating.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_templating.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_templating.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_templating.rst 2017-09-19 17:10:47.000000000 +0000 @@ -14,7 +14,7 @@ playbooks_filters playbooks_tests playbooks_lookups - + playbooks_python_version .. seealso:: @@ -25,7 +25,7 @@ Conditional statements in playbooks :doc:`playbooks_loops` Looping in playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles :doc:`playbooks_best_practices` Best practices in playbooks diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_tests.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_tests.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_tests.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_tests.rst 2017-09-19 17:10:47.000000000 +0000 @@ -83,6 +83,27 @@ .. _path_tests: +.. versionadded:: 2.4 + +You can use `any` and `all` to check if any or all elements in a list are true or not:: + + vars: + mylist: + - 1 + - 3 == 3 + - True + myotherlist: + - False + - True + tasks: + + - debug: msg="all are true!" + when: mylist is all + + - debug: msg="at least one is true" + when: myotherlist|any + + Testing paths ````````````` @@ -106,7 +127,7 @@ when: mypath|samefile(path2) - debug: msg="path is a mount" - when: mypath|ismount + when: mypath|is_mount .. _test_task_results: @@ -154,7 +175,7 @@ All about variables :doc:`playbooks_loops` Looping in playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles :doc:`playbooks_best_practices` Best practices in playbooks diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_variables.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_variables.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_variables.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_variables.rst 2017-09-19 17:10:47.000000000 +0000 @@ -90,7 +90,7 @@ It turns out we've already talked about variables in another place too. -As described in :doc:`playbooks_roles`, variables can also be included in the playbook via include files, which may or may +As described in :doc:`playbooks_reuse_roles`, variables can also be included in the playbook via include files, which may or may not be part of an "Ansible Role". Usage of roles is preferred as it provides a nice organizational system. .. _about_jinja2: @@ -124,6 +124,11 @@ pieces of files, or to have other ecosystem tools read Ansible files. Not everyone will need this but it can unlock possibilities. +.. seealso:: + + :doc:`playbooks_templating` + More information about Jinja2 templating + .. _jinja2_filters: Jinja2 Filters @@ -830,12 +835,12 @@ In 2.x, we have made the order of precedence more specific (with the last listed variables winning prioritization): * role defaults [1]_ - * inventory INI or script group vars [2]_ + * inventory file or script group vars [2]_ * inventory group_vars/all * playbook group_vars/all * inventory group_vars/* * playbook group_vars/* - * inventory INI or script host vars [2]_ + * inventory file or script host vars [2]_ * inventory host_vars/* * playbook host_vars/* * host facts @@ -897,8 +902,8 @@ Ansible has 3 main scopes: * Global: this is set by config, environment variables and the command line - * Play: each play and contained structures, vars entries, include_vars, role defaults and vars. - * Host: variables directly associated to a host, like inventory, facts or registered task outputs + * Play: each play and contained structures, vars entries (vars; vars_files; vars_prompt), role defaults and vars. + * Host: variables directly associated to a host, like inventory, include_vars, facts or registered task outputs .. _variable_examples: @@ -941,7 +946,7 @@ Ok, so if you are writing a redistributable role with reasonable defaults, put those in the ``roles/x/defaults/main.yml`` file. This means the role will bring along a default value but ANYTHING in Ansible will override it. It's just a default. That's why it says "defaults" :) -See :doc:`playbooks_roles` for more info about this:: +See :doc:`playbooks_reuse_roles` for more info about this:: --- # file: roles/x/defaults/main.yml @@ -1024,7 +1029,7 @@ Jinja2 filters and their uses :doc:`playbooks_loops` Looping in playbooks - :doc:`playbooks_roles` + :doc:`playbooks_reuse_roles` Playbook organization by roles :doc:`playbooks_best_practices` Best practices in playbooks diff -Nru ansible-2.3.2.0/docs/docsite/rst/playbooks_vault.rst ansible-2.4.0.0/docs/docsite/rst/playbooks_vault.rst --- ansible-2.3.2.0/docs/docsite/rst/playbooks_vault.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/playbooks_vault.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,5 +1,5 @@ -Vault -===== +Using Vault in playbooks +======================== .. contents:: Topics @@ -9,86 +9,6 @@ For best practices advice, refer to :ref:`best_practices_for_variables_and_vaults`. -.. _what_can_be_encrypted_with_vault: - -What Can Be Encrypted With Vault -```````````````````````````````` - -The vault feature can encrypt any structured data file used by Ansible. This can include "group_vars/" or "host_vars/" inventory variables, variables loaded by "include_vars" or "vars_files", or variable files passed on the ansible-playbook command line with "-e @file.yml" or "-e @file.json". Role variables and defaults are also included! - -Ansible tasks, handlers, and so on are also data so these can be encrypted with vault as well. To hide the names of variables that you're using, you can encrypt the task files in their entirety. However, that might be a little too much and could annoy your coworkers :) - -The vault feature can also encrypt arbitrary files, even binary files. If a vault-encrypted file is given as the `src` argument to the `copy` module, the file will be placed at the destination on the target host decrypted (assuming a valid vault password is supplied when running the play). - -As of version 2.3, Ansible also supports encrypting single values inside a YAML file, using the `!vault` tag to let YAML and Ansible know it uses special processing. This feature is covered in more details below. - -.. _creating_files: - -Creating Encrypted Files -```````````````````````` - -To create a new encrypted data file, run the following command:: - - ansible-vault create foo.yml - -First you will be prompted for a password. The password used with vault currently must be the same for all files you wish to use together at the same time. - -After providing a password, the tool will launch whatever editor you have defined with $EDITOR, and defaults to vi (before 2.1 the default was vim). Once you are done with the editor session, the file will be saved as encrypted data. - -The default cipher is AES (which is shared-secret based). - -.. _editing_encrypted_files: - -Editing Encrypted Files -``````````````````````` - -To edit an encrypted file in place, use the `ansible-vault edit` command. -This command will decrypt the file to a temporary file and allow you to edit -the file, saving it back when done and removing the temporary file:: - - ansible-vault edit foo.yml - -.. _rekeying_files: - -Rekeying Encrypted Files -```````````````````````` - -Should you wish to change your password on a vault-encrypted file or files, you can do so with the rekey command:: - - ansible-vault rekey foo.yml bar.yml baz.yml - -This command can rekey multiple data files at once and will ask for the original -password and also the new password. - -.. _encrypting_files: - -Encrypting Unencrypted Files -```````````````````````````` - -If you have existing files that you wish to encrypt, use the `ansible-vault encrypt` command. This command can operate on multiple files at once:: - - ansible-vault encrypt foo.yml bar.yml baz.yml - -.. _decrypting_files: - -Decrypting Encrypted Files -`````````````````````````` - -If you have existing files that you no longer want to keep encrypted, you can permanently decrypt them by running the `ansible-vault decrypt` command. This command will save them unencrypted to the disk, so be sure you do not want `ansible-vault edit` instead:: - - ansible-vault decrypt foo.yml bar.yml baz.yml - -.. _viewing_files: - -Viewing Encrypted Files -``````````````````````` - -*Available since Ansible 1.8* - -If you want to view the contents of an encrypted file without editing it, you can use the `ansible-vault view` command:: - - ansible-vault view foo.yml bar.yml baz.yml - .. _running_a_playbook_with_vault: Running a Playbook With Vault @@ -109,7 +29,7 @@ The password should be a string stored as a single line in the file. .. note:: - You can also set ``ANSIBLE_VAULT_PASSWORD_FILE`` environment variable, e.g. ``ANSIBLE_VAULT_PASSWORD_FILE=~/.vault_pass.txt`` and Ansible will automatically search for the password in that file. + You can also set :envvar:`ANSIBLE_VAULT_PASSWORD_FILE` environment variable, e.g. ``ANSIBLE_VAULT_PASSWORD_FILE=~/.vault_pass.txt`` and Ansible will automatically search for the password in that file. If you are using a script instead of a flat file, ensure that it is marked as executable, and that the password is printed to standard output. If your script needs to prompt for data, prompts can be sent to standard error. @@ -118,7 +38,7 @@ (The `--vault-password-file` option can also be used with the :ref:`ansible-pull` command if you wish, though this would require distributing the keys to your nodes, so understand the implications -- vault is more intended for push mode). -.. _single_encryptd_variable: +.. _single_encrypted_variable: Single Encrypted Variable ````````````````````````` @@ -135,34 +55,17 @@ 34623731376664623134383463316265643436343438623266623965636363326136 other_plain_text: othervalue +To create a vaulted variable, use the :ref:`ansible-vault encrypt_string` command. See :ref:`encrypt_string` for details. -This vaulted variable be decrypted with the supplied vault secret and used as a normal variable. The `ansible-vault` command line supports `STDIN` and `STDOUT` for encrypting data on the fly, which can be used from your favorite editor to create these vaulted variables; you just have to be sure to add the `!vault` tag so both Ansible and YAML are aware of the need to decrypt. The `|` is also required, as vault encryption results in a multi-line string. The leading spaces will be ignored and some indentation is required for it to be valid YAML. - -As of version 2.3, one way to generate the inline secret is to use `ansible-vault encrypt_string` which will output the secret to `STDOUT`:: - - $ ansible-vault encrypt_string "42" - !vault-encrypted | - $ANSIBLE_VAULT;1.1;AES256 - - - $ ansible-vault encrypt_string "42" --stdin-name "the_answer" - the_answer: !vault-encrypted | - $ANSIBLE_VAULT;1.1;AES256 - - - $ echo -n "the plaintext to encrypt" | ansible-vault encrypt_string - !vault-encrypted | - $ANSIBLE_VAULT;1.1;AES256 - - -Note the use of `echo -n`. If you use just `echo` the encrypted string will have a new line (`\n`) on the end. +This vaulted variable will be decrypted with the supplied vault secret and used as a normal variable. The ``ansible-vault`` command line supports stdin and stdout for encrypting data on the fly, which can be used from your favorite editor to create these vaulted variables; you just have to be sure to add the ``!vault`` tag so both Ansible and YAML are aware of the need to decrypt. The ``|`` is also required, as vault encryption results in a multi-line string. -.. _speeding_up_vault: +.. _encrypt_string: -Speeding Up Vault Operations -```````````````````````````` +Using encrypt_string +```````````````````` -By default, Ansible uses PyCrypto to encrypt and decrypt vault files. If you have many encrypted files, decrypting them at startup may cause a perceptible delay. To speed this up, install the cryptography package:: +This command will output a string in the above format ready to be included in a YAML file. +The string to encrypt can be provided via stdin, command line args, or via an interactive prompt. - pip install cryptography +See :ref:`encrypt_string_for_use_in_yaml`. diff -Nru ansible-2.3.2.0/docs/docsite/rst/porting_guide_2.0.rst ansible-2.4.0.0/docs/docsite/rst/porting_guide_2.0.rst --- ansible-2.3.2.0/docs/docsite/rst/porting_guide_2.0.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/porting_guide_2.0.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,14 +1,26 @@ -Porting Guide -============= +.. _porting_2.0_guide: +************************* +Ansible 2.0 Porting Guide +************************* + +This section discusses the behavioral changes between Ansible 1.x and Ansible 2.0. + +It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible. + + +We suggest you read this page along with `Ansible Changelog `_ to understand what updates you may need to make. + +This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides `. + +.. contents:: Topics Playbook --------- +======== -* backslash escapes When specifying parameters in jinja2 expressions in YAML - dicts, backslashes sometimes needed to be escaped twice. This has been fixed - in 2.0.x so that escaping once works. The following example shows how - playbooks must be modified:: +This section discusses any changes you may need to make to your playbooks. + +.. code-block:: yaml # Syntax in 1.9.x - debug: @@ -83,21 +95,21 @@ * templating (variables in playbooks and template lookups) has improved with regard to keeping the original instead of turning everything into a string. If you need the old behavior, quote the value to pass it around as a string. * Empty variables and variables set to null in yaml are no longer converted to empty strings. They will retain the value of `None`. - You can override the `null_representation` setting to an empty string in your config file by setting the `ANSIBLE_NULL_REPRESENTATION` environment variable. + You can override the `null_representation` setting to an empty string in your config file by setting the :envvar:`ANSIBLE_NULL_REPRESENTATION` environment variable. * Extras callbacks must be whitelisted in ansible.cfg. Copying is no longer necessary but whitelisting in ansible.cfg must be completed. * dnf module has been rewritten. Some minor changes in behavior may be observed. * win_updates has been rewritten and works as expected now. * from 2.0.1 onwards, the implicit setup task from gather_facts now correctly inherits everything from play, but this might cause issues for those setting - `environment` at the play level and depending on `ansible_env` existing. Previouslly this was ignored but now might issue an 'Undefined' error. + `environment` at the play level and depending on `ansible_env` existing. Previously this was ignored but now might issue an 'Undefined' error. Deprecated -~~~~~~~~~~ +---------- While all items listed here will show a deprecation warning message, they still work as they did in 1.9.x. Please note that they will be removed in 2.2 (Ansible always waits two major releases to remove a deprecated feature). -* Bare variables in `with_` loops should instead use the “{{var}}†syntax, which helps eliminate ambiguity. +* Bare variables in ``with_`` loops should instead use the ``"{ {var }}"`` syntax, which helps eliminate ambiguity. * The ansible-galaxy text format requirements file. Users should use the YAML format for requirements instead. -* Undefined variables within a `with_` loop’s list currently do not interrupt the loop, but they do issue a warning; in the future, they will issue an error. +* Undefined variables within a ``with_`` loop’s list currently do not interrupt the loop, but they do issue a warning; in the future, they will issue an error. * Using dictionary variables to set all task parameters is unsafe and will be removed in a future version. For example:: - hosts: localhost @@ -126,12 +138,12 @@ * Specifying variables at the top level of a task include statement is no longer supported. For example:: - - include: foo.yml + - include_tasks: foo.yml a: 1 Should now be:: - - include: foo.yml + - include_tasks: foo.yml vars: a: 1 @@ -140,20 +152,20 @@ * Tags (or any directive) should no longer be specified with other parameters in a task include. Instead, they should be specified as an option on the task. For example:: - - include: foo.yml tags=a,b,c + - include_tasks: foo.yml tags=a,b,c Should be:: - - include: foo.yml + - include_tasks: foo.yml tags: [a, b, c] * The first_available_file option on tasks has been deprecated. Users should use the with_first_found option or lookup (‘first_found’, …) plugin. Other caveats -~~~~~~~~~~~~~ +------------- -Here are some corner cases encountered when updating, these are mostly caused by the more stringent parser validation and the capture of errors that were previouslly ignored. +Here are some corner cases encountered when updating. These are mostly caused by the more stringent parser validation and the capture of errors that were previously ignored. * Bad variable composition:: @@ -161,9 +173,7 @@ This worked 'by accident' as the errors were retemplated and ended up resolving the variable, it was never intended as valid syntax and now properly returns an error, use the following instead.:: - with_items: "{{vars['myvar_' + res_of_name]}}" - - Or `hostvars[inventory_hostname]['myvar_' + rest_of_name]` if appropriate. + hostvars[inventory_hostname]['myvar_' + rest_of_name] * Misspelled directives:: @@ -201,32 +211,33 @@ with_items: "{{var1 + var2}}" - The bare feature itself is deprecated as an undefined variable is indistiguishable from a string which makes it difficult to display a proper error. + The bare feature itself is deprecated as an undefined variable is indistinguishable from a string which makes it difficult to display a proper error. Porting plugins ---------------- +=============== In ansible-1.9.x, you would generally copy an existing plugin to create a new one. Simply implementing the methods and attributes that the caller of the plugin expected made it a plugin of that type. In ansible-2.0, most plugins are implemented by subclassing a base class for each plugin type. This way the custom plugin does not need to contain methods which are not customized. Lookup plugins -~~~~~~~~~~~~~~ +-------------- + * lookup plugins ; import version Connection plugins -~~~~~~~~~~~~~~~~~~ +------------------ * connection plugins Action plugins -~~~~~~~~~~~~~~ +-------------- * action plugins Callback plugins -~~~~~~~~~~~~~~~~ +---------------- Although Ansible 2.0 provides a new callback API the old one continues to work for most callback plugins. However, if your callback plugin makes use of @@ -262,15 +273,15 @@ Connection plugins -~~~~~~~~~~~~~~~~~~ +------------------ * connection plugins Hybrid plugins --------------- +============== -In specific cases you may want a plugin that supports both ansible-1.9.x *and* ansible-2.0. Much like porting plugins from v1 to v2, you need to understand how plugins work in each version and support both requirements. It may mean playing tricks on Ansible. +In specific cases you may want a plugin that supports both ansible-1.9.x *and* ansible-2.0. Much like porting plugins from v1 to v2, you need to understand how plugins work in each version and support both requirements. Since the ansible-2.0 plugin system is more advanced, it is easier to adapt your plugin to provide similar pieces (subclasses, methods) for ansible-1.9.x as ansible-2.0 expects. This way your code will look a lot cleaner. @@ -290,8 +301,9 @@ Lookup plugins -~~~~~~~~~~~~~~ -As a simple example we are going to make a hybrid ``fileglob`` lookup plugin. The ``fileglob`` lookup plugin is pretty simple to understand +-------------- + +As a simple example we are going to make a hybrid ``fileglob`` lookup plugin. .. code-block:: python @@ -357,28 +369,28 @@ Connection plugins -~~~~~~~~~~~~~~~~~~ +------------------ * connection plugins Action plugins -~~~~~~~~~~~~~~ +-------------- * action plugins Callback plugins -~~~~~~~~~~~~~~~~ +---------------- * callback plugins Connection plugins -~~~~~~~~~~~~~~~~~~ +------------------ * connection plugins Porting custom scripts ----------------------- +====================== -Custom scripts that used the ``ansible.runner.Runner`` API in 1.x have to be ported in 2.x. Please refer to: :doc:`dev_guide//developing_api` +Custom scripts that used the ``ansible.runner.Runner`` API in 1.x have to be ported in 2.x. Please refer to: :doc:`dev_guide/developing_api` diff -Nru ansible-2.3.2.0/docs/docsite/rst/porting_guide_2.3.rst ansible-2.4.0.0/docs/docsite/rst/porting_guide_2.3.rst --- ansible-2.3.2.0/docs/docsite/rst/porting_guide_2.3.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/porting_guide_2.3.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,246 @@ +.. _porting_2.3_guide: + +************************* +Ansible 2.3 Porting Guide +************************* + +This section discusses the behavioral changes between Ansible 2.2 and Ansible 2.3. + +It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible. + + +We suggest you read this page along with `Ansible Changelog `_ to understand what updates you may need to make. + +This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides `. + +.. contents:: Topics + +Playbook +======== + +Restructued async to work with action plugins +--------------------------------------------- + +In Ansible 2.2 (and possibly earlier) the `async:` keyword could not be used in conjunction with the action plugins such as `service`. This limitation has been removed in Ansible 2.3 + +**NEW** In Ansible 2.3: + + +.. code-block:: yaml + + - name: Install nginx asynchronously + service: + name: nginx + state: restarted + async: 45 + + +OpenBSD version facts +--------------------- + +The `ansible_distribution_release` and `ansible_distribution_version` facts on OpenBSD hosts were reversed in Ansible 2.2 and earlier. This has been changed so that version has the numeric portion and release has the name of the release. + +**OLD** In Ansible 2.2 (and earlier) + + +.. code-block:: yaml + + "ansible_distribution": "OpenBSD" + "ansible_distribution_release": "6.0", + "ansible_distribution_version": "release", + +**NEW** In Ansible 2.3: + + +.. code-block:: yaml + + "ansible_distribution": "OpenBSD", + "ansible_distribution_release": "release", + "ansible_distribution_version": "6.0", + + +Names Blocks +------------ + +Blocks can now have names, this allows you to avoid the ugly `# this block is for...` comments. + + +**NEW** In Ansible 2.3: + + +.. code-block:: yaml + + - name: Block test case + hosts: localhost + tasks: + - name: Attempt to setup foo + block: + - debug: msg='I execute normally' + - command: /bin/false + - debug: msg='I never execute, cause ERROR!' + rescue: + - debug: msg='I caught an error' + - command: /bin/false + - debug: msg='I also never execute :-(' + always: + - debug: msg="this always executes" + + +Use of multiple tags +-------------------- + +Specifying ``--tags`` (or ``--skip-tags``) multiple times on the command line currently leads to the last specified tag overriding all the other specified tags. This behaviour is deprecated. In the future, if you specify --tags multiple times the tags will be merged together. From now on, using ``--tags`` multiple times on one command line will emit a deprecation warning. Setting the ``merge_multiple_cli_tags`` option to True in the ``ansible.cfg`` file will enable the new behaviour. + +In 2.4, the default will be to merge the tags. You can enable the old overwriting behavior via the config option. +In 2.5, multiple ``--tags`` options will be merged with no way to go back to the old behaviour. + + +Other caveats +------------- + +Here are some rare cases that might be encountered when updating. These are mostly caused by the more stringent parser validation and the capture of errors that were previously ignored. + + +* Made ``any_errors_fatal`` inheritable from play to task and all other objects in between. + +Modules +======= + +No major changes in this version. + +Modules removed +--------------- + +No major changes in this version. + +Deprecation notices +------------------- + +The following modules will be removed in Ansible 2.5. Please update your playbooks accordingly. + +* :ref:`ec2_vpc ` +* :ref:`cl_bond ` +* :ref:`cl_bridge ` +* :ref:`cl_img_install ` +* :ref:`cl_interface ` +* :ref:`cl_interface_policy ` +* :ref:`cl_license ` +* :ref:`cl_ports ` +* :ref:`nxos_mtu ` use :ref:`nxos_system ` instead + +Noteworthy module changes +------------------------- + +AWS lambda +^^^^^^^^^^ +Previously ignored changes that only affected one parameter. Existing deployments may have outstanding changes that this bug fix will apply. + + +Mount +^^^^^ + +Mount: Some fixes so bind mounts are not mounted each time the playbook runs. + + +Plugins +======= + +No major changes in this version. + +Porting custom scripts +====================== + +No major changes in this version. + +Networking +========== + +There have been a number of changes to number of changes to how Networking Modules operate. + +Playbooks should still use ``connection: local``. + +The following changes apply to: + +* dellos6 +* dellos9 +* dellos10 +* eos +* ios +* iosxr +* junos +* sros +* vyos + +Deprecation of top-level connection arguments +--------------------------------------------- + +**OLD** In Ansible 2.2: + +.. code-block:: yaml + + - name: example of using top-level options for connection properties + ios_command: + commands: show version + host: "{{ inventory_hostname }}" + username: cisco + password: cisco + authorize: yes + auth_pass: cisco + +Will result in: + +.. code-block:: yaml + + [WARNING]: argument username has been deprecated and will be removed in a future version + [WARNING]: argument host has been deprecated and will be removed in a future version + [WARNING]: argument password has been deprecated and will be removed in a future version + + +**NEW** In Ansible 2.3: + + +.. code-block:: yaml + + - name: Gather facts + eos_facts: + gather_subset: all + provider: + username: myuser + password: "{{ networkpassword }}" + transport: cli + host: "{{ ansible_host }}" + +delegate_to vs ProxyCommand +--------------------------- + +The new connection framework for Network Modules in Ansible 2.3 no longer supports the use of the +``delegate_to`` directive. In order to use a bastion or intermediate jump host +to connect to network devices, network modules now support the use of +``ProxyCommand``. + +To use ``ProxyCommand`` configure the proxy settings in the Ansible inventory +file to specify the proxy host. + +.. code-block:: ini + + [nxos] + nxos01 + nxos02 + + [nxos:vars] + ansible_ssh_common_args='-o ProxyCommand="ssh -W %h:%p -q bastion01"' + + +With the configuration above, simply build and run the playbook as normal with +no additional changes necessary. The network module will now connect to the +network device by first connecting to the host specified in +``ansible_ssh_common_args`` which is ``bastion01`` in the above example. + + +.. notes: Using ``ProxyCommand`` with passwords via variables + + It is a feature that SSH doesn't support providing passwords via environment variables. + This is done to prevent secrets from leaking out, for example in ``ps`` output. + + We recommend using SSH Keys, and if needed and ssh-agent, rather than passwords, where ever possible. + diff -Nru ansible-2.3.2.0/docs/docsite/rst/porting_guide_2.4.rst ansible-2.4.0.0/docs/docsite/rst/porting_guide_2.4.rst --- ansible-2.3.2.0/docs/docsite/rst/porting_guide_2.4.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/porting_guide_2.4.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,120 @@ +.. _porting_2.4_guide: + +************************* +Ansible 2.4 Porting Guide +************************* + +This section discusses the behavioral changes between Ansible 2.3 and Ansible 2.4. + +It is intended to assist in updating your playbooks, plugins and other parts of your Ansible infrastructure so they will work with this version of Ansible. + + +We suggest you read this page along with `Ansible Changelog `_ to understand what updates you may need to make. + +This document is part of a collection on porting. The complete list of porting guides can be found at :ref:`porting guides `. + +.. contents:: Topics + +Python version +============== + +Ansible will not support Python 2.4 nor 2.5 on the target hosts anymore. Going forward, Python 2.6+ will be required on targets, as already is the case on the controller. + +Deprecated +========== + +Specifying Inventory sources +----------------------------- + +Use of ``--inventory-file`` on the command line is now deprecated. Use ``--inventory`` or ``-i``. +The associated ini configuration key, ``hostfile``, and environment variable, :envvar:`ANSIBLE_HOSTS`, +are also deprecated. Replace them with the configuration key ``inventory`` and environment variable :envvar: `ANSIBLE_INVENTORY`. + +Use of multiple tags +-------------------- + +Specifying ``--tags`` (or ``--skip-tags``) multiple times on the command line currently leads to the last one overriding all the previous ones. This behavior is deprecated. In the future, if you specify --tags multiple times the tags will be merged together. From now on, using ``--tags`` multiple times on one command line will emit a deprecation warning. Setting the ``merge_multiple_cli_tags`` option to True in the ``ansible.cfg`` file will enable the new behavior. + +In 2.4, the default has change to merge the tags. You can enable the old overwriting behavior via the config option. + +In 2.5, multiple ``--tags`` options will be merged with no way to go back to the old behavior. + + +Other caveats +------------- + +No major changes in this version. + +Modules +======= + +Major changes in popular modules are detailed here + +* The :ref:`win_shell ` and :ref:`win_command ` modules now properly preserve quoted arguments in the command-line. Tasks that attempted to work around the issue by adding extra quotes/escaping may need to be reworked to remove the superfluous escaping. See `Issue 23019 `_ for additional detail. + +Modules removed +--------------- + +The following modules no longer exist: + +* None + +Deprecation notices +------------------- + +The following modules will be removed in Ansible 2.8. Please update update your playbooks accordingly. + +* :ref:`azure `, use :ref:`azure_rm_virtualmachine `, which uses the new Resource Manager SDK. +* :ref:`win_msi `, use :ref:`win_package ` instead + +Noteworthy module changes +------------------------- + +* The :ref:`win_get_url ` module has the dictionary ``win_get_url`` in its results deprecated, its content is now also available directly in the resulting output, like other modules. This dictionary will be removed in Ansible 2.8. +* The :ref:`win_unzip ` module no longer includes the dictionary ``win_unzip`` in its results; the contents are now included directly in the resulting output, like other modules. +* The :ref:`win_package ` module return values ``exit_code`` and ``restart_required`` have been deprecated in favour of ``rc`` and ``reboot_required`` respectively. The deprecated return values will be removed in Ansible 2.6. + + +Plugins +======= + +A new way to configure and document plugins has been introduced. This does not require changes to existing setups but developers should start adapting to the new infrastructure now. More details will be available in the developer documentation for each plugin type. + +Vars plugin changes +------------------- + +There have been many changes to the implementation of vars plugins, but both users and developers should not need to change anything to keep current setups working. Developers should consider changing their plugins take advantage of new features. + +The most notable difference to users is that vars plugins now get invoked on demand instead of at inventory build time. This should make them more efficient for large inventories, especially when using a subset of the hosts. + +Inventory plugins +----------------- + +Developers should start migrating from hardcoded inventory with dynamic inventory scripts to the new Inventory Plugins. The scripts will still work via the ``script`` inventory plugin but Ansible development efforts will now concentrate on writing plugins rather than enhancing existing scripts. + +Both users and developers should look into the new plugins because they are intended to alleviate the need for many of the hacks and workarounds found in the dynamic inventory scripts. + + +Networking +========== + +There have been a number of changes to how Networking Modules operate. + +Playbooks should still use ``connection: local``. + +Persistent Connection +--------------------- + +The configuration variables ``connection_retries`` and ``connect_interval`` which were added in Ansible 2.3 are now deprecated. For Ansible 2.4 and later use ``connection_retry_timeout``. + +To control timeouts use ``command_timeout`` rather than the previous top level ``timeout`` variable under ``[default]`` + +See :ref:`Ansible Network debug guide ` for more information. + + +Configuration API +================= + +The configuration system has had some major changes, but users should be unaffected. Developers that were working directly with the previous API should revisit their usage as some methods (for example, ``get_config``) were kept for backwards compatibility but will warn users that the function has been deprecated. + +The new configuration has been designed to minimize the need for code changes in core for new plugins. The plugins just need to document their settings and the configuration system will use the documentation to provide what they need. This is still a work in progress; currently only 'callback' and 'connection' plugins support this. More details will be added to the specific plugin developer guides. diff -Nru ansible-2.3.2.0/docs/docsite/rst/porting_guides.rst ansible-2.4.0.0/docs/docsite/rst/porting_guides.rst --- ansible-2.3.2.0/docs/docsite/rst/porting_guides.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/porting_guides.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,12 @@ +.. _porting_guides: + +********************** +Ansible Porting Guides +********************** + +This section lists porting guides that can help you playbooks, plugins and other parts of your Ansible infrastructure from one version of Ansible to the next. Please note that this is not a complete list. If you believe any extra information would be useful in these pages, you can edit by clicking `Edit on GitHub` on the top right, or raising an issue. + + +* :ref:`Ansible 1.x to 2.0 porting guide ` +* :ref:`Ansible 2.2 to 2.3 porting guide ` +* :ref:`Ansible 2.3 to 2.4 porting guide ` diff -Nru ansible-2.3.2.0/docs/docsite/rst/python_3_support.rst ansible-2.4.0.0/docs/docsite/rst/python_3_support.rst --- ansible-2.3.2.0/docs/docsite/rst/python_3_support.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/python_3_support.rst 2017-09-19 17:10:47.000000000 +0000 @@ -4,6 +4,8 @@ Ansible 2.2 features a tech preview of Python 3 support. This topic discusses how you can test to make sure your modules and playbooks work with Python 3. +.. note:: Ansible supports Python version 3.5 and above only. + .. note:: Technology preview features provide early access to upcoming product innovations, enabling you to test functionality and provide feedback during the development process. @@ -11,30 +13,77 @@ intended for production use. To report a Python 3 bug, please see `Community Information & Contributing `_. Testing Python 3 with commands and playbooks ----------------------------------------------------- +-------------------------------------------- -* `Install Ansible 2.2+ `_ +* `Run Ansible 2.2+ :ref:`from_source` * To test Python 3 on the controller, run your ansible command via - `python3`. For example:: + ``python3``. For example: + +.. code-block:: shell + + python3 /usr/bin/ansible localhost -m ping + python3 /usr/bin/ansible-playbook sample-playbook.yml + +You can also install Ansible using :program:`pip` for Python3 which will make the default +:command:`/usr/bin/ansible` run with Python3: +.. code-block:: shell - $ python3 /usr/bin/ansible localhost -m ping - $ python3 /usr/bin/ansible-playbook sample-playbook.yml + $ virtualenv py3-ansible + $ source ./bin/activate + $ pip3 install ansible + $ ansible --version|grep python + python version = 3.5.3 (default, May 10 2017, 15:05:55) [GCC 6.3.1 20161221 (Red Hat 6.3.1-1)] + +.. note:: Individual Linux distribution packages may be packaged for Python2 or Python3. When running from + distro packages you'll only be able to use Ansible with the Python version for which it was + installed. Sometimes distros will provide a means of installing for several Python versions + (via a separate package or via some commands that are run after install). You'll need to check + with your distro to see if that applies in your case. Testing Python 3 module support -------------------------------- * Set the ansible_python_interpreter configuration option to - `/usr/bin/python3`. The `ansible_python_interpreter` configuration option is usually set per-host as inventory variable associated with a host or set of hosts. See the `inventory documentation `_ for more information. -* Run your command or playbook.:: + :command:`/usr/bin/python3`. The ``ansible_python_interpreter`` configuration option is + usually set per-host as an inventory variable associated with a host or group of hosts: + +.. code-block:: ini + + # Example inventory that makes an alias for localhost that uses python3 + [py3-hosts] + localhost-py3 ansible_host=localhost ansible_connection=local + + [py3-hosts:vars] + ansible_python_interpreter=/usr/bin/python3 + + See the :ref:`inventory documentation ` for more information. + +* Run your command or playbook: + +.. code-block:: shell + + ansible localhost-py3 -m ping + ansible-playbook sample-playbook.yml + + +Note that you can also use the :option:`-e` command line option to manually +set the python interpreter when you run a command. For example: + +.. code-block:: shell - $ ansible localhost -m ping - $ ansible-playbook sample-playbook.yml + ansible localhost -m ping -e 'ansible_python_interpreter=/usr/bin/python3' + ansible-playbook sample-playbook.yml -e 'ansible_python_interpreter=/usr/bin/python3' +What to do if an incompatibility is found +----------------------------------------- -Note that you can also use the `-e` command line option to manually set the python interpreter when you run a command. For example:: - - $ ansible localhost -m ping -e 'ansible_python_interpreter=/usr/bin/python3' - $ ansible-playbook sample-playbook.yml -e 'ansible_python_interpreter=/usr/bin/python3' +If you find a bug while testing modules with Python3 you can submit a bug +report on `Ansible's GitHub project +`_. Be sure to mention Python3 in +the bug report so that the right people look at it. +If you would like to fix the code and submit a pull request on github, you can +refer to :doc:`dev_guide/developing_python3` for information on how we fix +common Python3 compatibility issues in the Ansible codebase. diff -Nru ansible-2.3.2.0/docs/docsite/rst/release_and_maintenance.rst ansible-2.4.0.0/docs/docsite/rst/release_and_maintenance.rst --- ansible-2.3.2.0/docs/docsite/rst/release_and_maintenance.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/release_and_maintenance.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,137 @@ +Release and maintenance +======================= + +.. contents:: Topics + :local: + +.. _schedule: + +Release cycle +````````````` + +Ansible is developed and released on a flexible 4 months release cycle. +This cycle can be extended in order to allow for larger changes to be properly +implemented and tested before a new release is made available. + +Ansible supports the two most recent major stable releases. +For more information, read about the +`development and stable version maintenance workflow`_. + +If you are using a release of Ansible that is no longer supported, we strongly +encourage you to upgrade as soon as possible in order to benefit from the +latest features and security fixes. + +Older unsupported versions of Ansible can contain unfixed security +vulnerabilities (*CVE*). + +You can refer to the `porting guide`_ for tips on updating your Ansible +playbooks to run on newer versions. + +.. _porting guide: https://docs.ansible.com/ansible/porting_guide_2.0.html + +Release status +`````````````` + ++-----------------+----------------------------+----------------------------------------+ +| Ansible release | Latest version | Status | ++=================+============================+========================================+ +| devel | `2.4`_ (unreleased, trunk) | In development | ++-----------------+----------------------------+----------------------------------------+ +| 2.3 | `2.3.2`_ (2017-08-08) | Supported (bug **and** security fixes) | ++-----------------+----------------------------+----------------------------------------+ +| 2.2   | `2.2.3`_ (2017-05-09) | Supported (**only** security fixes) | ++-----------------+----------------------------+----------------------------------------+ +| 2.1 | `2.1.6`_ (2017-06-01) | Unsupported (end of life) | ++-----------------+----------------------------+----------------------------------------+ +| 2.0 | `2.0.2`_ (2016-04-19) | Unsupported (end of life) | ++-----------------+----------------------------+----------------------------------------+ +| 1.9 | `1.9.6`_ (2016-04-15) | Unsupported (end of life) | ++-----------------+----------------------------+----------------------------------------+ +| <1.9 | n/a | Unsupported (end of life) | ++-----------------+----------------------------+----------------------------------------+ + +.. _2.4: https://github.com/ansible/ansible/blob/devel/CHANGELOG.md +.. _2.3.2: https://github.com/ansible/ansible/blob/stable-2.3/CHANGELOG.md +.. _2.2.3: https://github.com/ansible/ansible/blob/stable-2.2/CHANGELOG.md +.. _2.1.6: https://github.com/ansible/ansible/blob/stable-2.1/CHANGELOG.md +.. _2.0.2: https://github.com/ansible/ansible/blob/stable-2.0/CHANGELOG.md +.. _1.9.6: https://github.com/ansible/ansible/blob/stable-1.9/CHANGELOG.md + +.. _support_life: +.. _methods: + +Development and stable version maintenance workflow +``````````````````````````````````````````````````` + +The Ansible community develops and maintains Ansible on GitHub_. + +New modules, plugins, features and bugfixes will always be integrated in what +will become the next major version of Ansible. +This work is tracked on the ``devel`` git branch. + +Ansible provides bugfixes and security improvements for the most recent major +release while the previous major release will only receive security patches. +This work is tracked on the ``stable-`` git branches. + +The fixes that land in supported stable branches will eventually be released +as a new version when necessary. + +For more information on the changes included in each new version, you can refer +to the changelog_, available on GitHub. + +Note that while there are no guarantees for providing fixes for unsupported +releases of Ansible, there can sometimes be exceptions for critical issues. + +.. _GitHub: https://github.com/ansible/ansible +.. _changelog: https://github.com/ansible/ansible/blob/devel/CHANGELOG.md + +Release candidates +~~~~~~~~~~~~~~~~~~ + +Before a new release or version of Ansible can be done, it will typically go +through a release candidate process. + +This provides the Ansible community the opportunity to test Ansible and report +bugs or issues they might come across. + +Ansible tags the first release candidate (``RC1``) which is usually scheduled +to last five business days. The final release is done if no major bugs or +issues are identified during this period. + +If there are major problems with the first candidate, a second candidate will +be tagged (``RC2``) once the necessary fixes have landed. +This second candidate lasts for a shorter duration than the first. +If no problems have been reported after two business days, the final release is +done. + +More release candidates can be tagged as required, so long as there are +bugs that the Ansible core maintainers consider should be fixed before the +final release. + +.. _freezing: + +Feature freeze +~~~~~~~~~~~~~~ + +While there is a pending release candidate, the focus of core developers and +maintainers will on fixes towards the release candidate. + +Merging new features or fixes that are not related to the release candidate may +be delayed in order to allow the new release to be shipped as soon as possible. + +.. seealso:: + + :doc:`committer_guidelines` + Guidelines for Ansible core contributors and maintainers + :doc:`test_strategies` + Testing strategies + :doc:`community` + Community information and contributing + `Ansible Changelog `_ + Documentation of the improvements for each version of Ansible + `Ansible release tarballs `_ + Ansible release tarballs + `Development Mailing List `_ + Mailing list for development topics + `irc.freenode.net `_ + #ansible IRC chat channel diff -Nru ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_1.rst ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_1.rst --- ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_1.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_1.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,71 +1,105 @@ -**************** +================ Ansible Core 2.1 -**************** -************* -Target: April -************* - -- Windows, General - - Figuring out privilege escalation (runas w/ username/password) - - Implement kerberos encryption over http - - pywinrm conversion to requests (Some mess here on pywinrm/requests. will need docs etc.) - - NTLM support +================ +**Target: April** + +.. contents:: Topics + +Windows +------- +- General + + - Figuring out privilege escalation (runas w/ username/password) + - Implement kerberos encryption over http + - pywinrm conversion to requests (Some mess here on pywinrm/requests. will need docs etc.) + - NTLM support - Modules - - Windows - - Finish cleaning up tests and support for post-beta release - - Strict mode cleanup (one module in core) - - Domain user/group management - - Finish win_host and win_rm in the domain/workgroup modules. - - Close 2 existing PRs (These were deemed insufficient) - - Replicate python module API in PS/C# (deprecate hodgepodge of stuff from module_utils/powershell.ps1) - -- Network - - Cisco modules (ios, iosxr, nxos, iosxe) - - Arista modules (eos) - - Juniper modules (junos) - - OpenSwitch - - Cumulus - - Dell (os10) - At risk - - Netconf shared module - - Hooks for supporting Tower credentials -- VMware (This one is a little at risk due to staffing. We're investigating some community maintainers and shifting some people at Ansible around, but it is a VERY high priority). - - vsphere\_guest brought to parity with other vmware modules (vs Viasat and 'whereismyjetpack' provided modules) - - VMware modules moved to official pyvmomi bindings - - VMware inventory script updates for pyvmomi, adding tagging support -- Azure (Notes: This is on hold until Microsoft swaps out the code generator on the Azure Python SDK, which may introduce breaking changes. We have basic modules working against all of these resources at this time. Could ship it against current SDK, but may break. Or should the version be pinned?) - - Minimal Azure coverage using new ARM api - - Resource Group - - Virtual Network - - Subnet - - Public IP - - Network Interface - - Storage Account - - Security Group - - Virtual Machine - - Update of inventory script to use new API, adding tagging support -- Docker: - - Start Docker module refactor - - Update to match current docker CLI capabilities - - Docker exec support -- Upgrade other cloud modules or work with community maintainers to upgrade. (In order) - - AWS (Community maintainers) - - Openstack (Community maintainers) - - Google (Google/Community) - - Digital Ocean (Community) -- Ansiballz (renamed from Ziploader): - - Write code to create the zipfile that gets passed across the wire to be run on the remote python - - Port most of the functionality in module\_utils to be usage in ansiballz instead - - Port a few essential modules to use ansiballz instead of module-replacer as proof of concept - - New modules will be able to use ansiballz. Old modules will need to be ported in future releases (Some modules will not need porting but others will) - - Better testing of modules, caching of modules clientside(Have not yet arrived at an architecture for this that we like), better code sharing between ansible/ansible and modules - - ansiballz is a helpful building block for: python3 porting(high priority), better code sharing between modules(medium priority) - - ansiballz is a good idea before: enabling users to have custom module_utils directories -- Expand module diff support (already in progress in devel) - - Framework done. Need to add to modules, test etc. - - Coordinate with community to update their modules -- Things being kicking down the road that we said we’d do - - NOT remerging core with ansible/ansible this release cycle -- Community stuff - - Define the process/ETA for reviewing PR’s from community - - Publish better docs and how-tos for submitting code/features/fixes + + - Finish cleaning up tests and support for post-beta release + - Strict mode cleanup (one module in core) + - Domain user/group management + - Finish win_host and win_rm in the domain/workgroup modules. + + - Close 2 existing PRs (These were deemed insufficient) + + - Replicate python module API in PS/C# (deprecate hodgepodge of stuff from module_utils/powershell.ps1) + +Network +------- +- Cisco modules (ios, iosxr, nxos, iosxe) +- Arista modules (eos) +- Juniper modules (junos) +- OpenSwitch +- Cumulus +- Dell (os10) - At risk +- Netconf shared module +- Hooks for supporting Tower credentials + +VMware +------ +This one is a little at risk due to staffing. We're investigating some community maintainers and shifting some people at Ansible around, but it is a VERY high priority. + +- vsphere\_guest brought to parity with other vmware modules (vs Viasat and 'whereismyjetpack' provided modules) +- VMware modules moved to official pyvmomi bindings +- VMware inventory script updates for pyvmomi, adding tagging support + +Azure +----- +This is on hold until Microsoft swaps out the code generator on the Azure Python SDK, which may introduce breaking changes. We have basic modules working against all of these resources at this time. Could ship it against current SDK, but may break. Or should the version be pinned?) +- Minimal Azure coverage using new ARM api +- Resource Group +- Virtual Network +- Subnet +- Public IP +- Network Interface +- Storage Account +- Security Group +- Virtual Machine +- Update of inventory script to use new API, adding tagging support + + +Docker +------ +- Start Docker module refactor +- Update to match current docker CLI capabilities +- Docker exec support + +Cloud +----- +Upgrade other cloud modules or work with community maintainers to upgrade. (In order) + +- AWS (Community maintainers) +- Openstack (Community maintainers) +- Google (Google/Community) +- Digital Ocean (Community) + +Ansiballz +--------- +Renamed from Ziploader + +- Write code to create the zipfile that gets passed across the wire to be run on the remote python +- Port most of the functionality in module\_utils to be usage in ansiballz instead +- Port a few essential modules to use ansiballz instead of module-replacer as proof of concept +- New modules will be able to use ansiballz. Old modules will need to be ported in future releases (Some modules will not need porting but others will) +- Better testing of modules, caching of modules clientside(Have not yet arrived at an architecture for this that we like), better code sharing between ansible/ansible and modules +- ansiballz is a helpful building block for: python3 porting(high priority), better code sharing between modules(medium priority) +- ansiballz is a good idea before: enabling users to have custom module_utils directories + +Diff-support +------------ +Expand module diff support (already in progress in devel) + +- Framework done. Need to add to modules, test etc. +- Coordinate with community to update their modules + +Other +----- +Things being kicking down the road that we said we’d do + +- NOT remerging core with ansible/ansible this release cycle + +Community +--------- +- Define the process/ETA for reviewing PR’s from community +- Publish better docs and how-tos for submitting code/features/fixes diff -Nru ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_2.rst ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_2.rst --- ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_2.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_2.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,67 +1,89 @@ -**************** +================ Ansible Core 2.2 -**************** -********************** -Target: September 2016 -********************** +================ +**Target: September 2016** -- **Docker** (lead by Chris Houseknecht) +.. contents:: Topics - - Docker_network: **done** - - Docker_volume: Not in this release - - Docker_file: Not in this release. - - Openshift:  oso_deployment, oso_route, oso_service, oso_login (...and possibly others. These are modules being developed to support `ansible-container `_.): Deferred for later release - - Kubernetes: kube_deployment, kube_service, kube_login (...and possibly others. These too are modules being developed to support `ansible-container `_): Deferred for later release +Docker +------ +Lead by Chris Houseknecht + +- Docker_network: **done** +- Docker_volume: Not in this release +- Docker_file: Not in this release. +- Openshift:  oso_deployment, oso_route, oso_service, oso_login (...and possibly others. These are modules being developed to support `ansible-container `_.): Deferred for later release +- Kubernetes: kube_deployment, kube_service, kube_login (...and possibly others. These too are modules being developed to support `ansible-container `_): Deferred for later release + +Extras split from Core +---------------------- +Lead by Jason M and Jimi-c (Targeting 2.2, could move into 2.3). + +Targeted towards the 2.2 release or shortly after, we are planning on splitting Extras out of the “Ansible Core†project.  That means that modules that are shipped with Ansible by default are **only** the modules in ansibl-modules-core.  Ansible extras will become a separate project, managed by the community standard.  Over the next few months we’re going to have a lot of work to do on getting all of the modules in the right places for this to work. + +- Create proposal (Jason or Jimi) +- Review modules for correct location (extras v core) +- Extras is a completely different package (does not install with ansible) +- Library dependencies +- Decide and implement release schedules between Ansible Core and Extras to ensure compatibility and longevity for modules and versions of Ansible. + +Tweaks/Fixes +------------ +- Connection handling stuff. (Toshio K. and Brian C.): This is a stretch goal for 2.2.  **This work got pushed out** + + - Change connection polling to avoid resource limitations, see ``_ + - ``_ + - Code: https://github.com/kai11/ansible/blob/fix/select_fd_out_of_range_wip/lib/ansible/plugins/connection/ssh.py + + +AWS +--- +Lead by Ryan Brown + +- Pagination for all AWS modules (generic pagination exists, but isn’t used everywhere) (bumped to 2.3) +- Refactoring ec2.py to be more digestible (bumped to 2.3) +- Fix inconsistencies with different authentication methods (STS, environment creds, ~/.aws/credentials) (done) +- AWS Lambda modules (lambda_execute done, others pending) +- Ryan Brown and Robyn Bergeron work on bug/PR triage to reduce backlog (reduced - continuing to work on it) + +Google +------ +Lead by Ryan Brown and Tom Melendez + +- Add support for Google Cloud DNS +- Add support for Google Cloud managed instance groups (done) +- Support restoring instances from snapshots +- Improved handling of scratch disks on instances (done) + +OpenStack +--------- +Lead by Ryan Brown + +Stretch goal for this release + +- Ryan with some help from David Shrewsbury (Zuul/Openstack at RedHat). +- Support Heat stack resources (done) +- Support LBaaS load balancers + +Azure load balancer +------------------- +- Feature parity for AWS ELB (Stretch Goal) + +VMware +------ +Lead by Brian, Jtanner + +- *module/inventory script: port to pyvmomi (jtanner, bcoca)* + **done:** https://github.com/ansible/ansible/pull/15967 +- *inventory script: allow filtering ala ec2 (jtanner) (undergoing PR process)* + **done:** https://github.com/ansible/ansible/pull/15967 +- vsphere: feature parity with whereismyjetpack and viasat modules  + +Windows +------- +Lead by Matt D -- **Extras split from Core** (Team, Community, lead by Jason M and Jimi-c) (Targeting 2.2, could move into 2.3). - Targeted towards the 2.2 release or shortly after, we are planning on splitting Extras out of the “Ansible Core†project.  That means that modules that are shipped with Ansible by default are **only** the modules in ansibl-modules-core.  Ansible extras will become a separate project, managed by the community standard.  Over the next few months we’re going to have a lot of work to do on getting all of the modules in the right places for this to work. - - - Create proposal (Jason or Jimi) - - Review modules for correct location (extras v core) - - Extras is a completely different package (does not install with ansible) - - Library dependencies - - Decide and implement release schedules between Ansible Core and Extras to ensure compatibility and longevity for modules and versions of Ansible. - -- **Tweaks/Fixes** - - - Connection handling stuff. (Toshio K. and Brian C.): This is a stretch goal for 2.2.  **This work got pushed out** - - - Change connection polling to avoid resource limitations, see ``_ - - ``_ - - Code: https://github.com/kai11/ansible/blob/fix/select_fd_out_of_range_wip/lib/ansible/plugins/connection/ssh.py - -- **Cloud Modules** (Ryan Brown) - - - AWS - - - Pagination for all AWS modules (generic pagination exists, but isn’t used everywhere) (bumped to 2.3) - - Refactoring ec2.py to be more digestible (bumped to 2.3) - - Fix inconsistencies with different authentication methods (STS, environment creds, ~/.aws/credentials) (done) - - AWS Lambda modules (lambda_execute done, others pending) - - Ryan Brown and Robyn Bergeron work on bug/PR triage to reduce backlog (reduced - continuing to work on it) - - Google (Ryan Brown and Tom Melendez) - - - Add support for Google Cloud DNS - - Add support for Google Cloud managed instance groups (done) - - Support restoring instances from snapshots - - Improved handling of scratch disks on instances (done) - - External OpenStack (Stretch goal for this release) - - - Ryan with some help from David Shrewsbury (Zuul/Openstack at RedHat). - - Support Heat stack resources (done) - - Support LBaaS load balancers - - Azure load balancer: Feature parity for AWS ELB (Stretch Goal) - -- **VMware** (Brian, Jtanner) - - - *module/inventory script: port to pyvmomi (jtanner, bcoca)* - **done:** https://github.com/ansible/ansible/pull/15967 - - *inventory script: allow filtering ala ec2 (jtanner) (undergoing PR process)* - **done:** https://github.com/ansible/ansible/pull/15967 - - - vsphere: feature parity with whereismyjetpack and viasat modules  - -- **Windows platform feature parity** (Matt D) +- Feature parity - PS module API (mirror Python module API where appropriate). Note: We don’t necessarily like the current python module API (AnsibleModule is a huge class with many unrelated utility functions.  Maybe we should redesign both at the same time?) (bumped to 2.3+ due to "moving target" uncertainty) - Environment keyword support (done) @@ -69,131 +91,149 @@ - Async support (done) - (stretch goal) Pipelining (bumped to 2.3+) -- **Windows-specific enhancements** (Matt D) +- Windows-specific enhancements - Multiple Kerberos credential support (done) - Server 2016 testing/fixes (done, awaiting next TP/RTM) - (stretch goal) Nano Server connection + module_utils working (bumped to 2.3) - (stretch goal) Encrypted kerberos support in pywinrm (bumped to 2.3) -- **Network** (Nate C/Peter S) +Network +------- +Lead by Nate C, Peter S + +- **Done:** Unify NetworkModules (module_utils/network.py) as much as possible  +- **Done:** Add support for config diff and replace on supported platforms (2 weeks) +- **Done:** Support for VyOS network operating system +- **Done:** Add support for RestConf for IOS/XE +- **Done:** Support for Dell Networking OS10 +- **Done:** Add support for Nokia SR OS modules +- **Done:** Network facts modules (dellos, eos, ios, iosxr, junos, nxos, openswitch, vyos) +- **Deferred:** Network facts modules (cumulus, netvisor, sros) +- **Deferred:** Add support for NetConf for IOS/XE +- **Deferred:** (stretch goal) Quagga modules +- **Deferred:** (stretch goal) Bird modules +- **Deferred:** (stretch goal) GoBGP modules + +Role revamp +----------- +- Implement ‘role revamp’ proposal to give users more control on role/task execution (Brian) - - **Done:** Unify NetworkModules (module_utils/network.py) as much as possible  - - **Done:** Add support for config diff and replace on supported platforms (2 weeks) - - **Done:** Support for VyOS network operating system - - **Done:** Add support for RestConf for IOS/XE - - **Done:** Support for Dell Networking OS10 - - **Done:** Add support for Nokia SR OS modules - - **Done:** Network facts modules (dellos, eos, ios, iosxr, junos, nxos, openswitch, vyos) - - **Deferred:** Network facts modules (cumulus, netvisor, sros) - - **Deferred:** Add support for NetConf for IOS/XE - - **Deferred:** (stretch goal) Quagga modules - - **Deferred:** (stretch goal) Bird modules - - **Deferred:** (stretch goal) GoBGP modules + - **https://github.com/ansible/proposals/blob/master/roles_revamp.md** -- **Implement ‘role revamp’ proposal to give users more control on role/task execution (Brian) ** +Vault +----- +Lead by Jtanner, Adrian + +- *Extend ‘transparent vault file usage’ to other action plugins other than 'copy'(https://github.com/ansible/ansible/issues/7298)* + **done:** https://github.com/ansible/ansible/pull/16957 +- Add ‘per variable’ vault support (!vault YAML directive, existing PR already) https://github.com/ansible/ansible/issues/13287 https://github.com/ansible/ansible/issues/14721 +- Add vault/unvault filters https://github.com/ansible/ansible/issues/12087 (deferred to 2.3) +- Add vault support to lookups (likely deferred to 2.3 or until lookup plugins are revamped) +- Allow for multiple vault secrets https://github.com/ansible/ansible/issues/13243 +- Config option to turn ‘unvaulting’ failures into warnings https://github.com/ansible/ansible/issues/13244 + +Python3 +------- +Lead by Toshio + +A note here from Jason M: Getting to complete, tested Python 3 is both +a critical task and one that has so much work and so many moving parts +that we don’t expect this to be complete by the 2.2 release.  Toshio will +lead this overall effort. + +- Motivation: + - Ubuntu LTS (16.04) already ships without python2.  RHEL8 is coming which is also expected to be python3 based. These considerations make this high priority. + - Ansible users are getting restless: https://groups.google.com/forum/#!topic/ansible-project/DUKzTho3OCI + - This is probably going to take multiple releases to complete; need to get started now + +- Baselines: + - We're targeting Python-3.5 and above. + +- Goals for 2.2: + + - Tech preview level of support + - Controller-side code can run on Python3 + - Update: Essential features have been shown to work on Python3. + Currently all unittests and all but three integration tests are + passing on Python3. Code has not been line-by-line audited so bugs + remain but can be treated as bugs, not as massive, invasive new features. + - Almost all of our deps have been ported: + + - The base deps in setup.py are ported: ['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'] + - python-six from the rpm spec file has been ported + - Python-keyczar from the rpm spec file is not. + - Strategy: removing keyczar when we drop accelerate for 2.3. Print deprecation in 2.1. + + - Module_utils ported to dual python3/python2(2.4 for much of it, python2.6 for specific things) + **Mostly done:** Also not line-by-line audited but the unittests + and integration tests do show that the most use functionality is working. + - Add module_utils files to help port + + - Update: copy of the six library (v1.4.1 for python2.4 compat) and unicode helpers are here (ansible.module_utils._text.{to_bytes,to_text,to_native}) + - A few basic modules ported to python3 - - **https://github.com/ansible/proposals/blob/master/roles_revamp.md** + - Stat module best example module since it’s essential. + - Update: -- **Vault** (Jtanner/Adrian) + - A handful of modules like stat have been line-by-line ported. They should work reliably with few python3-specific bugs. All but three integration tests pass which means that most essential modules are working to some extent on Python3. - - *Extend ‘transparent vault file usage’ to other action plugins other than 'copy'(https://github.com/ansible/ansible/issues/7298)* - **done:** https://github.com/ansible/ansible/pull/16957 - - Add ‘per variable’ vault support (!vault YAML directive, existing PR already) https://github.com/ansible/ansible/issues/13287 https://github.com/ansible/ansible/issues/14721 - - Add vault/unvault filters https://github.com/ansible/ansible/issues/12087 (deferred to 2.3) - - Add vault support to lookups (likely deferred to 2.3 or until lookup plugins are revamped) - - Allow for multiple vault secrets https://github.com/ansible/ansible/issues/13243 - - Config option to turn ‘unvaulting’ failures into warnings https://github.com/ansible/ansible/issues/13244 - -- **Python3** (Toshio) - A note here from Jason M: Getting to complete, tested Python 3 is both - a critical task and one that has so much work and so many moving parts - that we don’t expect this to be complete by the 2.2 release.  Toshio will - lead this overall effort. - - - Motivation: - - Ubuntu LTS (16.04) already ships without python2.  RHEL8 is coming which is also expected to be python3 based. These considerations make this high priority. - - Ansible users are getting restless: https://groups.google.com/forum/#!topic/ansible-project/DUKzTho3OCI - - This is probably going to take multiple releases to complete; need to get started now - - - Baselines: - - We're targeting Python-3.5 and above. - - - Goals for 2.2: - - - Tech preview level of support - - Controller-side code can run on Python3 - - Update: Essential features have been shown to work on Python3. - Currently all unittests and all but three integration tests are - passing on Python3. Code has not been line-by-line audited so bugs - remain but can be treated as bugs, not as massive, invasive new features. - - Almost all of our deps have been ported: - - The base deps in setup.py are ported: ['paramiko', 'jinja2', "PyYAML", 'setuptools', 'pycrypto >= 2.6'] - - python-six from the rpm spec file has been ported - - Python-keyczar from the rpm spec file is not. - - Strategy: removing keyczar when we drop accelerate for 2.3. Print deprecation in 2.1. - - - Module_utils ported to dual python3/python2(2.4 for much of it, python2.6 for specific things) - **Mostly done:** Also not line-by-line audited but the unittests - and integration tests do show that the most use functionality is working. - - Add module_utils files to help port - - Update: copy of the six library (v1.4.1 for python2.4 compat) and unicode helpers are here (ansible.module_utils._text.{to_bytes,to_text,to_native}) - - A few basic modules ported to python3 - - Stat module best example module since it’s essential. - - Update: - - - A handful of modules like stat have been line-by-line ported. They should work reliably with few python3-specific bugs. All but three integration tests pass which means that most essential modules are working to some extent on Python3. - - The three failing tests are: service, hg, and uri. - - Note, large swaths of the modules are not tested. The status of + - The three failing tests are: service, hg, and uri. + - Note, large swaths of the modules are not tested. The status of these is unknown - - All code should compile under Python3. - - lib/ansible/* and all modules now compile under Python-3.5 - - Side work to do: - - Figure out best ways to run unit-tests on modules.  Start unit-testing modules.  This is going to become important so we don’t regress python3 or python2.4 support in modules  (Going to largely punt on this for 2.2.  Matt Clay is working on building us a testing foundation for the first half of 2.2 development so we’ll re-evaluate towards the middle of the dev cycle). - - More unit tests of module_utils - - More integration tests. Currently integration tests are the best way to test ansible modules so we have to rely on those. + - All code should compile under Python3. + - lib/ansible/* and all modules now compile under Python-3.5 + + - Side work to do: + - Figure out best ways to run unit-tests on modules.  Start unit-testing modules.  This is going to become important so we don’t regress python3 or python2.4 support in modules  (Going to largely punt on this for 2.2.  Matt Clay is working on building us a testing foundation for the first half of 2.2 development so we’ll re-evaluate towards the middle of the dev cycle). + - More unit tests of module_utils + - More integration tests. Currently integration tests are the best way to test ansible modules so we have to rely on those. - Goals for 2.3: - - Bugfixing, bugfixing, bugfixing. We need community members to test, - submit bugs, and add new unit and integration tests. I'll have some - time allocated both to review any Python3 bugfixes that they submit - and to work on bug reports without PRs. The overall goal is to make - the things that people do in production with Ansible work on Python 3. -- **Infrastructure Buildout and Changes** (Matt Clay) - Another note from Jason M: A lot of this work is to ease the burden of CI, CI performance, increase our testing coverage and all of that sort of thing.  It’s not necessarily feature work, but it’s \*\*critical\*\* to growing our product and our ability to get community changes in more securely and quickly. + - Bugfixing, bugfixing, bugfixing. We need community members to test, + submit bugs, and add new unit and integration tests. I'll have some + time allocated both to review any Python3 bugfixes that they submit + and to work on bug reports without PRs. The overall goal is to make + the things that people do in production with Ansible work on Python 3. + +Infrastructure Buildout and Changes +----------------------------------- +Lead by Matt Clay + +Another note from Jason M: A lot of this work is to ease the burden of CI, CI performance, increase our testing coverage and all of that sort of thing.  It’s not necessarily feature work, but it’s \*\*critical\*\* to growing our product and our ability to get community changes in more securely and quickly. - - **CI Performance** - Reduce time spent waiting on CI for PRs. Combination of optimizing existing Travis setup and offloading work to other services. Will be impacted by available budget. +- **CI Performance** + Reduce time spent waiting on CI for PRs. Combination of optimizing existing Travis setup and offloading work to other services. Will be impacted by available budget. - **Done:** Most tests have been migrated from Travis to Shippable. + **Done:** Most tests have been migrated from Travis to Shippable. - - **Core Module Test Organization** - Relocate core module tests to ansible-modules-core to encourage inclusion of tests in core module PRs. +- **Core Module Test Organization** + Relocate core module tests to ansible-modules-core to encourage inclusion of tests in core module PRs. - **Deferred:** Relocation of core module tests has been deferred due to proposed changes in `modules management `_. + **Deferred:** Relocation of core module tests has been deferred due to proposed changes in `modules management `_. - - **Documentation** - Expand documentation on setting up a development and test environment, as well as writing tests. The goal is to ease development for new contributors and encourage more testing, particularly with module contributions. - - **Test Coverage** +- **Documentation** + Expand documentation on setting up a development and test environment, as well as writing tests. The goal is to ease development for new contributors and encourage more testing, particularly with module contributions. +- **Test Coverage** - - Expand test coverage, particularly for CI. Being testing, this is open ended. Will be impacted by available budget. + - Expand test coverage, particularly for CI. Being testing, this is open ended. Will be impacted by available budget. - **Done:** Module PRs now run integration tests for the module(s) being changed. + **Done:** Module PRs now run integration tests for the module(s) being changed. - - Python 3 - Run integration tests using Python 3 on CI with tagging for those which should pass, so we can track progress and detect regressions. + - Python 3 - Run integration tests using Python 3 on CI with tagging for those which should pass, so we can track progress and detect regressions. - **Done:** Integration tests now run on Shippable using a Ubuntu 16.04 docker image with only Python 3 installed. + **Done:** Integration tests now run on Shippable using a Ubuntu 16.04 docker image with only Python 3 installed. - - Windows - Create framework for running Windows integration tests, ideally both locally and on CI. + - Windows - Create framework for running Windows integration tests, ideally both locally and on CI. - **Done:** Windows integration tests now run on Shippable. + **Done:** Windows integration tests now run on Shippable. - - FreeBSD - Include FreeBSD in CI coverage. Not originally on the roadmap, this is an intermediary step for CI coverage for OS X. + - FreeBSD - Include FreeBSD in CI coverage. Not originally on the roadmap, this is an intermediary step for CI coverage for OS X. - **Done:** FreeBSD integration tests now run on Shippable. + **Done:** FreeBSD integration tests now run on Shippable. - - OS X - Include OS X in CI coverage. + - OS X - Include OS X in CI coverage. - **Done:** OS X integration tests now run on Shippable. + **Done:** OS X integration tests now run on Shippable. diff -Nru ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_3.rst ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_3.rst --- ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_3.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_3.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,42 +1,46 @@ -**************************** +============================ Ansible by Red Hat, Core 2.3 -**************************** -*************************** -Target: Mid April 2017 -*************************** +============================ +**Target: Mid April 2017** -- **General Comments from the Core Team** +.. contents:: Topics - - The 2.3 Ansible Core is just a little different than the past two major releases we've done. In addition to feature work, we're using part of the time for this release to reduce some of our backlog in other areas than pure development. - - *Administration:* Clean up our GitHub repos and move to one repo so that contributions, tickets, submissions, etc are centralized and easier for both the community and the Core Team to manage. - - *Metadata:* Move to a Metadata based system for modules. This has been discussed here: https://github.com/ansible/proposals/blob/master/modules-management.md - - *Documentation:* We're aware that Docs have issues. Scott Butler, aka Dharmabumstead will be leading the charge on how he and we as a community can clean them up. - - *Backlog & Stability:* We're spending some of the cycles for 2.3 trying to reduce our ticket/PR backlog, and clean up some particular areas of the project that the community has expressed particular frustrations about. - - *Python 3:* The community and Toshio have done TONS of work getting Python 3 working. Still more to go... - - *Features:* We still have some cool stuff coming. Check it out below. For people on the Networking side of the world, the Persistent Connection Manager will be a *huge* feature and performance gain. +General Comments from the Core Team +----------------------------------- +- The 2.3 Ansible Core is just a little different than the past two major releases we've done. In addition to feature work, we're using part of the time for this release to reduce some of our backlog in other areas than pure development. +- *Administration:* Clean up our GitHub repos and move to one repo so that contributions, tickets, submissions, etc are centralized and easier for both the community and the Core Team to manage. +- *Metadata:* Move to a Metadata based system for modules. This has been discussed here: https://github.com/ansible/proposals/blob/master/modules-management.md +- *Documentation:* We're aware that Docs have issues. Scott Butler, aka Dharmabumstead will be leading the charge on how he and we as a community can clean them up. +- *Backlog & Stability:* We're spending some of the cycles for 2.3 trying to reduce our ticket/PR backlog, and clean up some particular areas of the project that the community has expressed particular frustrations about. +- *Python 3:* The community and Toshio have done TONS of work getting Python 3 working. Still more to go... +- *Features:* We still have some cool stuff coming. Check it out below. For people on the Networking side of the world, the Persistent Connection Manager will be a *huge* feature and performance gain. + +Repo Merge +---------- +- Script that a submitter can run to migrate their PR **(done)** +- Script that a committer can run to fork a PR and then merge to ansible/ansible **(mostly done)** +- Move all the issues (remove old ones that can be removed) **(done)** +- Enhance ansibullbot to accommodate the changes (jctanner) **(in progress, going well)** + +Metadata +-------- +- Add metadata to the modules we ship **(done)** +- Write code to use metadata in docs **(done)** +- If needed for python2/3 write code to use metadata in module_common or pluginloader **(not needed)** + +Documentation +------------- +- Update developing_modules **(in progress, will continue in 2.4)** +- Set up rst skeleton for module_utils docs. +- Plugin development docs +- Speed up `make webdocs` https://github.com/ansible/ansible/issues/17406 **(done)** + +Windows +------- +Lead by nitzmahone -- **Repo Merge** - - - Script that a submitter can run to migrate their PR **(done)** - - Script that a committer can run to fork a PR and then merge to ansible/ansible **(mostly done)** - - Move all the issues (remove old ones that can be removed) **(done)** - - Enhance ansibullbot to accommodate the changes (jctanner) **(in progress, going well)** - -- **Metadata** - - - Add metadata to the modules we ship **(done)** - - Write code to use metadata in docs **(done)** - - If needed for python2/3 write code to use metadata in module_common or pluginloader **(not needed)** - -- **Documentation** - - - Update developing_modules **(in progress, will continue in 2.4)** - - Set up rst skeleton for module_utils docs. - - Plugin development docs - - Speed up `make webdocs` https://github.com/ansible/ansible/issues/17406 **(done)** - -- **Windows platform** (nitzmahone) +- **Platform** - Pipelining support **(done)** - Become support **(done/experimental)** @@ -48,7 +52,7 @@ - Kerberos encryption (via notting, pywinrm/requests_kerberos/pykerberos) **(in progress, available in pywinrm post 2.3 release)** - Fix plugin-specific connection var lookup/delegation (either registered explicitly by plugins or ansible_(plugin)_*) **(bumped to 2.4)** -- **Windows modules** (nitzmahone) +- **Modules** - win_domain module **(done)** - win_domain_membership module **(done)** @@ -61,93 +65,102 @@ - Updates to win_updates, adopt to core (stretch) **(bump to 2.4)** - Updates to win_package, adopt to core (+ deprecate win_msi) (stretch) **(bump to 2.4)** -- **Azure modules** (nitzmahone/mattclay) - - - Ensure Azure SDK rc6/RTM work **(done)** - - Move tests from ansible/azure_rm repo to ansible/ansible **(bump to 2.4, no CI resources)** - - Update/enhance tests **(bump to 2.4, no CI resources)** - - Expose endpoint overrides (support AzureChinaCloud, Azure Stack) **(bump to 2.4)** - - Get Azure tests running in CI (stretch, depends on availability of sponsored account) **(bump to 2.4, no CI resources)** - - azure_rm_loadbalancer module (stretch) **(bump to 2.4)** +Azure +----- +Lead by nitzmahone, mattclay -- **Networking** +- Ensure Azure SDK rc6/RTM work **(done)** +- Move tests from ansible/azure_rm repo to ansible/ansible **(bump to 2.4, no CI resources)** +- Update/enhance tests **(bump to 2.4, no CI resources)** +- Expose endpoint overrides (support AzureChinaCloud, Azure Stack) **(bump to 2.4)** +- Get Azure tests running in CI (stretch, depends on availability of sponsored account) **(bump to 2.4, no CI resources)** +- azure_rm_loadbalancer module (stretch) **(bump to 2.4)** - - Code stability and tidy up **(done)** - - Extend testing **(done)** - - User facing documentation - - Persistent connection manager **(done)** - - Netconf/YANG implementation (only feature) **(done)** - - Deferred from 2.2: Network facts modules (sros) +Networking +---------- +- Code stability and tidy up **(done)** +- Extend testing **(done)** +- User facing documentation +- Persistent connection manager **(done)** +- Netconf/YANG implementation (only feature) **(done)** +- Deferred from 2.2: Network facts modules (sros) -- **Python3** +Python3 +------- - - For 2.3: +- For 2.3: - - We want all tests to pass + - We want all tests to pass - - Just the mercurial tests left because we haven't created an image with - both python2 and python3 to test it on yet. - - Check by doing ``grep skip/python3 test/integration/targets/*/aliases`` - - If users report bugs on python3, these should be fixed and will prioritize our work on porting other modules. - - Still have to solve the python3-only and python2-only modules. Thinking of doing this via metadata. Will mean we have to use metadata at the module_common level. Will also mean we don’t support py2-only or py3-only old style python modules. - - Note: Most of the currently tested ansible features now run. But there’s still a lot of code that’s untested. + - Just the mercurial tests left because we haven't created an image with + both python2 and python3 to test it on yet. + - Check by doing ``grep skip/python3 test/integration/targets/*/aliases`` -- **Testing and CI** (mattclay) + - If users report bugs on python3, these should be fixed and will prioritize our work on porting other modules. - - *Static Code Analysis:* Create custom pylint extensions to automate detection of common Ansible specific issues reported during code review. Automate feedback on PRs for new code only to avoid noise from existing code which does not pass. +- Still have to solve the python3-only and python2-only modules. Thinking of doing this via metadata. Will mean we have to use metadata at the module_common level. Will also mean we don’t support py2-only or py3-only old style python modules. +- Note: Most of the currently tested ansible features now run. But there’s still a lot of code that’s untested. - **Ongoing:** Some static code analysis is now part of the CI process: +Testing and CI +-------------- +Lead by mattclay - - pep8 is now being run by CI, although not all PEP 8 rules are being enforced. - - pylint is now being run by CI, but currently only on the ansible-test portion of codebase. +- *Static Code Analysis:* Create custom pylint extensions to automate detection of common Ansible specific issues reported during code review. Automate feedback on PRs for new code only to avoid noise from existing code which does not pass. - - *Test Reliability:* Eliminate transient test failures by fixing unreliable tests. Reduce network dependencies by moving network resources into httptester. + **Ongoing:** Some static code analysis is now part of the CI process: - **Ongoing:** Many of the frequent sources of test instability have been resolved. However, more work still remains. + - pep8 is now being run by CI, although not all PEP 8 rules are being enforced. + - pylint is now being run by CI, but currently only on the ansible-test portion of codebase. - Some new issues have also appeared, which are currently being worked on. +- *Test Reliability:* Eliminate transient test failures by fixing unreliable tests. Reduce network dependencies by moving network resources into httptester. - - *Enable Remaining Tests:* Implement fixes for OS X, FreeBSD and Python 3 to enable the remaining blacklisted tests for CI. + **Ongoing:** Many of the frequent sources of test instability have been resolved. However, more work still remains. - **Ongoing:** More tests have been enabled for OS X, FreeBSD and Python 3. However, work still remains to enable more tests. + Some new issues have also appeared, which are currently being worked on. - - *Windows Server 2016:* Add Windows Server 2016 to CI when official AMIs become available. +- *Enable Remaining Tests:* Implement fixes for OS X, FreeBSD and Python 3 to enable the remaining blacklisted tests for CI. - **Delayed:** Integration tests pass on Windows Server 2016. However, due to intermittent WinRM issues, the tests have been disabled. + **Ongoing:** More tests have been enabled for OS X, FreeBSD and Python 3. However, work still remains to enable more tests. - Once the issues with WinRM have been resolved, the tests will be re-enabled. +- *Windows Server 2016:* Add Windows Server 2016 to CI when official AMIs become available. - - *Repository Consolidation:* Update CI to maintain and improve upon existing functionality after repository consolidation. + **Delayed:** Integration tests pass on Windows Server 2016. However, due to intermittent WinRM issues, the tests have been disabled. - **Done:** A new test runner, ansible-test, has been deployed to manage CI jobs on Shippable. + Once the issues with WinRM have been resolved, the tests will be re-enabled. - Tests executed on PRs are based on the changes made in the PR, for example: +- *Repository Consolidation:* Update CI to maintain and improve upon existing functionality after repository consolidation. - - Changes to a module will only run tests appropriate for that module. - - Changes to Windows modules or the Windows connection plugin run tests on Windows. - - Changes to network modules run tests on the appropriate virtual network device (currently supporting VyOS and IOS). + **Done:** A new test runner, ansible-test, has been deployed to manage CI jobs on Shippable. - Tests executed on merges are based on changes since the last successful merge test. + Tests executed on PRs are based on the changes made in the PR, for example: -- **Amazon resources** (ryansb) + - Changes to a module will only run tests appropriate for that module. + - Changes to Windows modules or the Windows connection plugin run tests on Windows. + - Changes to network modules run tests on the appropriate virtual network device (currently supporting VyOS and IOS). - - Improve ec2.py integration tests **(partial, more to do in 2.4)** - - ELB version 2 **(pushed - needs_revision [PR](https://github.com/ansible/ansible/pull/19491))** - - CloudFormation YAML, cross-stack reference, and roles support **(done)** - - ECS module refactor **(done)** - - AWS module unit testing w/ placebo (boto3 only) **(pushed 2.4)** + Tests executed on merges are based on changes since the last successful merge test. -- **Plugin Loader** +Amazon +------ +Lead by ryansb - - Add module_utils to the plugin loader (feature) [done] - - Split plugin loader: Plugin_search, plugin_loader (modules only use first) [pushed to 2.4] +- Improve ec2.py integration tests **(partial, more to do in 2.4)** +- ELB version 2 **(pushed - needs_revision [PR](https://github.com/ansible/ansible/pull/19491))** +- CloudFormation YAML, cross-stack reference, and roles support **(done)** +- ECS module refactor **(done)** +- AWS module unit testing w/ placebo (boto3 only) **(pushed 2.4)** -- **ansible-ssh** +Plugin Loader +------------- +- Add module_utils to the plugin loader (feature) [done] +- Split plugin loader: Plugin_search, plugin_loader (modules only use first) [pushed to 2.4] - - Add a ‘ansible-ssh’ convenience and debugging tool (will slip to 2.4) - - Tool to invoke an interactive ssh to a host with the same args/env/config that ansible would. - - There are at least three external versions +ansible-ssh +----------- +- Add a ‘ansible-ssh’ convenience and debugging tool (will slip to 2.4) +- Tool to invoke an interactive ssh to a host with the same args/env/config that ansible would. +- There are at least three external versions - - https://github.com/2ndQuadrant/ansible-ssh - - https://github.com/haad/ansible-ssh - - https://github.com/mlvnd/ansible-ssh + - https://github.com/2ndQuadrant/ansible-ssh + - https://github.com/haad/ansible-ssh + - https://github.com/mlvnd/ansible-ssh diff -Nru ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_4.rst ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_4.rst --- ansible-2.3.2.0/docs/docsite/rst/roadmap/ROADMAP_2_4.rst 2017-08-08 17:08:31.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/roadmap/ROADMAP_2_4.rst 2017-09-19 17:10:47.000000000 +0000 @@ -1,65 +1,249 @@ -**************************** +============================ Ansible by Red Hat, Core 2.4 -**************************** -********************** -Target: June/July 2017 -********************** +============================ +**Core Engine Freeze and Module Freeze: 15 August 2017** -This is meant to be a living document, and is **DRAFT** until -stated otherwise in the document. +**Core and Curated Module Freeze: 15 August 2017** -- **Python 2.4 and 2.5 support discontinuation** - - Ansible will not support Python 2.4 nor 2.5 on the target hosts anymore. Going forward, Python 2.6+ will be required on targets, as already is the case on the controller. +**Community Module Freeze: 29 August 2017** -- **Ansible-Config** - - New yaml format for config - - Extend the ability of the current config system by adding creating an ansible-config command and add the following: +**Release Candidate 1 will be 06 September, 2017** - - Dump existing config settings +**Target: Mid-September 2017** + +.. contents:: Topics + +Administrivia and Process +------------------------- +- Starting with 2.4, all items that are deprecated will be removed in 4 major releases unless otherwise stated. + + - For example: A module that is deprecated in 2.4 will be removed in 2.8 + +Python 2.4 and 2.5 support discontinuation +------------------------------------------ +- Ansible will not support Python 2.4 nor 2.5 on the target hosts anymore. + Going forward, Python 2.6+ will be required on targets, as already is the case on the controller. + +Python 3 +-------- +- Ansible Core Engine and Core modules will be tested on Python 3 + + - All Core modules now have at least a smoketest integration test. + Additional coverage is welcomed to find more bugs and prevent regressions. + +- Communicate with Linux distros to provide Ansible running on Python 3 + + - Python3 based Ansible packages are now available to run on Fedora Linux + +Ansible-Config +-------------- +- Proposal found in ansible/proposals issue `#35 `_. +- Initial PR of code found in ansible/ansible PR `#12797 `_. **(done)** +- Per plugin configuration (depends on plugin docs below). **(WIP)** +- New yaml format for config **(possibly pushed to future roadmap)** +- Extend the ability of the current config system by adding an ``ansible-config`` command and add the following: + + - Dump existing config settings **(working, fine tuning)** + - Update / write a config entry **(pushed to future roadmap)** + - Show available options (ini entry, yaml, env var, etc) **(working, fine tuning)** + + +Inventory +--------- +**(done, needs docs)** +- Proposal found in ansible/proposals issue `#41 `_. +- Current inventory is overly complex, non modular and mostly still a legacy from inception. + +Facts +----- +- Configurable list of ‘fact modules’ for ``gather_facts`` **(done)** +- Fact gathering policy finer grained **(done)** +- Make ``setup.py``/``facts`` more pluggable **(done)** +- Improve testing of ``setup.py``/``facts.py`` **(done)** +- Namespacing fact variables (via a config option) implemented in ansible/ansible PR `#18445 `_. **(done)** + Proposal found in ansible/proposals issue `#17 `_. + +PluginLoader +------------ +**(pushed out to future release)** +- Over the past couple releases we've had some thoughts about how PluginLoader might be better structured + + - Load the loaders via an initialization function(), not when importing + the module. (stretch goal, doesn't impact the CLI) + - Separate duties of ``PluginLoader`` from ``PluginFinder``. Most plugins need + both but Modules and Module_utils only need a PluginFinder + - Write different ``PluginFinder`` subclasses for module_utils and perhaps + Modules. Most Plugin types have a flattened namespace and are single + python files. Modules include code that is not written in python. + Module_utils are vastly different from the other Plugins as they + maintain a hierarchical namespace and are multi-file. + - Potentially split module_utils loader for python from module_utils + loader for powershell. Currently we only support generic module_utils + for python modules. The powershell modules always include a single, + hardcoded powershell module_utils file. If we add generic module_utils + for powershell, we'll need to decide how to organize the code. + +Static Loop Keyword +------------------- +- **Pushed to 2.5** +- Deprecate (not on standard deprecation cycle) ``with_`` in favor of ``loop:`` +- This ``loop:`` will take only a list +- Remove complexity from loops, lookups are still available to users +- Less confusing having a static directive vs a one that is dynamic depending on plugins loaded. + +Vault +----- +- Support for multiple vault passwords. **(done)** + + - Each decrypted item should know which secret to request **(done)** + - Support requesting credentials (password prompt) as callbacks + +- Ability to open and edit file with encrypted vars deencrypted, and encrypt/format on save + +Globalize Callbacks +------------------- +**(pushed out to future release)** +- Make send_callback available to other code that cannot use it. +- Would allow for ‘full formatting’ of output (see JSON callback) +- Fixes static ‘include’ display problem + +Plugins +------- +- Allow plugins to have embedded docs (like modules) **(done)** +- Update ansible-doc and website to generate docs from these ansible/ansible PR `#22796 `_. **(ansible-doc working, todo:website)** + +Group Priorities +---------------- +**(done)** +- Start using existing group priority variable to sort/merge group vars +- Implementation for this in ansible/ansible PR `#22580 `_. +- Documentation of group priority variable + +Runtime Check on Modules for Blacklisting +----------------------------------------- +**(pushed out to future release)** +- Filter on things like "supported_by" in module metadata +- Provide users with an option of "warning, error or allow/ignore" +- Configurable via ansible.cfg and environment variable + +Disambiguate Includes +--------------------- +- Create import_x for ‘static includes’ (import_task, import_play, import_role) + + - Any directives are applied to the ‘imported’ tasks + +- Create include_x for ‘dynamic includes’ (include_task, include_role) + + - Any directives apply to the ‘include’ itself + +Windows +------- +- New PS/.NET module API **(in progress)** +- Windows Nano Server support +- Windows module_utils pluginloader **(done)** +- Refactor duplicated module code into new module_utils files **(in progress)** +- Evaluate #Requires directives (existing and new: PS version, OS version, etc) +- Improve module debug support/persistence **(done)** +- Explore official DSC support **(done)** +- Explore module intermediate output +- Explore Powershell module unit testing **(in progress)** +- Explore JEA support (stretch) +- Extended become support with network/service/batch logon types +- Module updates + + - Split "Windows" category into multiple subs + - Domain user/group management modules **(done)** + - win_mapped_drive module **(done)** + - win_hotfix **(done)** + - win_updates rewrite to require become + - win_package changes required to deprecate win_msi **(done)** + - win_copy re-write **(done)** + +AWS +--- +- Focus on pull requests for various modules +- Triage existing merges for modules +- Module work + + - elb-target-groups `#19492 `_, `#24583 `_. **(done)** + - alb* `#19491 `_, `#24584 `_. **(done)** + - ecs `#20618 `_. **(in review process)** + - Data Pipelines `#22878 `_. **(in review process)** + - VPN `#24385 `_. **(in review process)** + - DirectConnect `#26152 `_. **(connection module in review process, several more to come)** + +Azure +----- +- Expose endpoint overrides **(done)** +- Reformat/document module output to collapse internal API structures and surface important data (eg, public IPs, NICs, data disks) **(pushed to future)** +- Add load balancer module **(in progress)** +- Add Azure Functions module **(in progress)** + +Google Cloud Platform +--------------------- +- New Module: DataProc +- Support for Cross-Region HTTP Load Balancing +- New Module: GKE + +Network Roadmap +--------------- +- Removal of ``*_template`` modules **(done)** +- Distributed Continuous Integration Infrastructure **(done)** +- RPC Connection Plugin **(done)** +- Module Work + + - Declarative intent modules **(done)** + - OpenVSwitch **(done)** + - Minimal Viable Platform Agnostic Modules **(done)** + +Contributor Quality of Life +--------------------------- +- All Core and Curated modules will work towards having unit testing. **(edit: integration and/or unit tests)** +- More bot improvements! + + - Bot comments on PRs with details of test failures. **(done)** + +- Test Infrastructure changes + + - Shippable + Bot Integration + + - Provide verified test results to the bot from Shippable so the bot can comment on PRs with CI failures. **(done, compile and sanity tests only)** + - Enable the bot to mark PRs with ``ci_verified`` if all CI failures are verified. **(done)** + + - Windows Server 2016 Integration Tests + + - Restore Windows Server 2016 integration tests on Shippable. + + - Originally enabled during the 2.3 release cycle, but later disabled due to intermittent WinRM issues. + - Depends on resolution of WinRM connection issues. + + - Windows Server Nano Integration Tests **(pushed to future roadmap)** + + - Add support to ansible-core-ci for Windows Server 2016 Nano and enable on Shippable. + - This will use a subset of the existing Windows integration tests. + - Depends on resolution of WinRM connection issues. + + - Windows + Python 3 Tests + + - Run basic Windows tests using Python 3 as the controller. **(partially done, not all planned tests running yet)** + - Depends on resolution of WinRM Python 3 issues. + + - Cloud Integration Tests + + - Run existing cloud integration tests as part of CI for: + + - AWS **(done)** + - Azure **(done)** + - GCP **(pushed to future roadmap)** + + - Tests to be run only on cloud module (and module_utils) PRs and merges for the relevant cloud provider. **(done)** + + - Test Reliability - - Update / write a config entry + - Further improve test reliability to reduce false positives on Shippable. **(ongoing)** + - This continues work from the 2.3 release cycle. - - Show available options (ini entry, yaml, env var, etc) + - Static Code Analysis - - Proposal found in ansible/proposals issue `#35 `_. - - Initial PR of code found in ansible/ansible PR `#12797 `_. - -- **Inventory Overhaul** - - - Current inventory is overtly complex, non modular and mostly still a legacy from inception. We also want to add a common set of features to most inventory sources but are hampered by the current code base. - - Proposal found in ansible/proposals issue `#41 `_. - -- **PluginLoader Refactor** - - - Over the past couple releases we've had some thoughts about how - PluginLoader might be better structured - - - Load the loaders via an initialization function(), not when importing - the module. (stretch goal, doesn't impact the CLI) - - Separate duties of PluginLoader from PluginFinder. Most plugins need - both but Modules and Module_utils only need a PluginFinder - - Write different PluginFinder subclasses for Module_utils and perhaps - Modules. Most Plugin types have a flattened namespace and are single - python files. Modules include code that is not written in python. - Module_utils are vastly different from the other Plugins as they - maintain a hierarchical namespace and are multi-file. - - Potentially split module_utils loader for python from module_utils - loader for powershell. Currently we only support generic module_utils - for python modules. The powershell modules always include a single, - hardcoded powershell module_utils file. If we add generic module_utils - for powershell, we'll need to decide how to organize the code. - -- **Facts Refreshening** - - - Make setup.py/facts more pluggable - - Fact gathering policy finer grained - - Improve testing of setup.py/facts.py - -- **Cloud Provider Support** - - - Focus on pull requests for various modules - - Triage existing merges - -- **Contributor Quality of Life** - - - More bot improvements! + - Further expand the scope and coverage of static analysis. **(ongoing)** + - This continues work from the 2.3 release cycle. diff -Nru ansible-2.3.2.0/docs/docsite/rst/vault.rst ansible-2.4.0.0/docs/docsite/rst/vault.rst --- ansible-2.3.2.0/docs/docsite/rst/vault.rst 1970-01-01 00:00:00.000000000 +0000 +++ ansible-2.4.0.0/docs/docsite/rst/vault.rst 2017-09-19 17:10:47.000000000 +0000 @@ -0,0 +1,414 @@ +Ansible Vault +============= + +.. contents:: Topics + +New in Ansible 1.5, "Vault" is a feature of ansible that allows keeping sensitive data such as passwords or keys in encrypted files, rather than as plaintext in your playbooks or roles. These vault files can then be distributed or placed in source control. + +To enable this feature, a command line tool - :ref:`ansible-vault` - is used to edit files, and a command line flag (:option:`--ask-vault-pass ` or :option:`--vault-password-file `) is used. Alternately, you may specify the location of a password file or command Ansible to always prompt for the password in your ansible.cfg file. These options require no command line flag usage. + +For best practices advice, refer to :ref:`best_practices_for_variables_and_vaults`. + +.. _what_can_be_encrypted_with_vault: + +What Can Be Encrypted With Vault +```````````````````````````````` + +The vault feature can encrypt any structured data file used by Ansible. This can include "group_vars/" or "host_vars/" inventory variables, variables loaded by "include_vars" or "vars_files", or variable files passed on the ansible-playbook command line with "-e @file.yml" or "-e @file.json". Role variables and defaults are also included! + +Ansible tasks, handlers, and so on are also data so these can be encrypted with vault as well. To hide the names of variables that you're using, you can encrypt the task files in their entirety. However, that might be a little too much and could annoy your coworkers :) + +The vault feature can also encrypt arbitrary files, even binary files. If a vault-encrypted file is +given as the :ref:`src ` argument to the :ref:`copy `, :ref:`template