Remove `docs` and `examples` directories (#81011)

* Remove docs dir

* Updates to reflect docs removal

* Fix integration test

* Remove examples dir

* Updates to reflect examples removal

* Remove build_library and build-ansible.py

* Remove refs to build_library and build-ansible.py

* Remove obsolete template

* Remove obsolete template reference

* Remove the now obsolete rstcheck sanity test
This commit is contained in:
Matt Clay 2023-07-11 12:40:06 -07:00 committed by GitHub
parent 38e50c9f81
commit 72e038e823
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
665 changed files with 4 additions and 126820 deletions

View File

@ -46,7 +46,6 @@ stages:
targets:
- test: 1
- test: 2
- test: 3
- stage: Units
dependsOn: []
jobs:

View File

@ -16,14 +16,8 @@ group2_include=(
validate-modules
)
group3_include=(
docs-build
sanity-docs
)
group1_exclude=(
"${group2_include[@]}"
"${group3_include[@]}"
)
options=()
@ -39,11 +33,6 @@ case "${group}" in
options+=(--test "${name}")
done
;;
3)
for name in "${group3_include[@]}"; do
options+=(--test "${name}")
done
;;
esac
# shellcheck disable=SC2086

14
.github/BOTMETA.yml vendored
View File

@ -59,20 +59,6 @@ files:
.github/BOTMETA.yml:
labels: botmeta
support: core
docs/:
maintainers:
- acozine
docs/docsite/rst/community/:
maintainers:
- gundalow
docs/docsite/rst/dev_guide/:
maintainers:
- gundalow
docs/docsite/rst/network/:
labels: networking
maintainers:
- samccann
docs/docsite/rst/user_guide/windows: *id001
hacking/report.py:
notified: mattclay
hacking/shippable/:

View File

@ -2,18 +2,11 @@ include COPYING
include bin/*
include changelogs/CHANGELOG*.rst
include changelogs/changelog.yaml
include examples/ansible.cfg
include examples/hosts
include examples/scripts/ConfigureRemotingForAnsible.ps1
include examples/scripts/upgrade_to_ps3.ps1
include hacking/build-ansible.py
include hacking/templates/*.j2
include hacking/test-module.py
include hacking/update-sanity-requirements.py
include licenses/*.txt
include requirements.txt
recursive-include docs *
recursive-include hacking/build_library *.py
recursive-include packaging *.py *.j2
recursive-include test/integration *
recursive-include test/sanity *.in *.json *.py *.txt

View File

@ -1,85 +0,0 @@
#!/usr/bin/env python
# To run this script, first make webdocs in the toplevel of the checkout. This will generate all
# rst files from their sources. Then run this script ./docs/bin/find-plugin-refs.py
#
# No output means that there are no longer any bare module and plugin names referenced via :ref:
#
# For my listing of what needs to be changed after running this script, see the comment at the end
# of the file
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import glob
import os
import re
from ansible.module_utils.common.text.converters import to_text
TOPDIR = os.path.join(os.path.dirname(__file__), '..', 'docsite', 'rst')
def plugin_names(topdir):
plugins = set()
# Modules are in a separate directory
for module_filename in glob.glob(os.path.join(topdir, 'modules', '*_module.rst')):
module_filename = os.path.basename(module_filename)
module_name = module_filename[:module_filename.index('_module.rst')]
plugins.add(module_name)
for plugin_filename in glob.glob(os.path.join(topdir, 'plugins', '*', '*.rst')):
plugin_filename = os.path.basename(plugin_filename)
plugin_name = plugin_filename[:plugin_filename.index('.rst')]
plugins.add(plugin_name)
return plugins
def process_refs(topdir, plugin_names):
REF_RE = re.compile(':ref:`([^`]*)`')
LABEL_RE = re.compile('<([^>]*)>$')
# Walk the whole docs tree looking for :ref:. Anywhere those are found, search for `([^`]*)`
for dirpath, dirnames, filenames in os.walk(topdir):
for filename in filenames:
with open(os.path.join(dirpath, filename), 'rb') as f:
data = f.read()
data = to_text(data)
for ref_match in re.finditer(REF_RE, data):
label = ref_match.group(1)
# If the ref label includes "<", then search for the label inside of the "<>"
label_match = re.search(LABEL_RE, label)
if label_match:
label = label_match.group(1)
# If the ref label is listed in plugins, then print that the file contains an unported ref
if label in plugin_names:
print(':ref:`{0}` matching plugin {1} was found in {2}'.format(ref_match.group(1), label, os.path.join(dirpath, filename)))
if __name__ == '__main__':
plugins = plugin_names(TOPDIR)
process_refs(TOPDIR, plugins)
# Fixes needed: docs/bin/plugin_formatter.py
# - t = _MODULE.sub(r":ref:`\1 <\1>`", t)
# + t = _MODULE.sub(r":ref:`\1 <module_\1>`", t)
#
# These have @{module}@ in the template and need to have something like module_@{module}@
# If any of these list plugins as well as modules, they will need to have a conditional or extra
# data passed in to handle that in a generic fashion:
#
# docs/templates/list_of_CATEGORY_modules.rst.j2
# docs/templates/list_of_CATEGORY_plugins.rst.j2
# docs/templates/modules_by_support.rst.j2
#
# These are just a simple manual fix:
# :ref:`command` matching plugin command was found in ./../docsite/rst/user_guide/intro_adhoc.rst
# :ref:`shell` matching plugin shell was found in ./../docsite/rst/user_guide/intro_adhoc.rst
# :ref:`config` matching plugin config was found in ./../docsite/rst/installation_guide/intro_configuration.rst

View File

@ -1,42 +0,0 @@
#!/bin/sh
set -eux
FILENAME=../docsite/rst/dev_guide/testing/sanity/index.rst
cat <<- EOF >$FILENAME.new
.. _all_sanity_tests:
Sanity Tests
============
The following sanity tests are available as \`\`--test\`\` options for \`\`ansible-test sanity\`\`.
This list is also available using \`\`ansible-test sanity --list-tests --allow-disabled\`\`.
For information on how to run these tests, see :ref:\`sanity testing guide <testing_sanity>\`.
.. toctree::
:maxdepth: 1
$(for test in $(../../bin/ansible-test sanity --list-tests --allow-disabled); do echo " ${test}"; done)
EOF
# By default use sha1sum which exists on Linux, if not present select the correct binary
# based on platform defaults
SHA_CMD="sha1sum"
if ! command -v ${SHA_CMD} > /dev/null 2>&1; then
if command -v sha1 > /dev/null 2>&1; then
SHA_CMD="sha1"
elif command -v shasum > /dev/null 2>&1; then
SHA_CMD="shasum"
else
# exit early with an error if no hashing binary can be found since it is required later
exit 1
fi
fi
# Put file into place if it has changed
if [ ! -f "${FILENAME}" ] || [ "$(${SHA_CMD} <$FILENAME)" != "$(${SHA_CMD} <$FILENAME.new)" ]; then
mv -f $FILENAME.new $FILENAME
fi

View File

@ -1,19 +0,0 @@
# Old compiled python stuff
*.py[co]
# package building stuff
build
# Emacs backup files...
*~
.\#*
.doctrees
# Generated docs stuff
ansible*.xml
.buildinfo
objects.inv
.doctrees
rst/dev_guide/testing/sanity/index.rst
rst/modules/*.rst
rst/playbooks_keywords.rst
rst/collections/
*.min.css

View File

View File

@ -1,69 +0,0 @@
{% if is_eol %}
{# Creates a banner at the top of the page for EOL versions. #}
<div id='banner' class='Admonition caution'>
<p>You are reading an unmaintained version of the Ansible documentation. Unmaintained Ansible versions can contain unfixed security vulnerabilities (CVE). Please upgrade to a maintained version. See <a href="/ansible/latest/">the latest Ansible documentation</a>.</p>
</div>
{% else %}
<script>
function startsWith(str, needle) {
return str.slice(0, needle.length) == needle
}
function startsWithOneOf(str, needles) {
return needles.some(function (needle) {
return startsWith(str, needle);
});
}
var banner = '';
var extra_banner = '';
/*use extra_banner for when marketing wants something extra, like a survey or AnsibleFest notice */
var extra_banner =
'<div id="latest_extra_banner_id" class="admonition important">' +
'<br>' +
'<p>' +
'We\'re updating the Ansible community mission statement! Participate in our survey and let us know - <a href="https://www.surveymonkey.co.uk/r/DLG9FJN" target="_blank">What does Ansible mean to you?</a> ' +
'</p>' +
'<br>' +
'</div>';
// Create a banner if we're not on the official docs site
if (location.host == "docs.testing.ansible.com") {
document.write('<div id="testing_banner_id" class="admonition important">' +
'<p>This is the testing site for Ansible Documentation. Unless you are reviewing pre-production changes, please visit the <a href="https://docs.ansible.com/ansible/latest/">official documentation website</a>.</p> <p></p>' +
'</div>');
}
{% if available_versions is defined %}
// Create a banner
current_url_path = window.location.pathname;
var important = false;
var msg = '<p>';
if (startsWith(current_url_path, "/ansible-core/")) {
msg += 'You are reading documentation for Ansible Core, which contains no plugins except for those in ansible.builtin. For documentation of the Ansible package, go to <a href="/ansible/latest">the latest documentation</a>.';
} else if (startsWithOneOf(current_url_path, ["/ansible/latest/", "/ansible/{{ latest_version }}/"])) {
/* temp extra banner to advertise AnsibeFest2021 */
banner += extra_banner;
msg += 'You are reading the <b>latest</b> (stable) community version of the Ansible documentation. If you are a Red Hat customer, refer to the <a href="https://access.redhat.com/support/policy/updates/ansible-automation-platform">Ansible Automation Platform Life Cycle</a> page for subscription details.';
} else if (startsWith(current_url_path, "/ansible/2.9/")) {
msg += 'You are reading the latest Red Hat released version of the Ansible documentation. Community users can use this version, or select <b>latest</b> from the version selector to the left for the most recent community version.';
} else if (startsWith(current_url_path, "/ansible/devel/")) {
/* temp extra banner to advertise AnsibleFest2021 */
banner += extra_banner;
msg += 'You are reading the <b>devel</b> version of the Ansible documentation - this version is not guaranteed stable. Use the version selection to the left if you want the <b>latest</b> (stable) released version.';
} else {
msg += 'You are reading an older version of the Ansible documentation. Use the version selection to the left if you want the <b>latest</b> (stable) released version.';
}
msg += '</p>';
banner += '<div id="banner_id" class="admonition ';
banner += important ? 'important' : 'caution';
banner += '">';
banner += important ? '<br>' : '';
banner += msg;
banner += important ? '<br>' : '';
banner += '</div>';
document.write(banner);
{% endif %}
</script>
{% endif %}

View File

@ -1,52 +0,0 @@
{%- extends "!breadcrumbs.html" %}
{%- block breadcrumbs_aside %}
<li class="wy-breadcrumbs-aside">
{%- if hasdoc(pagename) and display_vcs_links %}
{%- if display_github %}
{%- if check_meta and 'github_url' in meta %}
<!-- User defined GitHub URL -->
<a href="{{ meta['github_url'] }}" class="fa fa-github"> {{ _('Edit on GitHub') }}</a>
{%- else %}
<!-- Ansible-specific additions for modules etc -->
{% if check_meta and pagename.endswith((
'_module', '_become', '_cache', '_callback',
'_connection', '_inventory', '_lookup',
'_shell', '_strategy', '_vars',
)) %}
{# <a href="https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/{{ theme_vcs_pageview_mode or "blob" }}/{{ github_module_version }}{{ meta.get('source', '') }}?description=%23%23%23%23%23%20SUMMARY%0A%3C!---%20Your%20description%20here%20--%3E%0A%0A%0A%23%23%23%23%23%20ISSUE%20TYPE%0A-%20Docs%20Pull%20Request%0A%0A%2Blabel:%20docsite_pr" class="fa fa-github"> {{ _('Edit on GitHub') }}</a> #}
<br>
<!-- Remove main index page as it is no longer editable -->
{% elif pagename == 'index' %}
<br>
<!-- Remove all pages under collections/ as no longer editable -->
{% elif pagename.startswith('collections/') %}
<br>
{% elif check_meta and pagename.startswith('cli') and meta.get('source', None) %}
<a href="https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/{{ theme_vcs_pageview_mode or "blob" }}/{{ github_cli_version }}{{ meta.get('source', '') }}?description=%23%23%23%23%23%20SUMMARY%0A%3C!---%20Your%20description%20here%20--%3E%0A%0A%0A%23%23%23%23%23%20ISSUE%20TYPE%0A-%20Docs%20Pull%20Request%0A%0A%2Blabel:%20docsite_pr" class="fa fa-github"> {{ _('Edit on GitHub') }}</a>
{% elif (not 'list_of' in pagename) and (not 'category' in pagename) %}
<a href="https://{{ github_host|default("github.com") }}/{{ github_user }}/{{ github_repo }}/{{ theme_vcs_pageview_mode or "blob" }}/{{ github_version }}{{ conf_py_path }}{{ pagename }}{{ page_source_suffix }}?description=%23%23%23%23%23%20SUMMARY%0A%3C!---%20Your%20description%20here%20--%3E%0A%0A%0A%23%23%23%23%23%20ISSUE%20TYPE%0A-%20Docs%20Pull%20Request%0A%0A%2Blabel:%20docsite_pr" class="fa fa-github"> {{ _('Edit on GitHub') }}</a>
{% endif %}
{%- endif %}
{%- elif display_bitbucket %}
{%- if check_meta and 'bitbucket_url' in meta %}
<!-- User defined Bitbucket URL -->
<a href="{{ meta['bitbucket_url'] }}" class="fa fa-bitbucket"> {{ _('Edit on Bitbucket') }}</a>
{%- else %}
<a href="https://bitbucket.org/{{ bitbucket_user }}/{{ bitbucket_repo }}/src/{{ bitbucket_version}}{{ conf_py_path }}{{ pagename }}{{ page_source_suffix }}?mode={{ theme_vcs_pageview_mode or "view" }}" class="fa fa-bitbucket"> {{ _('Edit on Bitbucket') }}</a>
{%- endif %}
{%- elif display_gitlab %}
{%- if check_meta and 'gitlab_url' in meta %}
<!-- User defined GitLab URL -->
<a href="{{ meta['gitlab_url'] }}" class="fa fa-gitlab"> {{ _('Edit on GitLab') }}</a>
{%- else %}
<a href="https://{{ gitlab_host|default("gitlab.com") }}/{{ gitlab_user }}/{{ gitlab_repo }}/{{ theme_vcs_pageview_mode or "blob" }}/{{ gitlab_version }}{{ conf_py_path }}{{ pagename }}{{ page_source_suffix }}" class="fa fa-gitlab"> {{ _('Edit on GitLab') }}</a>
{%- endif %}
{%- elif show_source and source_url_prefix %}
<a href="{{ source_url_prefix }}{{ pagename }}{{ page_source_suffix }}">{{ _('View page source') }}</a>
{%- elif show_source and has_source and sourcename %}
<a href="{{ pathto('_sources/' + sourcename, true)|e }}" rel="nofollow"> {{ _('View page source') }}</a>
{%- endif %}
{%- endif %}
</li>
{%- endblock %}

View File

@ -1,4 +0,0 @@
{# https://jinja.palletsprojects.com/en/3.0.x/tricks/#null-default-fallback #}
{%- if not is_eol %}
{%- extends "!version_chooser.html" %}
{%- endif %}

View File

@ -1,220 +0,0 @@
OS := $(shell uname -s)
PLUGIN_FORMATTER=../../hacking/build-ansible.py docs-build
TESTING_FORMATTER=../bin/testing_formatter.sh
KEYWORD_DUMPER=../../hacking/build-ansible.py document-keywords
CONFIG_DUMPER=../../hacking/build-ansible.py document-config
GENERATE_CLI=../../hacking/build-ansible.py generate-man
COLLECTION_DUMPER=../../hacking/build-ansible.py collection-meta
ifeq ($(shell echo $(OS) | egrep -ic 'Darwin|FreeBSD|OpenBSD|DragonFly'),1)
CPUS ?= $(shell sysctl hw.ncpu|awk '{print $$2}')
else
CPUS ?= $(shell nproc)
endif
# Intenationalisation and Localisation
LANGUAGES ?=
# Sets the build output directory for the main docsite if it's not already specified
ifndef BUILDDIR
BUILDDIR = _build
endif
ifndef POTDIR
POTDIR = $(BUILDDIR)/gettext
endif
# Backwards compat for separate VARS
PLUGIN_ARGS=
ifdef MODULES
ifndef PLUGINS
PLUGIN_ARGS = -l $(MODULES)
else
PLUGIN_ARGS = -l $(MODULES),$(PLUGINS)
endif
else
ifdef PLUGINS
PLUGIN_ARGS = -l $(PLUGINS)
endif
endif
ANSIBLE_VERSION_ARGS=
ifdef ANSIBLE_VERSION
ANSIBLE_VERSION_ARGS=--ansible-version=$(ANSIBLE_VERSION)
endif
DOC_PLUGINS ?= become cache callback cliconf connection httpapi inventory lookup netconf shell strategy vars
PYTHON ?= python
# fetch version from project release.py as single source-of-truth
VERSION := $(shell $(PYTHON) ./version_helper.py --raw || echo error)
ifeq ($(findstring error,$(VERSION)), error)
$(error "version_helper failed")
endif
MAJOR_VERSION := $(shell $(PYTHON) ./version_helper.py --majorversion || echo error)
ifeq ($(findstring error,$(MAJOR_VERSION)), error)
$(error "version_helper failed to determine major version")
endif
assertrst:
ifndef rst
$(error specify document or pattern with rst=somefile.rst)
endif
all: docs
docs: htmldocs
coredocs: core_htmldocs
generate_rst: collections_meta config cli keywords plugins testing
core_generate_rst: collections_meta config cli keywords core_plugins testing
# At the moment localizing the plugins and collections is not required for the ongoing
# localisation effort. It will come at a later time.
gettext_generate_rst: collections_meta config cli keywords testing
# The following symlinks are necessary to produce two different docsets
# from the same set of rst files (Ansible the package docs, and core docs).
# Symlink the relevant index into place for building Ansible docs
ansible_structure:
# We must have python and python-packaging for the version_helper
# script so use it for version comparison
if $(PYTHON) -c "import sys, packaging.version as p; sys.exit(not p.Version('$(MAJOR_VERSION)') > p.Version('2.10'))" ; then \
echo "Creating symlinks in ansible_structure"; \
ln -sf ../rst/ansible_index.rst rst/index.rst; \
ln -sf ../dev_guide/ansible_index.rst rst/dev_guide/index.rst; \
ln -sf ../sphinx_conf/ansible_conf.py rst/conf.py; \
else \
echo 'Creating symlinks for older ansible in ansible_structure'; \
ln -sf ../rst/2.10_index.rst rst/index.rst; \
ln -sf ../sphinx_conf/2.10_conf.py rst/conf.py; \
fi
# Symlink the relevant index into place for building core docs
core_structure:
@echo "Creating symlinks in core_structure"
-ln -sf ../rst/core_index.rst rst/index.rst
-ln -sf ../dev_guide/core_index.rst rst/dev_guide/index.rst
# set up the correct core conf.py to use for English vs a translated language
ifdef LANGOPTS
-ln -sf ../sphinx_conf/core_lang_conf.py rst/conf.py
else
-ln -sf ../sphinx_conf/core_conf.py rst/conf.py
endif
# Symlink the relevant index into place for building core translated docs
gettext_structure:
@echo "Creating symlinks in gettext_structure"
-ln -sf ../rst/core_index.rst rst/index.rst
-ln -sf ../rst/dev_guide/core_index.rst rst/dev_guide/index.rst
-ln -sf ../sphinx_conf/all_conf.py rst/conf.py
gettext: gettext_structure gettext_generate_rst
CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx gettext
# if msgcat is installed handle all indexes, otherwise use the index from gettext_structure.
-msgcat "$(POTDIR)/core_index.pot" "$(POTDIR)/ansible_index.pot" "$(POTDIR)/2.10_index.pot" > "$(POTDIR)/tmp_index.pot" && mv "$(POTDIR)/tmp_index.pot" "$(POTDIR)/index.pot"
rm "$(POTDIR)/core_index.pot" "$(POTDIR)/ansible_index.pot" "$(POTDIR)/2.10_index.pot"
generate-po:
ifeq ($(LANGUAGES),)
@echo 'LANGUAGES is not defined. It is mandatory. LANGUAGES should be a comma separated list of languages to support. (Exampe: fr,es)'
else
(cd docs/docsite/; sphinx-intl update -w 0 -d rst/locales -p "$(POTDIR)" -l $(LANGUAGES))
endif
needs-translation:
ifeq ($(LANGUAGES),)
@echo 'LANGUAGES is not defined. It is mandatory. LANGUAGES should be a comma separated list of languages to support. (Exampe: fr,es)'
else
(cd docs/docsite/; sphinx-intl stat -d rst/locales -l $(LANGUAGES) | grep -E ' [1-9][0-9]* (fuzzy|untranslated)' | sort)
endif
htmldocs: ansible_structure generate_rst
CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx html
core_htmldocs: core_structure core_generate_rst
CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx html
singlehtmldocs: ansible_structure generate_rst
CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx singlehtml
core_singlehtmldocs: core_structure core_generate_rst
CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx singlehtml
# Note: The linkcheckdocs and htmlsingle targets depend on gettext_structure
# because that one does not exclude any rst files in its conf.py.
linkcheckdocs: gettext_structure generate_rst
CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx linkcheck
htmlsingle: assertrst gettext_structure
sphinx-build -j $(CPUS) -b html -d $(BUILDDIR)/doctrees ./rst $(BUILDDIR)/html rst/$(rst)
@echo "Output is in $(BUILDDIR)/html/$(rst:.rst=.html)"
webdocs: docs
#TODO: leaving htmlout removal for those having older versions, should eventually be removed also
clean:
@echo "Cleaning $(BUILDDIR)"
-rm -rf $(BUILDDIR)/doctrees
-rm -rf $(BUILDDIR)/html
-rm -rf htmlout
-rm -rf module_docs
-rm -rf $(BUILDDIR)
-rm -f .buildinfo
-rm -f objects.inv
-rm -rf *.doctrees
@echo "Cleaning up minified css files"
find . -type f -name "*.min.css" -delete
@echo "Cleaning up byte compiled python stuff"
find . -regex ".*\.py[co]$$" -delete
@echo "Cleaning up editor backup files"
find . -type f \( -name "*~" -or -name "#*" \) -delete
find . -type f \( -name "*.swp" \) -delete
@echo "Cleaning up generated rst"
rm -f rst/playbooks_directives.rst
rm -f rst/reference_appendices/config.rst
rm -f rst/reference_appendices/playbooks_keywords.rst
rm -f rst/dev_guide/collections_galaxy_meta.rst
rm -f rst/cli/*.rst
for filename in `ls rst/collections/` ; do \
if test x"$$filename" != x'all_plugins.rst' ; then \
rm -rf "rst/collections/$$filename"; \
fi \
done
@echo "Cleaning up generated ansible_structure"
find . -type l -delete
@echo "Cleaning up legacy generated rst locations"
rm -rf rst/modules
rm -f rst/plugins/*/*.rst
.PHONY: docs clean
collections_meta: ../templates/collections_galaxy_meta.rst.j2
$(COLLECTION_DUMPER) --template-file=../templates/collections_galaxy_meta.rst.j2 --output-dir=rst/dev_guide/ $(EXTRA_COLLECTION_META_ARGS) ../../lib/ansible/galaxy/data/collections_galaxy_meta.yml
# TODO: make generate_man output dir cli option
cli:
mkdir -p rst/cli
$(GENERATE_CLI) --template-file=../templates/cli_rst.j2 --output-dir=rst/cli/ --output-format rst $(EXTRA_CLI_DUMPER_ARGS) ../../lib/ansible/cli/*.py
keywords: ../templates/playbooks_keywords.rst.j2
$(KEYWORD_DUMPER) --template-dir=../templates --output-dir=rst/reference_appendices/ ../../lib/ansible/keyword_desc.yml $(EXTRA_KEYWORD_DUMPER_ARGS)
config: ../templates/config.rst.j2
$(CONFIG_DUMPER) --template-file=../templates/config.rst.j2 --output-dir=rst/reference_appendices/ $(EXTRA_CONFIG_DUMPER_ARGS) ../../lib/ansible/config/base.yml
plugins:
$(PLUGIN_FORMATTER) full -o rst $(ANSIBLE_VERSION_ARGS) $(EXTRA_PLUGIN_FORMATTER_ARGS) $(PLUGIN_ARGS)
# This only builds the plugin docs included with ansible-core
core_plugins:
$(PLUGIN_FORMATTER) core -o rst $(EXTRA_PLUGIN_FORMATTER_ARGS) $(PLUGIN_ARGS)
testing:
$(TESTING_FORMATTER)
epub:
(CPUS=$(CPUS) $(MAKE) -f Makefile.sphinx epub)

View File

@ -1,26 +0,0 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line.
SPHINXCONFDIR = rst
LANGOPTS ?=
SPHINXOPTS ?= -j $(CPUS) -n -w rst_warnings -c "$(SPHINXCONFDIR)" $(LANGOPTS)
SPHINXBUILD = sphinx-build
SPHINXPROJ = sdfsdf
SOURCEDIR = rst
# Sets the build output directory if it's not specified on the command line
ifndef BUILDDIR
BUILDDIR = _build
endif
# Put it first so that "make" without argument is like "make help".
help:
$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile.sphinx
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile.sphinx
$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

View File

@ -1,26 +0,0 @@
Ansible documentation
=====================
This project hosts the source behind the general pages of [docs.ansible.com](https://docs.ansible.com/). Module-specific documentation is hosted in the various collections repositories. See [Ansible Galaxy](https://galaxy.ansible.com/), the list of [Ansible-maintained collections](https://docs.ansible.com/ansible/devel/community/contributing_maintained_collections.html), and the [ansible-collections organization](https://github.com/ansible-collections) for collections sources.
To create clear, concise, and consistent contributions to Ansible documentation, please refer to the following information.
Contributions
=============
Contributions to the documentation are welcome.
The Ansible community produces guidance on contributions, building documentation, and submitting pull requests, which you can find in [Contributing to the Ansible Documentation](https://docs.ansible.com/ansible/latest/community/documentation_contributions.html).
You can also join the [Docs Working Group](https://github.com/ansible/community/wiki/Docs) and/or the ``#ansible-docs`` IRC channel on [irc.libera.chat](https://libera.chat/)
Ansible style guide
===================
Ansible documentation is written in ReStructuredText(RST). The [Ansible style guide](https://docs.ansible.com/ansible/latest/dev_guide/style_guide/index.html#linguistic-guidelines) provides linguistic direction and technical guidelines for working with reStructuredText, in addition to other resources.
Tools
=====
The Ansible community uses a range of tools and programs for working with Ansible documentation. Learn more about [Other Tools and Programs](https://docs.ansible.com/ansible/latest/community/other_tools_and_programs.html#popular-editors) in the Ansible Community Guide.
GitHub
======
[Ansible documentation](https://github.com/ansible/ansible/tree/devel/docs/docsite) is hosted on the Ansible GitHub project and various collection repositories, especially those in the [ansible-collections organization](https://github.com/ansible-collections). For general GitHub workflows and other information, see the [GitHub Guides](https://guides.github.com/).

View File

@ -1,17 +0,0 @@
.DocSiteProduct-header--core {
background-color: #161b1f;
border-color: #161b1f;
}
.wy-nav-top, .wy-side-nav-search {
background-color: #161b1f;
}
.wy-nav-side {
background-color: #999999;
}
.wy-menu-vertical header, .wy-menu-vertical p.caption {
color: #161b1f;
}
.ansibleNav ul li a:hover {
color: #161b1f;
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -1,17 +0,0 @@
# We also need an example of modules hosted in Automation Hub
# We'll likely move to data hosted in botmeta instead of a standalone file but
# we'll need all of these same details.
module:
purefa_user:
source: 'https://galaxy.ansible.com/'
fqcn: 'purestorage.flasharray'
purefa_vg:
source: 'https://galaxy.ansible.com/'
fqcn: 'purestorage.flasharray'
gcp_compute_firewall_info:
source: 'https://galaxy.ansible.com/'
fqcn: 'google.cloud'
module_utils:
purefa:
source: 'https://galaxy.ansible.com/'
fqcn: 'purestorage.flasharray'

Binary file not shown.

View File

@ -1,5 +0,0 @@
function AnsibleModules($scope) {
$scope.modules = [];
$scope.orderProp = "module";
}

Binary file not shown.

Binary file not shown.

View File

@ -1,17 +0,0 @@
# pip packages required to build docsite
# these requirements are as loosely defined as possible
# if you want known good versions of these dependencies
# use test/sanity/code-smell/docs-build.requirements.txt
# instead
antsibull-docs >= 1.0.0, < 2.0.0
docutils
jinja2
pygments >= 2.10.0
pyyaml
rstcheck
sphinx-notfound-page >= 0.6
sphinx-intl
sphinx-ansible-theme >= 0.10.2
sphinx
resolvelib

View File

@ -1,106 +0,0 @@
.. _ansible_documentation:
Ansible Documentation
=====================
About Ansible
`````````````
Ansible is an IT automation tool. It can configure systems, deploy software, and orchestrate more advanced IT tasks such as continuous deployments or zero downtime rolling updates.
Ansible's main goals are simplicity and ease-of-use. It also has a strong focus on security and reliability, featuring a minimum of moving parts, usage of OpenSSH for transport (with other transports and pull modes as alternatives), and a language that is designed around auditability by humans--even those not familiar with the program.
We believe simplicity is relevant to all sizes of environments, so we design for busy users of all types: developers, sysadmins, release engineers, IT managers, and everyone in between. Ansible is appropriate for managing all environments, from small setups with a handful of instances to enterprise environments with many thousands of instances.
You can learn more at `AnsibleFest <https://www.ansible.com/ansiblefest>`_, the annual event for all Ansible contributors, users, and customers hosted by Red Hat. AnsibleFest is the place to connect with others, learn new skills, and find a new friend to automate with.
Ansible manages machines in an agent-less manner. There is never a question of how to upgrade remote daemons or the problem of not being able to manage systems because daemons are uninstalled. Because OpenSSH is one of the most peer-reviewed open source components, security exposure is greatly reduced. Ansible is decentralized--it relies on your existing OS credentials to control access to remote machines. If needed, Ansible can easily connect with Kerberos, LDAP, and other centralized authentication management systems.
This documentation covers the version of Ansible noted in the upper left corner of this page. We maintain multiple versions of Ansible and of the documentation, so please be sure you are using the version of the documentation that covers the version of Ansible you're using. For recent features, we note the version of Ansible where the feature was added.
Ansible releases a new major release approximately twice a year. The core application evolves somewhat conservatively, valuing simplicity in language design and setup. Contributors develop and change modules and plugins, hosted in collections since version 2.10, much more quickly.
.. toctree::
:maxdepth: 2
:caption: Installation, Upgrade & Configuration
installation_guide/index
porting_guides/porting_guides
.. toctree::
:maxdepth: 2
:caption: Using Ansible
user_guide/index
.. toctree::
:maxdepth: 2
:caption: Contributing to Ansible
community/index
.. toctree::
:maxdepth: 2
:caption: Extending Ansible
dev_guide/index
.. toctree::
:glob:
:maxdepth: 1
:caption: Common Ansible Scenarios
scenario_guides/cloud_guides
scenario_guides/network_guides
scenario_guides/virt_guides
.. toctree::
:maxdepth: 2
:caption: Network Automation
network/getting_started/index
network/user_guide/index
network/dev_guide/index
.. toctree::
:maxdepth: 2
:caption: Ansible Galaxy
galaxy/user_guide.rst
galaxy/dev_guide.rst
.. toctree::
:maxdepth: 1
:caption: Reference & Appendices
collections/index
collections/all_plugins
reference_appendices/playbooks_keywords
reference_appendices/common_return_values
reference_appendices/config
reference_appendices/general_precedence
reference_appendices/YAMLSyntax
reference_appendices/python_3_support
reference_appendices/interpreter_discovery
reference_appendices/release_and_maintenance
reference_appendices/test_strategies
dev_guide/testing/sanity/index
reference_appendices/faq
reference_appendices/glossary
reference_appendices/module_utils
reference_appendices/special_variables
reference_appendices/tower
reference_appendices/automationhub
reference_appendices/logging
.. toctree::
:maxdepth: 2
:caption: Release Notes
.. toctree::
:maxdepth: 2
:caption: Roadmaps
roadmap/index.rst

View File

@ -1,12 +0,0 @@
:orphan:
*****
Oops!
*****
The version of the Ansible documentation you were looking at doesn't contain that page.
.. image:: images/cow.png
:alt: Cowsay 404
Use the back button to return to the version you were browsing, or use the navigation at left to explore our latest release. Once you're on a non-404 page, you can use the version-changer to select a version.

View File

@ -1,124 +0,0 @@
.. _ansible_documentation:
..
This is the index file for Ansible the package. It gets symlinked to index.rst by the Makefile
Ansible Documentation
=====================
About Ansible
`````````````
Ansible is an IT automation tool. It can configure systems, deploy software, and orchestrate more advanced IT tasks such as continuous deployments or zero downtime rolling updates.
Ansible's main goals are simplicity and ease-of-use. It also has a strong focus on security and reliability, featuring a minimum of moving parts, usage of OpenSSH for transport (with other transports and pull modes as alternatives), and a language that is designed around auditability by humans--even those not familiar with the program.
We believe simplicity is relevant to all sizes of environments, so we design for busy users of all types: developers, sysadmins, release engineers, IT managers, and everyone in between. Ansible is appropriate for managing all environments, from small setups with a handful of instances to enterprise environments with many thousands of instances.
You can learn more at `AnsibleFest <https://www.ansible.com/ansiblefest>`_, the annual event for all Ansible contributors, users, and customers hosted by Red Hat. AnsibleFest is the place to connect with others, learn new skills, and find a new friend to automate with.
Ansible manages machines in an agent-less manner. There is never a question of how to upgrade remote daemons or the problem of not being able to manage systems because daemons are uninstalled. Also, security exposure is greatly reduced because Ansible uses OpenSSH — the open source connectivity tool for remote login with the SSH (Secure Shell) protocol.
Ansible is decentralized--it relies on your existing OS credentials to control access to remote machines. And if needed, Ansible can easily connect with Kerberos, LDAP, and other centralized authentication management systems.
This documentation covers the version of Ansible noted in the upper left corner of this page. We maintain multiple versions of Ansible and the Ansible documentation, so please be sure you are using the documentation version that covers the version of Ansible you are using. For recent features, we note the version of Ansible where the feature was added.
Ansible releases a new major release approximately twice a year. The core application evolves somewhat conservatively, valuing simplicity in language design and setup. Contributors develop and change modules and plugins hosted in collections since version 2.10 much more quickly.
.. toctree::
:maxdepth: 2
:caption: Ansible getting started
getting_started/index
.. toctree::
:maxdepth: 2
:caption: Installation, Upgrade & Configuration
installation_guide/index
porting_guides/porting_guides
.. toctree::
:maxdepth: 2
:caption: Using Ansible
inventory_guide/index
command_guide/index
playbook_guide/index
vault_guide/index
module_plugin_guide/index
collections_guide/index
os_guide/index
tips_tricks/index
.. toctree::
:maxdepth: 2
:caption: Contributing to Ansible
community/index
community/contributions_collections
community/contributions
community/advanced_index
dev_guide/style_guide/index
.. toctree::
:maxdepth: 2
:caption: Extending Ansible
dev_guide/index
.. toctree::
:glob:
:maxdepth: 1
:caption: Common Ansible Scenarios
scenario_guides/cloud_guides
scenario_guides/network_guides
scenario_guides/virt_guides
.. toctree::
:maxdepth: 2
:caption: Network Automation
network/getting_started/index
network/user_guide/index
network/dev_guide/index
.. toctree::
:maxdepth: 2
:caption: Ansible Galaxy
galaxy/user_guide.rst
galaxy/dev_guide.rst
.. toctree::
:maxdepth: 1
:caption: Reference & Appendices
collections/index
collections/all_plugins
reference_appendices/playbooks_keywords
reference_appendices/common_return_values
reference_appendices/config
reference_appendices/general_precedence
reference_appendices/YAMLSyntax
reference_appendices/python_3_support
reference_appendices/interpreter_discovery
reference_appendices/release_and_maintenance
reference_appendices/test_strategies
dev_guide/testing/sanity/index
reference_appendices/faq
reference_appendices/glossary
reference_appendices/module_utils
reference_appendices/special_variables
reference_appendices/tower
reference_appendices/automationhub
reference_appendices/logging
.. toctree::
:maxdepth: 2
:caption: Roadmaps
roadmap/ansible_roadmap_index.rst
roadmap/ansible_core_roadmap_index.rst

View File

@ -1,107 +0,0 @@
:orphan:
*************************
Ansible API Documentation
*************************
The Ansible API is under construction. These stub references for attributes, classes, functions, methods, and modules will be documented in future.
The :ref:`module utilities <ansible.module_utils>` included in ``ansible.module_utils.basic`` and ``AnsibleModule`` are documented under Reference & Appendices.
.. contents::
:local:
Attributes
==========
.. py:attribute:: AnsibleModule.params
The parameters accepted by the module.
.. py:attribute:: ansible.module_utils.basic.ANSIBLE_VERSION
.. py:attribute:: ansible.module_utils.basic.SELINUX_SPECIAL_FS
Deprecated in favor of ansibleModule._selinux_special_fs.
.. py:attribute:: AnsibleModule.ansible_version
.. py:attribute:: AnsibleModule._debug
.. py:attribute:: AnsibleModule._diff
.. py:attribute:: AnsibleModule.no_log
.. py:attribute:: AnsibleModule._selinux_special_fs
(formerly ansible.module_utils.basic.SELINUX_SPECIAL_FS)
.. py:attribute:: AnsibleModule._syslog_facility
.. py:attribute:: self.playbook
.. py:attribute:: self.play
.. py:attribute:: self.task
.. py:attribute:: sys.path
Classes
=======
.. py:class:: ``ansible.module_utils.basic.AnsibleModule``
:noindex:
The basic utilities for AnsibleModule.
.. py:class:: AnsibleModule
The main class for an Ansible module.
Functions
=========
.. py:function:: ansible.module_utils.basic._load_params()
Load parameters.
Methods
=======
.. py:method:: AnsibleModule.log()
Logs the output of Ansible.
.. py:method:: AnsibleModule.debug()
Debugs Ansible.
.. py:method:: Ansible.get_bin_path()
Retrieves the path for executables.
.. py:method:: AnsibleModule.run_command()
Runs a command within an Ansible module.
.. py:method:: module.fail_json()
Exits and returns a failure.
.. py:method:: module.exit_json()
Exits and returns output.
Modules
=======
.. py:module:: ansible.module_utils
.. py:module:: ansible.module_utils.basic
:noindex:
.. py:module:: ansible.module_utils.url

View File

@ -1,11 +0,0 @@
.. _all_modules_and_plugins:
Indexes of all modules and plugins
----------------------------------
.. toctree::
:maxdepth: 1
:caption: Plugin indexes
:glob:
index_*

View File

@ -1,77 +0,0 @@
.. _collections_downloading:
Downloading collections
=======================
To download a collection and its dependencies for an offline install, run ``ansible-galaxy collection download``. This
downloads the collections specified and their dependencies to the specified folder and creates a ``requirements.yml``
file which can be used to install those collections on a host without access to a Galaxy server. All the collections
are downloaded by default to the ``./collections`` folder.
Just like the ``install`` command, the collections are sourced based on the
:ref:`configured galaxy server config <galaxy_server_config>`. Even if a collection to download was specified by a URL
or path to a tarball, the collection will be redownloaded from the configured Galaxy server.
Collections can be specified as one or multiple collections or with a ``requirements.yml`` file just like
``ansible-galaxy collection install``.
To download a single collection and its dependencies:
.. code-block:: bash
ansible-galaxy collection download my_namespace.my_collection
To download a single collection at a specific version:
.. code-block:: bash
ansible-galaxy collection download my_namespace.my_collection:1.0.0
To download multiple collections either specify multiple collections as command line arguments as shown above or use a
requirements file in the format documented with :ref:`collection_requirements_file`.
.. code-block:: bash
ansible-galaxy collection download -r requirements.yml
You can also download a source collection directory. The collection is built with the mandatory ``galaxy.yml`` file.
.. code-block:: bash
ansible-galaxy collection download /path/to/collection
ansible-galaxy collection download git+file:///path/to/collection/.git
You can download multiple source collections from a single namespace by providing the path to the namespace.
.. code-block:: text
ns/
├── collection1/
│   ├── galaxy.yml
│   └── plugins/
└── collection2/
├── galaxy.yml
└── plugins/
.. code-block:: bash
ansible-galaxy collection install /path/to/ns
All the collections are downloaded by default to the ``./collections`` folder but you can use ``-p`` or
``--download-path`` to specify another path:
.. code-block:: bash
ansible-galaxy collection download my_namespace.my_collection -p ~/offline-collections
Once you have downloaded the collections, the folder contains the collections specified, their dependencies, and a
``requirements.yml`` file. You can use this folder as is with ``ansible-galaxy collection install`` to install the
collections on a host without access to a Galaxy server.
.. code-block:: bash
# This must be run from the folder that contains the offline collections and requirements.yml file downloaded
# by the internet-connected host
cd ~/offline-collections
ansible-galaxy collection install -r requirements.yml

View File

@ -1,6 +0,0 @@
.. _index_collections:
Collections index
=================
You can find an index of collections at :ref:`list_of_collections`.

View File

@ -1,126 +0,0 @@
.. _collections_installing:
Installing collections
======================
.. note::
If you install a collection manually as described in this paragraph, the collection will not be upgraded automatically when you upgrade the ``ansible`` package or ``ansible-core``.
Installing collections with ``ansible-galaxy``
----------------------------------------------
.. include:: ../shared_snippets/installing_collections.txt
.. _installing_signed_collections:
Installing collections with signature verification
---------------------------------------------------
If a collection has been signed by a :term:`distribution server`, the server will provide ASCII armored, detached signatures to verify the authenticity of the ``MANIFEST.json`` before using it to verify the collection's contents. This option is not available on all distribution servers. See :ref:`distributing_collections` for a table listing which servers support collection signing.
To use signature verification for signed collections:
1. :ref:`Configured a GnuPG keyring <galaxy_gpg_keyring>` for ``ansible-galaxy``, or provide the path to the keyring with the ``--keyring`` option when you install the signed collection.
2. Import the public key from the distribution server into that keyring.
.. code-block:: bash
gpg --import --no-default-keyring --keyring ~/.ansible/pubring.kbx my-public-key.asc
3. Verify the signature when you install the collection.
.. code-block:: bash
ansible-galaxy collection install my_namespace.my_collection --keyring ~/.ansible/pubring.kbx
The ``--keyring`` option is not necessary if you have :ref:`configured a GnuPG keyring <galaxy_gpg_keyring>`.
4. Optionally, verify the signature at any point after installation to prove the collection has not been tampered with. See :ref:`verify_signed_collections` for details.
You can also include signatures in addition to those provided by the distribution server. Use the ``--signature`` option to verify the collection's ``MANIFEST.json`` with these additional signatures. Supplemental signatures should be provided as URIs.
.. code-block:: bash
ansible-galaxy collection install my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --keyring ~/.ansible/pubring.kbx
GnuPG verification only occurs for collections installed from a distribution server. User-provided signatures are not used to verify collections installed from git repositories, source directories, or URLs/paths to tar.gz files.
You can also include additional signatures in the collection ``requirements.yml`` file under the ``signatures`` key.
.. code-block:: yaml
# requirements.yml
collections:
- name: ns.coll
version: 1.0.0
signatures:
- https://examplehost.com/detached_signature.asc
- file:///path/to/local/detached_signature.asc
See :ref:`collection requirements file <collection_requirements_file>` for details on how to install collections with this file.
By default, verification is considered successful if a minimum of 1 signature successfully verifies the collection. The number of required signatures can be configured with ``--required-valid-signature-count`` or :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT`. All signatures can be required by setting the option to ``all``. To fail signature verification if no valid signatures are found, prepend the value with ``+``, such as ``+all`` or ``+1``.
.. code-block:: bash
export ANSIBLE_GALAXY_GPG_KEYRING=~/.ansible/pubring.kbx
export ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT=2
ansible-galaxy collection install my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --signature file:///path/to/local/detached_signature.asc
Certain GnuPG errors can be ignored with ``--ignore-signature-status-code`` or :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT`. :ref:`GALAXY_REQUIRED_VALID_SIGNATURE_COUNT` should be a list, and ``--ignore-signature-status-code`` can be provided multiple times to ignore multiple additional error status codes.
This example requires any signatures provided by the distribution server to verify the collection except if they fail due to NO_PUBKEY:
.. code-block:: bash
export ANSIBLE_GALAXY_GPG_KEYRING=~/.ansible/pubring.kbx
export ANSIBLE_GALAXY_REQUIRED_VALID_SIGNATURE_COUNT=all
ansible-galaxy collection install my_namespace.my_collection --ignore-signature-status-code NO_PUBKEY
If verification fails for the example above, only errors other than NO_PUBKEY will be displayed.
If verification is unsuccessful, the collection will not be installed. GnuPG signature verification can be disabled with ``--disable-gpg-verify`` or by configuring :ref:`GALAXY_DISABLE_GPG_VERIFY`.
.. _collections_older_version:
Installing an older version of a collection
-------------------------------------------
.. include:: ../shared_snippets/installing_older_collection.txt
.. _collection_requirements_file:
Install multiple collections with a requirements file
-----------------------------------------------------
.. include:: ../shared_snippets/installing_multiple_collections.txt
.. _collection_offline_download:
Downloading a collection for offline use
-----------------------------------------
.. include:: ../shared_snippets/download_tarball_collections.txt
Installing a collection from source files
-----------------------------------------
.. include:: ../shared_snippets/installing_collections_file.rst
Installing a collection from a git repository
---------------------------------------------
.. include:: ../shared_snippets/installing_collections_git_repo.txt
.. _galaxy_server_config:
Configuring the ``ansible-galaxy`` client
------------------------------------------
.. include:: ../shared_snippets/galaxy_server_list.txt

View File

@ -1,74 +0,0 @@
.. _collections_listing:
Listing collections
===================
To list installed collections, run ``ansible-galaxy collection list``. This shows all of the installed collections found in the configured collections search paths. It will also show collections under development which contain a galaxy.yml file instead of a MANIFEST.json. The path where the collections are located are displayed as well as version information. If no version information is available, a ``*`` is displayed for the version number.
.. code-block:: shell
# /home/astark/.ansible/collections/ansible_collections
Collection Version
-------------------------- -------
cisco.aci 0.0.5
cisco.mso 0.0.4
sandwiches.ham *
splunk.es 0.0.5
# /usr/share/ansible/collections/ansible_collections
Collection Version
----------------- -------
fortinet.fortios 1.0.6
pureport.pureport 0.0.8
sensu.sensu_go 1.3.0
Run with ``-vvv`` to display more detailed information.
You may see additional collections here that were added as dependencies of your installed collections. Only use collections in your playbooks that you have directly installed.
To list a specific collection, pass a valid fully qualified collection name (FQCN) to the command ``ansible-galaxy collection list``. All instances of the collection will be listed.
.. code-block:: shell
> ansible-galaxy collection list fortinet.fortios
# /home/astark/.ansible/collections/ansible_collections
Collection Version
---------------- -------
fortinet.fortios 1.0.1
# /usr/share/ansible/collections/ansible_collections
Collection Version
---------------- -------
fortinet.fortios 1.0.6
To search other paths for collections, use the ``-p`` option. Specify multiple search paths by separating them with a ``:``. The list of paths specified on the command line will be added to the beginning of the configured collections search paths.
.. code-block:: shell
> ansible-galaxy collection list -p '/opt/ansible/collections:/etc/ansible/collections'
# /opt/ansible/collections/ansible_collections
Collection Version
--------------- -------
sandwiches.club 1.7.2
# /etc/ansible/collections/ansible_collections
Collection Version
-------------- -------
sandwiches.pbj 1.2.0
# /home/astark/.ansible/collections/ansible_collections
Collection Version
-------------------------- -------
cisco.aci 0.0.5
cisco.mso 0.0.4
fortinet.fortios 1.0.1
sandwiches.ham *
splunk.es 0.0.5
# /usr/share/ansible/collections/ansible_collections
Collection Version
----------------- -------
fortinet.fortios 1.0.6
pureport.pureport 0.0.8
sensu.sensu_go 1.3.0

View File

@ -1,119 +0,0 @@
.. _using_collections:
.. _collections_using_playbook:
Using collections in a playbook
===============================
Once installed, you can reference a collection content by its fully qualified collection name (FQCN):
.. code-block:: yaml
- hosts: all
tasks:
- my_namespace.my_collection.mymodule:
option1: value
This works for roles or any type of plugin distributed within the collection:
.. code-block:: yaml
- hosts: all
tasks:
- import_role:
name: my_namespace.my_collection.role1
- my_namespace.mycollection.mymodule:
option1: value
- debug:
msg: '{{ lookup("my_namespace.my_collection.lookup1", 'param1')| my_namespace.my_collection.filter1 }}'
Simplifying module names with the ``collections`` keyword
---------------------------------------------------------
The ``collections`` keyword lets you define a list of collections that your role or playbook should search for unqualified module and action names. So you can use the ``collections`` keyword, then simply refer to modules and action plugins by their short-form names throughout that role or playbook.
.. warning::
If your playbook uses both the ``collections`` keyword and one or more roles, the roles do not inherit the collections set by the playbook. This is one of the reasons we recommend you always use FQCN. See below for roles details.
Using ``collections`` in roles
------------------------------
Within a role, you can control which collections Ansible searches for the tasks inside the role using the ``collections`` keyword in the role's ``meta/main.yml``. Ansible will use the collections list defined inside the role even if the playbook that calls the role defines different collections in a separate ``collections`` keyword entry. Roles defined inside a collection always implicitly search their own collection first, so you don't need to use the ``collections`` keyword to access modules, actions, or other roles contained in the same collection.
.. code-block:: yaml
# myrole/meta/main.yml
collections:
- my_namespace.first_collection
- my_namespace.second_collection
- other_namespace.other_collection
Using ``collections`` in playbooks
----------------------------------
In a playbook, you can control the collections Ansible searches for modules and action plugins to execute. However, any roles you call in your playbook define their own collections search order; they do not inherit the calling playbook's settings. This is true even if the role does not define its own ``collections`` keyword.
.. code-block:: yaml
- hosts: all
collections:
- my_namespace.my_collection
tasks:
- import_role:
name: role1
- mymodule:
option1: value
- debug:
msg: '{{ lookup("my_namespace.my_collection.lookup1", "param1")| my_namespace.my_collection.filter1 }}'
The ``collections`` keyword merely creates an ordered 'search path' for non-namespaced plugin and role references. It does not install content or otherwise change Ansible's behavior around the loading of plugins or roles. Note that an FQCN is still required for non-action or module plugins (for example, lookups, filters, tests).
When using the ``collections`` keyword, it is not necessary to add in ``ansible.builtin`` as part of the search list. When left omitted, the following content is available by default:
1. Standard ansible modules and plugins available through ``ansible-base``/``ansible-core``
2. Support for older 3rd party plugin paths
In general, it is preferable to use a module or plugin's FQCN over the ``collections`` keyword and the short name for all content in ``ansible-core``
Using a playbook from a collection
----------------------------------
.. versionadded:: 2.11
You can also distribute playbooks in your collection and invoke them using the same semantics you use for plugins:
.. code-block:: shell
ansible-playbook my_namespace.my_collection.playbook1 -i ./myinventory
From inside a playbook:
.. code-block:: yaml
- import_playbook: my_namespace.my_collection.playbookX
A few recommendations when creating such playbooks, ``hosts:`` should be generic or at least have a variable input.
.. code-block:: yaml
- hosts: all # Use --limit or customized inventory to restrict hosts targeted
- hosts: localhost # For things you want to restrict to the controller
- hosts: '{{target|default("webservers")}}' # Assumes inventory provides a 'webservers' group, but can also use ``-e 'target=host1,host2'``
This will have an implied entry in the ``collections:`` keyword of ``my_namespace.my_collection`` just as with roles.
.. note::
* Playbook names, like other collection resources, have a restricted set of valid characters.
Names can contain only lowercase alphanumeric characters, plus _ and must start with an alpha character. The dash ``-`` character is not valid for playbook names in collections.
Playbooks whose names contain invalid characters are not addressable: this is a limitation of the Python importer that is used to load collection resources.
* Playbooks in collections do not support 'adjacent' plugins, all plugins must be in the collection specific directories.

View File

@ -1,80 +0,0 @@
.. _collections_verifying:
Verifying collections
=====================
Verifying collections with ``ansible-galaxy``
---------------------------------------------
Once installed, you can verify that the content of the installed collection matches the content of the collection on the server. This feature expects that the collection is installed in one of the configured collection paths and that the collection exists on one of the configured galaxy servers.
.. code-block:: bash
ansible-galaxy collection verify my_namespace.my_collection
The output of the ``ansible-galaxy collection verify`` command is quiet if it is successful. If a collection has been modified, the altered files are listed under the collection name.
.. code-block:: bash
ansible-galaxy collection verify my_namespace.my_collection
Collection my_namespace.my_collection contains modified content in the following files:
my_namespace.my_collection
plugins/inventory/my_inventory.py
plugins/modules/my_module.py
You can use the ``-vvv`` flag to display additional information, such as the version and path of the installed collection, the URL of the remote collection used for validation, and successful verification output.
.. code-block:: bash
ansible-galaxy collection verify my_namespace.my_collection -vvv
...
Verifying 'my_namespace.my_collection:1.0.0'.
Installed collection found at '/path/to/ansible_collections/my_namespace/my_collection/'
Remote collection found at 'https://galaxy.ansible.com/download/my_namespace-my_collection-1.0.0.tar.gz'
Successfully verified that checksums for 'my_namespace.my_collection:1.0.0' match the remote collection
If you have a pre-release or non-latest version of a collection installed you should include the specific version to verify. If the version is omitted, the installed collection is verified against the latest version available on the server.
.. code-block:: bash
ansible-galaxy collection verify my_namespace.my_collection:1.0.0
In addition to the ``namespace.collection_name:version`` format, you can provide the collections to verify in a ``requirements.yml`` file. Dependencies listed in ``requirements.yml`` are not included in the verify process and should be verified separately.
.. code-block:: bash
ansible-galaxy collection verify -r requirements.yml
Verifying against ``tar.gz`` files is not supported. If your ``requirements.yml`` contains paths to tar files or URLs for installation, you can use the ``--ignore-errors`` flag to ensure that all collections using the ``namespace.name`` format in the file are processed.
.. _verify_signed_collections:
Verifying signed collections
-----------------------------
If a collection has been signed by a :term:`distribution server`, the server will provide ASCII armored, detached signatures to verify the authenticity of the MANIFEST.json before using it to verify the collection's contents. This option is not available on all distribution servers. See :ref:`distributing_collections` for a table listing which servers support collection signing. See :ref:`installing_signed_collections` for how to verify a signed collection when you install it.
To verify a signed installed collection:
.. code-block:: bash
ansible-galaxy collection verify my_namespace.my_collection --keyring ~/.ansible/pubring.kbx
Use the ``--signature`` option to verify collection name(s) provided on the CLI with an additional signature. This option can be used multiple times to provide multiple signatures.
.. code-block:: bash
ansible-galaxy collection verify my_namespace.my_collection --signature https://examplehost.com/detached_signature.asc --signature file:///path/to/local/detached_signature.asc --keyring ~/.ansible/pubring.kbx
Optionally, you can verify a collection signature with a ``requirements.yml`` file.
.. code-block:: bash
ansible-galaxy collection verify -r requirements.yml --keyring ~/.ansible/pubring.kbx
When a collection is installed from a distribution server, the signatures provided by the server to verify the collection's authenticity are saved alongside the installed collections. This data is used to verify the internal consistency of the collection without querying the distribution server again when the ``--offline`` option is provided.
.. code-block:: bash
ansible-galaxy collection verify my_namespace.my_collection --offline --keyring ~/.ansible/pubring.kbx

View File

@ -1,27 +0,0 @@
.. _collections_index:
.. _collections:
#########################
Using Ansible collections
#########################
.. note::
**Making Open Source More Inclusive**
Red Hat is committed to replacing problematic language in our code, documentation, and web properties. We are beginning with these four terms: master, slave, blacklist, and whitelist. We ask that you open an issue or pull request if you come upon a term that we have missed. For more details, see `our CTO Chris Wright's message <https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language>`_.
Welcome to the Ansible guide for working with collections.
Collections are a distribution format for Ansible content that can include playbooks, roles, modules, and plugins.
You can install and use collections through a distribution server, such as Ansible Galaxy, or a Pulp 3 Galaxy server.
.. toctree::
:maxdepth: 2
collections_installing
collections_downloading
collections_listing
collections_verifying
collections_using_playbooks
collections_index

View File

@ -1,33 +0,0 @@
.. _cheatsheet:
**********************
Ansible CLI cheatsheet
**********************
This page shows one or more examples of each Ansible command line utility with some common flags added and a link to the full documentation for the command.
This page offers a quick reminder of some common use cases only - it may be out of date or incomplete or both.
For canonical documentation, follow the links to the CLI pages.
.. contents::
:local:
ansible-playbook
================
.. code-block:: bash
ansible-playbook -i /path/to/my_inventory_file -u my_connection_user -k -f 3 -T 30 -t my_tag -m /path/to/my_modules -b -K my_playbook.yml
Loads ``my_playbook.yml`` from the current working directory and:
- ``-i`` - uses ``my_inventory_file`` in the path provided for :ref:`inventory <intro_inventory>` to match the :ref:`pattern <intro_patterns>`.
- ``-u`` - connects :ref:`over SSH <connections>` as ``my_connection_user``.
- ``-k`` - asks for password which is then provided to SSH authentication.
- ``-f`` - allocates 3 :ref:`forks <playbooks_strategies>`.
- ``-T`` - sets a 30-second timeout.
- ``-t`` - runs only tasks marked with the :ref:`tag <tags>` ``my_tag``.
- ``-m`` - loads :ref:`local modules <developing_locally>` from ``/path/to/my/modules``.
- ``-b`` - executes with elevated privileges (uses :ref:`become <become>`).
- ``-K`` - prompts the user for the become password.
See :ref:`ansible-playbook` for detailed documentation.

View File

@ -1,23 +0,0 @@
.. _command_line_tools:
Working with command line tools
===============================
Most users are familiar with `ansible` and `ansible-playbook`, but those are not the only utilities Ansible provides.
Below is a complete list of Ansible utilities. Each page contains a description of the utility and a listing of supported parameters.
.. note::
You should not run most Ansible CLI tools in parallel against the same targets.
.. toctree::
:maxdepth: 1
../cli/ansible.rst
../cli/ansible-config.rst
../cli/ansible-console.rst
../cli/ansible-doc.rst
../cli/ansible-galaxy.rst
../cli/ansible-inventory.rst
../cli/ansible-playbook.rst
../cli/ansible-pull.rst
../cli/ansible-vault.rst

View File

@ -1,21 +0,0 @@
.. _command_guide_index:
################################
Using Ansible command line tools
################################
.. note::
**Making Open Source More Inclusive**
Red Hat is committed to replacing problematic language in our code, documentation, and web properties. We are beginning with these four terms: master, slave, blacklist, and whitelist. We ask that you open an issue or pull request if you come upon a term that we have missed. For more details, see `our CTO Chris Wright's message <https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language>`_.
Welcome to the guide for using Ansible command line tools.
Ansible provides ad hoc commands and several utilities for performing various operations and automation tasks.
.. toctree::
:maxdepth: 2
intro_adhoc
command_line_tools
cheatsheet

View File

@ -1,220 +0,0 @@
.. _intro_adhoc:
*******************************
Introduction to ad hoc commands
*******************************
An Ansible ad hoc command uses the `/usr/bin/ansible` command-line tool to automate a single task on one or more managed nodes.
ad hoc commands are quick and easy, but they are not reusable.
So why learn about ad hoc commands?
ad hoc commands demonstrate the simplicity and power of Ansible.
The concepts you learn here will port over directly to the playbook language.
Before reading and executing these examples, please read :ref:`intro_inventory`.
.. contents::
:local:
Why use ad hoc commands?
========================
ad hoc commands are great for tasks you repeat rarely. For example, if you want to power off all the machines in your lab for Christmas vacation, you could execute a quick one-liner in Ansible without writing a playbook. An ad hoc command looks like this:
.. code-block:: bash
$ ansible [pattern] -m [module] -a "[module options]"
The ``-a`` option accepts options either through the ``key=value`` syntax or a JSON string starting with ``{`` and ending with ``}`` for more complex option structure.
You can learn more about :ref:`patterns<intro_patterns>` and :ref:`modules<working_with_modules>` on other pages.
Use cases for ad hoc tasks
==========================
ad hoc tasks can be used to reboot servers, copy files, manage packages and users, and much more. You can use any Ansible module in an ad hoc task. ad hoc tasks, like playbooks, use a declarative model,
calculating and executing the actions required to reach a specified final state. They
achieve a form of idempotence by checking the current state before they begin and doing nothing unless the current state is different from the specified final state.
Rebooting servers
-----------------
The default module for the ``ansible`` command-line utility is the :ref:`ansible.builtin.command module<command_module>`. You can use an ad hoc task to call the command module and reboot all web servers in Atlanta, 10 at a time. Before Ansible can do this, you must have all servers in Atlanta listed in a group called [atlanta] in your inventory, and you must have working SSH credentials for each machine in that group. To reboot all the servers in the [atlanta] group:
.. code-block:: bash
$ ansible atlanta -a "/sbin/reboot"
By default Ansible uses only 5 simultaneous processes. If you have more hosts than the value set for the fork count, Ansible will talk to them, but it will take a little longer. To reboot the [atlanta] servers with 10 parallel forks:
.. code-block:: bash
$ ansible atlanta -a "/sbin/reboot" -f 10
/usr/bin/ansible will default to running from your user account. To connect as a different user:
.. code-block:: bash
$ ansible atlanta -a "/sbin/reboot" -f 10 -u username
Rebooting probably requires privilege escalation. You can connect to the server as ``username`` and run the command as the ``root`` user by using the :ref:`become <become>` keyword:
.. code-block:: bash
$ ansible atlanta -a "/sbin/reboot" -f 10 -u username --become [--ask-become-pass]
If you add ``--ask-become-pass`` or ``-K``, Ansible prompts you for the password to use for privilege escalation (sudo/su/pfexec/doas/etc).
.. note::
The :ref:`command module <command_module>` does not support extended shell syntax like piping and
redirects (although shell variables will always work). If your command requires shell-specific
syntax, use the `shell` module instead. Read more about the differences on the
:ref:`working_with_modules` page.
So far all our examples have used the default 'command' module. To use a different module, pass ``-m`` for module name. For example, to use the :ref:`ansible.builtin.shell module <shell_module>`:
.. code-block:: bash
$ ansible raleigh -m ansible.builtin.shell -a 'echo $TERM'
When running any command with the Ansible *ad hoc* CLI (as opposed to
:ref:`Playbooks <working_with_playbooks>`), pay particular attention to shell quoting rules, so
the local shell retains the variable and passes it to Ansible.
For example, using double rather than single quotes in the above example would
evaluate the variable on the box you were on.
.. _file_transfer:
Managing files
--------------
An ad hoc task can harness the power of Ansible and SCP to transfer many files to multiple machines in parallel. To transfer a file directly to all servers in the [atlanta] group:
.. code-block:: bash
$ ansible atlanta -m ansible.builtin.copy -a "src=/etc/hosts dest=/tmp/hosts"
If you plan to repeat a task like this, use the :ref:`ansible.builtin.template<template_module>` module in a playbook.
The :ref:`ansible.builtin.file<file_module>` module allows changing ownership and permissions on files. These
same options can be passed directly to the ``copy`` module as well:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.file -a "dest=/srv/foo/a.txt mode=600"
$ ansible webservers -m ansible.builtin.file -a "dest=/srv/foo/b.txt mode=600 owner=mdehaan group=mdehaan"
The ``file`` module can also create directories, similar to ``mkdir -p``:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.file -a "dest=/path/to/c mode=755 owner=mdehaan group=mdehaan state=directory"
As well as delete directories (recursively) and delete files:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.file -a "dest=/path/to/c state=absent"
.. _managing_packages:
Managing packages
-----------------
You might also use an ad hoc task to install, update, or remove packages on managed nodes using a package management module such as ``yum``. Package management modules support common functions to install, remove, and generally manage packages. Some specific functions for a package manager might not be present in the Ansible module since they are not part of general package management.
To ensure a package is installed without updating it:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.yum -a "name=acme state=present"
To ensure a specific version of a package is installed:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.yum -a "name=acme-1.5 state=present"
To ensure a package is at the latest version:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.yum -a "name=acme state=latest"
To ensure a package is not installed:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.yum -a "name=acme state=absent"
Ansible has modules for managing packages under many platforms. If there is no module for your package manager, you can install packages using the command module or create a module for your package manager.
.. _users_and_groups:
Managing users and groups
-------------------------
You can create, manage, and remove user accounts on your managed nodes with ad hoc tasks:
.. code-block:: bash
$ ansible all -m ansible.builtin.user -a "name=foo password=<crypted password here>"
$ ansible all -m ansible.builtin.user -a "name=foo state=absent"
See the :ref:`ansible.builtin.user <user_module>` module documentation for details on all of the available options, including
how to manipulate groups and group membership.
.. _managing_services:
Managing services
-----------------
Ensure a service is started on all webservers:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.service -a "name=httpd state=started"
Alternatively, restart a service on all webservers:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.service -a "name=httpd state=restarted"
Ensure a service is stopped:
.. code-block:: bash
$ ansible webservers -m ansible.builtin.service -a "name=httpd state=stopped"
.. _gathering_facts:
Gathering facts
---------------
Facts represent discovered variables about a system. You can use facts to implement conditional execution of tasks but also just to get ad hoc information about your systems. To see all facts:
.. code-block:: bash
$ ansible all -m ansible.builtin.setup
You can also filter this output to display only certain facts, see the :ref:`ansible.builtin.setup <setup_module>` module documentation for details.
Patterns and ad-hoc commands
----------------------------
See the :ref:`patterns <intro_patterns>` documentation for details on all of the available options, including
how to limit using patterns in ad-hoc commands.
Now that you understand the basic elements of Ansible execution, you are ready to learn to automate repetitive tasks using :ref:`Ansible Playbooks <playbooks_intro>`.
.. seealso::
:ref:`intro_configuration`
All about the Ansible config file
:ref:`list_of_collections`
Browse existing collections, modules, and plugins
:ref:`working_with_playbooks`
Using Ansible for configuration management & deployment
`Mailing List <https://groups.google.com/group/ansible-project>`_
Questions? Help? Ideas? Stop by the list on Google Groups
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,14 +0,0 @@
.. _advanced_community_guide:
**********************************************
Advanced Contributor Guide
**********************************************
This guide focuses on contributors who are committers, GitHub admins, or release managers.
.. toctree::
:maxdepth: 1
committer_guidelines
release_managers
github_admins

View File

@ -1,145 +0,0 @@
.. _code_of_conduct:
*************************
Community Code of Conduct
*************************
.. contents:: Topics
Every community can be strengthened by a diverse variety of viewpoints, insights,
opinions, skillsets, and skill levels. However, with diversity comes the potential for
disagreement and miscommunication. The purpose of this Code of Conduct is to ensure that
disagreements and differences of opinion are conducted respectfully and on their own
merits, without personal attacks or other behavior that might create an unsafe or
unwelcoming environment.
These policies are not designed to be a comprehensive set of Things You Cannot Do. We ask
that you treat your fellow community members with respect and courtesy, and in general,
Don't Be A Jerk. This Code of Conduct is meant to be followed in spirit as much as in
letter and is not exhaustive.
All Ansible events and participants therein are governed by this Code of Conduct and
anti-harassment policy. We expect organizers to enforce these guidelines throughout all events,
and we expect attendees, speakers, sponsors, and volunteers to help ensure a safe
environment for our whole community. Specifically, this Code of Conduct covers
participation in all Ansible-related forums and mailing lists, code and documentation
contributions, public chat (Matrix, IRC), private correspondence, and public meetings.
Ansible community members are...
**Considerate**
Contributions of every kind have far-ranging consequences. Just as your work depends on
the work of others, decisions you make surrounding your contributions to the Ansible
community will affect your fellow community members. You are strongly encouraged to take
those consequences into account while making decisions.
**Patient**
Asynchronous communication can come with its own frustrations, even in the most responsive
of communities. Please remember that our community is largely built on volunteered time,
and that questions, contributions, and requests for support may take some time to receive
a response. Repeated "bumps" or "reminders" in rapid succession are not good displays of
patience. Additionally, it is considered poor manners to ping a specific person with
general questions. Pose your question to the community as a whole, and wait patiently for
a response.
**Respectful**
Every community inevitably has disagreements, but remember that it is
possible to disagree respectfully and courteously. Disagreements are never an excuse for
rudeness, hostility, threatening behavior, abuse (verbal or physical), or personal attacks.
**Kind**
Everyone should feel welcome in the Ansible community, regardless of their background.
Please be courteous, respectful and polite to fellow community members. Do not make or
post offensive comments related to skill level, gender, gender identity or expression,
sexual orientation, disability, physical appearance, body size, race, or religion.
Sexualized images or imagery, real or implied violence, intimidation, oppression,
stalking, sustained disruption of activities, publishing the personal information of
others without explicit permission to do so, unwanted physical contact, and unwelcome
sexual attention are all strictly prohibited. Additionally, you are encouraged not to
make assumptions about the background or identity of your fellow community members.
**Inquisitive**
The only stupid question is the one that does not get asked. We
encourage our users to ask early and ask often. Rather than asking whether you can ask a
question (the answer is always yes!), instead, simply ask your question. You are
encouraged to provide as many specifics as possible. Code snippets in the form of Gists or
other paste site links are almost always needed in order to get the most helpful answers.
Refrain from pasting multiple lines of code directly into the chat channels - instead use
gist.github.com or another paste site to provide code snippets.
**Helpful**
The Ansible community is committed to being a welcoming environment for all users,
regardless of skill level. We were all beginners once upon a time, and our community
cannot grow without an environment where new users feel safe and comfortable asking questions.
It can become frustrating to answer the same questions repeatedly; however, community
members are expected to remain courteous and helpful to all users equally, regardless of
skill or knowledge level. Avoid providing responses that prioritize snideness and snark over
useful information. At the same time, everyone is expected to read the provided
documentation thoroughly. We are happy to answer questions, provide strategic guidance,
and suggest effective workflows, but we are not here to do your job for you.
Anti-harassment policy
======================
Harassment includes (but is not limited to) all of the following behaviors:
- Offensive comments related to gender (including gender expression and identity), age, sexual orientation, disability, physical appearance, body size, race, and religion
- Derogatory terminology including words commonly known to be slurs
- Posting sexualized images or imagery in public spaces
- Deliberate intimidation
- Stalking
- Posting others' personal information without explicit permission
- Sustained disruption of talks or other events
- Inappropriate physical contact
- Unwelcome sexual attention
Participants asked to stop any harassing behavior are expected to comply immediately.
Sponsors are also subject to the anti-harassment policy. In particular, sponsors should
not use sexualized images, activities, or other material. Meetup organizing staff and
other volunteer organizers should not use sexualized attire or otherwise create a
sexualized environment at community events.
In addition to the behaviors outlined above, continuing to behave a certain way after you
have been asked to stop also constitutes harassment, even if that behavior is not
specifically outlined in this policy. It is considerate and respectful to stop doing
something after you have been asked to stop, and all community members are expected to
comply with such requests immediately.
Policy violations
=================
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by
contacting `codeofconduct@ansible.com <mailto:codeofconduct@ansible.com>`_, to anyone with administrative power in community chat (Admins or Moderators on Matrix, ops on IRC), or to the local organizers of an event. Meetup
organizers are encouraged to prominently display points of contact for reporting unacceptable
behavior at local events.
If a participant engages in harassing behavior, the meetup organizers may take any action
they deem appropriate. These actions may include but are not limited to warning the
offender, expelling the offender from the event, and barring the offender from future
community events.
Organizers will be happy to help participants contact security or local law enforcement,
provide escorts to an alternate location, or otherwise assist those experiencing
harassment to feel safe for the duration of the meetup. We value the safety and well-being
of our community members and want everyone to feel welcome at our events, both online and
offline.
We expect all participants, organizers, speakers, and attendees to follow these policies at
all of our event venues and event-related social events.
The Ansible Community Code of Conduct is licensed under the Creative Commons
Attribution-Share Alike 3.0 license. Our Code of Conduct was adapted from Codes of Conduct
of other open source projects, including:
* Contributor Covenant
* Elastic
* The Fedora Project
* OpenStack
* Puppet Labs
* Ubuntu

View File

@ -1,155 +0,0 @@
.. _collection_integration_tests_about:
Understanding integration tests
=================================
.. note::
Some collections do not have integration tests.
Integration tests are functional tests of modules and plugins.
With integration tests, we check if a module or plugin satisfies its functional requirements. Simply put, we check that features work as expected and users get the outcome described in the module or plugin documentation.
There are :ref:`two kinds of integration tests <collections_adding_integration_test>` used in collections:
* integration tests that use Ansible roles
* integration tests that use ``runme.sh``.
This section focuses on integration tests that use Ansible roles.
Integration tests check modules with playbooks that invoke those modules. The tests pass standalone parameters and their combinations, check what the module or plugin reports with the :ref:`assert <ansible_collections.ansible.builtin.assert_module>` module, and the actual state of the system after each task.
Integration test example
-------------------------
Let's say we want to test the ``postgresql_user`` module invoked with the ``name`` parameter. We expect that the module will both create a user based on the provided value of the ``name`` parameter and will report that the system state has changed. We cannot rely on only what the module reports. To be sure that the user has been created, we query our database with another module to see if the user exists.
.. code-block:: yaml
- name: Create PostgreSQL user and store module's output to the result variable
community.postgresql.postgresql_user:
name: test_user
register: result
- name: Check the module returns what we expect
assert:
that:
- result is changed
- name: Check actual system state with another module, in other words, that the user exists
community.postgresql.postgresql_query:
query: SELECT * FROM pg_authid WHERE rolename = 'test_user'
register: query_result
- name: We expect it returns one row, check it
assert:
that:
- query_result.rowcount == 1
Details about integration tests
--------------------------------
The basic entity of an Ansible integration test is a ``target``. The target is an :ref:`Ansible role <playbooks_reuse_roles>` stored in the ``tests/integration/targets`` directory of the collection repository. The target role contains everything that is needed to test a module.
The names of targets contain the module or plugin name that they test. Target names that start with ``setup_`` are usually executed as dependencies before module and plugin targets start execution. See :ref:`collection_creating_integration_tests` for details.
To run integration tests, we use the ``ansible-test`` utility that is included in the ``ansible-core`` and ``ansible`` packages. See :ref:`collection_run_integration_tests` for details. After you finish your integration tests, see to :ref:`collection_quickstart` to learn how to submit a pull request.
.. _collection_integration_prepare:
Preparing for integration tests for collections
=================================================
To prepare for developing integration tests:
#. :ref:`Set up your local environment <collection_prepare_environment>`.
#. Determine if integration tests already exist.
.. code-block:: bash
ansible-test integration --list-targets
If a collection already has integration tests, they are stored in ``tests/integration/targets/*`` subdirectories of the collection repository.
If you use ``bash`` and the ``argcomplete`` package is installed with ``pip`` on your system, you can also get a full target list.
.. code-block:: shell
ansible-test integration <tab><tab>
Alternately, you can check if the ``tests/integration/targets`` directory contains a corresponding directory with the same name as the module. For example, the tests for the ``postgresql_user`` module of the ``community.postgresql`` collection are stored in the ``tests/integration/targets/postgresql_user`` directory of the collection repository. If there is no corresponding target there, then that module does not have integration tests. In this case, consider adding integration tests for the module. See :ref:`collection_creating_integration_tests` for details.
.. _collection_integration_recommendations:
Recommendations on coverage
===========================
Bugfixes
--------
Before fixing code, create a test case in an :ref:`appropriate test target<collection_integration_prepare>` that reproduces the bug provided by the issue reporter and described in the ``Steps to Reproduce`` issue section. :ref:`Run <collection_run_integration_tests>` the tests.
If you failed to reproduce the bug, ask the reporter to provide additional information. The issue may be related to environment settings. Sometimes specific environment issues cannot be reproduced in integration tests, in that case, manual testing by issue reporter or other interested users is required.
Refactoring code
----------------
When refactoring code, always check that related options are covered in a :ref:`corresponding test target<collection_integration_prepare>`. Do not assume if the test target exists, everything is covered.
.. _collections_recommendation_modules:
Covering modules / new features
-------------------------------
When covering a module, cover all its options separately and their meaningful combinations. Every possible use of the module should be tested against:
- Idempotency - Does rerunning a task report no changes?
- Check-mode - Does dry-running a task behave the same as a real run? Does it not make any changes?
- Return values - Does the module return values consistently under different conditions?
Each test action has to be tested at least the following times:
- Perform an action in check-mode if supported. This should indicate a change.
- Check with another module that the changes have ``not`` been actually made.
- Perform the action for real. This should indicate a change.
- Check with another module that the changes have been actually made.
- Perform the action again in check-mode. This should indicate ``no`` change.
- Perform the action again for real. This should indicate ``no`` change.
To check a task:
1. Register the outcome of the task as a variable, for example, ``register: result``. Using the :ref:`assert <ansible_collections.ansible.builtin.assert_module>` module, check:
#. If ``- result is changed`` or not.
#. Expected return values.
2. If the module changes the system state, check the actual system state using at least one other module. For example, if the module changes a file, we can check that the file has been changed by checking its checksum with the :ref:`stat <ansible_collections.ansible.builtin.stat_module>` module before and after the test tasks.
3. Run the same task with ``check_mode: true`` if check-mode is supported by the module. Check with other modules that the actual system state has not been changed.
4. Cover cases when the module must fail. Use the ``ignore_errors: true`` option and check the returned message with the ``assert`` module.
Example:
.. code-block:: yaml
- name: Task to fail
abstract_module:
...
register: result
- name: Check the task fails and its error message
assert:
that:
- result is failed
- result.msg == 'Message we expect'
Here is a summary:
- Cover options and their sensible combinations.
- Check returned values.
- Cover check-mode if supported.
- Check a system state using other modules.
- Check when a module must fail and error messages.

View File

@ -1,250 +0,0 @@
.. _collection_creating_integration_tests:
Creating new integration tests
=================================
This section covers the following cases:
- There are no integration tests for a collection or group of modules in a collection at all.
- You are adding a new module and you want to include integration tests.
- You want to add integration tests for a module that already exists without integration tests.
In other words, there are currently no tests for a module regardless of whether the module exists or not.
If the module already has tests, see :ref:`collection_updating_integration_tests`.
Simplified example
--------------------
Here is a simplified abstract example.
Let's say we are going to add integration tests to a new module in the ``community.abstract`` collection which interacts with some service.
We :ref:`checked<collection_integration_prepare>` and determined that there are no integration tests at all.
We should basically do the following:
1. Install and run the service with a ``setup`` target.
2. Create a test target.
3. Add integration tests for the module.
4. :ref:`Run the tests<collection_run_integration_tests>`.
5. Fix the code and tests as needed, run the tests again, and repeat the cycle until they pass.
.. note::
You can reuse the ``setup`` target when implementing other targets that also use the same service.
1. Clone the collection to the ``~/ansible_collections/community.abstract`` directory on your local machine.
2. From the ``~/ansible_collections/community.abstract`` directory, create directories for the ``setup`` target:
.. code-block:: bash
mkdir -p tests/integration/targets/setup_abstract_service/tasks
3. Write all the tasks needed to prepare the environment, install, and run the service.
For simplicity, let's imagine that the service is available in the native distribution repositories and no sophisticated environment configuration is required.
Add the following tasks to the ``tests/integration/targets/setup_abstract_service/tasks/main.yml`` file to install and run the service:
.. code-block:: yaml
- name: Install abstract service
package:
name: abstract_service
- name: Run the service
systemd:
name: abstract_service
state: started
This is a very simplified example.
4. Add the target for the module you are testing.
Let's say the module is called ``abstract_service_info``. Create the following directory structure in the target:
.. code-block:: bash
mkdir -p tests/integration/targets/abstract_service_info/tasks
mkdir -p tests/integration/targets/abstract_service_info/meta
Add all of the needed subdirectories. For example, if you are going to use defaults and files, add the ``defaults`` and ``files`` directories, and so on. The approach is the same as when you are creating a role.
5. To make the ``setup_abstract_service`` target run before the module's target, add the following lines to the ``tests/integration/targets/abstract_service_info/meta/main.yml`` file.
.. code-block:: yaml
dependencies:
- setup_abstract_service
6. Start with writing a single stand-alone task to check that your module can interact with the service.
We assume that the ``abstract_service_info`` module fetches some information from the ``abstract_service`` and that it has two connection parameters.
Among other fields, it returns a field called ``version`` containing a service version.
Add the following to ``tests/integration/targets/abstract_service_info/tasks/main.yml``:
.. code-block:: yaml
- name: Fetch info from abstract service
abstract_service_info:
host: 127.0.0.1 # We assume the service accepts local connection by default
port: 1234 # We assume that the service is listening to this port by default
register: result # This variable will contain the returned JSON including the server version
- name: Test the output
assert:
that:
- result.version == '1.0.0' # Check version field contains what we expect
7. :ref:`Run the tests<collection_run_integration_tests>` with the ``-vvv`` argument.
If there are any issues with connectivity (for example, the service is not accepting connections) or with the code, the play will fail.
Examine the output to see at which step the failure occurred. Investigate the reason, fix it, and run again. Repeat the cycle until the test passes.
8. If the test succeeds, write more tests. Refer to the :ref:`Recommendations on coverage<collection_integration_recommendations>` section for details.
``community.postgresql`` example
--------------------------------
Here is a real example of writing integration tests from scratch for the ``community.postgresql.postgresql_info`` module.
For the sake of simplicity, we will create very basic tests which we will run using the Ubuntu 20.04 test container.
We use ``Linux`` as a work environment and have ``git`` and ``docker`` installed and running.
We also installed ``ansible-core``.
1. Create the following directories in your home directory:
.. code-block:: bash
mkdir -p ~/ansible_collections/community
2. Fork the `collection repository <https://github.com/ansible-collections/community.postgresql>`_ through the GitHub web interface.
3. Clone the forked repository from your profile to the created path:
.. code-block:: bash
git clone https://github.com/YOURACC/community.postgresql.git ~/ansible_collections/community/postgresql
If you prefer to use the SSH protocol:
.. code-block:: bash
git clone git@github.com:YOURACC/community.postgresql.git ~/ansible_collections/community/postgresql
4. Go to the cloned repository:
.. code-block:: bash
cd ~/ansible_collections/community/postgresql
5. Be sure you are in the default branch:
.. code-block:: bash
git status
6. Checkout a test branch:
.. code-block:: bash
git checkout -b postgresql_info_tests
7. Since we already have tests for the ``postgresql_info`` module, we will run the following command:
.. code-block:: bash
rm -rf tests/integration/targets/*
With all of the targets now removed, the current state is as if we do not have any integration tests for the ``community.postgresql`` collection at all. We can now start writing integration tests from scratch.
8. We will start with creating a ``setup`` target that will install all required packages and will launch PostgreSQL. Create the following directories:
.. code-block:: bash
mkdir -p tests/integration/targets/setup_postgresql_db/tasks
9. Create the ``tests/integration/targets/setup_postgresql_db/tasks/main.yml`` file and add the following tasks to it:
.. code-block:: yaml
- name: Install required packages
package:
name:
- apt-utils
- postgresql
- postgresql-common
- python3-psycopg2
- name: Initialize PostgreSQL
shell: . /usr/share/postgresql-common/maintscripts-functions && set_system_locale && /usr/bin/pg_createcluster -u postgres 12 main
args:
creates: /etc/postgresql/12/
- name: Start PostgreSQL service
ansible.builtin.service:
name: postgresql
state: started
That is enough for our very basic example.
10. Then, create the following directories for the ``postgresql_info`` target:
.. code-block:: bash
mkdir -p tests/integration/targets/postgresql_info/tasks tests/integration/targets/postgresql_info/meta
11. To make the ``setup_postgresql_db`` target run before the ``postgresql_info`` target as a dependency, create the ``tests/integration/targets/postgresql_info/meta/main.yml`` file and add the following code to it:
.. code-block:: yaml
dependencies:
- setup_postgresql_db
12. Now we are ready to add our first test task for the ``postgresql_info`` module. Create the ``tests/integration/targets/postgresql_info/tasks/main.yml`` file and add the following code to it:
.. code-block:: yaml
- name: Test postgresql_info module
become: true
become_user: postgres
community.postgresql.postgresql_info:
login_user: postgres
login_db: postgres
register: result
- name: Check the module returns what we expect
assert:
that:
- result is not changed
- result.version.major == 12
- result.version.minor == 8
In the first task, we run the ``postgresql_info`` module to fetch information from the database we installed and launched with the ``setup_postgresql_db`` target. We are saving the values returned by the module into the ``result`` variable.
In the second task, we check the ``result`` variable, which is what the first task returned, with the ``assert`` module. We expect that, among other things, the result has the version and reports that the system state has not been changed.
13. Run the tests in the Ubuntu 20.04 docker container:
.. code-block:: bash
ansible-test integration postgresql_info --docker ubuntu2004 -vvv
The tests should pass. If we look at the output, we should see something like the following:
.. code-block:: shell
TASK [postgresql_info : Check the module returns what we expect] ***************
ok: [testhost] => {
"changed": false,
"msg": "All assertions passed"
}
If your tests fail when you are working on your project, examine the output to see at which step the failure occurred. Investigate the reason, fix it, and run again. Repeat the cycle until the test passes. If the test succeeds, write more tests. Refer to the :ref:`Recommendations on coverage<collection_integration_recommendations>` section for details.

View File

@ -1,32 +0,0 @@
.. _collection_run_integration_tests:
Running integration tests
============================
In the following examples, we will use ``Docker`` to run integration tests locally. Ensure you :ref:`collection_prepare_environment` first.
We assume that you are in the ``~/ansible_collections/NAMESPACE/COLLECTION`` directory.
After you change the tests, you can run them with the following command:
.. code-block:: text
ansible-test integration <target_name> --docker <distro>
The ``target_name`` is a test role directory containing the tests. For example, if the test files you changed are stored in the ``tests/integration/targets/postgresql_info/`` directory and you want to use the ``fedora34`` container image, then the command will be:
.. code-block:: bash
ansible-test integration postgresql_info --docker fedora34
You can use the ``-vv`` or ``-vvv`` argument if you need more detailed output.
In the examples above, the ``fedora34`` test image will be automatically downloaded and used to create and run a test container.
See the :ref:`list of supported container images <test_container_images>`.
In some cases, for example, for platform-independent tests, the ``default`` test image is required. Use the ``--docker default`` or just ``--docker`` option without specifying a distribution in this case.
.. note::
If you have any difficulties with writing or running integration tests or you are not sure if the case can be covered, submit your pull request without the tests. Other contributors can help you with them later if needed.

View File

@ -1,36 +0,0 @@
.. _collection_integration_tests:
*****************************************
Adding integration tests to a collection
*****************************************
This section describes the steps to add integration tests to a collection and how to run them locally using the ``ansible-test`` command.
.. toctree::
:maxdepth: 1
collection_integration_about
collection_integration_updating
collection_integration_running
collection_integration_add
.. seealso::
:ref:`testing_units_modules`
Unit testing Ansible modules
`pytest <https://docs.pytest.org/en/latest/>`_
Pytest framework documentation
:ref:`developing_testing`
Ansible Testing Guide
:ref:`collection_unit_tests`
Unit testing for collections
:ref:`testing_integration`
Integration tests guide
:ref:`testing_collections`
Testing collections
:ref:`testing_resource_modules`
Resource module integration tests
:ref:`collection_pr_test`
How to test a pull request locally

View File

@ -1,169 +0,0 @@
.. _collection_updating_integration_tests:
Adding to an existing integration test
=======================================
The test tasks are stored in the ``tests/integration/targets/<target_name>/tasks`` directory.
The ``main.yml`` file holds test tasks and includes other test files.
Look for a suitable test file to integrate your tests or create and include or import a separate test file.
You can use one of the existing test files as a draft.
When fixing a bug
-----------------
When fixing a bug:
1. :ref:`Determine if integration tests for the module exist<collection_integration_prepare>`. If they do not, see :ref:`collection_creating_integration_tests` section.
2. Add a task that reproduces the bug to an appropriate file within the ``tests/integration/targets/<target_name>/tasks`` directory.
3. :ref:`Run the tests<collection_run_integration_tests>`. The newly added task should fail.
4. If they do not fail, re-check if your environment or test task satisfies the conditions described in the ``Steps to Reproduce`` section of the issue.
5. If you reproduce the bug and tests fail, change the code.
6. :ref:`Run the tests<collection_run_integration_tests>` again.
7. If they fail, repeat steps 5-6 until the tests pass.
Here is an example.
Let's say someone reported an issue in the ``community.postgresql`` collection that when users pass a name containing underscores to the ``postgresql_user`` module, the module fails.
We cloned the collection repository to the ``~/ansible_collections/community/postgresql`` directory and :ref:`prepared our environment <collection_prepare_environment>`. From the collection's root directory, we run ``ansible-test integration --list-targets`` and it shows a target called ``postgresql_user``. It means that we already have tests for the module.
We start with reproducing the bug.
First, we look into the ``tests/integration/targets/postgresql_user/tasks/main.yml`` file. In this particular case, the file imports other files from the ``tasks`` directory. The ``postgresql_user_general.yml`` looks like an appropriate one to add our tests.
.. code-block:: yaml
# General tests:
- import_tasks: postgresql_user_general.yml
when: postgres_version_resp.stdout is version('9.4', '>=')
We will add the following code to the file.
.. code-block:: yaml
# https://github.com/ansible-collections/community.postgresql/issues/NUM
- name: Test user name containing underscore
community.postgresql.postgresql_user:
name: underscored_user
register: result
- name: Check the module returns what we expect
assert:
that:
- result is changed
- name: Query the database if the user exists
community.postgresql.postgresql_query:
query: SELECT * FROM pg_authid WHERE rolename = 'underscored_user'
register: result
- name: Check the database returns one row
assert:
that:
- result.query_result.rowcount == 1
When we :ref:`run the tests<collection_run_integration_tests>` with ``postgresql_user`` as a test target, this task must fail.
Now that we have our failing test; we will fix the bug and run the same tests again. Once the tests pass, we will consider the bug fixed and will submit a pull request.
When adding a new feature
-------------------------
.. note::
The process described in this section also applies when you want to add integration tests to a feature that already exists, but is missing integration tests.
If you have not already implemented the new feature, you can start by writing the integration tests for it. They will not work as the code does not yet exist, but they can help you improve your implementation design before you start writing any code.
When adding new features, the process of adding tests consists of the following steps:
1. :ref:`Determine if integration tests for the module exists<collection_integration_prepare>`. If they do not, see :ref:`collection_creating_integration_tests`.
2. Find an appropriate file for your tests within the ``tests/integration/targets/<target_name>/tasks`` directory.
3. Cover your feature with tests. Refer to the :ref:`Recommendations on coverage<collection_integration_recommendations>` section for details.
4. :ref:`Run the tests<collection_run_integration_tests>`.
5. If they fail, see the test output for details. Fix your code or tests and run the tests again.
6. Repeat steps 4-5 until the tests pass.
Here is an example.
Let's say we decided to add a new option called ``add_attribute`` to the ``postgresql_user`` module of the ``community.postgresql`` collection.
The option is boolean. If set to ``yes``, it adds an additional attribute to a database user.
We cloned the collection repository to the ``~/ansible_collections/community/postgresql`` directory and :ref:`prepared our environment<collection_integration_prepare>`. From the collection's root directory, we run ``ansible-test integration --list-targets`` and it shows a target called ``postgresql_user``. Therefore, we already have some tests for the module.
First, we look at the ``tests/integration/targets/<target_name>/tasks/main.yml`` file. In this particular case, the file imports other files from the ``tasks`` directory. The ``postgresql_user_general.yml`` file looks like an appropriate one to add our tests.
.. code-block:: yaml
# General tests:
- import_tasks: postgresql_user_general.yml
when: postgres_version_resp.stdout is version('9.4', '>=')
We will add the following code to the file.
.. code-block:: yaml
# https://github.com/ansible-collections/community.postgresql/issues/NUM
# We should also run the same tasks with check_mode: true. We omit it here for simplicity.
- name: Test for new_option, create new user WITHOUT the attribute
community.postgresql.postgresql_user:
name: test_user
register: result
- name: Check the module returns what we expect
assert:
that:
- result is changed
- name: Query the database if the user exists but does not have the attribute (it is NULL)
community.postgresql.postgresql_query:
query: SELECT * FROM pg_authid WHERE rolename = 'test_user' AND attribute = NULL
register: result
- name: Check the database returns one row
assert:
that:
- result.query_result.rowcount == 1
- name: Test for new_option, create new user WITH the attribute
community.postgresql.postgresql_user:
name: test_user
register: result
- name: Check the module returns what we expect
assert:
that:
- result is changed
- name: Query the database if the user has the attribute (it is TRUE)
community.postgresql.postgresql_query:
query: SELECT * FROM pg_authid WHERE rolename = 'test_user' AND attribute = 't'
register: result
- name: Check the database returns one row
assert:
that:
- result.query_result.rowcount == 1
Then we :ref:`run the tests<collection_run_integration_tests>` with ``postgresql_user`` passed as a test target.
In reality, we would alternate the tasks above with the same tasks run with the ``check_mode: true`` option to be sure our option works as expected in check-mode as well. See :ref:`Recommendations on coverage<collection_integration_recommendations>` for details.
If we expect a task to fail, we use the ``ignore_errors: true`` option and check that the task actually failed and returned the message we expect:
.. code-block:: yaml
- name: Test for fail_when_true option
community.postgresql.postgresql_user:
name: test_user
fail_when_true: true
register: result
ignore_errors: true
- name: Check the module fails and returns message we expect
assert:
that:
- result is failed
- result.msg == 'The message we expect'

View File

@ -1,341 +0,0 @@
.. _collection_release_with_branches:
Releasing collections with release branches
============================================
Collections MUST follow the `semantic versioning <https://semver.org/>`_ rules. See :ref:`releasing_collections` for high-level details.
.. contents::
:local:
Release planning and announcement
----------------------------------
#. Announce your intention to release the collection in a corresponding pinned release issue/community pinboard of the collection and in the ``#ansible-community`` `Matrix/IRC channel <https://docs.ansible.com/ansible/devel/community/communication.html#real-time-chat>`_. Repeat the announcement in any other dedicated channels if they exist.
#. Ensure all the other repository maintainers are informed about the time of the following release.
Releasing major collection versions
-------------------------------------
The new version is assumed to be ``X.0.0``.
1. Make sure that ``galaxy.yml`` contains the correct version number ``X.0.0``. If that is not the case, create a PR to update it. This will make sanity tests fail for all deprecations that have to be removed in ``X.0.0``, so this is potentially a lot of work and should have been done weeks before the major release.
2. Check the collection for deprecations that are planned for removal in the major release that were not reported by the sanity tests. Use past changelogs or run ``grep -r `X.0.0` plugins/`` in the repository.
3. If you are going to release the ``community.general`` and ``community.network`` collections, create a new ``backport-X`` label in the corresponding repositories. Copy the styles and descriptions from the corresponding existing labels.
4. Ensure you are in a default branch in your local fork. These examples use ``main``.
.. code-block:: bash
git status
git checkout main # if needed
5. Update your local fork:
.. code-block:: bash
git pull --rebase upstream main
Creating the release branch
^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1. Create a branch ``stable-X``. Replace ``X`` with a correct number and push it to the **upstream** repository, NOT to the ``origin``.:
.. code-block:: bash
git branch stable-X main
git push upstream stable-X
2. Create and checkout to another branch from the ``main`` branch:
.. code-block:: bash
git checkout -b update_repo
3. Update the version in ``galaxy.yml`` in the branch to the next **expected** version, for example, ``X.1.0``.
Creating the changelogs
^^^^^^^^^^^^^^^^^^^^^^^^
1. Replace ``changelogs/changelog.yml`` with:
.. code-block:: yaml
ancestor: X.0.0
releases: {}
2. Remove all changelog fragments from ``changelogs/fragments/``. Removing the changelog fragments ensures that every major release has a changelog describing changes since the last major release.
3. Add and commit all the changes made. Push the branch to the ``origin`` repository.
4. Create a pull request in the collection repository. If CI tests pass, merge the pull request since the ``main`` branch is expecting changes for the next minor/major versions
5. Switch to the ``stable-X`` branch.
6. In the ``stable-X`` branch, verify that ``galaxy.yml`` contains the correct version number ``X.0.0``.
7. In the ``stable-X`` branch, ensure that ``changelogs/changelog.yml`` contains a correct ancestor's version:
.. code-block:: yaml
ancestor: X-1.0.0
releases: {}
8. In the ``stable-X`` branch, add a changelog fragment ``changelogs/fragments/X.0.0.yml`` with the content:
.. code-block:: yaml
release_summary: |-
Write some text here that should appear as the release summary for this version.
The format is reStructuredText, but not a list as for regular changelog fragments.
This text will be inserted into the changelog.
For example:
.. code-block:: yaml
release_summary: This is release 2.0.0 of ``community.foo``, released on YYYY-MM-DD.
9. In the ``stable-X`` branch, generate the changelogs:
.. code-block:: bash
antsibull-changelog release --cummulative-release
10. In the ``stable-X`` branch, verify that the ``CHANGELOG.rst`` looks as expected.
11. In the ``stable-X`` branch, update ``README.md`` so that the changelog link points to ``/tree/stable-X/`` and no longer to ``/tree/main/``, and change badges respectively, for example, in case of AZP, add ``?branchName=stable-X`` to the AZP CI badge (https://dev.azure.com/ansible/community.xxx/_apis/build/status/CI?branchName=stable-X).
12. In the ``stable-X`` branch, add, commit, and push changes to ``README.md``, ``CHANGELOG.rst`` and ``changelogs/changelog.yaml``, and potentially deleted/archived fragments to the **upstream** repository, NOT to the ``origin``.
Publishing the collection
^^^^^^^^^^^^^^^^^^^^^^^^^^
1. In the ``stable-X`` branch, add an annotated tag to the last commit with the collection version ``X.0.0``. Pushing this tag to the ``upstream`` repository will make Zuul publish the collection on `Ansible Galaxy <https://galaxy.ansible.com/>`_.
.. code-block:: bash
git tag -n # see current tags and their comments
git tag -a NEW_VERSION -m "comment here" # the comment can be, for example, "community.foo: 2.0.0"
git push upstream NEW_VERSION
2. If the collection uses `Zuul <https://github.com/ansible/zuul-config/blob/master/README.rst>`_ for publishing its releases, wait until the new version is published on the collection's `Ansible Galaxy <https://galaxy.ansible.com/>`_ page. It will appear in a list of tarballs available to download.
3. If the release tarball did not appear within several hours after pushing the tag, try to re-tag the release commit and push the tag again. In the ``stable-X`` branch being at the release commit:
.. code-block:: bash
git tag --delete NEW_VERSION
git push upstream :NEW_VERSION
git tag -a NEW_VERSION -m "comment here" # the comment can be, for example, "community.foo: 2.0.0"
git push upstream NEW_VERSION
4. Add a GitHub release for the new tag. The title should be the version and content, such as - ``See https://github.com/ansible-collections/community.xxx/blob/stable-X/CHANGELOG.rst for all changes``.
5. Announce the release through the `Bullhorn Newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
6. Announce the release in the pinned release issue/community pinboard of the collection and in the ``#ansible-community`` `Matrix/Libera.Chat IRC channel <https://docs.ansible.com/ansible/devel/community/communication.html#real-time-chat>`_.
7. In the ``stable-X`` branch, update the version in ``galaxy.yml`` to the next **expected** version, for example, ``X.1.0``. Add, commit and push to the **upstream** repository.
Releasing minor collection versions
-------------------------------------
The new version is assumed to be ``X.Y.0``. All changes that should go into it are expected to be previously backported from the default branch to the ``stable-X`` branch.
Creating the changelogs
^^^^^^^^^^^^^^^^^^^^^^^^
1. In the ``stable-X`` branch, make sure that ``galaxy.yml`` contains the correct version number ``X.Y.0``. If not, update it.
2. In the ``stable-X`` branch, add a changelog fragment ``changelogs/fragments/X.Y.0.yml`` with content:
.. code-block:: yaml
release_summary: |-
Write some text here that should appear as the release summary for this version.
The format is reStructuredText, but not a list as for regular changelog fragments.
This text will be inserted into the changelog.
3. In the ``stable-X`` branch, run:
.. code-block:: bash
antsibull-changelog release
4. In the ``stable-X`` branch, verify that ``CHANGELOG.rst`` looks as expected.
5. In the ``stable-X`` branch, add, commit, and push changes to ``CHANGELOG.rst`` and ``changelogs/changelog.yaml``, and potentially deleted/archived fragments to the **upstream** repository, NOT to the origin.
Publishing the collection
^^^^^^^^^^^^^^^^^^^^^^^^^^
1. In the ``stable-X`` branch, add an annotated tag to the last commit with the collection version ``X.Y.0``. Pushing this tag to the ``upstream`` repository will make Zuul publish the collection on `Ansible Galaxy <https://galaxy.ansible.com/>`_.
.. code-block:: bash
git tag -n # see current tags and their comments
git tag -a NEW_VERSION -m "comment here" # the comment can be, for example, "community.foo: 2.1.0"
git push upstream NEW_VERSION
2. Wait until the new version is published on the collection's `Ansible Galaxy <https://galaxy.ansible.com/>`_ page. The published version will appear in a list of tarballs available to download.
3. Add a GitHub release for the new tag. The title should be the version and content, such as - ``See https://github.com/ansible-collections/community.xxx/blob/stable-X/CHANGELOG.rst for all changes``.
4. Announce the release through the `Bullhorn Newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
5. Announce the release in the pinned release issue/community pinboard of the collection and in the ``#ansible-community`` `Matrix/IRC channel <https://docs.ansible.com/ansible/devel/community/communication.html#real-time-chat>`_. Additionally, you can announce it using GitHub's Releases system.
6. In the ``stable-X`` branch, update the version in ``galaxy.yml`` to the next **expected** version, for example, if you have released ``X.1.0``, the next expected version could be ``X.2.0``. Add, commit and push to the **upstream** repository.
7. Checkout to the ``main`` branch.
8. In the ``main`` branch:
#. If more minor versions are released before the next major version, update the version in ``galaxy.yml`` to ``X.(Y+1).0`` as well. Create a dedicated pull request and merge.
#. If the next version will be a new major version, create a pull request where you update the version in ``galaxy.yml`` to ``(X+1).0.0``. Note that the sanity tests will most likely fail since there will be deprecations with removal scheduled for ``(X+1).0.0``, which are flagged by the tests.
For every such deprecation, decide:
* Whether to remove them now. For example you remove the complete ``modules/plugins`` or you remove redirects.
* Whether to add ignore entries to the corresponding ``tests/sanity/ignore-*.txt`` file and create issues, for example for removed features in ``modules/plugins``.
Once the CI tests pass, merge the pull request. Make sure that this pull request is merged not too much later after the release
for ``version_added`` sanity tests not to expect the wrong version for the new feature pull request.
.. note::
It makes sense to already do some removals in the days before the release. These removals must happen in the main branch and must not be backported.
Releasing patch versions
-------------------------
The new version is assumed to be ``X.Y.Z``, and the previous patch version is assumed to be ``X.Y.z`` with ``z < Z``. ``z`` is frequently``0`` since patch releases are uncommon.
Releasing when more minor versions are expected
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1. Checkout the ``X.Y.z`` tag.
2. Update ``galaxy.yml`` so that the version is ``X.Y.Z``. Add and commit.
3. Cherry-pick all changes from ``stable-X`` that were added after ``X.Y.z`` and should go into ``X.Y.Z``.
4. Add a changelog fragment ``changelogs/fragments/X.Y.Z.yml`` with content:
.. code-block:: yaml
release_summary: |-
Write some text here that should appear as the release summary for this version.
The format is reStructuredText but not a list as for regular changelog fragments.
This text will be inserted into the changelog.
Add to git and commit.
5. Generate the changelogs.
.. code-block:: bash
antsibull-changelog release
6. Verify that ``CHANGELOG.rst`` looks as expected.
7. Add and commit changes to ``CHANGELOG.rst`` and ``changelogs/changelog.yaml``, and potentially deleted/archived fragments.
**Publishing the collection**
1. Add an annotated tag to the last commit with the collection version ``X.Y.Z``. Pushing this tag to the ``upstream`` repository will make Zuul publish the collection on `Ansible Galaxy <https://galaxy.ansible.com/>`_.
.. code-block:: bash
git tag -n # see current tags and their comments
git tag -a NEW_VERSION -m "comment here" # the comment can be, for example, "community.foo: 2.1.1"
git push upstream NEW_VERSION
2. Wait until the new version is published on the collection's `Ansible Galaxy <https://galaxy.ansible.com/>`_ page. It will appear in a list of tarballs available to download.
3. Add a GitHub release for the new tag. The title should be the version and content, such as - ``See https://github.com/ansible-collections/community.xxx/blob/stable-X/CHANGELOG.rst for all changes``.
.. note::
The data for this release is only contained in a tag, and not in a branch, in particular not in ``stable-X``.
This is deliberate, since the next minor release ``X.(Y+1).0`` already contains the changes for ``X.Y.Z`` as well, since these were cherry-picked from ``stable-X``.
4. Announce the release through the `Bullhorn Newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
5. Announce the release in the pinned release issue/community pinboard of the collection and in the ``#ansible-community`` `Matrix/IRC channel <https://docs.ansible.com/ansible/devel/community/communication.html#real-time-chat>`.
Releasing when no more minor versions are expected
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
1. In the ``stable-X`` branch, make sure that ``galaxy.yml`` contains the correct version number ``X.Y.Z``. If not, update it!
2. In the ``stable-X`` branch, add a changelog fragment ``changelogs/fragments/X.Y.Z.yml`` with content:
.. code-block:: yaml
release_summary: |-
Write some text here that should appear as the release summary for this version.
The format is reStructuredText, but not a list as for regular changelog fragments.
This text will be inserted into the changelog.
3. Generate the changelogs in the ``stable-X`` branch.
.. code-block:: bash
antsibull-changelog release
4. In the ``stable-X`` branch, verify that ``CHANGELOG.rst`` looks as expected.
5. In the ``stable-X`` branch, add, commit, and push changes to ``CHANGELOG.rst`` and ``changelogs/changelog.yaml``, and potentially deleted/archived fragments to the **upstream** repository, NOT to the origin.
**Publishing the collection**
1. In the ``stable-X`` branch, add an annotated tag to the last commit with the collection version ``X.Y.Z``. Pushing this tag to the ``upstream`` repository will make Zuul publish the collection on `Ansible Galaxy <https://galaxy.ansible.com/>`_.
.. code-block:: bash
git tag -n # see current tags and their comments
git tag -a NEW_VERSION -m "comment here" # the comment can be, for example, "community.foo: 2.1.1"
git push upstream NEW_VERSION
2. Wait until the new version is published on the collection's `Ansible Galaxy <https://galaxy.ansible.com/>`_ page. It will appear in a list of tarballs available to download.
3. Add a GitHub release for the new tag. Title should be the version and content, such as: ``See https://github.com/ansible-collections/community.xxx/blob/stable-X/CHANGELOG.rst for all changes``.
4. Announce the release through the `Bullhorn Newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
5. Announce the release in the pinned issue/community pinboard of the collection and in the ``#ansible-community`` `Matrix/IRC channel <https://docs.ansible.com/ansible/devel/community/communication.html#real-time-chat>`_.

View File

@ -1,115 +0,0 @@
.. _collection_release_without_branches:
Releasing collections without release branches
===============================================
Since no release branches are used, this section does not distinguish between releasing a major, minor, or patch version.
.. contents::
:local:
Release planning and announcement
----------------------------------
#. Examine the collection to determine if there are merged changes to release.
#. According to the changes made, choose an appropriate release version number. Keep in mind that the collections must follow the `semantic versioning <https://semver.org/>`_ rules. See :ref:`collection_versioning_and_deprecation` for details.
#. Announce your intention to release the collection in a corresponding pinned release issue or community pinboard of the collection and in the ``community`` :ref:`Matrix/IRC channel <communication_irc>`.
Creating the release branch
----------------------------
1. Ensure you are in a default branch in your local fork. We use ``main`` in the following examples.
.. code:: bash
git status
git checkout main # if needed
2. Update your local fork:
.. code:: bash
git pull --rebase upstream main
3. Checkout a new release branch from the default branch:
.. code:: bash
git checkout -b release_branch
4. Ensure the ``galaxy.yml`` contains the correct release version number.
Generating the changelog
-------------------------
1. Add a changelog fragment ``changelogs/fragments/X.Y.Z.yml`` with content:
.. code:: yaml
release_summary: |-
Write some text here that should appear as the release summary for this version.
The format is reStructuredText, but not a list as for regular changelog fragments.
This text will be inserted into the changelog.
For example:
.. code:: yaml
release_summary: |-
This is the minor release of the ``community.mysql`` collection.
This changelog contains all changes to the modules and plugins in this collection
that have been made after the previous release.
2. If the content was recently moved from another collection (for example, migrating a module from one collection to another), ensure you have all related changelog fragments in the ``changelogs/fragments`` directory. If not, copy them previously.
3. Run ``antsibull-changelog release --reload-plugins`` . This package should previously be installed with ``pip install antsibull-changelog``.
4. Verify that the ``CHANGELOG.rst`` looks as expected.
5. Commit and push changes to the ``CHANGELOG.rst`` and ``changelogs/changelog.yaml``, and potentially deleted/archived fragments to the ``origin`` repository's ``release_branch``.
.. code:: bash
git commit -a -m "Release VERSION commit"
git push origin release_branch
6. Create a pull request in the collection repository. If CI tests pass, merge it.
7. Checkout the default branch and pull the changes:
.. code:: bash
git checkout main
git pull --rebase upstream main
Publish the collection
-----------------------------------
1. Add an annotated tag to the release commit with the collection version. Pushing this tag to the ``upstream`` repository will make Zuul publish the collection on `Ansible Galaxy <https://galaxy.ansible.com/>`_.
.. code:: bash
git tag -n # see current tags and their comments
git tag -a NEW_VERSION -m "comment here" # the comment can be, for example, "community.postgresql: 1.2.0"
git push upstream NEW_VERSION
2. Wait until the new version is published on the collection's `Ansible Galaxy <https://galaxy.ansible.com/>`_ page. It will appear in a list of tarballs available to download.
3. Update the version in the ``galaxy.yml`` file to the next **expected** version. Add, commit, and push to the ``upstream``'s default branch.
4. Add a GitHub release for the new tag. Title should be the version and content ``See https://github.com/ansible-collections/community.xxx/blob/main/CHANGELOG.rst for all changes``.
5. Announce the release through the `Bullhorn Newsletter issue <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
6. Announce the release in the pinned release issue/community pinboard of the collection mentioned in step 3 and in the ``community`` :ref:`Matrix/IRC channel <communication_irc>`.

View File

@ -1,106 +0,0 @@
.. _releasing_collections:
.. _Releasing:
Releasing collections
======================
Collection maintainers release all supported stable versions of the collections regularly,
provided that there have been enough changes merged to release.
.. contents::
:local:
Preparing to release a collection
--------------------------------------------
The collections under the `ansible-collections organization <https://github.com/ansible-collections>`_ follow `semantic versioning <https://semver.org/>`_ when releasing. See :ref:`collection_versioning_and_deprecation` for details.
To prepare for a release, a collection must have:
* A publicly available policy of releasing, versioning, and deprecation. This can be, for example, written in its README or in a dedicated pinned issue.
* A pinned issue when its release managers inform the community about planned or completed releases. This can be combined with the release policy issue mentioned above.
* A :ref:`changelog <collection_changelogs>`.
* Releases of the collection tagged in the collection's repository.
* CI pipelines up and running. This can be implemented by using GitHub Actions, Azure Pipelines, Zuul.
* All CI tests running against a commit that releases the collection. If they do not pass, the collection MUST NOT be released.
See :ref:`including_collection_ansible` if you plan on adding a new collection to the Ansible package.
.. note::
Your collection must pass ``ansible-test sanity`` tests. See :ref:`testing_collections` for details.
.. _collection_versioning_and_deprecation:
Collection versioning and deprecation
--------------------------------------
.. note::
Collections MUST adhere to `semantic versioning <https://semver.org/>`_.
To preserve backward compatibility for users, every Ansible minor version series (5.1.x, 5.2.x, and so on) will keep the major version of a collection constant. For example, if Ansible 5.0.0 includes ``community.general`` 4.0.2, then each Ansible 5.X.x release will include the latest ``community.general`` 4.y.z release available at build time. Ansible 5.x.x will **never** include a ``community.general`` 5.y.x release, even if it is available. Major collection version changes will be included in the next Ansible major release (6.0.0 in this case).
Ensure that the current major release of your collection included in 6.0.0 receives at least bugfixes as long as new Ansible 6.X.X releases are produced.
Since new minor releases are included, you can include new features, modules and plugins. You must make sure that you do not break backwards compatibility. See `semantic versioning <https://semver.org/>`_. for more details. This means in particular:
* You can fix bugs in **patch** releases but not add new features or deprecate things.
* You can add new features and deprecate things in **minor** releases, but not remove things or change behavior of existing features.
* You can only remove things or make breaking changes in **major** releases.
Ensure that if a deprecation is added in a collection version that is included in 5.x.y, the removal itself will only happen in a collection version included in 7.0.0 or later.
Ensure that the policy of releasing, versioning, and deprecation is announced to contributors and users in some way. For an example of how to do this, see `the announcement in community.general <https://github.com/ansible-collections/community.general/issues/582>`_. You could also do this in the collection README file.
.. _collection_changelog:
Collection changelogs
----------------------
Collections MUST include a changelog. To give a consistent feel for changelogs across collections and ensure changelogs exist for collections included in the ``ansible`` package, we suggest you use `antsibull-changelog <https://github.com/ansible-community/antsibull-changelog>`_ to maintain and generate this.
Before releasing, verify the following for your changelogs:
* All merged pull requests since the last release, except ones related to documentation and new modules/plugins, have :ref:`changelog fragments <collection_changelog_fragments>`.
* New module and plugin pull requests, except jinja2 test and filter plugins, do **not** need a changelog fragment, they are auto-detected by the changelog generator by their ``version_added`` value.
* All the fragments follow the :ref:`changelog entry format <collection_changelogs_how_to_format>`.
Options for releasing a collection
-----------------------------------
There are several approaches on how to release a collection. If you are not aware of which approach to use, ask in the ``#ansible-community`` IRC channel or the ``community`` Matrix channel.
This section assumes that publishing the collection is done with `Zuul <https://github.com/ansible/project-config>`_ and that `antsibull-changelog <https://github.com/ansible-community/antsibull-changelog>`_ is used for the changelog.
Releasing without release branches
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Use releasing without release branches when:
* There are no prior major releases of the collection.
* There are no breaking changes introduced since the ``1.0.0`` release of the collection.
See :ref:`collection_release_without_branches` for details.
When there is a need to introduce breaking changes, you can switch to the next approach.
Hybrid approach
^^^^^^^^^^^^^^^^^^^^^
In this approach, releases for the current major version are made from the ``main`` branch, while new releases for older major versions are made from release branches for these versions.
Releasing with release branches
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Use releasing with release branches when breaking changes have been introduced. This approach is usually only used by the large community collections, ``community.general`` and ``community.network``.
See :ref:`collection_release_with_branches` for details.
.. toctree::
:maxdepth: 1
collection_release_without_branches
collection_release_with_branches

View File

@ -1,499 +0,0 @@
.. _collections_requirements:
**************************************************
Ansible community package collections requirements
**************************************************
This section describes the requirements for maintainers of Ansible community collections in the `ansible-collections <https://github.com/ansible-collections>`_ repository or included in the Ansible community package.
.. contents::
:local:
Overview
========
This section provides help, advice, and guidance on making sure your collections are correct and ready for inclusion in the Ansible community package.
.. note::
`Inclusion of a new collection <https://github.com/ansible-collections/ansible-inclusion>`_ in the Ansible package is ultimately at the discretion of the :ref:`community_steering_committee`. Every rejected candidate will get feedback. Differences of opinion should be taken to a dedicated `Community Topic <https://github.com/ansible-community/community-topics/issues>`_ for discussion and a final vote.
Feedback and communications
==============================
As with any project it is very important that we get feedback from users, contributors, and maintainers. You can get feedback and help as follows:
* Discussing in the `#community:ansible.com Matrix room <https://matrix.to/#/#community:ansible.com>`_, which is bridged with the ``#ansible-community`` channel on Libera.Chat IRC. See the :ref:`Ansible Communication Guide <communication_irc>` for details.
* Discussing in the `Community Working Group meeting <https://github.com/ansible/community/blob/main/meetings/README.md#wednesdays>`_.
* Creating `GitHub Issues <https://github.com/ansible-collections/overview/issues>`_ in the ``ansible-collections`` repository.
Keeping informed
================
You should subscribe to:
* The `news-for-maintainers repository <https://github.com/ansible-collections/news-for-maintainers>`_ to track changes that collection maintainers should be aware of. Subscribe only to issues if you want less traffic.
* The `Bullhorn <https://github.com/ansible/community/wiki/News#the-bullhorn>`_ Ansible contributor newsletter.
.. _coll_infrastructure_reqs:
Collection infrastructure
=========================
The following guidelines describe the required structure for your collection:
* MUST have a publicly available issue tracker that does not require a paid level of service to create an account or view issues.
* MUST have a Code of Conduct (CoC).
* The collection's CoC MUST be compatible with the :ref:`code_of_conduct`.
* The collections SHOULD consider using the Ansible CoC if they do not have a CoC that they consider better.
* The :ref:`Diversity and Inclusion working group <working_group_list>` may evaluate all CoCs and object to a collection's inclusion based on the CoCs contents.
* The CoC MUST be linked from the ``README.md`` file, or MUST be present or linked from the ``CODE_OF_CONDUCT.md`` file in the collection root.
* MUST be published to `Ansible Galaxy <https://galaxy.ansible.com>`_.
* SHOULD NOT contain any large objects (binaries) comparatively to the current Galaxy tarball size limit of 20 MB, For example, do not include package installers for testing purposes.
* SHOULD NOT contain any unnecessary files such as temporary files.
* MUST only contain objects that follow the :ref:`Licensing rules <coll_licensing_req>`.
.. _coll_python_compatibility:
Python Compatibility
====================
A collection MUST be developed and tested using the below Python requirements as Ansible supports a wide variety of machines.
The collection should adhere to the tips at :ref:`ansible-and-python-3`.
.. _coll_python_reqs:
Python Requirements
-------------------
Python requirements for a collection vary between **controller environment** and **other environment**. On the controller-environment, the Python versions required may be higher than what is required on the other-environment. While developing a collection, you need to understand the definitions of both the controller-environment and other-environment to help you choose Python versions accordingly:
* controller environment: The plugins/modules always run in the same environment (Python interpreter, venv, host, and so on) as ansible-core itself.
* other environment: It is possible, even if uncommon in practice, for the plugins/modules to run in a different environment than ansible-core itself.
One example scenario where the "even if" clause comes into play is when using cloud modules. These modules mostly run on the controller node but in some environments, the controller might run on one machine inside a demilitarized zone which cannot directly access the cloud machines. The user has to have the cloud modules run on a bastion host/jump server which has access to the cloud machines.
.. _coll_controller_req:
Controller environment
~~~~~~~~~~~~~~~~~~~~~~
Review the :ref:`support_life` for the versions of ``ansible-core`` that the collection supports. Collections MUST document the supported Python versions for plugins which cannot support all controller Python versions (for example, if required libraries do not support them).
Other environment
~~~~~~~~~~~~~~~~~
Review the :ref:`support_life` for the versions of ``ansible-core`` that the collection supports. Collections MUST document the supported Python versions for modules which cannot support all target Python versions (for example, if required libraries do not support them).
.. note::
If the collection does not support Python 2.6 and/or Python 3.5 explicitly then take the below points into consideration:
- Dropping support for Python 2.6 in the other environment means that you are dropping support for RHEL6. RHEL6 ended full support in November, 2020, but some users are still using RHEL6 under extended support contracts (ELS) until 2024. ELS is not full support; not all CVEs of the python-2.6 interpreter are fixed, for instance.
- Dropping support for Python 3.5 means that Python 2.7 has to be installed on Ubuntu Xenial (16.04) and that you have to support Python 2.7.
Also, note that dropping support for a Python version for an existing module/plugin is a breaking change, and thus requires a major release. A collection MUST announce dropping support for Python versions in their changelog, if possible in advance (for example, in previous versions before support is dropped).
.. _coll_python_docs_req:
Python documentation requirements
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
* If everything in your collection supports the same Python versions as the collection-supported versions of ansible-core, you do not need to document Python versions.
* If your collection does not support those Python versions, you MUST document which versions it supports in the README.
* If most of your collection supports the same Python versions as ansible-core, but some modules and plugins do not, you MUST include the supported Python versions in the documentation for those modules and plugins.
For example, if your collection supports Ansible 2.9 to ansible-core 2.13, the Python versions supported for modules are 2.6, 2.7, and 3.5 and newer (until at least 3.10), while the Python versions supported for plugins are 2.7 and 3.5 and newer (until at least 3.10). So if the modules in your collection do not support Python 2.6, you have to document this in the README, for example ``The content in this collection supports Python 2.7, Python 3.5 and newer.``.
.. _coll_plugin_standards:
Standards for developing module and plugin utilities
====================================================
* ``module_utils`` and ``plugin_utils`` can be marked for only internal use in the collection, but they MUST document this and MUST use a leading underscore for filenames.
* It is a breaking change when you make an existing ``module_utils`` private and in that case the collection requires a major version bump.
* Below are some recommendations for ``module_utils`` documentation:
* No docstring: everything we recommend for ``other-environment`` is supported.
* The docstring ``'Python versions supported: same as for controller-environment'``: everything we recommend for ``controller-environment`` is supported.
* The docstring with specific versions otherwise: ``'Python versions supported: '``.
.. _coll_repo_structure:
Repository structure requirements
==================================
galaxy.yml
----------
* The ``tags`` field MUST be set.
* Collection dependencies must meet a set of rules. See the section on `Collection Dependencies <collection_dependencies_>` for details.
* The ``ansible`` package MUST NOT depend on collections not shipped in the package.
* If you plan to split up your collection, the new collection MUST be approved for inclusion before the smaller collections replace the larger in Ansible.
* If you plan to add other collections as dependencies, they MUST run through the formal application process.
.. _coll_readme_req:
README.md
---------
Your collection repository MUST have a ``README.md`` in the root of the collection, see `collection_template/README.md <https://github.com/ansible-collections/collection_template/blob/main/README.md>`_ for an example.
meta/runtime.yml
----------------
Example: `meta/runtime.yml <https://github.com/ansible-collections/collection_template/blob/main/meta/runtime.yml>`_
* The ``meta/runtime.yml`` MUST define the minimum version of Ansible which this collection works with.
* If the collection works with Ansible 2.9, then this should be set to `>=2.9.10`
* It is usually better to avoid adding `<2.11` as a restriction, since this for example makes it impossible to use the collection with the current ansible-base devel branch (which has version 2.11.0.dev0)
.. _coll_module-reqs:
Modules & Plugins
------------------
* Collections MUST only use the directories specified below in the ``plugins/`` directory and
only for the purposes listed:
:Those recognized by ansible-core: ``doc_fragments``, ``modules``, ``module_utils``, ``terminal``, and those listed in :ref:`working_with_plugins`. This list can be verified by looking at the last element of the package argument of each ``*_loader`` in https://github.com/ansible/ansible/blob/devel/lib/ansible/plugins/loader.py#L1126
:plugin_utils: For shared code which is only used controller-side, not in modules.
:sub_plugins: For other plugins which are managed by plugins inside of collections instead of ansible-core. We use a subfolder so there aren't conflicts when ansible-core adds new plugin types.
The core team (which maintains ansible-core) has committed not to use these directories for
anything which would conflict with the uses specified here.
Other directories
-----------------
Collections MUST not use files outside ``meta/``, ``plugins/``, ``roles/`` and ``playbooks/`` in any plugin, role, or playbook that can be called by FQCN, used from other collections, or used from user playbooks and roles. A collection must work if every file or directory is deleted from the installed collection except those four directories and their contents.
Internal plugins, roles and playbooks (artifacts used only in testing, or only to release the collection, or only for some other internal purpose and not used externally) are exempt from this rule and may rely on files in other directories.
.. _coll_docs_structure_reqs:
Documentation requirements
~~~~~~~~~~~~~~~~~~~~~~~~~~~
All modules and plugins MUST:
* Include a :ref:`DOCUMENTATION <documentation-block>` block.
* Include an :ref:`EXAMPLES <examples-block>` block (except where not relevant for the plugin type).
* Use FQCNs when referring to modules, plugins and documentation fragments inside and outside the collection (including ``ansible.builtin`` for the listed entities from ansible-core.
When using ``version_added`` in the documentation:
* Declare the version of the collection in which the options were added -- NOT the version of Ansible.
* If you for some reason really have to specify version numbers of Ansible or of another collection, you also have to provide ``version_added_collection: collection_name``. We strongly recommend to NOT do this.
* Include ``version_added`` when you add new content (modules, plugins, options) to an existing collection. The values are shown in the documentation, and can be useful, but you do not need to add ``version_added`` to every option, module, and plugin when creating a new collection.
Other items:
* The ``CONTRIBUTING.md`` (or ``README.md``) file MUST state what types of contributions (pull requests, feature requests, and so on) are accepted and any relevant contributor guidance. Issues (bugs and feature request) reports must always be accepted.
* Collections are encouraged to use z:ref:`links and formatting macros <linking-and-other-format-macros-within-module-documentation>`
* Including a :ref:`RETURN <return-block>` block for modules is strongly encouraged but not required.
.. _coll_workflow:
Contributor Workflow
====================
.. _coll_changlogs_req:
Changelogs
----------
Collections are required to include a changelog. To give a consistent feel for changelogs across collections and ensure changelogs exist for collections included in the ``ansible`` package we suggest you use `antsibull-changelog <https://github.com/ansible-community/antsibull-changelog>`_ to maintain and generate this but other options exist. Preferred (in descending order):
#. Use antsibull-changelog (preferred).
#. Provide ``changelogs/changelog.yaml`` in the `correct format <https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelog.yaml-format.md>`_. (You can use ``antsibull-lint changelog-yaml /path/to/changelog.yaml`` to validate the format.)
#. Provide a link to the changelog file (self-hosted) (not recommended).
Note that the porting guide is compiled from ``changelogs/changelog.yaml`` (sections ``breaking_changes``, ``major_changes``, ``deprecated_features``, ``removed_features``). So if you use option 3, you will not be able to add something to the porting guide.
.. _coll_versioning_req:
Versioning and deprecation
~~~~~~~~~~~~~~~~~~~~~~~~~~
* Collections MUST adhere to `semantic versioning <https://semver.org/>`_.
* To preserve backward compatibility for users, every Ansible minor version series (x.Y.z) will keep the major version of a collection constant. If ansible 3.0.0 includes ``community.general`` 2.2.0, then each 3.Y.z (3.1.z, 3.2.z, and so on) release will include the latest ``community.general`` 2.y.z release available at build time. Ansible 3.y.z will **never** include a ``community.general`` 3.y.z release, even if it is available. Major collection version changes will be included in the next Ansible major release (4.0.0 in this example).
* Therefore, ensure that the current major release of your collection included in 3.0.0 receives at least bugfixes as long as new 3.Y.Z releases are produced.
* Since new minor releases are included, you can include new features, modules and plugins. You must make sure that you do not break backwards compatibility! (See `semantic versioning <https://semver.org/>`_.) This means in particular:
* You can fix bugs in patch releases, but not add new features or deprecate things.
* You can add new features and deprecate things in minor releases, but not remove things or change behavior of existing features.
* You can only remove things or make breaking changes in major releases.
* We recommend that you ensure that if a deprecation is added in a collection version that is included in Ansible 3.y.z, the removal itself will only happen in a collection version included in Ansible 5.0.0 or later, but not in a collection version included in Ansible 4.0.0.
* Content moved from ansible/ansible that was scheduled for removal in 2.11 or later MUST NOT be removed in the current major release available when ansible 2.10.0 is released. Otherwise it would already be removed in 2.10, unexpectedly for users! Deprecation cycles can be shortened (since they are now uncoupled from ansible or ansible-base versions), but existing ones must not be unexpectedly terminated.
* We recommend you announce your policy of releasing, versioning and deprecation to contributors and users in some way. For an example of how to do this, see `the announcement in community.general <https://github.com/ansible-collections/community.general/issues/582>`_. You could also do this in the README.
.. _ coll_naming_req:
Naming
======
Collection naming
-----------------
For collections under ansible-collections the repository SHOULD be named ``NAMESPACE.COLLECTION``.
To create a new collection and corresponding repository, first, a new namespace in Galaxy has to be created by submitting `Request a namespace <https://github.com/ansible/galaxy/issues/new/choose>`_.
`Namespace limitations <https://galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespace-limitations>`_ lists requirements for namespaces in Galaxy.
For collections created for working with a particular entity, they should contain the entity name, for example ``community.mysql``.
For corporate maintained collections, the repository can be named ``COMPANY_NAME.PRODUCT_NAME``, for example ``ibm.db2``.
We should avoid FQCN / repository names:
* which are unnecessary long: try to make it compact but clear.
* contain the same words / collocations in ``NAMESPACE`` and ``COLLECTION`` parts, for example ``my_system.my_system``.
If your collection is planned to be certified on **Red Hat Automation Hub**, please consult with Red Hat Partner Engineering through ``ansiblepartners@redhat.com`` to ensure collection naming compatibility between the community collection on **Galaxy**.
.. _coll_module_name_req:
Module naming
-------------
Modules that only gather information MUST be named ``<something>_info``. Modules that return ``ansible_facts`` are named ``<something>_facts`` and do not return non-facts.
For more information, refer to the :ref:`Developing modules guidelines <creating-an-info-or-a-facts-module>`.
.. _coll_licensing_req:
Collection licensing requirements
===================================
.. note::
The guidelines below are more restrictive than strictly necessary. We will try to add a larger list of acceptable licenses once we have approval from Red Hat Legal.
There are four types of content in collections which licensing has to address in different
ways:
:modules: must be licensed with a free software license that is compatible with the
`GPL-3.0-or-later <https://www.gnu.org/licenses/gpl-3.0-standalone.html>`_
:module_utils: must be licensed with a free software license that is compatible with the
`GPL-3.0-or-later <https://www.gnu.org/licenses/gpl-3.0-standalone.html>`_. Ansible
itself typically uses the `BSD-2-clause
<https://opensource.org/licenses/BSD-2-Clause>`_ license to make it possible for
third-party modules which are licensed incompatibly with the GPLv3 to use them.
Please consider this use case when licensing your own ``module_utils``.
:All other code in ``plugins/``: All other code in ``plugins/`` must be under the `GPL-3.0-or-later
<https://www.gnu.org/licenses/gpl-3.0-standalone.html>`_. These plugins
are run inside of the Ansible controller process which is licensed under
the ``GPL-3.0-or-later`` and often must import code from the controller.
For these reasons, ``GPL-3.0-or-later`` must be used.
:All other code: Code outside ``plugins/`` may be licensed under another free software license that is compatible
with the `GPL-3.0-or-later <https://www.gnu.org/licenses/gpl-3.0-standalone.html>`_,
provided that such code does not import any other code that is licensed under
the ``GPL-3.0-or-later``. If the file does import other ``GPL-3.0-or-later`` code,
then it must similarly be licensed under ``GPL-3.0-or-later``. Note that this applies in
particular to unit tests; these often import code from ansible-core, plugins, module utils,
or modules, and such code is often licensed under ``GPL-3.0-or-later``.
:Non code content: At the moment, these must also be under the `GPL-3.0-or-later
<https://www.gnu.org/licenses/gpl-3.0-standalone.html>`_.
Use `this table of licenses from the Fedora Project
<https://fedoraproject.org/wiki/Licensing:Main#Software_License_List>`_ to find which licenses are
compatible with the GPLv3+. The license must be considered open source on both the Fedora License
table and the `Debian Free Software Guidelines <https://wiki.debian.org/DFSGLicenses>`_ to be
allowed.
These guidelines are the policy for inclusion in the Ansible package and are in addition to any
licensing and legal concerns that may otherwise affect your code.
.. _coll_repo_management:
Repository management
=====================
Every collection MUST have a public git repository. Releases of the collection MUST be tagged in said repository. This means that releases MUST be ``git tag``\ ed and that the tag name MUST exactly match the Galaxy version number. Tag names MAY have a ``v`` prefix, but a collection's tag names MUST have a consistent format from release to release.
Additionally, collection artifacts released to Galaxy MUST be built from the sources that are tagged in the collection's git repository as that release. Any changes made during the build process MUST be clearly documented so the collection artifact can be reproduced.
We are open to allowing other SCM software once our tooling supports them.
.. _coll_branch_config:
Branch name and configuration
-----------------------------
This subsection is **only** for repositories under `ansible-collections <https://github.com/ansible-collections>`_! Other collection repositories can also follow these guidelines, but do not have to.
All new repositories MUST have ``main`` as the default branch.
Existing repositories SHOULD be converted to use ``main``.
Repository Protections:
* Allow merge commits: disallowed
Branch protections MUST be enforced:
* Require linear history
* Include administrators
.. _coll_ci_tests:
CI Testing
===========
.. note::
You can copy the free-to-use `GitHub action workflow file <https://github.com/ansible-collections/collection_template/blob/main/.github/workflows/ansible-test.yml>`_ from the `Collection Template repository <https://github.com/ansible-collections/collection_template/>`_ to the `.github/workflows` directory in your collection to set up testing through GitHub actions. The workflow covers all the requirements below.
* You MUST run the ``ansible-test sanity`` command from the `latest stable ansible-base/ansible-core branch <https://github.com/ansible/ansible/branches/all?query=stable->`_.
* Collections MUST run an equivalent of the ``ansible-test sanity --docker`` command.
* If they do not use ``--docker``, they must make sure that all tests run, in particular the compile and import tests (which should run for all :ref:`supported Python versions <ansible-and-python-3>`).
* Collections can choose to skip certain Python versions that they explicitly do not support; this needs to be documented in ``README.md`` and in every module and plugin (hint: use a docs fragment). However we strongly recommend you follow the :ref:`Ansible Python Compatibility <ansible-and-python-3>` section for more details.
* You SHOULD suggest to *additionally* run ``ansible-test sanity`` from the ansible/ansible ``devel`` branch so that you find out about new linting requirements earlier.
* The sanity tests MUST pass.
* Adding some entries to the ``test/sanity/ignore*.txt`` file is an allowed method of getting them to pass, except cases listed below.
* You SHOULD not have ignored test entries. A reviewer can manually evaluate and approve your collection if they deem an ignored entry to be valid.
* You MUST not ignore the following validations. They must be fixed before approval:
* ``validate-modules:doc-choices-do-not-match-spec``
* ``validate-modules:doc-default-does-not-match-spec``
* ``validate-modules:doc-missing-type``
* ``validate-modules:doc-required-mismatch``
* ``validate-modules:mutually_exclusive-unknown``
* ``validate-modules:no-log-needed`` (use ``no_log=False`` in the argument spec to flag false positives!)
* ``validate-modules:nonexistent-parameter-documented``
* ``validate-modules:parameter-list-no-elements``
* ``validate-modules:parameter-type-not-in-doc``
* ``validate-modules:undocumented-parameter``
* All entries in ignores.txt MUST have a justification in a comment in the ignore.txt file for each entry. For example ``plugins/modules/docker_container.py use-argspec-type-path # uses colon-separated paths, can't use type=path``.
* Reviewers can block acceptance of a new collection if they don't agree with the ignores.txt entries.
* You MUST run CI against each of the "major versions" (2.10, 2.11, 2.12, etc) of ``ansible-base``/``ansible-core`` that the collection supports. (Usually the ``HEAD`` of the stable-xxx branches.)
* All CI tests MUST run against every pull request and SHOULD pass before merge.
* At least sanity tests MUST run against a commit that releases the collection; if they do not pass, the collection will NOT be released.
- If the collection has integration/unit tests, they SHOULD run too; if they do not pass, the errors SHOULD be analyzed to decide whether they should block the release or not.
* All CI tests MUST run regularly (nightly, or at least once per week) to ensure that repositories without regular commits are tested against the latest version of ansible-test from each ansible-base/ansible-core version tested. The results from the regular CI runs MUST be checked regularly.
All of the above can be achieved by using the `GitHub Action template <https://github.com/ansible-collections/collection_template/tree/main/.github/workflows>`_.
To learn how to add tests to your collection, see:
* :ref:`collection_integration_tests`
* :ref:`collection_unit_tests`
.. _coll_wg_reqs:
Collections and Working Groups
==============================
The collections have:
* Working group page(s) on a corresponding wiki if needed. Makes sense if there is a group of modules for working with one common entity, for example postgresql, zabbix, grafana, and so on.
* Issue for agenda (or pinboard if there are not regular meetings) as a pinned issue in the repository.
.. _coll_migrating_reqs:
When moving modules between collections
=======================================
All related entities must be moved/copied including:
* Related plugins and module_utils files (when moving, be sure it is not used by other modules, otherwise copy).
* CI and unit tests.
* Corresponding documentation fragments from ``plugins/doc_fragments``.
Also:
* Change ``M()``, examples, ``seealso``, ``extended_documentation_fragments`` to use actual FQCNs in moved content and in other collections that have references to the content.
* Move all related issues, pull requests, and wiki pages.
* Look through ``docs/docsite`` directory of `ansible-base GitHub repository <https://github.com/ansible/ansible>`_ (for example, using the ``grep`` command-line utility) to check if there are examples using the moved modules and plugins to update their FQCNs.
See :ref:`Migrating content to a different collection <migrating-ansible-content-to-a-different-collection>` for complete details.
.. _coll_development_conventions:
Development conventions
=======================
Besides all the requirements listed in the :ref:`module_dev_conventions`, be sure:
* Your modules satisfy the concept of :ref:`idempotency <term-Idempotency>`: if a module repeatedly runs with the same set of inputs, it will not make any changes on the system.
* Your modules do not query information using special ``state`` option values like ``get``, ``list``, ``query``, or ``info`` -
create new ``_info`` or ``_facts`` modules instead (for more information, refer to the :ref:`Developing modules guidelines <creating-an-info-or-a-facts-module>`).
* ``check_mode`` is supported in all ``*_info`` and ``*_facts`` modules (for more information, refer to the :ref:`Development conventions <#following-ansible-conventions>`).
.. _coll_dependencies:
Collection Dependencies
=======================
**Notation:** if foo.bar has a dependency on baz.bam, we say that baz.bam is the collection *depended on*, and foo.bar is the *dependent collection*.
* Collection dependencies must have a lower bound on the version which is at least 1.0.0.
* This means that all collection dependencies have to specify lower bounds on the versions, and these lower bounds should be stable releases, and not versions of the form 0.x.y.
* When creating new collections where collection dependencies are also under development, you need to watch out since Galaxy checks whether dependencies exist in the required versions:
#. Assume that ``foo.bar`` depends on ``foo.baz``.
#. First release ``foo.baz`` as 1.0.0.
#. Then modify ``foo.bar``'s ``galaxy.yml`` to specify ``'>=1.0.0'`` for ``foo.baz``.
#. Finally release ``foo.bar`` as 1.0.0.
* The dependencies between collections included in Ansible must be valid. If a dependency is violated, the involved collections must be pinned so that all dependencies are valid again. This means that the version numbers from the previous release are kept or only partially incremented so that the resulting set of versions has no invalid dependencies.
* If a collection has a too strict dependency for a longer time, and forces another collection depended on to be held back, that collection will be removed from the next major Ansible release. What "longer time" means depends on when the next Ansible major release happens. If a dependent collection prevents a new major version of a collection it depends on to be included in the next major Ansible release, the dependent collection will be removed from that major release to avoid blocking the collection being depended on.
* We strongly suggest that collections also test against the ``main`` branches of their dependencies to ensure that incompatibilities with future releases of these are detected as early as possible and can be resolved in time to avoid such problems. Collections depending on other collections must understand that they bear the risk of being removed when they do not ensure compatibility with the latest releases of their dependencies.
* Collections included in Ansible must not depend on other collections except if they satisfy one of the following cases:
#. They have a loose dependency on one (or more) major versions of other collections included in Ansible. For example, ``ansible.netcommon: >=1.0.0``, or ``ansible.netcommon: >=2.0.0, <3.0.0``. In case the collection depended on releases a new major version outside of this version range that will be included in the next major Ansible release, the dependent collection will be removed from the next major Ansible release. The cut-off date for this is feature freeze.
#. They are explicitly being allowed to do so by the Steering Committee.
Examples
--------
#. ``community.foo 1.2.0`` has a dependency on ``community.bar >= 1.0.0, < 1.3.0``.
* Now ``community.bar`` creates a new release ``1.3.0``. When ``community.foo`` does not create a new release with a relaxed dependency, we have to include ``community.bar 1.2.x`` in the next Ansible release despite ``1.3.0`` being available.
* If ``community.foo`` does not relax its dependency on ``community.bar`` for some time, ``community.foo`` will be removed from the next Ansible major release.
* Unfortunately ``community.bar`` has to stay at ``1.2.x`` until either ``community.foo`` is removed (in the next major release), or loosens its requirements so that newer ``community.bar 1.3.z`` releases can be included.
#. ``community.foonetwork`` depends on ``ansible.netcommon >= 2.0.0, <3.0.0``.
* ``ansible.netcommon 4.0.0`` is released during this major Ansible release cycle.
* ``community.foonetwork`` either releases a new version before feature freeze of the next major Ansible release that allows depending on all ``ansible.netcommon 4.x.y`` releases, or it will be removed from the next major Ansible release.
.. _coll_inclusion_reqs:
Requirements for collections to be included in the Ansible Package
==================================================================
To be included in the `ansible` package, collections must meet the following criteria:
* :ref:`Development conventions <module_dev_conventions>`.
* `Collection requirements <https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst>`_ (this document).
* The `Collection Inclusion Criteria Checklist <https://github.com/ansible-collections/overview/blob/main/collection_checklist.md>`_ covers most of the criteria from this document.
* :ref:`Ansible documentation format <module_documenting>` and the :ref:`style guide <style-guide>`.
* To pass the Ansible :ref:`sanity tests <testing-sanity>`.
* To have :ref:`unit <unit-tests>`_and / or :ref:`integration tests <integration-tests>` according to the corresponding sections of this document.
Other requirements
===================
* After content is moved out of another currently included collection such as ``community.general`` or ``community.network`` OR a new collection satisfies all the requirements, add the collection to the ``ansible.in`` file in a corresponding directory of the `ansible-build-data repository <https://github.com/ansible-community/ansible-build-data/>`_.

View File

@ -1,74 +0,0 @@
.. _review_checklist:
Review checklist for collection PRs
====================================
Use this section as a checklist reminder of items to review when you review a collection PR.
Reviewing bug reports
----------------------
When users report bugs, verify the behavior reported. Remember always to be kind with your feedback.
* Did the user make a mistake in the code they put in the Steps to Reproduce issue section? We often see user errors reported as bugs.
* Did the user assume an unexpected behavior? Ensure that the related documentation is clear. If not, the issue is useful to help us improve documentation.
* Is there a minimal reproducer? If not, ask the reporter to reduce the complexity to help pinpoint the issue.
* Is the issue a consequence of a misconfigured environment?
* If it seems to be a real bug, does the behaviour still exist in the most recent release or the development branch?
* Reproduce the bug, or if you do not have a suitable infrastructure, ask other contributors to reproduce the bug.
Reviewing suggested changes
---------------------------
When reviewing PRs, verify that the suggested changes do not:
* Unnecessarily break backward compatibility.
* Bring more harm than value.
* Introduce non-idempotent solutions.
* Duplicate already existing features (inside or outside the collection).
* Violate the :ref:`Ansible development conventions <module_conventions>`.
Other standards to check for in a PR include:
* A pull request MUST NOT contain a mix of bug fixes and new features that are not tightly related. If yes, ask the author to split the pull request into separate PRs.
* If the pull request is not a documentation fix, it must include a :ref:`changelog fragment <collection_changelog_fragments>`. Check the format carefully as follows:
* New modules and plugins (that are not jinja2 filter and test plugins) do not need changelog fragments.
* For jinja2 filter and test plugins, check out the `special syntax for changelog fragments <https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelogs.rst#adding-new-roles-playbooks-test-and-filter-plugins>`_.
* The changelog content contains useful information for end users of the collection.
* If new files are added with the pull request, they follow the :ref:`coll_licensing_req`.
* The changes follow the :ref:`Ansible documentation standards <developing_modules_documenting>` and the :ref:`style_guide`.
* The changes follow the :ref:`Development conventions <developing_modules_best_practices>`.
* If a new plugin is added, it is one of the :ref:`allowed plugin types <coll_module-reqs>`.
* Documentation, examples, and return sections use FQCNs for the ``M(..)`` :ref:`format macros <module_documents_linking>` when referring to modules.
* Modules and plugins from ansible-core use ``ansible.builtin.`` as an FQCN prefix when mentioned.
* When a new option, module, plugin, or return value is added, the corresponding documentation or return sections use ``version_added:`` containing the *collection* version in which they will be first released.
* This is typically the next minor release, sometimes the next major release. For example: if 2.7.5 is the current release, the next minor release will be 2.8.0, and the next major release will be 3.0.0).
* FQCNs are used for ``extends_documentation_fragment:``, unless the author is referring to doc_fragments from ansible-core.
* New features have corresponding examples in the :ref:`examples_block`.
* Return values are documented in the :ref:`return_block`.
Review tests in the PR
----------------------
Review the following if tests are applicable and possible to implement for the changes included in the PR:
* Where applicable, the pull request has :ref:`testing_integration` and :ref:`testing_units`.
* All changes are covered. For example, a bug case or a new option separately and in sensible combinations with other options.
* Integration tests cover ``check_mode`` if supported.
* Integration tests check the actual state of the system, not only what the module reports. For example, if the module actually changes a file, check that the file was changed by using the ``ansible.builtin.stat`` module..
* Integration tests check return values, if applicable.
Review for merge commits and breaking changes
---------------------------------------------
* The pull request does not contain merge commits. See the GitHub warnings at the bottom of the pull request. If merge commits are present, ask the author to rebase the pull request branch.
* If the pull request contains breaking changes, ask the author and the collection maintainers if it really is needed, and if there is a way not to introduce breaking changes. If breaking changes are present, they MUST only appear in the next major release and MUST NOT appear in a minor or patch release. The only exception is breaking changes caused by security fixes that are absolutely necessary to fix the security issue.

View File

@ -1,67 +0,0 @@
.. _collection_pr_test:
****************************
How to test a collection PR
****************************
Reviewers and issue authors can verify a PR fixes the reported bug by testing the PR locally.
.. contents::
:local:
.. _collection_prepare_environment:
Prepare your environment
========================
We assume that you use Linux as a work environment (you can use a virtual machine as well) and have ``git`` installed.
1. :ref:`Install Ansible <installation_guide>` or ansible-core.
2. Create the following directories in your home directory:
.. code:: bash
mkdir -p ~/ansible_collections/NAMESPACE/COLLECTION_NAME
For example, if the collection is ``community.general``:
.. code:: bash
mkdir -p ~/ansible_collections/community/general
If the collection is ``ansible.posix``:
.. code:: bash
mkdir -p ~/ansible_collections/ansible/posix
3. Clone the forked repository from the author profile to the created path:
.. code:: bash
git clone https://github.com/AUTHOR_ACC/COLLECTION_REPO.git ~/ansible_collections/NAMESPACE/COLLECTION_NAME
4. Go to the cloned repository.
.. code:: bash
cd ~/ansible_collections/NAMESPACE/COLLECTION_NAME
5. Checkout the PR branch (it can be retrieved from the PR's page):
.. code:: bash
git checkout pr_branch
Test the Pull Request
=====================
1. Include `~/ansible_collections` in `COLLECTIONS_PATHS`. See :ref:`COLLECTIONS_PATHS` for details.
2. Run your playbook using the PR branch and verify the PR fixed the bug.
3. Give feedback on the pull request or the linked issue(s).

View File

@ -1,162 +0,0 @@
.. _collection_unit_tests:
******************************
Add unit tests to a collection
******************************
This section describes all of the steps needed to add unit tests to a collection and how to run them locally using the ``ansible-test`` command.
See :ref:`testing_units_modules` for more details.
.. contents::
:local:
Understanding the purpose of unit tests
========================================
Unit tests ensure that a section of code (known as a ``unit``) meets its design requirements and behaves as intended. Some collections do not have unit tests but it does not mean they are not needed.
A ``unit`` is a function or method of a class used in a module or plugin. Unit tests verify that a function with a certain input returns the expected output.
Unit tests should also verify when a function raises or handles exceptions.
Ansible uses `pytest <https://docs.pytest.org/en/latest/>`_ as a testing framework.
See :ref:`testing_units_modules` for complete details.
Inclusion in the Ansible package :ref:`requires integration and/or unit tests <coll_ci_tests>` You should have tests for your collection as well as for individual modules and plugins to make your code more reliable To learn how to get started with integration tests, see :ref:`collection_integration_tests`.
See :ref:`collection_prepare_local` to prepare your environment.
.. _collection_unit_test_required:
Determine if unit tests exist
=============================
Ansible collection unit tests are located in the ``tests/units`` directory.
The structure of the unit tests matches the structure of the code base, so the tests can reside in the ``tests/units/plugins/modules/`` and ``tests/units/plugins/module_utils`` directories. There can be sub-directories, if modules are organized by module groups.
If you are adding unit tests for ``my_module`` for example, check to see if the tests already exist in the collection source tree with the path ``tests/units/plugins/modules/test_my_module.py``.
Example of unit tests
=====================
Let's assume that the following function is in ``my_module`` :
.. code:: python
def convert_to_supported(val):
"""Convert unsupported types to appropriate."""
if isinstance(val, decimal.Decimal):
return float(val)
if isinstance(val, datetime.timedelta):
return str(val)
if val == 42:
raise ValueError("This number is just too cool for us ;)")
return val
Unit tests for this function should, at a minimum, check the following:
* If the function gets a ``Decimal`` argument, it returns a corresponding ``float`` value.
* If the function gets a ``timedelta`` argument, it returns a corresponding ``str`` value.
* If the function gets ``42`` as an argument, it raises a ``ValueError``.
* If the function gets an argument of any other type, it does nothing and returns the same value.
To write these unit tests in collection is called ``community.mycollection``:
1. If you already have your local environment :ref:`prepared <collection_prepare_local>`, go to the collection root directory.
.. code:: bash
cd ~/ansible_collection/community/mycollection
2. Create a test file for ``my_module``. If the path does not exist, create it.
.. code:: bash
touch tests/units/plugins/modules/test_my_module.py
3. Add the following code to the file:
.. code:: python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from datetime import timedelta
from decimal import Decimal
import pytest
from ansible_collections.community.mycollection.plugins.modules.my_module import (
convert_to_supported,
)
# We use the @pytest.mark.parametrize decorator to parametrize the function
# https://docs.pytest.org/en/latest/how-to/parametrize.html
# Simply put, the first element of each tuple will be passed to
# the test_convert_to_supported function as the test_input argument
# and the second element of each tuple will be passed as
# the expected argument.
# In the function's body, we use the assert statement to check
# if the convert_to_supported function given the test_input,
# returns what we expect.
@pytest.mark.parametrize('test_input, expected', [
(timedelta(0, 43200), '12:00:00'),
(Decimal('1.01'), 1.01),
('string', 'string'),
(None, None),
(1, 1),
])
def test_convert_to_supported(test_input, expected):
assert convert_to_supported(test_input) == expected
def test_convert_to_supported_exception():
with pytest.raises(ValueError, match=r"too cool"):
convert_to_supported(42)
See :ref:`testing_units_modules` for examples on how to mock ``AnsibleModule`` objects, monkeypatch methods (``module.fail_json``, ``module.exit_json``), emulate API responses, and more.
4. Run the tests using docker:
.. code:: bash
ansible-test units tests/unit/plugins/modules/test_my_module.py --docker
.. _collection_recommendation_unit:
Recommendations on coverage
===========================
Use the following tips to organize your code and test coverage:
* Make your functions simple. Small functions that do one thing with no or minimal side effects are easier to test.
* Test all possible behaviors of a function including exception related ones such as raising, catching and handling exceptions.
* When a function invokes the ``module.fail_json`` method, passed messages should also be checked.
.. seealso::
:ref:`testing_units_modules`
Unit testing Ansible modules
:ref:`developing_testing`
Ansible Testing Guide
:ref:`collection_integration_tests`
Integration testing for collections
:ref:`testing_integration`
Integration tests guide
:ref:`testing_collections`
Testing collections
:ref:`testing_resource_modules`
Resource module integration tests
:ref:`collection_pr_test`
How to test a pull request locally

View File

@ -1,14 +0,0 @@
.. _testing_collections_guide:
**********************************************
Testing Collection Contributions
**********************************************
This section focuses on the different tests a contributor should run on their collection PR.
.. toctree::
:maxdepth: 1
collection_test_pr_locally
collection_unit_tests
collection_integration_tests

View File

@ -1,255 +0,0 @@
.. _collection_development_process:
******************************************
The Ansible Collections Development Cycle
******************************************
Ansible developers (including community contributors) add new features, fix bugs, and update code in many different repositories. These repositories contain plugins and modules that enable Ansible to execute specific tasks, like adding a user to a particular database or configuring a particular network device. These repositories contain the source code for collections.
Development on collections occurs at the macro and micro levels. Each collection has its own macro development cycle. For more information on the collections development cycle, see :ref:`contributing_maintained_collections`. The micro-level lifecycle of a PR is similar in collections and in ``ansible-core``.
.. contents::
:local:
Macro development: roadmaps, releases, and projects
=====================================================================
If you want to follow the conversation about what features will be added to the Ansible package for upcoming releases and what bugs are being fixed, you can watch these resources:
* the :ref:`roadmaps`
* the :ref:`Ansible Release Schedule <release_and_maintenance>`
* the `Ansible Community Working Group <https://github.com/ansible/community/wiki/Community>`_ .
Micro development: the lifecycle of a PR
========================================
If you want to contribute a feature or fix a bug in a collection, you must open a **pull request** ("PR" for short). GitHub provides a great overview of `how the pull request process works <https://help.github.com/articles/about-pull-requests/>`_ in general. The ultimate goal of any pull request is to get merged and become part of a collection. Each collection has its own contributor guidelines so please check there for specific details.
Here's an overview of the PR lifecycle:
* :ref:`Contributor opens a PR <collection_quickstart>`
* CI runs the test suite
* Developers, maintainers, community review the PR
* Contributor addresses any feedback from reviewers
* Developers, maintainers, community re-review
* PR merged or closed
Making your PR merge-worthy
===========================
We do not merge every PR. See :ref:`collection_quickstart` for tips to make your PR useful, attractive, and merge-worthy.
.. _collection_changelog_fragments:
Creating changelog fragments
-----------------------------
Most changelogs should emphasize the impact of the change on the end user of the feature or collection, unless the change impacts developers directly. Consider what the user needs to know about this change and write the changelog to convey that detail.
Changelogs help users and developers keep up with changes to Ansible collections. Many collections build changelogs for each release from fragments. For collections that use this model, you **must** add a changelog fragment to any PR that changes functionality or fixes a bug.
You do not need a changelog fragment for PRs that:
* add new modules and plugins, because Ansible tooling does that automatically;
* contain only documentation changes.
.. note::
Some collections require a changelog fragment for every pull request. They use the ``trivial:`` section for entries mentioned above that will be skipped when building a release changelog.
More precisely:
* Every bugfix PR must have a changelog fragment. The only exception are fixes to a change that has not yet been included in a release.
* Every feature PR must have a changelog fragment.
* New modules and plugins (including jinja2 filter and test plugins) must have ``version_added`` entries set correctly in their documentation, and do not need a changelog fragment. The tooling detects new modules and plugins by their ``version_added`` values and announces them in the next release's changelog automatically.
We build short summary changelogs for minor releases as well as for major releases. If you backport a bugfix, include a changelog fragment with the backport PR.
.. _collection_changelogs_how_to_format:
Creating a changelog fragment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A basic changelog fragment is a ``.yaml`` or ``.yml`` file placed in the ``changelogs/fragments/`` directory. Each file contains a yaml dict with keys like ``bugfixes`` or ``major_changes`` followed by a list of changelog entries of bugfixes or features. Each changelog entry is rst embedded inside of the yaml file which means that certain constructs would need to be escaped so they can be interpreted by rst and not by yaml (or escaped for both yaml and rst if you prefer). Each PR **must** use a new fragment file rather than adding to an existing one, so we can trace the change back to the PR that introduced it.
PRs which add a new module or plugin do not necessarily need a changelog fragment. See :ref:`community_changelogs`. Also see :ref:`changelogs_how_to_format` for the precise format changelog fragments should have.
To create a changelog entry, create a new file with a unique name in the ``changelogs/fragments/`` directory of the corresponding repository. The file name should include the PR number and a description of the change. It must end with the file extension ``.yaml`` or ``.yml``. For example: ``40696-user-backup-shadow-file.yaml``
A single changelog fragment may contain multiple sections but most will only contain one section. The toplevel keys (bugfixes, major_changes, and so on) are defined in the `config file <https://github.com/ansible/ansible/blob/devel/changelogs/config.yaml>`_ for our `release note tool <https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelogs.rst>`_. Here are the valid sections and a description of each:
**breaking_changes**
MUST include changes that break existing playbooks or roles. This includes any change to existing behavior that forces users to update tasks. Breaking changes means the user MUST make a change when they update. Breaking changes MUST only happen in a major release of the collection. Write in present tense and clearly describe the new behavior that the end user must now follow. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
breaking_changes:
- ec2_instance - instance wait for state behavior no longer waits for the instance monitoring status to become OK when launching a new instance. If plays require the old behavior, the action will need to specify ``state: started`` (https://github.com/ansible-collections/amazon.aws/pull/481).
**major_changes**
Major changes to ansible-core or a collection. SHOULD NOT include individual module or plugin changes. MUST include non-breaking changes that impact all or most of a collection (for example, updates to support a new SDK version across the collection). Major changes mean the user can CHOOSE to make a change when they update but do not have to. Could be used to announce an important upcoming EOL or breaking change in a future release. (ideally 6 months in advance, if known. See `this example <https://github.com/ansible-collections/community.general/blob/stable-1/CHANGELOG.rst#v1313>`_). Write in present tense and describe what is new. Optionally, include a 'Previously..." sentence to help the user identify where old behavior should now change. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
major_changes:
- bitbucket_* modules - client_id is no longer marked as ``no_log=true``. If you relied on its value not showing up in logs and output, mark the whole tasks with ``no_log: true`` (https://github.com/ansible-collections/community.general/pull/2045).
**minor_changes**
Minor changes to ansible-core, modules, or plugins. This includes new parameters added to modules, or non-breaking behavior changes to existing parameters, such as adding new values to choices[]. Minor changes are enhancements, not bug fixes. Write in present tense.
.. code-block:: yaml
minor_changes:
- nmcli - adds ``routes6`` and ``route_metric6`` parameters for supporting IPv6 routes (https://github.com/ansible-collections/community.general/issues/4059).
**deprecated_features**
Features that have been deprecated and are scheduled for removal in a future release. Write in past tense. Include an alternative, where available, for the feature being deprecated. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
deprecated_features:
- mail callback plugin - not specifying ``sender`` is deprecated and will be disallowed in ``community.general`` 6.0.0 (https://github.com/ansible-collections/community.general/pull/4140).
**removed_features**
Features that were previously deprecated and are now removed. Write in past tense. Include an alternative, where available, for the feature being deprecated. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
removed_features:
- acme_account_facts - the deprecated redirect has been removed. Use ``community.crypto.acme_account_info`` instead (https://github.com/ansible-collections/community.crypto/pull/290).
**security_fixes**
Fixes that address CVEs or resolve security concerns. MUST use security_fixes for any CVEs. Write in present tense. Include links to CVE information.
.. code-block:: yaml
security_fixes:
- win_psexec - ensure password is masked in ``psexec_``command return result (https://github.com/ansible-collections/community.windows/issues/43).
**bugfixes**
Fixes that resolve issues. SHOULD NOT be used for minor enhancements (use ``minor_change`` instead). Write in past tense to describe the problem and present tense to describe the fix.
.. code-block:: yaml
bugfixes:
- apt_repository - fix crash caused by a timeout. The ``cache.update()`` was raising an ``IOError`` because of a timeout in ``apt update`` (https://github.com/ansible/ansible/issues/51995).
**known_issues**
Known issues that are currently not fixed or will not be fixed. Write in present tense to describe the problem and in imperative tense to describe any available workaround.
.. code-block:: yaml
known_issues:
- idrac_user - module may error out with the message ``unable to perform the import or export operation`` because there are pending attribute changes or a configuration job is in progress. Wait for the job to complete and run the task again.(https://github.com/dell/dellemc-openmanage-ansible-modules/pull/303).
**trivial**
Changes where a formal release changelog entry isn't required. ``trivial`` changelog fragments are excluded from the published changelog output and may be used for changes such as housekeeping, documentation and test only changes.
You can use ``trivial`` for collections that require a changelog fragment for each pull request.
.. code-block:: yaml
trivial:
- aws_ec2 - fix broken integration test (https://github.com/ansible-collections/amazon.aws/pull/1269).
Each changelog entry must contain a link to its issue between parentheses at the end. If there is no corresponding issue, the entry must contain a link to the PR itself.
Most changelog entries are ``bugfixes`` or ``minor_changes``.
Changelog fragment entry format
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When writing a changelog entry, use the following format:
.. code-block:: yaml
- scope - description starting with a lowercase letter and ending with a period at the very end. Multiple sentences are allowed (https://github.com/reference/to/an/issue or, if there is no issue, reference to a pull request itself).
The scope is usually a module or plugin name or group of modules or plugins, for example, ``lookup plugins``. While module names can (and should) be mentioned directly (``foo_module``), plugin names should always be followed by the type (``foo inventory plugin``).
For changes that are not really scoped (for example, which affect a whole collection), use the following format:
.. code-block:: yaml
- Description starting with an uppercase letter and ending with a dot at the very end. Multiple sentences are allowed (https://github.com/reference/to/an/issue or, if there is no issue, reference to a pull request itself).
Here are some examples:
.. code-block:: yaml
bugfixes:
- apt_repository - fix crash caused by ``cache.update()`` raising an ``IOError``
due to a timeout in ``apt update`` (https://github.com/ansible/ansible/issues/51995).
.. code-block:: yaml
minor_changes:
- lineinfile - add warning when using an empty regexp (https://github.com/ansible/ansible/issues/29443).
.. code-block:: yaml
bugfixes:
- copy - the module was attempting to change the mode of files for
remote_src=True even if mode was not set as a parameter. This failed on
filesystems which do not have permission bits (https://github.com/ansible/ansible/issues/29444).
You can find more example changelog fragments in the `changelog directory <https://github.com/ansible-collections/community.general/tree/main/changelogs/fragments>`_ for the community.general development branch.
After you have written the changelog fragment for your PR, commit the file and include it with the pull request.
Changelog fragment entry format for new jinja2 plugins, roles, and playbooks
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
While new modules and plugins that are not jinja2 filter or test plugins are mentioned automatically in the generated changelog, jinja2 filter and test plugins, roles, and playbooks are not. To make sure they are mentioned, a changelog fragment in a specific format is needed:
.. code-block:: yaml
# A new jinja2 filter plugin:
add plugin.filter:
- # The following needs to be the name of the filter itself, not of the file
# the filter is included in!
name: to_time_unit
# The description should be in the same format as short_description for
# other plugins and modules: it should start with an upper-case letter and
# not have a period at the end.
description: Converts a time expression to a given unit
# A new jinja2 test plugin:
add plugin.test:
- # The following needs to be the name of the test itself, not of the file
# the test is included in!
name: asn1time
# The description should be in the same format as short_description for
# other plugins and modules: it should start with an upper-case letter and
# not have a period at the end.
description: Check whether the given string is an ASN.1 time
# A new role:
add object.role:
- # This should be the short (non-FQCN) name of the role.
name: nginx
# The description should be in the same format as short_description for
# plugins and modules: it should start with an upper-case letter and
# not have a period at the end.
description: A nginx installation role
# A new playbook:
add object.playbook:
- # This should be the short (non-FQCN) name of the playbook.
name: wipe_server
# The description should be in the same format as short_description for
# plugins and modules: it should start with an upper-case letter and
# not have a period at the end.
description: Wipes a server

View File

@ -1,79 +0,0 @@
.. _community_committer_guidelines:
*********************
Committers Guidelines
*********************
These are the guidelines for people with commit privileges on the repositories in the ansible and ansible-collections GitHub organizations.
Committers of `Ansible-core <https://github.com/ansible/ansible>`_ are necessarily Red Hat employees acting as members of the Ansible Core team. Committers of `Ansible collections <https://github.com/ansible-collections/>`_ are members of the community or Ansible Engineering. Please read the guidelines before you commit.
These guidelines apply to everyone. At the same time, this is NOT a process document. So just use good judgment. You have been given commit access because we trust your judgment.
That said, use the trust wisely.
If you abuse the trust and break components and builds, and so on, the trust level falls and you may be asked not to commit or you may lose your commit privileges.
Features, high-level design, and roadmap of ansible-core
========================================================
As a core team member, you are an integral part of the team that develops the :ref:`roadmap <roadmaps>`. Please be engaged, and push for the features and fixes that you want to see. Also keep in mind that Red Hat, as a company, will commit to certain features, fixes, APIs, and so on, for various releases. Red Hat, the company, and the Ansible team must get these changes completed and released as scheduled. Obligations to users, the community, and customers must come first. Because of these commitments, a feature you want to develop yourself may not get into a release if it affects a lot of other parts within Ansible.
Any other new features and changes to high level design should go through the proposal process (TBD), to ensure the community and core team have had a chance to review the idea and approve it. The core team has sole responsibility for merging new features based on proposals to `Ansible-core <https://github.com/ansible/ansible>`_.
Features, high-level design, and roadmap of Ansible collections
===============================================================
Collections maintainers define features, high-level design, and roadmap of the collections themselves and are responsible for merging new features to `Ansible collections <https://github.com/ansible-collections/>`_ based on proposals discussed with their communities.
Our workflow on GitHub
======================
As a committer, you may already know this, but our workflow forms a lot of our team policies. Please ensure you are aware of the following workflow steps:
* Fork the repository upon which you want to do some work to your own personal repository
* Work on the specific branch upon which you need to commit
* Create a pull request back to the upstream repository and tag the people you would like to review; assign someone as the primary "owner" of your pull request
* Adjust code as necessary based on the comments provided
* Ask someone from the repository committers to do a final review and merge
Addendum to workflow for committers:
------------------------------------
The Core Team is aware that this can be a difficult process at times. Sometimes, the team breaks the rules by making direct commits or merging their own pull requests. This section is a set of guidelines. If you are changing a comma in documentation, or making a very minor change, you can use your best judgement. This is another trust thing. The process is critical for any major change, but for little things or getting something done quickly, use your best judgement and make sure people on the team are aware of your work.
Roles on Core
=============
* Core committers: Fine to do pull requests for most things, but we should have a timebox. Hanging pull requests may merge on the judgement of these developers.
* :ref:`Module maintainers <maintainers>`: Module maintainers own specific modules and have indirect commit access through the current module pull request mechanisms.
* :ref:`Collection maintainers <maintainers>`: Collection maintainers own specific collections and have commit access to them. Each collection can set its own rules for contributions.
.. _committer_general_rules:
General rules
=============
Individuals with direct commit access are entrusted with powers that allow them to do a broad variety of things--probably more than we can write down. Rather than rules, treat these as general *guidelines*, individuals with this power are expected to use their best judgement.
* Do NOT
- Commit directly.
- Merge your own pull requests. Someone else should have a chance to review and approve the pull request merge. If you are a Core Committer, you have a small amount of leeway here for very minor changes.
- Forget about alternate environments. Consider the alternatives--yes, people have bad environments, but they are the ones who need us the most.
- Drag your community team members down. Discuss the technical merits of any pull requests you review. Avoid negativity and personal comments. For more guidance on being a good community member, read our :ref:`code_of_conduct`.
- Forget about the maintenance burden. High-maintenance features may not be worth adding.
- Break playbooks. Always keep backwards compatibility in mind.
- Forget to keep it simple. Complexity breeds all kinds of problems.
* Do
- Squash, avoid merges whenever possible, use GitHub's squash commits or cherry pick if needed (bisect thanks you).
- Be active. Committers who have no activity on the project (through merges, triage, commits, and so on) will have their permissions suspended.
- Consider backwards compatibility (goes back to "do not break existing playbooks").
- Write :ref:`tests<developing_testing>` and be sure that other's pull requests you are reviewing are covered well. Pull requests with tests are looked at with more priority than pull requests without tests that should have them included. While not all changes require tests, be sure to add them for new features, bug fixes, and functionality changes.
- Discuss with other committers, specially when you are unsure of something.
- Document! If your pull request is a new feature or a change to behavior, make sure you have updated all associated documentation or have notified the right people to do so. It also helps to add the version of ``ansible-core`` or ``collection`` against which this documentation is compatible (to avoid confusion between stable and devel docs, for backwards compatibility, and so on).
- Consider scope, sometimes a fix can be generalized.
- Keep it simple, then things are maintainable, debuggable, and intelligible.
Committers are expected to continue to follow the same community and contribution guidelines followed by the rest of the Ansible community.

View File

@ -1,180 +0,0 @@
.. _communication:
*****************************************
Communicating with the Ansible community
*****************************************
.. contents::
:local:
Code of Conduct
===============
All communication and interactions in the Ansible Community are governed by our :ref:`code_of_conduct`. Please read and understand it!
Asking questions over email
===========================
If you want to keep up with Ansible news, need help, or have a question, you can use one of the Ansible mailing lists. Each list covers a particular topic. Read the descriptions here to find the best list for your question.
Your first post to the mailing list will be moderated (to reduce spam), so please allow up to a day or so for your first post to appear.
* `Ansible Announce list <https://groups.google.com/forum/#!forum/ansible-announce>`_ is a read-only list that shares information about new releases of Ansible, and also rare infrequent event information, such as announcements about an upcoming AnsibleFest, which is our official conference series. Worth subscribing to!
* `Ansible AWX List <https://groups.google.com/forum/#!forum/awx-project>`_ is for `Ansible AWX <https://github.com/ansible/awx>`_
* `Ansible Development List <https://groups.google.com/forum/#!forum/ansible-devel>`_ is for questions about developing Ansible modules (mostly in Python), fixing bugs in the Ansible core code, asking about prospective feature design, or discussions about extending Ansible or features in progress.
* `Ansible Outreach List <https://groups.google.com/forum/#!forum/ansible-outreach>`_ help with promoting Ansible and `Ansible Meetups <https://www.meetup.com/topics/ansible/>`_
* `Ansible Project List <https://groups.google.com/forum/#!forum/ansible-project>`_ is for sharing Ansible tips, answering questions about playbooks and roles, and general user discussion.
* `Molecule Discussions <https://github.com/ansible-community/molecule/discussions>`_ is designed to aid with the development and testing of Ansible roles with Molecule.
The Ansible mailing lists are hosted on Google, but you do not need a Google account to subscribe. To subscribe to a group from a non-Google account, send an email to the subscription address requesting the subscription. For example: ``ansible-devel+subscribe@googlegroups.com``.
.. _communication_irc:
Real-time chat
==============
For real-time interactions, conversations in the Ansible community happen over two chat protocols: Matrix and IRC. We maintain a bridge between Matrix and IRC, so you can choose whichever protocol you prefer. All channels exist in both places. Join a channel any time to ask questions, participate in a Working Group meeting, or just say hello.
Ansible community on Matrix
---------------------------
To join the community using Matrix, you need two things:
* a Matrix account (from `Matrix.org <https://app.element.io/#/register>`_ or any other Matrix homeserver)
* a `Matrix client <https://matrix.org/clients/>`_ (we recommend `Element Webchat <https://app.element.io>`_)
The Ansible community maintains its own Matrix homeserver at ``ansible.im``, however public registration is currently unavailable.
Matrix chat supports:
* persistence (when you log on, you see all messages since you last logged off)
* edits (Lets you fix typos and so on. **NOTE** Each edit you make on Matrix re-sends the message to IRC. Please try to avoid multiple edits!)
* replies to individual users
* reactions/emojis
* bridging to IRC
* no line limits
* images
The room links in the :ref:`general_channels` or in the :ref:`working_group_list` list will take you directly to the relevant rooms.
If there is no appropriate room for your community, please create it.
For more information, see the community-hosted `Matrix FAQ <https://hackmd.io/@ansible-community/community-matrix-faq>`_.
You can add Matrix shields to your repository's ``README.md`` using the shield in the `community-topics <https://github.com/ansible-community/community-topics#community-topics>`_ repository as a template.
Ansible community on IRC
------------------------
The Ansible community maintains several IRC channels on `irc.libera.chat <https://libera.chat/>`_. To join the community using IRC, you need one thing:
* an IRC client
IRC chat supports:
* no persistence (you only see messages when you are logged on unless you add a bouncer)
* simple text interface
* bridging from Matrix
Our IRC channels may require you to register your IRC nickname. If you receive an error when you connect or when posting a message, see `libera.chat's Nickname Registration guide <https://libera.chat/guides/registration>`_ for instructions. To find all ``ansible`` specific channels on the libera.chat network, use the following command in your IRC client:
.. code-block:: text
/msg alis LIST #ansible* -min 5
as described in the `libera.chat docs <https://libera.chat/guides/findingchannels>`_.
Our channels record history on the Matrix side. The channel history can be viewed in a browser - all channels will report an appropriate link to ``chat.ansible.im`` in their Chanserv entrymsg upon joining the room. Alternatively, a URL of the form ``https://chat.ansible.im/#/room/# {IRC channel name}:libera.chat`` will also work, for example - for the #ansible-docs channel it would be `https://app.element.io/#/room/#ansible-docs:libera.chat`.
.. _general_channels:
General channels
----------------
The clickable links will take you directly to the relevant Matrix room in your browser; room/channel information is also given for use in other clients:
- `Community social room and posting news for the Bullhorn newsletter <https://matrix.to:/#/#social:ansible.com>`_ - ``Matrix: #social:ansible.com | IRC: #ansible-social``
- `General usage and support questions <https://matrix.to:/#/#users:ansible.com>`_ - ``Matrix: #users:ansible.com | IRC: #ansible``
- `Discussions on developer topics and code related to features or bugs <https://matrix.to/#/#devel:ansible.com>`_ - ``Matrix: #devel:ansible.com | IRC: #ansible-devel``
- `Discussions on community and collections related topics <https://matrix.to:/#/#community:ansible.com>`_ - ``Matrix: #community:ansible.com | IRC: #ansible-community``
- `For public community meetings <https://matrix.to/#/#meeting:ansible.im>`_ - ``Matrix: #meeting:ansible.im | IRC: #ansible-meeting``
- We will generally announce these on one or more of the above mailing lists. See the `meeting schedule and agenda page <https://github.com/ansible/community/blob/main/meetings/README.md>`_
.. _working_group_list:
Working groups
--------------
Many of our community `Working Groups <https://github.com/ansible/community/wiki#working-groups>`_ meet in chat. If you want to get involved in a working group, join the Matrix room or IRC channel where it meets or comment on the agenda.
- `AAP Configuration as Code <https://github.com/redhat-cop/controller_configuration/wiki/AAP-Configuration-as-Code>`_ - Matrix: `#aap_config_as_code:ansible.com <https://matrix.to/#/#aap_config_as_code:ansible.com>`_
- `Amazon (AWS) Working Group <https://github.com/ansible/community/wiki/AWS>`_ - Matrix: `#aws:ansible.com <https://matrix.to:/#/#aws:ansible.com>`_ | IRC: ``#ansible-aws``
- `AWX Working Group <https://github.com/ansible/awx>`_ - Matrix: `#awx:ansible.com <https://matrix.to:/#/#awx:ansible.com>`_ | IRC: ``#ansible-awx``
- `Azure Working Group <https://github.com/ansible/community/wiki/Azure>`_ - Matrix: `#azure:ansible.com <https://matrix.to:/#/#azure:ansible.com>`_ | IRC: ``#ansible-azure``
- `Community Working Group <https://github.com/ansible/community/wiki/Community>`_ (including Meetups) - Matrix: `#community:ansible.com <https://matrix.to:/#/#community:ansible.com>`_ | IRC: ``#ansible-community``
- `Container Working Group <https://github.com/ansible/community/wiki/Container>`_ - Matrix: `#container:ansible.com <https://matrix.to:/#/#container:ansible.com>`_ | IRC: ``#ansible-container``
- `Contributor Experience Working Group <https://github.com/ansible/community/wiki/Contributor-Experience>`_ - Matrix: `#community:ansible.com <https://matrix.to:/#/#community:ansible.com>`_ | IRC: ``#ansible-community``
- `DigitalOcean Working Group <https://github.com/ansible/community/wiki/Digital-Ocean>`_ - Matrix: `#digitalocean:ansible.im <https://matrix.to:/#/#digitalocean:ansible.im>`_ | IRC: ``#ansible-digitalocean``
- `Diversity Working Group <https://github.com/ansible/community/wiki/Diversity>`_ - Matrix: `#diversity:ansible.com <https://matrix.to:/#/#diversity:ansible.com>`_ | IRC: ``#ansible-diversity``
- `Docker Working Group <https://github.com/ansible/community/wiki/Docker>`_ - Matrix: `#devel:ansible.com <https://matrix.to:/#/#devel:ansible.com>`_ | IRC: ``#ansible-devel``
- `Documentation Working Group <https://github.com/ansible/community/wiki/Docs>`_ - Matrix: `#docs:ansible.com <https://matrix.to:/#/#docs:ansible.com>`_ | IRC: ``#ansible-docs``
- `Galaxy Working Group <https://github.com/ansible/community/wiki/Galaxy>`_ - Matrix: `#galaxy:ansible.com <https://matrix.to:/#/#galaxy:ansible.com>`_ | IRC: ``#ansible-galaxy``
- `JBoss Working Group <https://github.com/ansible/community/wiki/JBoss>`_ - Matrix: `#jboss:ansible.com <https://matrix.to:/#/#jboss:ansible.com>`_ | IRC: ``#ansible-jboss``
- `Kubernetes Working Group <https://github.com/ansible/community/wiki/Kubernetes>`_ - Matrix: `#kubernetes:ansible.com <https://matrix.to:/#/#kubernetes:ansible.com>`_ | IRC: ``#ansible-kubernetes``
- `Linode Working Group <https://github.com/ansible/community/wiki/Linode>`_ - Matrix: `#linode:ansible.com <https://matrix.to:/#/#linode:ansible.com>`_ | IRC: ``#ansible-linode``
- `Molecule Working Group <https://github.com/ansible/community/wiki/Molecule>`_ (`testing platform for Ansible playbooks and roles <https://molecule.readthedocs.io>`_) - Matrix: `#molecule:ansible.im <https://matrix.to:/#/#molecule:ansible.im>`_ | IRC: ``#ansible-molecule``
- `MySQL Working Group <https://github.com/ansible-collections/community.mysql/wiki/MySQL-Working-Group>`_ - Matrix: `#mysql:ansible.com <https://matrix.to:/#/#mysql:ansible.com>`_
- `Network Working Group <https://github.com/ansible/community/wiki/Network>`_ - Matrix: `#network:ansible.com <https://matrix.to:/#/#network:ansible.com>`_ | IRC: ``#ansible-network``
- `PostgreSQL Working Group <https://github.com/ansible-collections/community.postgresql/wiki/PostgreSQL-Working-Group>`_ - Matrix: `#postgresql:ansible.com <https://matrix.to:/#/#postgresql:ansible.com>`_
- `Remote Management Working Group <https://github.com/ansible/community/issues/409>`_ - Matrix: `#devel:ansible.com <https://matrix.to:/#/#devel:ansible.com>`_ | IRC: ``#ansible-devel``
- `Security Automation Working Group <https://github.com/ansible/community/wiki/Security-Automation>`_ - Matrix: `#security-automation:ansible.com <https://matrix.to/#/#security-automation:ansible.com>`_ | IRC: ``#ansible-security``
- `Storage Working Group <https://github.com/ansible/community/wiki/Storage>`_ - Matrix: `#storage:ansible.com <https://matrix.to/#/#storage:ansible.com>`_ | IRC: ``#ansible-storage``
- `VMware Working Group <https://github.com/ansible/community/wiki/VMware>`_ - Matrix: `#vmware:ansible.com <https://matrix.to:/#/#vmware:ansible.com>`_ | IRC: ``#ansible-vmware``
- `Windows Working Group <https://github.com/ansible/community/wiki/Windows>`_ - Matrix: `#windows:ansible.com <https://matrix.to:/#/#windows:ansible.com>`_ | IRC: ``#ansible-windows``
- `Ansible developer tools Group <https://github.com/ansible/community/wiki/Ansible-developer-tools>`_ - Matrix: `#devtools:ansible.com <https://matrix.to/#/#devtools:ansible.com>`_ | IRC: ``#ansible-devtools``
Want to `form a new Working Group <https://github.com/ansible/community/blob/main/WORKING-GROUPS.md>`_?
Regional and Language-specific channels
---------------------------------------
- Comunidad Ansible en español - Matrix: `#espanol:ansible.im <https://matrix.to:/#/#espanol:ansible.im>`_ | IRC: ``#ansible-es``
- Communauté française d'Ansible - Matrix: `#francais:ansible.im <https://matrix.to:/#/#francais:ansible.im>`_ | IRC: ``#ansible-fr``
- Communauté suisse d'Ansible - Matrix: `#suisse:ansible.im <https://matrix.to:/#/#suisse:ansible.im>`_ | IRC: ``#ansible-zh``
- European Ansible Community - Matrix: `#europe:ansible.im <https://matrix.to:/#/#europe:ansible.im>`_ | IRC: ``#ansible-eu``
Meetings on chat
----------------
The Ansible community holds regular meetings on various topics on Matrix/IRC, and anyone who is interested is invited to participate. For more information about Ansible meetings, consult the `meeting schedule and agenda page <https://github.com/ansible/community/blob/main/meetings/README.md>`_.
Ansible Community Topics
========================
The `Ansible Community Steering Committee <https://docs.ansible.com/ansible/devel/community/steering/community_steering_committee.html>`_ uses the `community-topics repository <https://github.com/ansible-community/community-topics/issues>`_ to asynchronously discuss with the Community and vote on Community topics in corresponding issues.
Create a new issue in the `repository <https://github.com/ansible-community/community-topics/issues>`_ if you want to discuss an idea that impacts any of the following:
* Ansible Community
* Community collection best practices and `requirements <https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst>`_
* `Community collection inclusion policy <https://github.com/ansible-collections/ansible-inclusion/blob/main/README.md>`_
* `The Community governance <https://docs.ansible.com/ansible/devel/community/steering/community_steering_committee.html>`_
* Other proposals of importance that need the Committee or overall Ansible community attention
Ansible Automation Platform support questions
=============================================
Red Hat Ansible `Automation Platform <https://www.ansible.com/products/automation-platform>`_ is a subscription that contains support, certified content, and tooling for Ansible including content management, a controller, UI and REST API.
If you have a question about Ansible Automation Platform, visit `Red Hat support <https://access.redhat.com/products/red-hat-ansible-automation-platform/>`_ rather than using a chat channel or the general project mailing list.
The Bullhorn
============
**The Bullhorn** is our newsletter for the Ansible contributor community. Please `subscribe <https://eepurl.com/gZmiEP>`_ to receive it.
If you have any content you would like to share, please `contribute/suggest it <https://github.com/ansible/community/wiki/News#the-bullhorn>`_ for upcoming releases.
If you have any questions, please reach out to us at ``the-bullhorn@redhat.com``.
Read past issues on the official Bullhorn's `wiki page <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.

View File

@ -1,304 +0,0 @@
.. _contributing_maintained_collections:
***********************************************
Contributing to Ansible-maintained Collections
***********************************************
The Ansible team welcomes community contributions to the collections maintained by Red Hat Ansible Engineering. This section describes how you can open issues and create PRs with the required testing before your PR can be merged.
.. contents::
:local:
Ansible-maintained collections
=================================
The following table shows:
* **Ansible-maintained collection** - Click the link to the collection on Galaxy, then click the ``repo`` button in Galaxy to find the GitHub repository for this collection.
* **Related community collection** - Collection that holds community-created content (modules, roles, and so on) that may also be of interest to a user of the Ansible-maintained collection. You can, for example, add new modules to the community collection as a technical preview before the content is moved to the Ansible-maintained collection.
* **Sponsor** - Working group that manages the collections. You can join the meetings to discuss important proposed changes and enhancements to the collections.
* **Test requirements** - Testing required for any new or changed content for the Ansible-maintained collection.
* **Developer details** - Describes whether the Ansible-maintained collection accepts direct community issues and PRs for existing collection content, as well as more specific developer guidelines based on the collection type.
.. _ansible-collection-table:
.. raw:: html
<style>
/* Style for this single table. Add delimiters between header columns */
table#ansible-collection-table th {
border-width: 1px;
border-color: #dddddd /*rgb(225, 228, 229)*/;
border-style: solid;
text-align: center;
padding: 5px;
background-color: #eeeeee;
}
tr, td {
border-width: 1px;
border-color: rgb(225, 228, 229);
border-style: solid;
text-align: center;
padding: 5px;
}
</style>
<table id="ansible-collection-table">
<tr>
<th colspan="3">Collection details</th>
<th colspan="4">Test requirements: Ansible collections</th>
<th colspan="2">Developer details</th>
</tr>
<tr>
<th>Ansible collection</th>
<th>Related community collection</th>
<th>Sponsor</th>
<th>Sanity</th>
<th>Unit</th>
<th>Integration</th>
<th>CI Platform</th>
<th>Open to PRs*</th>
<th>Guidelines</th>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/amazon/aws">amazon.aws</a></td>
<td><a href="https://galaxy.ansible.com/community/aws">community.aws</a></td>
<td><a href="https://github.com/ansible/community/tree/main/group-aws">AWS</a></td>
<td>✓**</td>
<td>**</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/collections/amazon/aws/docsite/dev_guidelines.html">AWS guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/ansible/netcommon">ansible.netcommon***</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/ansible/posix">ansible.posix</a></td>
<td><a href="https://galaxy.ansible.com/community/general">community.general</a></td>
<td>Linux</a></td>
<td>✓</td>
<td></td>
<td></td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/latest/dev_guide/index.html">Developer guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/ansible/windows">ansible.windows</a></td>
<td><a href="https://galaxy.ansible.com/community/windows">community.windows</a></td>
<td><a href="https://github.com/ansible/community/wiki/Windows">Windows</a></td>
<td>✓</td>
<td>✓****</td>
<td>✓</td>
<td>Azure Pipelines and Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/dev_guide/developing_modules_general_windows.html#developing-modules-general-windows">Windows guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/arista/eos">arista.eos</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/cisco/asa">cisco.asa</a></td>
<td><a href="https://github.com/ansible-collections/community.asa">community.asa</a></td>
<td><a href="https://github.com/ansible/community/wiki/Security-Automation">Security</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/latest/dev_guide/index.html">Developer guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/cisco/ios">cisco.ios</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/cisco/iosxr">cisco.iosxr</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/cisco/nxos">cisco.nxos</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/ibm/qradar">ibm.qradar</a></td>
<td><a href="https://github.com/ansible-collections/community.qradar">community.qradar</a></td>
<td><a href="https://github.com/ansible/community/wiki/Security-Automation">Security</a></td>
<td>✓</td>
<td></td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/latest/dev_guide/index.html">Developer guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/junipernetworks/junos">junipernetworks.junos</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/kubernetes/core">kubernetes.core</a></td>
<td><a href="https://galaxy.ansible.com/kubernetes/core">kubernetes.core</a></td>
<td><a href="https://github.com/ansible/community/wiki/Kubernetes">Kubernetes</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>GitHub Actions</td>
<td>✓</td>
<td></td>
</tr>
<tr>
<td><a href="https://cloud.redhat.com/ansible/automation-hub/redhat/openshift">redhat.openshift</a></td>
<td><a href="https://galaxy.ansible.com/community/okd">community.okd</a></td>
<td><a href="https://github.com/ansible/community/wiki/Kubernetes">Kubernetes</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>GitHub Actions</td>
<td>✓</td>
<td></td>
<tr>
<td><a href="https://galaxy.ansible.com/openvswitch/openvswitch">openvswitch.openvswitch</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://github.com/ansible-collections/splunk.es">splunk.es</a></td>
<td><a href="https://github.com/ansible-collections/community.es">community.es</a></td>
<td><a href="https://github.com/ansible/community/wiki/Security-Automation">Security</a></td>
<td>✓</td>
<td></td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/latest/dev_guide/index.html">Developer guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/vyos/vyos">vyos.vyos</a></td>
<td><a href="https://galaxy.ansible.com/community/network">community.network</a></td>
<td><a href="https://github.com/ansible/community/wiki/Network">Network</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/network/dev_guide/index.html">Network guide</a></td>
</tr>
<tr>
<td><a href="https://galaxy.ansible.com/vmware/vmware_rest">vmware.vmware_rest</a></td>
<td><a href="https://galaxy.ansible.com/vmware/vmware_rest">vmware.vmware_rest</a></td>
<td><a href="https://github.com/ansible/community/wiki/VMware">VMware</a></td>
<td>✓</td>
<td>✓</td>
<td>✓</td>
<td>Zuul</td>
<td>✓</td>
<td><a href="https://docs.ansible.com/ansible/devel/collections/vmware/vmware_rest/docsite/dev_guide.html">VMware REST guide</a></td>
</tr>
</table>
.. note::
\* A ✓ under **Open to PRs** means the collection welcomes GitHub issues and PRs for any changes to existing collection content (plugins, roles, and so on).
\*\* Integration tests are required and unit tests are welcomed but not required for the AWS collections. An exception to this is made in cases where integration tests are logistically not feasible due to external requirements. An example of this is AWS Direct Connect, as this service can not be functionally tested without the establishment of network peering connections. Unit tests are therefore required for modules that interact with AWS Direct Connect. Exceptions to ``amazon.aws`` must be approved by Red Hat, and exceptions to ``community.aws`` must be approved by the AWS community.
\*\*\* ``ansible.netcommon`` contains all foundational components for enabling many network and security :ref:`platform <platform_options>` collections. It contains all connection and filter plugins required, and installs as a dependency when you install the platform collection.
\*\*\*\* Unit tests for Windows PowerShell modules are an exception to testing, but unit tests are valid and required for the remainder of the collection, including Ansible-side plugins.
.. _which_collection:
Deciding where your contribution belongs
=========================================
We welcome contributions to Ansible-maintained collections. Because these collections are part of a downstream supported Red Hat product, the criteria for contribution, testing, and release may be higher than other community collections. The related community collections (such as ``community.general`` and ``community.network``) have less-stringent requirements and are a great place for new functionality that may become part of the Ansible-maintained collection in a future release.
The following scenarios use the ``arista.eos`` to help explain when to contribute to the Ansible-maintained collection, and when to propose your change or idea to the related community collection:
1. You want to fix a problem in the ``arista.eos`` Ansible-maintained collection. Create the PR directly in the `arista.eos collection GitHub repository <https://github.com/ansible-collections/arista.eos>`_. Apply all the :ref:`merge requirements <ansible_collection_merge_requirements>`.
2. You want to add a new Ansible module for Arista. Your options are one of the following:
* Propose a new module in the ``arista.eos`` collection (requires approval from Arista and Red Hat).
* Propose a new collection in the ``arista`` namespace (requires approval from Arista and Red Hat).
* Propose a new module in the ``community.network`` collection (requires network community approval).
* Place your new module in a collection in your own namespace (no approvals required).
Most new content should go into either a related community collection or your own collection first so that is well established in the community before you can propose adding it to the ``arista`` namespace, where inclusion and maintenance criteria are much higher.
.. _ansible_collection_merge_requirements:
Requirements to merge your PR
==============================
Your PR must meet the following requirements before it can merge into an Ansible-maintained collection:
#. The PR is in the intended scope of the collection. Communicate with the appropriate Ansible sponsor listed in the :ref:`Ansible-maintained collection table <ansible-collection-table>` for help.
#. For network and security domains, the PR follows the :ref:`resource module development principles <developing_resource_modules>`.
#. Passes :ref:`sanity tests and tox <tox_resource_modules>`.
#. Passes unit, and integration tests, as listed in the :ref:`Ansible-maintained collection table <ansible-collection-table>` and described in :ref:`testing_resource_modules`.
#. Follows Ansible guidelines. See :ref:`developing_modules` and :ref:`developing_collections`.
#. Addresses all review comments.
#. Includes an appropriate :ref:`changelog <community_changelogs>`.

View File

@ -1,29 +0,0 @@
.. _community_contributions:
********************************
ansible-core Contributors Guide
********************************
.. toctree::
:maxdepth: 2
reporting_bugs_and_features
documentation_contributions
development_process
other_tools_and_programs
If you have a specific Ansible interest or expertise (for example, VMware, Linode, and so on, consider joining a :ref:`working group <working_group_list>`.
Working with the Ansible repo
=============================
* I want to make my first code changes to a collection or to ``ansible-core``. How do I :ref:`set up my Python development environment <environment_setup>`?
* I would like to get more efficient as a developer. How can I find :ref:`editors, linters, and other tools <other_tools_and_programs>` that will support my Ansible development efforts?
* I want my code to meet Ansible's guidelines. Where can I find guidance on :ref:`coding in Ansible <developer_guide>`?
* I would like to connect Ansible to a new API or other resource. How do I :ref:`create a collection <developing_modules_in_groups>`?
* My pull request is marked ``needs_rebase``. How do I :ref:`rebase my PR <rebase_guide>`?
* I am using an older version of Ansible and want a bug fixed in my version that has already been fixed on the ``devel`` branch. How do I :ref:`backport a bugfix PR <backport_process>`?
* I have an open pull request with a failing test. How do I learn about Ansible's :ref:`testing (CI) process <developing_testing>`?
* I am ready to step up as a collection maintainer. What are the :ref:`guidelines for maintainers <maintainers>`?
* A module in a collection I maintain is obsolete. How do I :ref:`deprecate a module <deprecating_modules>`?

View File

@ -1,35 +0,0 @@
.. _collections_contributions:
*************************************
Ansible Collections Contributor Guide
*************************************
.. toctree::
:maxdepth: 2
collection_development_process
reporting_collections
create_pr_quick_start
collection_contributors/test_index
collection_contributors/collection_reviewing
collection_contributors/collection_requirements
maintainers
contributing_maintained_collections
steering/steering_index
documentation_contributions
other_tools_and_programs
If you have a specific Ansible interest or expertise (for example, VMware, Linode, and so on, consider joining a :ref:`working group <working_group_list>`.
Working with the Ansible collection repositories
=================================================
* How can I find :ref:`editors, linters, and other tools <other_tools_and_programs>` that will support my Ansible development efforts?
* Where can I find guidance on :ref:`coding in Ansible <developer_guide>`?
* How do I :ref:`create a collection <developing_modules_in_groups>`?
* How do I :ref:`rebase my PR <rebase_guide>`?
* How do I learn about Ansible's :ref:`testing (CI) process <developing_testing>`?
* How do I :ref:`deprecate a module <deprecating_modules>`?
* See `Collection developer tutorials <https://www.ansible.com/products/ansible-community-training>`_ for a quick introduction on how to develop and test your collection contributions.

View File

@ -1,7 +0,0 @@
.. _contributor_license_agreement:
******************************
Contributors License Agreement
******************************
By contributing you agree that these contributions are your own (or approved by your employer) and you grant a full, complete, irrevocable copyright license to all users and developers of the project, present and future, pursuant to the license of the project.

View File

@ -1,114 +0,0 @@
****************
Contributor path
****************
This section describes the contributor's journey from the beginning to becoming a leader who helps shape the future of Ansible. You can use this path as a roadmap for your long-term participation.
Any contribution to the project, even a small one, is very welcome and valuable. Any contribution counts, whether it's feedback on an issue, a pull request, a topic or documentation change, or a coding contribution. When you contribute regularly, your proficiency and judgment in the related area increase and, along with this, the importance of your presence in the project.
.. contents::
:local:
Determine your area of interest
=================================
First, determine areas that are interesting to you. Consider your current experience and what you'd like to gain. For example, if you use a specific collection, have a look there. See :ref:`how_can_i_help` for more ideas on how to help.
Find the corresponding project
====================================
These are multiple community projects in the Ansible ecosystem you could contribute to:
- `Ansible Core <https://docs.ansible.com/ansible-core/devel/index.html>`_
- `Collections <https://docs.ansible.com/ansible/latest/user_guide/collections_using.html>`_
- `AWX <https://github.com/ansible/awx>`_
- `Galaxy <https://galaxy.ansible.com/>`_
- `ansible-lint <https://ansible-lint.readthedocs.io/en/latest/>`_
- `Molecule <https://molecule.readthedocs.io/en/latest/>`_
Learn
=====
The required skillset depends on the area of interest and the project you'll be working on. Remember that the best way to learn is by doing.
Specific knowledge for code developers
----------------------------------------
Code development requires the most technical knowledge. Let's sort out what an Ansible developer should learn.
You should understand at least the *basics* of the following tools:
- `Python programming language <https://docs.python.org/3/tutorial/>`_
- `Git <https://git-scm.com/docs/gittutorial>`_
- `GitHub collaborative development model through forks and pull requests <https://docs.github.com/en/github/collaborating-with-pull-requests/getting-started/about-collaborative-development-models>`_
You can learn these tools more in-depth when working on your first contributions.
Each Ansible project has its own set of contributor guidelines. Familiarize yourself with these as you prepare your first contributions.
* :ref:`Ansible Core development <developer_guide>`.
* :ref:`Ansible collection development <developing_collections>` and the collection-level contributor guidelines in the collection repository.
Making your first contribution
==============================
You can find some ideas on how you can contribute in :ref:`how_can_i_help`.
If you are interested in contributing to collections, take a look at :ref:`collection contributions<collections_contributions>` and the `collection repository <https://github.com/ansible-collections/>`_'s ``README`` and ``CONTRIBUTING`` files. To make your first experience as smooth as possible, read the repository documentation carefully, then ask the repository maintainers for guidance if you have any questions.
Take a look at GitHub issues labeled with the ``easyfix`` and ``good_first_issue`` labels for:
- `Ansible collections repositories <https://github.com/search?q=user%3Aansible-collections+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_
- `All other Ansible projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_
Issues labeled with the ``docs`` label in `Ansible collections <https://github.com/search?q=user%3Aansible-collections+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_ and `other <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_ Ansible projects can be also good to start with.
When you choose an issue to work on, add a comment directly on the GitHub issue to say you are looking at it and let others know to avoid conflicting work.
You can also ask for help in a comment if you need it.
Continue to contribute
======================
We don't expect everybody to know everything. Start small, think big. When you contribute regularly, your proficiency and judgment in the related area will improve quickly and, along with this, the importance of your presence in the project.
See :ref:`communication` for ways to communicate and engage with the Ansible community, including working group meetings, accessing the Bullhorn news bulletin, and upcoming contributor summits.
Teach others
============
Share your experience with other contributors through :ref:`improving documentation<community_documentation_contributions>`, answering questions from other contributors and users on :ref:`Matrix/Libera.Chat IRC<communication>`, giving advice on issues and pull requests, and discussing `Community Topics <https://github.com/ansible-community/community-topics/issues>`_.
Become a collection maintainer
===============================
If you are a code contributor to a collection, you can get extended permissions in the repository and become a maintainer. A collection maintainer is a contributor trusted by the community who makes significant and regular contributions to the project and showed themselves as a specialist in the related area. See :ref:`maintainers` for details.
For some collections that use the `collection bot <https://github.com/ansible-community/collection_bot>`_, such as `community.general <https://github.com/ansible-collections/community.general>`_ and `community.network <https://github.com/ansible-collections/community.network>`_, you can have different levels of access and permissions.
* :ref:`module_maintainers` - The stage prior to becoming a collection maintainer. The file is usually a module or plugin. File maintainers have indirect commit rights.
* supershipit permissions - Similar to being a file maintainer but the scope where a maintainer has the indirect commit is the whole repository.
* ``triage`` - Access to the repository that allows contributors to manage issues and pull requests.
* ``write`` access to the repository also known as ``commit``. In other words, become a committer. This access level allows contributors to merge pull requests to the development branch as well as perform all the other activities listed in the :ref:`maintainers`.
For information about permission levels, see the `GitHub official documentation <https://docs.github.com/en/organizations/managing-access-to-your-organizations-repositories/repository-permission-levels-for-an-organization>`_.
Become a steering committee member
==================================
.. note::
You do NOT have to be a programmer to become a steering committee member.
The :ref:`Steering Committee <community_steering_committee>` member status reflects the highest level of trust which allows contributors to lead the project by making very important `decisions <https://github.com/ansible-community/community-topics/issues>`_ for the Ansible project. The Committee members are the community leaders who shape the project's future and the future of automation in the IT world in general.
To reach the status, as the current Committee members did before getting it, along with the things mentioned in this document, you should:
* Subscribe to, comment on, and vote on the `Community Topics <https://github.com/ansible-community/community-topics/issues>`_.
* Propose your topics.
* If time permits, join the `Community meetings <https://github.com/ansible/community/blob/main/meetings/README.md#schedule>`_. Note this is **NOT** a requirement.

View File

@ -1,272 +0,0 @@
.. _collection_quickstart:
********************************************
Creating your first collection pull request
********************************************
This section describes all steps needed to create your first patch and submit a pull request on a collection.
.. _collection_prepare_local:
Prepare your environment
========================
.. note::
These steps assume a Linux work environment with ``git`` installed.
1. Install and start ``docker`` or ``podman``. This ensures tests run properly isolated and in the same environment as in CI.
2. :ref:`Install Ansible or ansible-core <installation_guide>`. You need the ``ansible-test`` utility which is provided by either of these packages.
3. Create the following directories in your home directory:
.. code-block:: bash
$ mkdir -p ~/ansible_collections/NAMESPACE/COLLECTION_NAME
For example, if the collection is ``community.mysql``, it would be:
.. code-block:: bash
$ mkdir -p ~/ansible_collections/community/mysql
4. Fork the collection repository through the GitHub web interface.
5. Clone the forked repository from your profile to the created path:
.. code-block:: bash
$ git clone https://github.com/YOURACC/COLLECTION_REPO.git ~/ansible_collections/NAMESPACE/COLLECTION_NAME
If you prefer to use the SSH protocol:
.. code-block:: bash
$ git clone git@github.com:YOURACC/COLLECTION_REPO.git ~/ansible_collections/NAMESPACE/COLLECTION_NAME
6. Go to your new cloned repository.
.. code-block:: bash
$ cd ~/ansible_collections/NAMESPACE/COLLECTION_NAME
7. Ensure you are in the default branch (it is usually ``main``):
.. code-block:: bash
$ git status
8. Show remotes. There should be the ``origin`` repository only:
.. code-block:: bash
$ git remote -v
9. Add the ``upstream`` repository:
.. code-block:: bash
$ git remote add upstream https://github.com/ansible-collections/COLLECTION_REPO.git
This is the repository where you forked from.
10. Update your local default branch. Assuming that it is ``main``:
.. code-block:: bash
$ git fetch upstream
$ git rebase upstream/main
11. Create a branch for your changes:
.. code-block:: bash
$ git checkout -b name_of_my_branch
Change the code
===============
.. note::
Do NOT mix several bug fixes or features that are not tightly related in one pull request. Use separate pull requests for different changes.
You should start with writing integration and unit tests if applicable. These can verify the bug exists (prior to your code fix) and verify your code fixed that bug once the tests pass.
.. note::
If there are any difficulties with writing or running the tests or you are not sure if the case can be covered, you can skip this step. Other contributors can help you with tests later if needed.
.. note::
Some collections do not have integration tests. In this case, unit tests are required.
All integration tests are stored in ``tests/integration/targets`` subdirectories.
Go to the subdirectory containing the name of the module you are going to change.
For example, if you are fixing the ``mysql_user`` module in the ``community.mysql`` collection,
its tests are in ``tests/integration/targets/test_mysql_user/tasks``.
The ``main.yml`` file holds test tasks and includes other test files.
Look for a suitable test file to integrate your tests or create and include a dedicated test file.
You can use one of the existing test files as a draft.
When fixing a bug, write a task that reproduces the bug from the issue.
Put the reported case in the tests, then run integration tests with the following command:
.. code-block:: bash
$ ansible-test integration name_of_test_subdirectory --docker -v
For example, if the test files you changed are stored in ``tests/integration/targets/test_mysql_user/``, the command is as follows:
.. code-block:: bash
$ ansible-test integration test_mysql_user --docker -v
You can use the ``-vv`` or ``-vvv`` argument if you need more detailed output.
In the examples above, the default test image is automatically downloaded and used to create and run a test container.
Use the default test image for platform-independent integration tests such as those for cloud modules.
If you need to run the tests against a specific distribution, see the :ref:`list of supported container images <test_container_images>`. For example:
.. code-block:: bash
$ ansible-test integration name_of_test_subdirectory --docker fedora35 -v
.. note::
If you are not sure whether you should use the default image for testing or a specific one, skip the entire step - the community can help you later. You can also try to use the collection repository's CI to figure out which containers are used.
If the tests ran successfully, there are usually two possible outcomes:
- If the bug has not appeared and the tests have passed successfully, ask the reporter to provide more details. It may not be a bug or can relate to a particular software version used or specifics of the reporter's local environment configuration.
- The bug has appeared and the tests have failed as expected showing the reported symptoms.
Fix the bug
=============
See :ref:`module_contribution` for some general guidelines about Ansible module development that may help you craft a good code fix for the bug.
Test your changes
=================
1. Install ``flake8`` (``pip install flake8``, or install the corresponding package on your operating system).
2. Run ``flake8`` against a changed file:
.. code-block:: bash
$ flake8 path/to/changed_file.py
This shows unused imports, which are not shown by sanity tests, as well as other common issues.
Optionally, you can use the ``--max-line-length=160`` command-line argument.
3. Run sanity tests:
.. code-block:: bash
$ ansible-test sanity path/to/changed_file.py --docker -v
If they failed, look at the output carefully - it is informative and helps to identify a problem line quickly.
Sanity failings usually relate to incorrect code and documentation formatting.
4. Run integration tests:
.. code-block:: bash
$ ansible-test integration name_of_test_subdirectory --docker -v
For example, if the test files you changed are stored in ``tests/integration/targets/test_mysql_user/``, the command is:
.. code-block:: bash
$ ansible-test integration test_mysql_user --docker -v
You can use the ``-vv`` or ``-vvv`` argument if you need more detailed output.
There are two possible outcomes:
- They have failed. Look at the output of the command. Fix the problem in the code and run again. Repeat the cycle until the tests pass.
- They have passed. Remember they failed originally? Our congratulations! You have fixed the bug.
In addition to the integration tests, you can also cover your changes with unit tests. This is often required when integration tests are not applicable to the collection.
We use `pytest <https://docs.pytest.org/en/latest/>`_ as a testing framework.
Files with unit tests are stored in the ``tests/unit/plugins/`` directory. If you want to run unit tests, say, for ``tests/unit/plugins/test_myclass.py``, the command is:
.. code-block:: bash
$ ansible-test units tests/unit/plugins/test_myclass.py --docker
If you want to run all unit tests available in the collection, run:
.. code-block:: bash
$ ansible-test units --docker
Submit a pull request
=====================
1. Commit your changes with an informative but short commit message:
.. code-block:: bash
$ git add /path/to/changed/file
$ git commit -m "module_name_you_fixed: fix crash when ..."
2. Push the branch to ``origin`` (your fork):
.. code-block:: bash
$ git push origin name_of_my_branch
3. In a browser, navigate to the ``upstream`` repository (http://github.com/ansible-collections/COLLECTION_REPO).
4. Click the :guilabel:`Pull requests` tab.
GitHub is tracking your fork, so it should see the new branch in it and automatically offer to create a pull request. Sometimes GitHub does not do it, and you should click the :guilabel:`New pull request` button yourself. Then choose :guilabel:`compare across forks` under the :guilabel:`Compare changes` title.
5. Choose your repository and the new branch you pushed in the right drop-down list. Confirm.
a. Fill out the pull request template with all information you want to mention.
b. Put ``Fixes + link to the issue`` in the pull request's description.
c. Put ``[WIP] + short description`` in the pull request's title. Mention the name of the module/plugin you are modifying at the beginning of the description.
d. Click :guilabel:`Create pull request`.
6. Add a :ref:`changelog fragment <collection_changelog_fragments>` to the ``changelogs/fragments`` directory. It will be published in release notes, so users will know about the fix.
a. Run the sanity test for the fragment:
.. code-block:: bash
$ ansible-test sanity changelogs/fragments/ --docker -v
b. If the tests passed, commit and push the changes:
.. code-block:: bash
$ git add changelogs/fragments/myfragment.yml
$ git commit -m "Add changelog fragment"
$ git push origin name_of_my_branch
7. Verify the CI tests pass that run automatically on Red Hat infrastructure after every commit.
You will see the CI status at the bottom of your pull request. If they are green and you think that you do not want to add more commits before someone should take a closer look at it, remove ``[WIP]`` from the title. Mention the issue reporter in a comment and let contributors know that the pull request is "Ready for review".
8. Wait for reviews. You can also ask for a review in the ``#ansible-community`` :ref:`Matrix/Libera.Chat IRC channel <communication_irc>`.
9. If the pull request looks good to the community, committers will merge it.
For more in-depth details on this process, see the :ref:`Ansible developer guide <developer_guide>`.

View File

@ -1,368 +0,0 @@
.. _community_development_process:
*****************************
The Ansible Development Cycle
*****************************
Ansible developers (including community contributors) add new features, fix bugs, and update code in many different repositories. The `ansible/ansible repository <https://github.com/ansible/ansible>`_ contains the code for basic features and functions, such as copying module code to managed nodes. This code is also known as ``ansible-core``. Other repositories contain plugins and modules that enable Ansible to execute specific tasks, like adding a user to a particular database or configuring a particular network device. These repositories contain the source code for collections.
Development on ``ansible-core`` occurs on two levels. At the macro level, the ``ansible-core`` developers and maintainers plan releases and track progress with roadmaps and projects. At the micro level, each PR has its own lifecycle.
Development on collections also occurs at the macro and micro levels. Each collection has its own macro development cycle. For more information on the collections development cycle, see :ref:`contributing_maintained_collections`. The micro-level lifecycle of a PR is similar in collections and in ``ansible-core``.
.. contents::
:local:
Macro development: ``ansible-core`` roadmaps, releases, and projects
=====================================================================
If you want to follow the conversation about what features will be added to ``ansible-core`` for upcoming releases and what bugs are being fixed, you can watch these resources:
* the :ref:`roadmaps`
* the :ref:`Ansible Release Schedule <release_and_maintenance>`
* the :ref:`ansible-core project branches and tags <core_branches_and_tags>`
* various GitHub `projects <https://github.com/ansible/ansible/projects>`_ - for example:
* the `2.16 release project <https://github.com/ansible/ansible/projects/47>`_
* the `core documentation project <https://github.com/orgs/ansible/projects/94/views/1>`_
.. _community_pull_requests:
Micro development: the lifecycle of a PR
========================================
If you want to contribute a feature or fix a bug in ``ansible-core`` or in a collection, you must open a **pull request** ("PR" for short). GitHub provides a great overview of `how the pull request process works <https://help.github.com/articles/about-pull-requests/>`_ in general. The ultimate goal of any pull request is to get merged and become part of a collection or ``ansible-core``.
Here's an overview of the PR lifecycle:
* Contributor opens a PR (always against the ``devel`` branch)
* Ansibot reviews the PR
* Ansibot assigns labels
* Ansibot pings maintainers
* Azure Pipelines runs the test suite
* Developers, maintainers, community review the PR
* Contributor addresses any feedback from reviewers
* Developers, maintainers, community re-review
* PR merged or closed
* PR :ref:`backported <backport_process>` to one or more ``stable-X.Y`` branches (optional, bugfixes only)
Automated PR review: ansibullbot
--------------------------------
Because Ansible receives many pull requests, and because we love automating things, we have automated several steps of the process of reviewing and merging pull requests with a tool called Ansibullbot, or Ansibot for short.
`Ansibullbot <https://github.com/ansible/ansibullbot/blob/master/ISSUE_HELP.md>`_ serves many functions:
- Responds quickly to PR submitters to thank them for submitting their PR
- Identifies the community maintainer responsible for reviewing PRs for any files affected
- Tracks the current status of PRs
- Pings responsible parties to remind them of any PR actions for which they may be responsible
- Provides maintainers with the ability to move PRs through the workflow
- Identifies PRs abandoned by their submitters so that we can close them
- Identifies modules abandoned by their maintainers so that we can find new maintainers
Ansibot workflow
^^^^^^^^^^^^^^^^
Ansibullbot runs continuously. You can generally expect to see changes to your issue or pull request within thirty minutes. Ansibullbot examines every open pull request in the repositories, and enforces state roughly according to the following workflow:
- If a pull request has no workflow labels, it's considered **new**. Files in the pull request are identified, and the maintainers of those files are pinged by the bot, along with instructions on how to review the pull request. (Note: sometimes we strip labels from a pull request to "reboot" this process.)
- If the module maintainer is not ``$team_ansible``, the pull request then goes into the **community_review** state.
- If the module maintainer is ``$team_ansible``, the pull request then goes into the **core_review** state (and probably sits for a while).
- If the pull request is in **community_review** and has received comments from the maintainer:
- If the maintainer says ``shipit``, the pull request is labeled **shipit**, whereupon the Core team assesses it for final merge.
- If the maintainer says ``needs_info``, the pull request is labeled **needs_info** and the submitter is asked for more info.
- If the maintainer says **needs_revision**, the pull request is labeled **needs_revision** and the submitter is asked to fix some things.
- If the submitter says ``ready_for_review``, the pull request is put back into **community_review** or **core_review** and the maintainer is notified that the pull request is ready to be reviewed again.
- If the pull request is labeled **needs_revision** or **needs_info** and the submitter has not responded lately:
- The submitter is first politely pinged after two weeks, pinged again after two more weeks and labeled **pending action**, and the issue or pull request will be closed two weeks after that.
- If the submitter responds at all, the clock is reset.
- If the pull request is labeled **community_review** and the reviewer has not responded lately:
- The reviewer is first politely pinged after two weeks, pinged again after two more weeks and labeled **pending_action**, and then may be reassigned to ``$team_ansible`` or labeled **core_review**, or often the submitter of the pull request is asked to step up as a maintainer.
- If Azure Pipelines tests fail, or if the code is not able to be merged, the pull request is automatically put into **needs_revision** along with a message to the submitter explaining why.
There are corner cases and frequent refinements, but this is the workflow in general.
PR labels
^^^^^^^^^
There are two types of PR Labels generally: **workflow** labels and **information** labels.
Workflow labels
"""""""""""""""
- **community_review**: Pull requests for modules that are currently awaiting review by their maintainers in the Ansible community.
- **core_review**: Pull requests for modules that are currently awaiting review by their maintainers on the Ansible Core team.
- **needs_info**: Waiting on info from the submitter.
- **needs_rebase**: Waiting on the submitter to rebase.
- **needs_revision**: Waiting on the submitter to make changes.
- **shipit**: Waiting for final review by the core team for potential merge.
Information labels
""""""""""""""""""
- **backport**: this is applied automatically if the PR is requested against any branch that is not devel. The bot immediately assigns the labels backport and ``core_review``.
- **bugfix_pull_request**: applied by the bot based on the templatized description of the PR.
- **cloud**: applied by the bot based on the paths of the modified files.
- **docs_pull_request**: applied by the bot based on the templatized description of the PR.
- **easyfix**: applied manually, inconsistently used but sometimes useful.
- **feature_pull_request**: applied by the bot based on the templatized description of the PR.
- **networking**: applied by the bot based on the paths of the modified files.
- **owner_pr**: largely deprecated. Formerly workflow, now informational. Originally, PRs submitted by the maintainer would automatically go to **shipit** based on this label. If the submitter is also a maintainer, we notify the other maintainers and still require one of the maintainers (including the submitter) to give a **shipit**.
- **pending_action**: applied by the bot to PRs that are not moving. Reviewed every couple of weeks by the community team, who tries to figure out the appropriate action (closure, asking for new maintainers, and so on).
Special Labels
""""""""""""""
- **new_plugin**: this is for new modules or plugins that are not yet in Ansible.
**Note:** `new_plugin` kicks off a completely separate process, and frankly it doesn't work very well at present. We're working our best to improve this process.
Human PR review
---------------
After Ansibot reviews the PR and applies labels, the PR is ready for human review. The most likely reviewers for any PR are the maintainers for the module that PR modifies.
Each module has at least one assigned :ref:`maintainer <maintainers>`, listed in the `BOTMETA.yml <https://github.com/ansible/ansible/blob/devel/.github/BOTMETA.yml>`_ file.
The maintainer's job is to review PRs that affect that module and decide whether they should be merged (``shipit``) or revised (``needs_revision``). We'd like to have at least one community maintainer for every module. If a module has no community maintainers assigned, the maintainer is listed as ``$team_ansible``.
Once a human applies the ``shipit`` label, the :ref:`committers <community_committer_guidelines>` decide whether the PR is ready to be merged. Not every PR that gets the ``shipit`` label is actually ready to be merged, but the better our reviewers are, and the better our guidelines are, the more likely it will be that a PR that reaches **shipit** will be mergeable.
Making your PR merge-worthy
===========================
We do not merge every PR. Here are some tips for making your PR useful, attractive, and merge-worthy.
.. _community_changelogs:
Creating changelog fragments
------------------------------
Changelogs help users and developers keep up with changes to ansible-core and Ansible collections. Ansible and many collections build changelogs for each release from fragments. For ansible-core and collections using this model, you **must** add a changelog fragment to any PR that changes functionality or fixes a bug.
You do not need a changelog fragment for PRs that:
* add new modules and plugins, because Ansible tooling does that automatically;
* contain only documentation changes.
.. note::
Some collections require a changelog fragment for every pull request. They use the ``trivial:`` section for entries mentioned above that will be skipped when building a release changelog.
More precisely:
* Every bugfix PR must have a changelog fragment. The only exception are fixes to a change that has not yet been included in a release.
* Every feature PR must have a changelog fragment.
* New modules and plugins (including jinja2 filter and test plugins) must have ``version_added`` entries set correctly in their documentation, and do not need a changelog fragment. The tooling detects new modules and plugins by their ``version_added`` values and announces them in the next release's changelog automatically.
We build short summary changelogs for minor releases as well as for major releases. If you backport a bugfix, include a changelog fragment with the backport PR.
.. _changelogs_how_to:
Creating a changelog fragment
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A basic changelog fragment is a ``.yaml`` or ``.yml`` file placed in the ``changelogs/fragments/`` directory. Each file contains a yaml dict with keys like ``bugfixes`` or ``major_changes`` followed by a list of changelog entries of bugfixes or features. Each changelog entry is rst embedded inside of the yaml file which means that certain constructs would need to be escaped so they can be interpreted by rst and not by yaml (or escaped for both yaml and rst if you prefer). Each PR **must** use a new fragment file rather than adding to an existing one, so we can trace the change back to the PR that introduced it.
PRs which add a new module or plugin do not necessarily need a changelog fragment. See the previous section :ref:`community_changelogs`. Also see the next section :ref:`changelogs_how_to_format` for the precise format changelog fragments should have.
To create a changelog entry, create a new file with a unique name in the ``changelogs/fragments/`` directory of the corresponding repository. The file name should include the PR number and a description of the change. It must end with the file extension ``.yaml`` or ``.yml``. For example: ``40696-user-backup-shadow-file.yaml``
A single changelog fragment may contain multiple sections but most will only contain one section. The toplevel keys (bugfixes, major_changes, and so on) are defined in the `config file <https://github.com/ansible/ansible/blob/devel/changelogs/config.yaml>`_ for our `release note tool <https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelogs.rst>`_. Here are the valid sections and a description of each:
**breaking_changes**
MUST include changes that break existing playbooks or roles. This includes any change to existing behavior that forces users to update tasks. Breaking changes means the user MUST make a change when they update. Breaking changes MUST only happen in a major release of the collection. Write in present tense and clearly describe the new behavior that the end user must now follow. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
breaking_changes:
- ansible-test - automatic installation of requirements for cloud test plugins no longer occurs. The affected test plugins are ``aws``, ``azure``, ``cs``, ``hcloud``, ``nios``, ``opennebula``, ``openshift`` and ``vcenter``. Collections should instead use one of the supported integration test requirements files, such as the ``tests/integration/requirements.txt`` file (https://github.com/ansible/ansible/pull/75605).
**major_changes**
Major changes to ansible-core or a collection. SHOULD NOT include individual module or plugin changes. MUST include non-breaking changes that impact all or most of a collection (for example, updates to support a new SDK version across the collection). Major changes mean the user can CHOOSE to make a change when they update but do not have to. Could be used to announce an important upcoming EOL or breaking change in a future release. (ideally 6 months in advance, if known. See `this example <https://github.com/ansible-collections/community.general/blob/stable-1/CHANGELOG.rst#v1313>`_). Write in present tense and describe what is new. Optionally, include a 'Previously..." sentence to help the user identify where old behavior should now change. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
major_changes:
- ansible-test - all cloud plugins which use containers can now be used with all POSIX and Windows hosts. Previously the plugins did not work with Windows at all, and support for hosts created with the ``--remote`` option was inconsistent (https://github.com/ansible/ansible/pull/74216).
**minor_changes**
Minor changes to ansible-core, modules, or plugins. This includes new parameters added to modules, or non-breaking behavior changes to existing parameters, such as adding additional values to choices[]. Minor changes are enhancements, not bug fixes. Write in present tense.
.. code-block:: yaml
minor_changes:
- lineinfile - add warning when using an empty regexp (https://github.com/ansible/ansible/issues/29443).
**deprecated_features**
Features that have been deprecated and are scheduled for removal in a future release. Use past tense and include an alternative, where available for what is being deprecated.. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
deprecated_features:
- include action - is deprecated in favor of ``include_tasks``, ``import_tasks`` and ``import_playbook`` (https://github.com/ansible/ansible/pull/71262).
**removed_features**
Features that were previously deprecated and are now removed. Use past tense and include an alternative, where available for what is being deprecated. Displayed in both the changelogs and the :ref:`Porting Guides <porting_guides>`.
.. code-block:: yaml
removed_features:
- _get_item() alias - removed from callback plugin base class which had been deprecated in favor of ``_get_item_label()`` (https://github.com/ansible/ansible/pull/70233).
**security_fixes**
Fixes that address CVEs or resolve security concerns. MUST use security_fixes for any CVEs. Use present tense. Include links to CVE information.
.. code-block:: yaml
security_fixes:
- set_options -do not include params in exception when a call to ``set_options`` fails. Additionally, block the exception that is returned from being displayed to stdout. (CVE-2021-3620).
**bugfixes**
Fixes that resolve issues. SHOULD not be used for minor enhancements (use ``minor_change`` instead). Use past tense to describe the problem and present tense to describe the fix.
.. code-block:: yaml
bugfixes:
- ansible_play_batch - variable included unreachable hosts. Fix now saves unreachable hosts between plays by adding them to the PlayIterator's ``_play._removed_hosts`` (https://github.com/ansible/ansible/issues/66945).
**known_issues**
Known issues that are currently not fixed or will not be fixed. Use present tense and where available, use imperative tense for a workaround.
.. code-block:: yaml
known_issues:
- ansible-test - tab completion anywhere other than the end of the command with the new composite options provides incorrect results (https://github.com/kislyuk/argcomplete/issues/351).
Each changelog entry must contain a link to its issue between parentheses at the end. If there is no corresponding issue, the entry must contain a link to the PR itself.
Most changelog entries are ``bugfixes`` or ``minor_changes``. The changelog tool also supports ``trivial``, which are not listed in the actual changelog output but are used by collections repositories that require a changelog fragment for each PR.
.. _changelogs_how_to_format:
Changelog fragment entry format
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When writing a changelog entry, use the following format:
.. code-block:: yaml
- scope - description starting with a lowercase letter and ending with a period at the very end. Multiple sentences are allowed (https://github.com/reference/to/an/issue or, if there is no issue, reference to a pull request itself).
The scope is usually a module or plugin name or group of modules or plugins, for example, ``lookup plugins``. While module names can (and should) be mentioned directly (``foo_module``), plugin names should always be followed by the type (``foo inventory plugin``).
For changes that are not really scoped (for example, which affect a whole collection), use the following format:
.. code-block:: yaml
- Description starting with an uppercase letter and ending with a dot at the very end. Multiple sentences are allowed (https://github.com/reference/to/an/issue or, if there is no issue, reference to a pull request itself).
Here are some examples:
.. code-block:: yaml
bugfixes:
- apt_repository - fix crash caused by ``cache.update()`` raising an ``IOError``
due to a timeout in ``apt update`` (https://github.com/ansible/ansible/issues/51995).
.. code-block:: yaml
minor_changes:
- lineinfile - add warning when using an empty regexp (https://github.com/ansible/ansible/issues/29443).
.. code-block:: yaml
bugfixes:
- copy - the module was attempting to change the mode of files for
remote_src=True even if mode was not set as a parameter. This failed on
filesystems which do not have permission bits (https://github.com/ansible/ansible/issues/29444).
You can find more example changelog fragments in the `changelog directory <https://github.com/ansible/ansible/tree/stable-2.12/changelogs/fragments>`_ for the 2.12 release.
After you have written the changelog fragment for your PR, commit the file and include it with the pull request.
.. _changelogs_how_to_format_playbooks:
Changelog fragment entry format for new playbooks
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
While new modules, plugins, and roles are mentioned automatically in the generated changelog, playbooks are not. To make sure they are mentioned, a changelog fragment in a specific format is needed:
.. code-block:: yaml
# A new playbook:
add object.playbook:
- # This should be the short (non-FQCN) name of the playbook.
name: wipe_server
# The description should be in the same format as short_description for
# plugins and modules: it should start with an upper-case letter and
# not have a period at the end.
description: Wipes a server
.. _backport_process:
Backporting merged PRs in ``ansible-core``
===========================================
All ``ansible-core`` PRs must be merged to the ``devel`` branch first. After a pull request has been accepted and merged to the ``devel`` branch, the following instructions will help you create a pull request to backport the change to a previous stable branch.
We do **not** backport features.
.. note::
These instructions assume that:
* ``stable-2.15`` is the targeted release branch for the backport
* ``https://github.com/ansible/ansible.git`` is configured as a ``git remote`` named ``upstream``. If you do not use a ``git remote`` named ``upstream``, adjust the instructions accordingly.
* ``https://github.com/<yourgithubaccount>/ansible.git`` is configured as a ``git remote`` named ``origin``. If you do not use a ``git remote`` named ``origin``, adjust the instructions accordingly.
#. Prepare your devel, stable, and feature branches:
.. code-block:: shell
git fetch upstream
git checkout -b backport/2.15/[PR_NUMBER_FROM_DEVEL] upstream/stable-2.15
#. Cherry pick the relevant commit SHA from the devel branch into your feature branch, handling merge conflicts as necessary:
.. code-block:: shell
git cherry-pick -x [SHA_FROM_DEVEL]
#. Add a :ref:`changelog fragment <changelogs_how_to>` for the change, and commit it.
#. Push your feature branch to your fork on GitHub:
.. code-block:: shell
git push origin backport/2.15/[PR_NUMBER_FROM_DEVEL]
#. Submit the pull request for ``backport/2.15/[PR_NUMBER_FROM_DEVEL]`` against the ``stable-2.15`` branch
#. The Release Manager will decide whether to merge the backport PR before the next minor release. There isn't any need to follow up. Just ensure that the automated tests (CI) are green.
.. note::
The branch name ``backport/2.15/[PR_NUMBER_FROM_DEVEL]`` is somewhat arbitrary but conveys meaning about the purpose of the branch. This branch name format is not required, but it can be helpful, especially when making multiple backport PRs for multiple stable branches.
.. note::
If you prefer, you can use CPython's cherry-picker tool (``pip install --user 'cherry-picker >= 1.3.2'``) to backport commits from devel to stable branches in Ansible. Take a look at the `cherry-picker documentation <https://pypi.org/p/cherry-picker#cherry-picking>`_ for details on installing, configuring, and using it.

View File

@ -1,243 +0,0 @@
.. _community_documentation_contributions:
*****************************************
Contributing to the Ansible Documentation
*****************************************
Ansible has a lot of documentation and a small team of writers. Community support helps us keep up with new features, fixes, and changes.
Improving the documentation is an easy way to make your first contribution to the Ansible project. You do not have to be a programmer, since most of our documentation is written in YAML (module documentation) or `reStructuredText <https://docutils.sourceforge.io/rst.html>`_ (rST). Some collection-level documentation is written in a subset of `Markdown <https://github.com/ansible/ansible/issues/68119#issuecomment-596723053>`_. If you are using Ansible, you already use YAML in your playbooks. rST and Markdown are mostly just text. You do not even need git experience, if you use the ``Edit on GitHub`` option.
If you find a typo, a broken example, a missing topic, or any other error or omission on this documentation website, let us know. Here are some ways to support Ansible documentation:
.. contents::
:local:
Editing docs directly on GitHub
===============================
For typos and other quick fixes, you can edit most of the documentation right from the site. Look at the top right corner of this page. That ``Edit on GitHub`` link is available on all the guide pages in the documentation. If you have a GitHub account, you can submit a quick and easy pull request this way.
.. note::
The source files for individual collection plugins exist in their respective repositories. Follow the link to the collection on Galaxy to find where the repository is located and any guidelines on how to contribute to that collection.
To submit a documentation PR from docs.ansible.com with ``Edit on GitHub``:
#. Click on ``Edit on GitHub``.
#. If you don't already have a fork of the ansible repo on your GitHub account, you'll be prompted to create one.
#. Fix the typo, update the example, or make whatever other change you have in mind.
#. Enter a commit message in the first rectangle under the heading ``Propose file change`` at the bottom of the GitHub page. The more specific, the better. For example, "fixes typo in my_module description". You can put more detail in the second rectangle if you like. Leave the ``+label: docsite_pr`` there.
#. Submit the suggested change by clicking on the green "Propose file change" button. GitHub will handle branching and committing for you, and open a page with the heading "Comparing Changes".
#. Click on ``Create pull request`` to open the PR template.
#. Fill out the PR template, including as much detail as appropriate for your change. You can change the title of your PR if you like (by default it's the same as your commit message). In the ``Issue Type`` section, delete all lines except the ``Docs Pull Request`` line.
#. Submit your change by clicking on ``Create pull request`` button.
#. Be patient while Ansibot, our automated script, adds labels, pings the docs maintainers, and kicks off a CI testing run.
#. Keep an eye on your PR - the docs team may ask you for changes.
Reviewing or solving open issues
================================
Review or solve open documentation issues for:
- `Ansible projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_
- `Ansible collections <https://github.com/search?q=user%3Aansible-collections+label%3Adocs+state%3Aopen+type%3Aissue&type=Issues>`_
Reviewing open PRs
==================
Review open documentation pull requests for:
- Ansible `projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Adocs+state%3Aopen+type%3Apr>`_
- Ansible `collections <https://github.com/search?q=user%3Aansible-collections+label%3Adocs+state%3Aopen+type%3Apr>`_
To add a helpful review, please:
- Test the change if applicable.
- Think if it can be made better (including wording, structure, fixing typos and so on).
- Suggest improvements.
- Approve the change with the ``looks good to me`` comment.
Opening a new issue and/or PR
=============================
If the problem you have noticed is too complex to fix with the ``Edit on GitHub`` option, and no open issue or PR already documents the problem, please open an issue and/or a PR on the correct underlying repo - ``ansible/ansible`` for most pages that are not plugin or module documentation. If the documentation page has no ``Edit on GitHub`` option, check if the page is for a module within a collection. If so, follow the link to the collection on Galaxy and select the ``repo`` button in the upper right corner to find the source repository for that collection and module. The Collection README file should contain information on how to contribute to that collection, or report issues.
A great documentation GitHub issue or PR includes:
- a specific title
- a detailed description of the problem (even for a PR - it's hard to evaluate a suggested change unless we know what problem it's meant to solve)
- links to other information (related issues/PRs, external documentation, pages on docs.ansible.com, and so on)
Verifying your documentation PR
================================
If you make multiple changes to the documentation on ``ansible/ansible``, or add more than a line to it, before you open a pull request, please:
#. Check that your text follows our :ref:`style_guide`.
#. Test your changes for rST errors.
#. Build the page, and preferably the entire documentation site, locally.
.. note::
The following sections apply to documentation sourced from the ``ansible/ansible`` repo and does not apply to documentation from an individual collection. See the collection README file for details on how to contribute to that collection.
Setting up your environment to build documentation locally
----------------------------------------------------------
To build documentation locally, ensure you have a working :ref:`development environment <environment_setup>`.
To work with documentation on your local machine, you need to have python-3.9 or greater and install the `Ansible dependencies`_ and `documentation dependencies`_, which are listed in two files to make installation easier:
.. _Ansible dependencies: https://github.com/ansible/ansible/blob/devel/requirements.txt
.. _documentation dependencies: https://github.com/ansible/ansible/blob/devel/docs/docsite/requirements.txt
Drop the ``--user`` option in the following commands if you use a virtual environment (venv/virtenv).
#. Upgrade pip before installing dependencies (recommended).
.. code-block:: bash
pip install --user --upgrade pip
#. Install Ansible dependencies.
.. code-block:: bash
pip install --user -r requirements.txt
#. Install either the unpinned or tested documentation dependencies.
.. code-block:: bash
pip install --user -r docs/docsite/requirements.txt # This file installs unpinned versions that can cause problems with the Ansible docs build.
pip install --user -r test/sanity/code-smell/docs-build.requirements.txt # This file installs tested dependency versions that are used by CI.
.. note::
You may need to install these general pre-requisites separately on some systems:
- ``gcc``
- ``libyaml``
- ``make``
- ``pyparsing``
- ``wheel``
- ``six``
On macOS with Xcode, you may need to install ``six`` and ``pyparsing`` with ``--ignore-installed`` to get versions that work with ``sphinx``.
.. note::
After checking out ``ansible/ansible``, make sure the ``docs/docsite/rst`` directory has strict enough permissions. It should only be writable by the owner's account. If your default ``umask`` is not 022, you can use ``chmod go-w docs/docsite/rst`` to set the permissions correctly in your new branch. Optionally, you can set your ``umask`` to 022 to make all newly created files on your system (including those created by ``git clone``) have the correct permissions.
.. _testing_documentation_locally:
Testing the documentation locally
---------------------------------
To test an individual file for rST errors:
.. code-block:: bash
rstcheck changed_file.rst
Building the documentation locally
----------------------------------
Building the documentation is the best way to check for errors and review your changes. Once `rstcheck` runs with no errors, navigate to ``ansible/docs/docsite`` and then build the page(s) you want to review.
.. note::
If building on macOS with Python 3.8 or later, you must use Sphinx >= 2.2.2. See `#6803 <https://github.com/sphinx-doc/sphinx/pull/6879>`_ for details.
Building a single rST page
^^^^^^^^^^^^^^^^^^^^^^^^^^
To build a single rST file with the make utility:
.. code-block:: bash
make htmlsingle rst=path/to/your_file.rst
For example:
.. code-block:: bash
make htmlsingle rst=community/documentation_contributions.rst
This process compiles all the links but provides minimal log output. If you're writing a new page or want more detailed log output, refer to the instructions on :ref:`build_with_sphinx-build`
.. note::
``make htmlsingle`` adds ``rst/`` to the beginning of the path you provide in ``rst=``, so you can't type the filename with autocomplete. Here are the error messages you will see if you get this wrong:
- If you run ``make htmlsingle`` from the ``docs/docsite/rst/`` directory: ``make: *** No rule to make target `htmlsingle'. Stop.``
- If you run ``make htmlsingle`` from the ``docs/docsite/`` directory with the full path to your rST document: ``sphinx-build: error: cannot find files ['rst/rst/community/documentation_contributions.rst']``.
Building all the rST pages
^^^^^^^^^^^^^^^^^^^^^^^^^^
To build all the rST files without any module documentation:
.. code-block:: bash
MODULES=none make webdocs
Building module docs and rST pages
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To build documentation for a few modules included in ``ansible/ansible`` plus all the rST files, use a comma-separated list:
.. code-block:: bash
MODULES=one_module,another_module make webdocs
To build all the module documentation plus all the rST files:
.. code-block:: bash
make webdocs
.. _build_with_sphinx-build:
Building rST files with ``sphinx-build``
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Advanced users can build one or more rST files with the sphinx utility directly. ``sphinx-build`` returns misleading ``undefined label`` warnings if you only build a single page, because it does not create internal links. However, ``sphinx-build`` returns more extensive syntax feedback, including warnings about indentation errors and ``x-string without end-string`` warnings. This can be useful, especially if you're creating a new page from scratch. To build a page or pages with ``sphinx-build``:
.. code-block:: bash
sphinx-build [options] sourcedir outdir [filenames...]
You can specify filenames, or ``a`` for all files, or omit both to compile only new/changed files.
For example:
.. code-block:: bash
sphinx-build -b html -c rst/ rst/dev_guide/ _build/html/dev_guide/ rst/dev_guide/developing_modules_documenting.rst
Running the final tests
^^^^^^^^^^^^^^^^^^^^^^^
When you submit a documentation pull request, automated tests are run. Those same tests can be run locally. To do so, navigate to the repository's top directory and run:
.. code-block:: bash
make clean &&
bin/ansible-test sanity --test docs-build &&
bin/ansible-test sanity --test rstcheck
Unfortunately, leftover rST-files from previous document-generating can occasionally confuse these tests. It is therefore safest to run them on a clean copy of the repository, which is the purpose of ``make clean``. If you type these three lines one at a time and manually check the success of each, you do not need the ``&&``.
Joining the documentation working group
=======================================
The Documentation Working Group (DaWGs) meets weekly on Tuesdays in the Docs chat (using `Matrix <https://matrix.to/#/#docs:ansible.im>`_ or using IRC at `irc.libera.chat <https://libera.chat/>`_). For more information, including links to our agenda and a calendar invite, please visit the `working group page in the community repo <https://github.com/ansible/community/wiki/Docs>`_.
.. seealso::
:ref:`More about testing module documentation <testing_module_documentation>`
:ref:`More about documenting modules <module_documenting>`

View File

@ -1,33 +0,0 @@
.. _community_getting_started:
****************
Getting started
****************
Welcome and thank you for getting more involved with the Ansible community. Here are some ways you can get started.
.. toctree::
:maxdepth: 2
code_of_conduct
contributor_license_agreement
communication
how_can_I_help
Other ways to get involved
==========================
Here are some other ways to connect with the Ansible community:
* Find an `Ansible Meetup near me <https://www.meetup.com/topics/ansible/>`_
communication
* Learn more about Ansible:
* `Read books <https://www.ansible.com/resources/ebooks>`_.
* `Get certified <https://www.ansible.com/products/training-certification>`_.
* `Attend events <https://www.ansible.com/community/events>`_.
* `Review getting started guides <https://www.ansible.com/resources/get-started>`_.
* `Watch videos <https://www.ansible.com/resources/videos>`_ - includes Ansible Automates, AnsibleFest & webinar recordings.
* See where `new releases are announced <https://groups.google.com/forum/#!forum/ansible-announce>`_

View File

@ -1,32 +0,0 @@
.. _github_admins:
*************
GitHub Admins
*************
.. contents:: Topics
GitHub Admins have more permissions on GitHub than normal contributors or even committers. There are
a few responsibilities that come with that increased power.
Adding and removing committers
==============================
The Ansible Team will periodically review who is actively contributing to Ansible to grant or revoke
contributors' ability to commit on their own. GitHub Admins are the people who have the power to
actually manage the GitHub permissions.
Changing branch permissions for releases
========================================
When we make releases we make people go through a :ref:`release_managers` to push commits to that
branch. The GitHub admins are responsible for setting the branch so only the Release Manager can
commit to the branch when the release process reaches that stage and later opening the branch once
the release has been made. The Release manager will let the GitHub Admin know when this needs to be
done.
.. seealso:: The `GitHub Admin Process Docs
<https://docs.google.com/document/d/1gWPtxNX4J39uIzwqQWLIsTZ1dY_AwEZzAd9bJ4XtZso/edit#heading=h.2wezayw9xsqz>`_ for instructions
on how to change branch permissions.

View File

@ -1,97 +0,0 @@
.. _how_can_i_help:
***************
How can I help?
***************
.. contents::
:local:
Thanks for being interested in helping the Ansible project!
There are many ways to help the Ansible project...but first, please read and understand the :ref:`code_of_conduct`.
Become a power user
===================
A great way to help the Ansible project is to become a power user:
* Use Ansible everywhere you can
* Take tutorials and classes
* Read the :ref:`official documentation <ansible_documentation>`
* Study some of the `many excellent books <https://www.amazon.com/s/ref=nb_sb_ss_c_2_7?url=search-alias%3Dstripbooks&field-keywords=ansible&sprefix=ansible%2Caps%2C260>`_ about Ansible
* `Get certified <https://www.ansible.com/products/training-certification>`_.
When you become a power user, your ability and opportunities to help the Ansible project in other ways will multiply quickly.
Ask and answer questions online
===============================
There are many forums online where Ansible users ask and answer questions. Reach out and communicate with your fellow Ansible users.
You can find the official :ref:`Ansible communication channels <communication>`.
Review, fix, and maintain the documentation
===========================================
Typos are everywhere, even in the Ansible documentation. We work hard to keep the documentation up-to-date, but you may also find outdated examples. We offer easy ways to :ref:`report and/or fix documentation errors <community_documentation_contributions>`.
.. _ansible_community_meetup:
Participate in your local meetup
================================
There are Ansible meetups `all over the world <https://www.meetup.com/topics/ansible/>`_. Join your local meetup. Attend regularly. Ask good questions. Volunteer to give a presentation about how you use Ansible.
If there is no meetup near you, we are happy to help you `start one <https://www.ansible.com/community/events/ansible-meetups>`_.
File and verify issues
======================
All software has bugs, and Ansible is no exception. When you find a bug, you can help tremendously by telling us about it:
* Filing :ref:`issues for ansible-core <reporting_bugs_and_features>`.
* Filing :ref:`issues for collections <reporting_bugs_in_collections>`.
If the bug you found already exists in an issue, you can help by verifying the behavior of the reported bug with a comment in that issue, or by reporting any additional information.
Review and submit pull requests
===============================
As you become more familiar with how Ansible works, you may be able to fix issues or develop new features yourself. If you think you have a fix for a bug in Ansible, or if you have a new feature that you would like to share with millions of Ansible users, read all about the :ref:`development process <community_development_process>` to learn how to get your code accepted into Ansible.
You can also get started with solving GitHub issues labeled with the ``easyfix`` and ``good_first_issue`` labels for:
- `Ansible collections <https://github.com/search?q=user%3Aansible-collections+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_
- `All other Ansible projects <https://github.com/search?q=user%3Aansible+user%3Aansible-community+label%3Aeasyfix%2C%22good+first+issue%22+state%3Aopen&type=Issues>`_
When you choose an issue to work on, add a comment directly on the GitHub issue to say you are looking at it and let others know to avoid conflicting work.
You can also ask for help in a comment if you need it.
Another good way to help is to review pull requests that other Ansible users have submitted. Ansible core keeps a full list of `open pull requests by file <https://ansible.sivel.net/pr/byfile.html>`_, so if a particular module or plugin interests you, you can easily keep track of all the relevant new pull requests and provide testing or feedback. Alternatively, you can review the pull requests for any collections that interest you. Click :guilabel:`Issue tracker` on the collection documentation page to find the issues and PRs for that collection.
Become a collection maintainer
==============================
Once you have learned about the development process and have contributed code to a collection, we encourage you to become a maintainer of that collection. There are hundreds of modules in dozens of Ansible collections, and the vast majority of them are written and maintained entirely by members of the Ansible community.
See :ref:`collection maintainer guidelines <maintainers>` to learn more about the responsibilities of being an Ansible collection maintainer.
.. _community_working_groups:
Join a working group
====================
Working groups are a way for Ansible community members to self-organize around particular topics of interest. We have working groups around various topics. To join or create a working group, please read the :ref:`Ansible Working Groups<working_group_list>`.
Teach Ansible to others
=======================
We are working on a standardized `Ansible workshop <https://ansible.github.io/workshops/>`_ that can provide a good hands-on introduction to Ansible usage and concepts.
Social media
============
If you like Ansible and just want to spread the good word, feel free to share on your social media platform of choice, and let us know by using ``@ansible`` or ``#ansible``. We'll be looking for you.

View File

@ -1,24 +0,0 @@
.. _ansible_community_guide:
***********************
Ansible Community Guide
***********************
.. note::
**Making Open Source More Inclusive**
Red Hat is committed to replacing problematic language in our code, documentation, and web properties. We are beginning with these four terms: master, slave, blacklist, and whitelist. We ask that you open an issue or pull request if you come upon a term that we have missed. For more details, see `our CTO Chris Wright's message <https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language>`_.
Welcome to the Ansible Community Guide!
The purpose of this guide is to teach you everything you need to know about being a contributing member of the Ansible community. All types of contributions are welcome and necessary for Ansible's continued success.
.. _community_toc:
.. toctree::
:maxdepth: 2
getting_started
contributor_path

View File

@ -1,19 +0,0 @@
.. _maintainers:
***************************************
Guidelines for collection maintainers
***************************************
Thank you for being a community collection maintainer. This guide offers an overview of your responsibilities as a maintainer along with resources for additional information. The Ansible community hopes that you will find that maintaining a collection is as rewarding for you as having the collection content is for the wider community.
.. toctree::
:maxdepth: 1
maintainers_guidelines
maintainers_workflow
collection_contributors/collection_releasing
In addition to the information here, module maintainers should be familiar with:
* :ref:`General Ansible community development practices <ansible_community_guide>`
* Documentation on :ref:`module development <developing_modules>`

View File

@ -1,162 +0,0 @@
.. _maintainer_requirements:
Maintainer responsibilities
===========================
.. contents::
:depth: 1
:local:
An Ansible collection maintainer is a contributor trusted by the community who makes significant and regular contributions to the project and who has shown themselves as a specialist in the related area.
Collection maintainers have :ref:`extended permissions<collection_maintainers>` in the collection scope.
Ansible collection maintainers provide feedback, responses, or actions on pull requests or issues to the collection(s) they maintain in a reasonably timely manner. They can also update the contributor guidelines for that collection, in collaboration with the Ansible community team and the other maintainers of that collection.
In general, collection maintainers:
- Act in accordance with the :ref:`code_of_conduct`.
- Subscribe to the collection repository they maintain (click :guilabel:`Watch > All activity` in GitHub).
- Keep README, development guidelines, and other general collections :ref:`maintainer_documentation` relevant.
- Review and commit changes made by other contributors.
- :ref:`Backport <Backporting>` changes to stable branches.
- Address or assign issues to appropriate contributors.
- :ref:`Release collections <Releasing>`.
- Ensure that collections adhere to the :ref:`collections_requirements`.
- Track changes announced in `News for collection contributors and maintainers <https://github.com/ansible-collections/news-for-maintainers>`_ and update a collection in accordance with these changes.
- Subscribe and submit news to the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
- :ref:`Build a healthy community <expanding_community>` to increase the number of active contributors and maintainers around collections.
- Revise these guidelines to improve the maintainer experience for yourself and others.
Multiple maintainers can divide responsibilities among each other.
How to become a maintainer
--------------------------
A person interested in becoming a maintainer and satisfying the :ref:`requirements<maintainer_requirements>` may either self-nominate or be nominated by another maintainer.
To nominate a candidate, create a GitHub issue in the relevant collection repository. If there is no response, the repository is not actively maintained, or the current maintainers do not have permissions to add the candidate, please create the issue in the `ansible/community <https://github.com/ansible/community>`_ repository.
Communicating as a collection maintainer
-----------------------------------------
Maintainers MUST subscribe to the `"Changes impacting collection contributors and maintainers" GitHub repo <https://github.com/ansible-collections/news-for-maintainers>`_ and the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_. If you have something important to announce through the newsletter (for example, recent releases), see the `Bullhorn's wiki page <https://github.com/ansible/community/wiki/News#the-bullhorn>`_ to learn how.
Collection contributors and maintainers should also communicate through:
* :ref:`communication_irc` appropriate to their collection, or if none exists, the general community and developer chat channels
* Mailing lists such as `ansible-announce <https://groups.google.com/d/forum/ansible-announce>`_ and `ansible-devel <https://groups.google.com/d/forum/ansible-devel>`_
* Collection project boards, issues, and GitHub discussions in corresponding repositories
* Quarterly Contributor Summits.
* Ansiblefest and local meetups.
See :ref:`communication` for more details on these communication channels.
.. _wg_and_real_time_chat:
Establishing working group communication
----------------------------------------------------------------
Working groups depend on efficient, real-time communication.
Project maintainers can use the following techniques to establish communication for working groups:
* Find an existing :ref:`working_group_list` that is similar to your project and join the conversation.
* `Request <https://github.com/ansible/community/blob/main/WORKING-GROUPS.md>`_ a new working group for your project.
* `Create <https://hackmd.io/@ansible-community/community-matrix-faq#How-do-I-create-a-public-community-room>`_ a public chat for your working group or `ask <https://github.com/ansible/community/issues/new>`_ the community team.
* Provide working group details and links to chat rooms in the contributor section of your project ``README.md``.
* Encourage contributors to join the chats and add themselves to the working group.
See the :ref:`Communication guide <communication_irc>` to learn more about real-time chat.
Community Topics
----------------
The Community and the `Steering Committee <https://docs.ansible.com/ansible/devel/community/steering/community_steering_committee.html>`_ asynchronously discuss and vote on the `Community Topics <https://github.com/ansible-community/community-topics/issues>`_ which impact the whole project or its parts including collections and packaging.
Share your opinion and vote on the topics to help the community make the best decisions.
.. _expanding_community:
Contributor Summits
-------------------
The quarterly Ansible Contributor Summit is a global event that provides our contributors a great opportunity to meet each other, communicate, share ideas, and see that there are other real people behind the messages on Matrix or Libera Chat IRC, or GitHub. This gives a sense of community. Watch the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_ for information when the next contributor summit, invite contributors you know, and take part in the event together.
Weekly community Matrix/IRC meetings
------------------------------------
The Community and the Steering Committee come together at weekly meetings in the ``#ansible-community`` `Libera.Chat IRC <https://docs.ansible.com/ansible/devel/community/communication.html#ansible-community-on-irc>`_ channel or in the bridged `#community:ansible.com <https://matrix.to/#/#community:ansible.com>`_ room on `Matrix <https://docs.ansible.com/ansible/devel/community/communication.html#ansible-community-on-matrix>`_ to discuss important project questions. Join us! Here is our `schedule <https://github.com/ansible/community/blob/main/meetings/README.md#schedule>`_.
Expanding the collection community
===================================
.. note::
If you discover good ways to expand a community or make it more robust, edit this section with your ideas to share with other collection maintainers.
Here are some ways you can expand the community around your collection:
* Give :ref:`newcomers a positive first experience <collection_new_contributors>`.
* Invite contributors to join :ref:`real-time chats <wg_and_real_time_chat>` related to your project.
* Have :ref:`good documentation <maintainer_documentation>` with guidelines for new contributors.
* Make people feel welcome personally and individually.
* Use labels to show easy fixes and leave non-critical easy fixes to newcomers and offer to mentor them.
* Be responsive in issues, PRs and other communication.
* Conduct PR days regularly.
* Maintain a zero-tolerance policy towards behavior violating the :ref:`code_of_conduct`.
* Put information about how people can register code of conduct violations in your ``README`` and ``CONTRIBUTING`` files.
* Include quick ways contributors can help and other documentation in your ``README``.
* Add and keep updated the ``CONTRIBUTORS`` and ``MAINTAINERS`` files.
* Create a pinned issue to announce that the collection welcomes new maintainers and contributors.
* Look for new maintainers among active contributors.
* Announce that your collection welcomes new maintainers.
* Take part and congratulate new maintainers in Contributor Summits.
.. _collection_new_contributors:
Encouraging new contributors
-----------------------------
Easy-fix items are the best way to attract and mentor new contributors. You should triage incoming issues to mark them with labels such as ``easyfix``, ``waiting_on_contributor``, and ``docs``. where appropriate. Do not fix these trivial non-critical bugs yourself. Instead, mentor a person who wants to contribute.
For some easy-fix issues, you could ask the issue reporter whether they want to fix the issue themselves providing the link to a quick start guide for creating PRs.
Conduct pull request days regularly. You could plan PR days, for example, on the last Friday of every month when you and other maintainers go through all open issues and pull requests focusing on old ones, asking people if you can help, and so on. If there are pull requests that look abandoned (for example, there is no response on your help offers since the previous PR day), announce that anyone else interested can complete the pull request.
Promote active contributors satisfying :ref:`requirements<maintainer_requirements>` to maintainers. Revise contributors' activity regularly.
If your collection found new maintainers, announce that fact in the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_ and during the next Contributor Summit congratulating and thanking them for the work done. You can mention all the people promoted since the previous summit. Remember to invite the other maintainers to the Summit in advance.
Some other general guidelines to encourage contributors:
* Welcome the author and thank them for the issue or pull request.
* If there is a non-crucial easy-fix bug reported, politely ask the author to fix it themselves providing a link to :ref:`collection_quickstart`.
* When suggesting changes, try to use questions, not statements.
* When suggesting mandatory changes, do it as politely as possible providing documentation references.
* If your suggestion is optional or a matter of personal preference, please say it explicitly.
* When asking for adding tests or for complex code refactoring, say that the author is welcome to ask for clarifications and help if they need it.
* If somebody suggests a good idea, mention it or put a thumbs up.
* After merging, thank the author and reviewers for their time and effort.
See the :ref:`review_checklist` for a list of items to check before you merge a PR.
.. _maintainer_documentation:
Maintaining good collection documentation
==========================================
Maintainers look after the collection documentation to ensure it matches the :ref:`style_guide`. This includes keeping the following documents accurate and updated regularly:
* Collection module and plugin documentation that adheres to the :ref:`Ansible documentation format <module_documenting>`.
* Collection user guides that follow the :ref:`Collection documentation format <collections_doc_dir>`.
* Repository files that includes at least a ``README`` and ``CONTRIBUTING`` file.
A good ``README`` includes a description of the collection, a link to the :ref:`code_of_conduct`, and details on how to contribute or a pointer to the ``CONTRIBUTING`` file. If your collection is a part of Ansible (is shipped with Ansible package), highlight that fact at the top of the collection's ``README``.
The ``CONTRIBUTING`` file includes all the details or links to the details on how a new or continuing contributor can contribute to this collection. The ``CONTRIBUTING`` file should include:
* Information or links to new contributor guidelines, such as a quick start on opening PRs.
* Information or links to contributor requirements, such as unit and integration test requirements.
You can optionally include a ``CONTRIBUTORS`` and ``MAINTAINERS`` file to list the collection contributors and maintainers.

View File

@ -1,95 +0,0 @@
.. _maintainers_workflow:
Backporting and Ansible inclusion
==================================
Each collection community can set its own rules and workflow for managing pull requests, bug reports, documentation issues, and feature requests, as well as adding and replacing maintainers. Maintainers review and merge pull requests following the:
* :ref:`code_of_conduct`
* :ref:`maintainer_requirements`
* :ref:`Committer guidelines <committer_general_rules>`
* :ref:`PR review checklist<review_checklist>`
There can be two kinds of maintainers: :ref:`collection_maintainers` and :ref:`module_maintainers`.
.. _collection_maintainers:
Collection maintainers
----------------------
Collection-scope maintainers are contributors who have the ``write`` or higher access level in a collection. They have commit rights and can merge pull requests, among other permissions.
When a collection maintainer considers a contribution to a file significant enough
(for example, fixing a complex bug, adding a feature, providing regular reviews, and so on),
they can invite the author to become a module maintainer.
.. _module_maintainers:
Module maintainers
------------------
Module-scope maintainers exist in collections that have the `collection bot <https://github.com/ansible-community/collection_bot>`_,
for example, `community.general <https://github.com/ansible-collections/community.general>`_
and `community.network <https://github.com/ansible-collections/community.network>`_.
Being a module maintainer is the stage prior to becoming a collection maintainer. Module maintainers are contributors who are listed in ``.github/BOTMETA.yml``. The scope can be any file (for example, a module or plugin), directory, or repository. Because in most cases the scope is a module or group of modules, we call these contributors module maintainers. The collection bot notifies module maintainers when issues/pull requests related to files they maintain are created.
Module maintainers have indirect commit rights implemented through the `collection bot <https://github.com/ansible-community/collection_bot>`_.
When two module maintainers comment with the keywords ``shipit``, ``LGTM``, or ``+1`` on a pull request
which changes a module they maintain, the collection bot merges the pull request automatically.
For more information about the collection bot and its interface,
see to the `Collection bot overview <https://github.com/ansible-community/collection_bot/blob/main/ISSUE_HELP.md>`_.
Releasing a collection
----------------------
Collection maintainers are responsible for releasing new versions of a collection. Generally, releasing a collection consists of:
#. Planning and announcement.
#. Generating a changelog.
#. Creating a release git tag and pushing it.
#. Automatically publishing the release tarball on `Ansible Galaxy <https://galaxy.ansible.com/>`_ through the `Zuul dashboard <https://dashboard.zuul.ansible.com/t/ansible/builds?pipeline=release>`_.
#. Final announcement.
#. Optionally, `file a request to include a new collection into the Ansible package <https://github.com/ansible-collections/ansible-inclusion>`_.
See :ref:`releasing_collections` for details.
.. _Backporting:
Backporting
------------
Collection maintainers backport merged pull requests to stable branches
following the `semantic versioning <https://semver.org/>`_ and release policies of the collections.
The manual backport process is similar to the :ref:`ansible-core backporting guidelines <backport_process>`.
For convenience, backporting can be implemented automatically using GitHub bots (for example, with the `Patchback app <https://github.com/apps/patchback>`_) and labeling as it is done in `community.general <https://github.com/ansible-collections/community.general>`_ and `community.network <https://github.com/ansible-collections/community.network>`_.
.. _including_collection_ansible:
Including a collection in Ansible
-----------------------------------
If a collection is not included in Ansible (not shipped with Ansible package), maintainers can submit the collection for inclusion by creating a discussion under the `ansible-collections/ansible-inclusion repository <https://github.com/ansible-collections/ansible-inclusion>`_. For more information, see the `repository's README <https://github.com/ansible-collections/ansible-inclusion/blob/main/README.md>`_, and the :ref:`Ansible community package collections requirements <collections_requirements>`.
Stepping down as a collection maintainer
===========================================
Times change, and so may your ability to continue as a collection maintainer. We ask that you do not step down silently.
If you feel you don't have time to maintain your collection anymore, you should:
- Inform other maintainers about it.
- If the collection is under the ``ansible-collections`` organization, also inform relevant :ref:`communication_irc`, the ``community`` chat channels on IRC or matrix, or by email ``ansible-community@redhat.com``.
- Look at active contributors in the collection to find new maintainers among them. Discuss the potential candidates with other maintainers or with the community team.
- If you failed to find a replacement, create a pinned issue in the collection, announcing that the collection needs new maintainers.
- Make the same announcement through the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
- Please be around to discuss potential candidates found by other maintainers or by the community team.
Remember, this is a community, so you can come back at any time in the future.

View File

@ -1,109 +0,0 @@
.. _other_tools_and_programs:
************************
Other Tools and Programs
************************
.. contents::
:local:
The Ansible community uses a range of tools for working with the Ansible project. This is a list of some of the most popular of these tools.
If you know of any other tools that should be added, this list can be updated by clicking "Edit on GitHub" on the top right of this page.
Popular editors
===============
Emacs
-----
A free, open-source text editor and IDE that supports auto-indentation, syntax highlighting and built in terminal shell(among other things).
* `yaml-mode <https://github.com/yoshiki/yaml-mode>`_ - YAML highlighting and syntax checking.
* `jinja2-mode <https://github.com/paradoxxxzero/jinja2-mode>`_ - Jinja2 highlighting and syntax checking.
* `magit-mode <https://github.com/magit/magit>`_ - Git porcelain within Emacs.
* `lsp-mode <https://emacs-lsp.github.io/lsp-mode/page/lsp-ansible/>`_ - Ansible syntax highlighting, auto-completion and diagnostics.
PyCharm
-------
A full IDE (integrated development environment) for Python software development. It ships with everything you need to write python scripts and complete software, including support for YAML syntax highlighting. It's a little overkill for writing roles/playbooks, but it can be a very useful tool if you write modules and submit code for Ansible. Can be used to debug ``ansible-core``. For more information, see `PyCharm <https://www.jetbrains.com/pycharm/>`_
Sublime
-------
A closed-source, subscription GUI text editor. You can customize the GUI with themes and install packages for language highlighting and other refinements. You can install Sublime on Linux, macOS and Windows. Useful Sublime plugins include:
* `GitGutter <https://packagecontrol.io/packages/GitGutter>`_ - shows information about files in a git repository.
* `SideBarEnhancements <https://packagecontrol.io/packages/SideBarEnhancements>`_ - provides enhancements to the operations on Sidebar of Files and Folders.
* `Sublime Linter <https://packagecontrol.io/packages/SublimeLinter>`_ - a code-linting framework for Sublime Text 3.
* `Pretty YAML <https://packagecontrol.io/packages/Pretty%20YAML>`_ - prettifies YAML for Sublime Text 2 and 3.
* `Yamllint <https://packagecontrol.io/packages/SublimeLinter-contrib-yamllint>`_ - a Sublime wrapper around yamllint.
vim
---
An open-source, free command-line text editor. Useful vim plugins include:
* `Ansible vim <https://github.com/pearofducks/ansible-vim>`_ - vim syntax plugin for Ansible 2.x, it supports YAML playbooks, Jinja2 templates, and Ansible's hosts files.
* `Ansible vim and neovim plugin <https://www.npmjs.com/package/@yaegassy/coc-ansible>`_ - vim plugin (lsp client) for Ansible, it supports autocompletion, syntax highlighting, hover, diagnostics, and goto support.
Visual studio code
------------------
An open-source, free GUI text editor created and maintained by Microsoft. Useful Visual Studio Code plugins include:
* `Ansible extension by Red Hat <https://marketplace.visualstudio.com/items?itemName=redhat.ansible>`_ - provides autocompletion, syntax highlighting, hover, diagnostics, goto support, and command to run ansible-playbook and ansible-navigator tool for both local and execution-environment setups.
* `YAML Support by Red Hat <https://marketplace.visualstudio.com/items?itemName=redhat.vscode-yaml>`_ - provides YAML support through yaml-language-server with built-in Kubernetes and Kedge syntax support.
.. note::
the Visual Studio Code Ansible extension is maintained by the Ansible community and Red Hat.
Development tools
=================
Finding related issues and PRs
------------------------------
There are various ways to find existing issues and pull requests (PRs)
- `jctanner's Ansible Tools <https://github.com/jctanner/ansible-tools>`_ - miscellaneous collection of useful helper scripts for Ansible development.
.. _validate-playbook-tools:
Tools for validating playbooks
==============================
- `Ansible Lint <https://docs.ansible.com/ansible-lint/index.html>`_ - a highly configurable linter for Ansible playbooks.
- `Ansible Review <https://github.com/willthames/ansible-review>`_ - an extension of Ansible Lint designed for code review.
- `Molecule <https://molecule.readthedocs.io/en/latest/>`_ - a testing framework for Ansible plays and roles.
- `yamllint <https://yamllint.readthedocs.io/en/stable/>`__ - a command-line utility to check syntax validity including key repetition and indentation issues.
Other tools
===========
- `Ansible Inventory Grapher <https://github.com/willthames/ansible-inventory-grapher>`_ - visually displays inventory inheritance hierarchies and at what level a variable is defined in inventory.
- `Ansible Shell <https://github.com/dominis/ansible-shell>`_ - an interactive shell for Ansible with built-in tab completion for all the modules.
- `Ansible Silo <https://github.com/groupon/ansible-silo>`_ - a self-contained Ansible environment by Docker.
- `Ansigenome <https://github.com/nickjj/ansigenome>`_ - a command line tool designed to help you manage your Ansible roles.
- `antsibull-changelog <https://github.com/ansible-community/antsibull-changelog>`_ - a changelog generator for Ansible collections.
- `antsibull-docs <https://github.com/ansible-community/antsibull-docs>`_ - generates docsites for collections and can validate collection documentation.
- `ARA <https://github.com/ansible-community/ara>`_ - ARA Records Ansible playbooks and makes them easier to understand and troubleshoot with a reporting API, UI and CLI.
- `Awesome Ansible <https://github.com/ansible-community/awesome-ansible>`_ - a collaboratively curated list of awesome Ansible resources.
- `nanvault <https://github.com/marcobellaccini/nanvault>`_ - a standalone tool to encrypt and decrypt files in the Ansible Vault format, featuring UNIX-style composability.
- `OpsTools-ansible <https://github.com/centos-opstools/opstools-ansible>`_ - uses Ansible to configure an environment that provides the support of `OpsTools <https://wiki.centos.org/SpecialInterestGroup/OpsTools>`_, namely centralized logging and analysis, availability monitoring, and performance monitoring.
- `ansibledb <https://github.com/nbentoumi/ansibledb>`_ - Flask API server that uses MongoDB to store Ansible reports and facts. Along with different query capabilities, this tool provides a web UI for viewing Ansible reports, facts, and logs.

View File

@ -1,81 +0,0 @@
.. _release_managers:
**************************
Release Manager Guidelines
**************************
.. contents:: Topics
The release manager's purpose is to ensure a smooth release. To achieve that goal, they need to
coordinate between:
* Developers with commit privileges on the `Ansible GitHub repository <https://github.com/ansible/ansible/>`_
* Contributors without commit privileges
* The community
* Ansible documentation team
Pre-releases: what and why
==========================
Pre-releases exist to draw testers. They give people who don't feel comfortable running from source
control a means to get an early version of the code to test and give us feedback. To ensure we get
good feedback about a release, we need to make sure all major changes in a release are put into
a pre-release. Testers must be given time to test those changes before the final release. Ideally we
want there to be sufficient time between pre-releases for people to install and test one version for
a span of time. Then they can spend more time using the new code than installing the latest
version.
The right length of time for a tester is probably around two weeks. However, for our three-to-four month
development cycle to work, we compress this down to one week; any less runs the risk
of people spending more time installing the code instead of running it. However, if there's a time
crunch (with a release date that cannot slip), it is better to release with new changes than to hold
back those changes to give people time to test between. People cannot test what is not released, so
we have to get those tarballs out there even if people feel they have to install more frequently.
Beta releases
-------------
In a beta release, we know there are still bugs. We will continue to accept fixes for these.
Although we review these fixes, sometimes they can be invasive or potentially destabilize other
areas of the code.
During the beta, we will no longer accept feature submissions.
Release candidates
------------------
In a release candidate, we've fixed all known blockers. Any remaining bugfixes are
ones that we are willing to leave out of the release. At this point we need user testing to
determine if there are any other blocker bugs lurking.
Blocker bugs generally are those that cause significant problems for users. Regressions are
more likely to be considered blockers because they will break present users' usage of Ansible.
The Release Manager will cherry-pick fixes for new release blockers. The release manager will also
choose whether to accept bugfixes for isolated areas of the code or defer those to the next minor
release. By themselves, non-blocker bugs will not trigger a new release; they will only make it
into the next major release if blocker bugs require that a new release be made.
The last RC should be as close to the final as possible. The following things may be changed:
* Version numbers are changed automatically and will differ as the pre-release tags are removed from
the versions.
* Tests and :file:`docs/docsite/` can differ if really needed as they do not break runtime.
However, the release manager may still reject them as they have the potential to cause
breakage that will be visible during the release process.
.. note:: We want to specifically emphasize that code (in :file:`bin/`, :file:`lib/ansible/`, and
:file:`setup.py`) must be the same unless there are extraordinary extenuating circumstances. If
there are extenuating circumstances, the Release Manager is responsible for notifying groups
which would want to test the code.
Ansible release process
=======================
The release process is kept in a `separate document
<https://docs.google.com/document/d/10EWLkMesi9s_CK_GmbZlE_ZLhuQr6TBrdMLKo5dnMAI/edit#heading=h.ooo3izcel3cz>`_
so that it can be easily updated during a release. If you need access to edit this, please ask one
of the current release managers to add you.

View File

@ -1,59 +0,0 @@
.. _reporting_bugs_and_features:
**************************************
Reporting bugs and requesting features
**************************************
.. contents::
:local:
.. _reporting_bugs:
Reporting a bug
===============
Security bugs
-------------
Ansible practices responsible disclosure. To report security-related bugs, send an email to `security@ansible.com <mailto:security@ansible.com>`_ for an immediate response. Do not submit a ticket or post to any public groups.
Bugs in ansible-core
--------------------
Before reporting a bug, search in GitHub for `already reported issues <https://github.com/ansible/ansible/issues>`_ and `open pull requests <https://github.com/ansible/ansible/pulls>`_ to see if someone has already addressed your issue. Unsure if you found a bug? Report the behavior on the :ref:`mailing list or community chat first <communication>`.
Also, use the mailing list or chat to discuss whether the problem is in ``ansible-core`` or a collection, and for "how do I do this" type questions.
You need a free GitHub account to `report bugs <https://github.com/ansible/ansible/issues>`_ that affect:
- multiple plugins
- a plugin that remained in the ansible/ansible repo
- the overall functioning of Ansible
How to write a good bug report
------------------------------
If you find a bug, open an issue using the `issue template <https://github.com/ansible/ansible/issues/new?assignees=&labels=&template=bug_report.yml>`_.
Fill out the issue template as completely and as accurately as possible. Include:
* your Ansible version
* the expected behavior and what you've tried, including the exact commands you were using or tasks you are running.
* the current behavior and why you think it is a bug
* the steps to reproduce the bug
* a minimal reproducible example and comments describing examples
* any relevant configurations and the components you used
* any relevant output plus ``ansible -vvvv`` (debugging) output
* add the output of ``ansible-test-env --show`` when filing bug reports involving ``ansible-test``.
When sharing YAML in playbooks, ensure that you preserve formatting using `code blocks <https://help.github.com/articles/creating-and-highlighting-code-blocks/>`_. For multiple-file content, use gist.github.com, more durable than Pastebin content.
.. _request_features:
Requesting a feature
====================
Before you request a feature, check what is :ref:`planned for future Ansible Releases <roadmaps>`. Check `existing pull requests tagged with feature <https://github.com/ansible/ansible/issues?q=is%3Aissue+is%3Aopen+label%3Afeature>`_.
To get your feature into Ansible, :ref:`submit a pull request <community_pull_requests>`, either against ansible-core or a collection. See also :ref:`ansible_collection_merge_requirements`. For ``ansible-core``, you can also open an issue in `ansible/ansible <https://github.com/ansible/ansible/issues>`_ or in a corresponding collection repository (To find the correct issue tracker, refer to :ref:`Bugs in collections<reporting_bugs_in_collections>` ).

View File

@ -1,36 +0,0 @@
.. _reporting_bugs_in_collections:
***********************************
Requesting changes to a collection
***********************************
.. contents::
:local:
Reporting a bug
===============
Security bugs
-------------
Ansible practices responsible disclosure - if this is a security-related bug, email `security@ansible.com <mailto:security@ansible.com>`_ instead of filing a ticket or posting to any public groups, and you will receive a prompt response.
Bugs in collections
-------------------
Many bugs only affect a single module or plugin. If you find a bug that affects a module or plugin hosted in a collection, file the bug in the repository of the :ref:`collection <collections>`:
#. Find the collection on `Galaxy <https://galaxy.ansible.com>`_.
#. Click on the Issue Tracker link for that collection.
#. Follow the contributor guidelines or instructions in the collection repo.
If you are not sure whether a bug is in ansible-core or in a collection, you can report the behavior on the :ref:`mailing list or community chat channel first <communication>`.
Requesting a feature
====================
Before you request a feature, check what is :ref:`planned for future Ansible Releases <roadmaps>`.
The best way to get a feature into an Ansible collection is to :ref:`submit a pull request <community_pull_requests>`, either against ansible-core or against a collection. See also the :ref:`ansible_collection_merge_requirements`.
You can also submit a feature request by opening an issue in the collection repository.

View File

@ -1,165 +0,0 @@
.. _steering_responsibilities:
Steering Committee mission and responsibilities
===============================================
The Steering Committee mission is to provide continuity, guidance, and suggestions to the Ansible community to ensure the delivery and high quality of the Ansible package. In addition, the committee helps decide the technical direction of the Ansible project. It is responsible for approving new proposals and policies in the community, package, and community collections world, new community collection-inclusion requests, and other technical aspects regarding inclusion and packaging.
The Committee should reflect the scope and breadth of the Ansible community.
Steering Committee responsibilities
------------------------------------
The Committee:
* Designs policies and procedures for the community collections world.
* Votes on approval changes to established policies and procedures.
* Reviews community collections for compliance with the policies.
* Helps create and define roadmaps for our deliverables such as the ``ansible`` package, major community collections, and documentation.
* Reviews community collections submitted for inclusion in the Ansible package and decides whether to include them or not.
* Review other proposals of importance that need the Committee's attention and provide feedback.
.. _steering_members:
Current Steering Committee members
-----------------------------------
The following table lists the current Steering Committee members. See :ref:`steering_past_members` for a list of past members.
.. table:: Current Steering committee members
+------------------+---------------+-------------+
| Name | GitHub | Start year |
+==================+===============+=============+
| Alexei Znamensky | russoz | 2022 |
+------------------+---------------+-------------+
| Alicia Cozine | acozine | 2021 |
+------------------+---------------+-------------+
| Andrew Klychkov | Andersson007 | 2021 |
+------------------+---------------+-------------+
| Brad Thornton | cidrblock | 2021 |
+------------------+---------------+-------------+
| Brian Scholer | briantist | 2022 |
+------------------+---------------+-------------+
| Dylan Silva | thaumos | 2021 |
+------------------+---------------+-------------+
| Felix Fontein | felixfontein | 2021 |
+------------------+---------------+-------------+
| James Cassell | jamescassell | 2021 |
+------------------+---------------+-------------+
| John Barker | gundalow | 2021 |
+------------------+---------------+-------------+
| Mario Lenz | mariolenz | 2022 |
+------------------+---------------+-------------+
| Markus Bergholz | markuman | 2022 |
+------------------+---------------+-------------+
| Maxwell G | gotmax23 | 2022 |
+------------------+---------------+-------------+
| Sorin Sbarnea | ssbarnea | 2021 |
+------------------+---------------+-------------+
John Barker (`gundalow <https://github.com/gundalow>`_) has been elected by the Committee as its :ref:`chairperson`.
Committee members are selected based on their active contribution to the Ansible Project and its community. See :ref:`community_steering_guidelines` to learn details.
Creating new policy proposals & inclusion requests
----------------------------------------------------
The Committee uses the `community-topics repository <https://github.com/ansible-community/community-topics/issues>`_ to asynchronously discuss with the Community and vote on Community topics in corresponding issues.
You can create a new issue in the `community-topics repository <https://github.com/ansible-community/community-topics/issues>`_ as a discussion topic if you want to discuss an idea that impacts any of the following:
* Ansible Community
* Community collection best practices and requirements
* Community collection inclusion policy
* The Community governance
* Other proposals of importance that need the Committee's or overall Ansible community attention
To request changes to the inclusion policy and collection requirements:
#. Submit a new pull request to the `ansible-collections/overview <https://github.com/ansible-collections/overview>`_ repository.
#. Create a corresponding issue containing the rationale behind these changes in the `community-topics repository <https://github.com/ansible-community/community-topics/issues>`_ repository.
To submit new collections for inclusion into the Ansible package:
* Submit the new collection inclusion requests through a new discussion in the `ansible-inclusion <https://github.com/ansible-collections/ansible-inclusion/discussions/new>`_ repository.
Depending on a topic you want to discuss with the Community and the Committee, as you prepare your proposal, please consider the requirements established by:
* :ref:`code_of_conduct`.
* :ref:`collections_requirements`.
* `Ansible Collection Inclusion Checklist <https://github.com/ansible-collections/overview/blob/main/collection_checklist.md>`_.
Community topics workflow
^^^^^^^^^^^^^^^^^^^^^^^^^
The Committee uses the `Community-topics workflow <https://github.com/ansible-community/community-topics/blob/main/community_topics_workflow.md>`_ to asynchronously discuss and vote on the `community-topics <https://github.com/ansible-community/community-topics/issues>`_.
The quorum, the minimum number of Committee members who must vote on a topic in order for a decision to be officially made, is half of the whole number of the Committee members. If the quorum number contains a fractional part, it is rounded up to the next whole number. For example, if there are thirteen members currently in the committee, the quorum will be seven.
Votes must always have "no change" as an option.
In case of equal numbers of votes for and against a topic, the chairperson's vote will break the tie. For example, if there are six votes for and six votes against a topic, and the chairperson's vote is among those six which are for the topic, the final decision will be positive. If the chairperson has not voted yet, other members ask them to vote.
For votes with more than two options, one choice must have at least half of the votes. If two choices happen to both have half of the votes, the chairperson's vote will break the tie. If no choice has at least half of the votes, the vote choices have to be adjusted so that a majority can be found for a choice in a new vote.
Community topics triage
^^^^^^^^^^^^^^^^^^^^^^^
The Committee conducts a triage of `community topics <https://github.com/ansible-community/community-topics/issues>`_ periodically (every three to six months).
The triage goals are:
* Sparking interest for forgotten topics.
* Identifying and closing irrelevant topics, for example, when the reason of the topic does not exist anymore or the topic is out of the Committee responsibilities scope.
* Identifying and closing topics that the Community are not interested in discussing. As indicators, it can be absence of comments or no activity in comments, at least, for the last six months.
* Identifying and closing topics that were solved and implemented but not closed (in this case, such a topic can be closed on the spot with a comment that it has been implemented).
* Identifying topics that have been in pending state for a long time, for example, when it is waiting for actions from someone for several months or when the topics were solved but not implemented.
A person starting the triage:
#. Identifies the topics mentioned above.
#. Creates a special triage topic containing an enumerated list of the topics-candidates for closing.
#. Establishes a vote date considering a number of topics, their complexity and comment-history size giving the Community sufficient time to go through and discuss them.
#. The Community and the Committee vote on each topic-candidate listed in the triage topic whether to close it or keep it open.
Collection inclusion requests workflow
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When reviewing community collection `inclusion requests <https://github.com/ansible-collections/ansible-inclusion/discussions>`_, the Committee members check if a collection adheres to the :ref:`collections_requirements`.
#. A Committee member who conducts the inclusion review copies the `Ansible community collection checklist <https://github.com/ansible-collections/overview/blob/main/collection_checklist.md>`_ into a corresponding `discussion <https://github.com/ansible-collections/ansible-inclusion/discussions>`_.
#. In the course of the review, the Committee member marks items as completed or leaves a comment saying whether the reviewer expects an issue to be addressed or whether it is optional (for example, it could be **MUST FIX:** <what> or **SHOULD FIX:** <what> under an item).
#. For a collection to be included in the Ansible community package, the collection:
* MUST be reviewed and approved by at least two persons, where at least one person is a Steering Committee member.
* For a Non-Steering Committee review to be counted for inclusion, it MUST be checked and approved by *another* Steering Committee member.
* Reviewers must not be involved significantly in development of the collection. They must declare any potential conflict of interest (for example, being friends/relatives/coworkers of the maintainers/authors, being users of the collection, or having contributed to that collection recently or in the past).
#. After the collection gets two or more Committee member approvals, a Committee member creates a `community topic <https://github.com/ansible-community/community-topics/issues>`_ linked to the corresponding inclusion request. The issue's description says that the collection has been approved by two or more Committee members and establishes a date (a week by default) when the inclusion decision will be considered made. This time period can be used to raise concerns.
#. If no objections are raised up to the established date, the inclusion request is considered successfully resolved. In this case, a Committee member:
#. Declares the decision in the topic and in the inclusion request.
#. Moves the request to the ``Resolved reviews`` category.
#. Adds the collection to the ``ansible.in`` file in a corresponding directory of the `ansible-build-data repository <https://github.com/ansible-community/ansible-build-data>`_.
#. Announces the inclusion through the `Bullhorn newsletter <https://github.com/ansible/community/wiki/News#the-bullhorn>`_.
#. Closes the topic.
Community Working Group meetings
---------------------------------
See the Community Working Group meeting `schedule <https://github.com/ansible/community/blob/main/meetings/README.md#wednesdays>`_. Meeting summaries are posted in the `Community Working Group Meeting Agenda <https://github.com/ansible/community/issues?q=is%3Aopen+label%3Ameeting_agenda+label%3Acommunity+>`_ issue.
.. note::
Participation in the Community Working Group meetings is optional for Committee members. Decisions on community topics are made asynchronously in the `community-topics <https://github.com/ansible-community/community-topics/issues>`_ repository.
The meeting minutes can be found at the `fedora meetbot site <https://meetbot.fedoraproject.org/sresults/?group_id=ansible-community&type=channel>`_ and the same is posted to `Ansible Devel Mailing List <https://groups.google.com/g/ansible-devel>`_ after every meeting.

View File

@ -1,139 +0,0 @@
.. _community_steering_guidelines:
Steering Committee membership guidelines
==========================================
This document describes the expectations and policies related to membership in the :ref:`Ansible Community Steering Committee <steering_responsibilities>` (hereinafter the Committee).
.. contents:: Topics:
.. _steering_expectations:
Expectations of a Steering Committee member
-------------------------------------------
As a Committee member, you agree to:
#. Abide by the :ref:`code_of_conduct` in all your interactions with the Community.
#. Be a Community ambassador by representing its needs within the Committee and throughout the decision making process.
#. Asynchronously participate in discussions and voting on the `Community Topics <https://github.com/ansible-community/community-topics/issues>`_.
#. Review other proposals of importance that need the Committee's attention and provide feedback.
#. Act for the sake of the Community by not promoting corporate or individual agenda during the decision making process.
#. Engage with the Community in a professional and positive manner, encourage community members to express their opinion.
.. _Joining the committee:
Joining the Steering Committee
-------------------------------
Eligibility
^^^^^^^^^^^
A person is eligible to become a Committee member if they have:
#. A wide knowledge of Ansible and/or its related projects.
#. Active contributions to Ansible and/or related projects in any form described in the :ref:`collections_contributions`.
#. A consent to follow the :ref:`steering_expectations`.
Process
^^^^^^^^
The process to join the Steering Committee consists of the following steps:
#. Any community member may nominate someone or themselves for Committee membership by contacting one of the :ref:`current Committee members <steering_members>`) or by sending an email to ``ansible-community@redhat.com``.
#. A Committee member who receives the nomination must inform the Committee about it by forwarding the full message.
#. The vote is conducted by email. Nominees must receive a majority of votes from the present Committee members to be added to the Committee.
#. Provided that the vote result is positive, it is announced via the `Bullhorn <https://github.com/ansible/community/wiki/News#the-bullhorn>`_ newsletter and the new member is added to the :ref:`Committee member list <steering_members>`.
Leaving the Steering Committee
-------------------------------
Steering Committee members can resign voluntarily or be removed by the
rest of the Steering Committee under certain circumstances. See the details
below.
.. _Voluntarily leaving process:
Voluntarily leaving the Steering Committee
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A Committee member can voluntarily leave the Committee.
In this case, they notify the other members, create an issue in the `Community Topics <https://github.com/ansible-community/community-topics/issues>`_ repository announcing the resignation, and after that they are no longer considered Committee members.
Committee members who resign and later change their mind can
rejoin the Committee by following the :ref:`Process for joining the Steering Committee<Joining the committee>`.
Involuntarily leaving the Steering Committee
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A Committee member will be removed from the Committee if they:
#. Do not participate in asynchronous discussions and voting on the `Community Topics <https://github.com/ansible-community/community-topics/issues>`_ for more than 3 months in a row.
#. Participate unreasonably irregularly (for example, once a month for several months). Unreasonably is defined by other Committee members considering circumstances in each particular case.
#. Violate the :ref:`code_of_conduct`.
.. _Absence or irregular participation removal process:
Absence or irregular participation in discussing topics and votes
..................................................................
In case of absence or irregular participation, the involuntarily removal process consists of the following steps:
#. Another Committee member (hereinafter the initiator) contacts the person by email asking if they are still interested in fulfilling their Committee member's duties.
#. If they respond that they are not interested, the initiator asks the person to step down on their own following the :ref:`Voluntarily leaving process<Voluntarily leaving process>`.
#. If there has been no response or stepping down issue created by the person within a reasonable time, the initiator notifies other Committee members about the situation.
#. In case of agreement among the Committee about the need for removal, the initiator provides a draft of a corresponding topic's description to the Committee via email for discussion and approval.
* The topic's title is ``Steering Committee member audit.``. It must not contain the person's name or other identifying information.
* The description must not contain or imply any forms of condemnation.
* It must mention that the person has been inactive for an unknown reason for the last N months and that, in accordance with the Steering Committee policies, their place should be freed for another person who can continue their great job.
* The description must mention person's achievements and thanks for their time and effort they spent serving for the Community, Committee, and the Project, and a hope that one day they will come back.
#. The initiator creates the topic in the `Community Topics <https://github.com/ansible-community/community-topics/issues>`_ repository containing the description and the title from the draft.
#. The Committee members vote on the topic.
Ansible Community Code of Conduct violations
.............................................
In case of the `Ansible Community Code of Conduct <https://docs.ansible.com/ansible/latest/community/code_of_conduct.html>`_ violations, the process is the same as above except steps 1-2. Instead:
#. The initiator reports the case to the Committee via email.
#. The Committee discusses the case internally, evaluates its severity, and possible solutions.
#. If the Committee concludes that the violation is not severe, it develops a proposal to the person on how the situation can be corrected and further interactions with the Community improved.
#. A Committee representative reaches out to the person with the proposal.
#. The removal process starts if:
* The Committee decided that the severity of the violation excludes a possibility of further membership.
* The person does not respond to the proposal.
* The person explicitly rejects the proposal.
In case of starting the removal process, the topic's description in the reason's part changes correspondingly.
.. _chairperson:
Chairperson
------------
The chairperson election will happen once a year around the time of
Ansible Fest. If the current chairperson has to step down early, the election happens immediately.
The process of the election consist of the following steps:
#. Members interested in being the chairperson will inform a
person responsible for arranging the election about that.
#. Conduct anonymous voting somewhere.
#. Internally and publicly announce the elected candidate.
The chairperson has the following powers unlike regular members:
* The chairperson's vote breaks ties to resolve deadlocks when equal numbers of steering committee members vote for and against a `community topic <https://github.com/ansible-community/community-topics/issues>`_.

View File

@ -1,31 +0,0 @@
.. _steering_past_members:
Steering Committee past members
================================
The Ansible Community is very grateful to these amazing **nonreplaceable**
people for their great service to the Community and the project!
.. table:: Steering Committee past members
+------------------+-----------+-------------------+
| Name | GitHub | Years of service |
+==================+===========+===================+
| Jill Rouleau | jillr | 2021-2022 |
+------------------+-----------+-------------------+
| Tadej Borovšak | tadeboro | 2021-2022 |
+------------------+-----------+-------------------+
| Toshio Kuratomi | abadger | 2021 |
+------------------+-----------+-------------------+
We'd also like to thank our past chairpersons for their contributions to Ansible.
.. table:: Steering Committee past chairpersons
+------------------+-----------+-------------------+
| Name | GitHub | Years of service |
+==================+===========+===================+
| Tadej Borovšak | tadeboro | 2021-2022 |
+------------------+-----------+-------------------+

View File

@ -1,14 +0,0 @@
.. _community_steering_committee:
************************************
Ansible Community Steering Committee
************************************
This section focuses on the guidelines and membership of the Ansible Community Steering Committee.
.. toctree::
:maxdepth: 1
community_steering_committee
steering_committee_membership
steering_committee_past_members

View File

@ -1,110 +0,0 @@
.. _ansible_core_documentation:
..
This is the index file for ansible-core. It gets symlinked to index.rst by the Makefile
**************************
Ansible Core Documentation
**************************
About ansible-core
===================
Ansible is an IT automation tool. It can configure systems, deploy software, and orchestrate more advanced IT tasks such as continuous deployments or zero downtime rolling updates.
Ansible core, or ``ansible-core`` is the main building block and architecture for Ansible, and includes:
* CLI tools such as ``ansible-playbook``, ``ansible-doc``. and others for driving and interacting with automation.
* The Ansible language that uses YAML to create a set of rules for developing Ansible Playbooks and includes functions such as conditionals, blocks, includes, loops, and other Ansible imperatives.
* An architectural framework that allows extensions through Ansible collections.
Ansible's main goals are simplicity and ease-of-use. It also has a strong focus on security and reliability, featuring a minimum of moving parts, usage of OpenSSH for transport (with other transports and pull modes as alternatives), and a language that is designed around auditability by humans--even those not familiar with the program.
We believe simplicity is relevant to all sizes of environments, so we design for busy users of all types: developers, sysadmins, release engineers, IT managers, and everyone in between. Ansible is appropriate for managing all environments, from small setups with a handful of instances to enterprise environments with many thousands of instances.
You can learn more at `AnsibleFest <https://www.ansible.com/ansiblefest>`_, the annual event for all Ansible contributors, users, and customers hosted by Red Hat. AnsibleFest is the place to connect with others, learn new skills, and find a new friend to automate with.
Ansible manages machines in an agent-less manner. There is never a question of how to upgrade remote daemons or the problem of not being able to manage systems because daemons are uninstalled. Because OpenSSH is one of the most peer-reviewed open source components, security exposure is greatly reduced. Ansible is decentralized--it relies on your existing OS credentials to control access to remote machines. If needed, Ansible can easily connect with Kerberos, LDAP, and other centralized authentication management systems.
This documentation covers the version of ``ansible-core`` noted in the upper left corner of this page. We maintain multiple versions of ``ansible-core`` and of the documentation, so please be sure you are using the version of the documentation that covers the version of Ansible you're using. For recent features, we note the version of Ansible where the feature was added.
``ansible-core`` releases a new major release approximately twice a year. The core application evolves somewhat conservatively, valuing simplicity in language design and setup. Contributors develop and change modules and plugins, hosted in collections since version 2.10, much more quickly.
.. toctree::
:maxdepth: 2
:caption: Ansible getting started
getting_started/index
.. toctree::
:maxdepth: 2
:caption: Installation, Upgrade & Configuration
installation_guide/index
porting_guides/core_porting_guides
.. toctree::
:maxdepth: 2
:caption: Using Ansible Core
inventory_guide/index
command_guide/index
playbook_guide/index
vault_guide/index
module_plugin_guide/index
collections_guide/index
os_guide/index
tips_tricks/index
.. toctree::
:maxdepth: 2
:caption: Contributing to Ansible Core
community/index
community/contributions
community/advanced_index
dev_guide/style_guide/index
.. toctree::
:maxdepth: 2
:caption: Extending Ansible
dev_guide/index
.. toctree::
:maxdepth: 2
:caption: Ansible Galaxy
galaxy/user_guide.rst
galaxy/dev_guide.rst
.. toctree::
:maxdepth: 1
:caption: Reference & Appendices
collections/index
collections/all_plugins
reference_appendices/playbooks_keywords
reference_appendices/common_return_values
reference_appendices/config
reference_appendices/general_precedence
reference_appendices/YAMLSyntax
reference_appendices/python_3_support
reference_appendices/interpreter_discovery
reference_appendices/release_and_maintenance
reference_appendices/test_strategies
dev_guide/testing/sanity/index
reference_appendices/faq
reference_appendices/glossary
reference_appendices/module_utils
reference_appendices/special_variables
reference_appendices/tower
reference_appendices/automationhub
reference_appendices/logging
.. toctree::
:maxdepth: 2
:caption: Roadmaps
roadmap/ansible_core_roadmap_index.rst

View File

@ -1,92 +0,0 @@
.. _developer_guide:
***************
Developer Guide
***************
.. note::
**Making Open Source More Inclusive**
Red Hat is committed to replacing problematic language in our code, documentation, and web properties. We are beginning with these four terms: master, slave, blacklist, and whitelist. We ask that you open an issue or pull request if you come upon a term that we have missed. For more details, see `our CTO Chris Wright's message <https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language>`_.
Welcome to the Ansible Developer Guide!
**Who should use this guide?**
If you want to extend Ansible by using a custom module or plugin locally, creating a module or plugin, adding functionality to an existing module, or expanding test coverage, this guide is for you. We've included detailed information for developers on how to test and document modules, as well as the prerequisites for getting your module or plugin accepted into the main Ansible repository.
Find the task that best describes what you want to do:
* I'm looking for a way to address a use case:
* I want to :ref:`add a custom plugin or module locally <developing_locally>`.
* I want to figure out if :ref:`developing a module is the right approach <module_dev_should_you>` for my use case.
* I want to :ref:`develop a collection <developing_collections>`.
* I want to :ref:`contribute to an Ansible-maintained collection <contributing_maintained_collections>`.
* I want to :ref:`contribute to a community-maintained collection <hacking_collections>`.
* I want to :ref:`migrate a role to a collection <migrating_roles>`.
* I've read the info above, and I'm sure I want to develop a module:
* What do I need to know before I start coding?
* I want to :ref:`set up my Python development environment <environment_setup>`.
* I want to :ref:`get started writing a module <developing_modules_general>`.
* I want to write a specific kind of module:
* a :ref:`network module <developing_modules_network>`
* a :ref:`Windows module <developing_modules_general_windows>`.
* an :ref:`Amazon module <ansible_collections.amazon.aws.docsite.dev_guide_intro>`.
* an :ref:`oVirt/RHV module <https://github.com/oVirt/ovirt-ansible-collection/blob/master/README-developers.md>`.
* a :ref:`VMware module <ansible_collections.community.vmware.docsite.vmware_ansible_devguide>`.
* I want to :ref:`write a series of related modules <developing_modules_in_groups>` that integrate Ansible with a new product (for example, a database, cloud provider, network platform, and so on).
* I want to refine my code:
* I want to :ref:`debug my module code <debugging_modules>`.
* I want to :ref:`add tests <developing_testing>`.
* I want to :ref:`document my module <module_documenting>`.
* I want to :ref:`document my set of modules for a network platform <documenting_modules_network>`.
* I want to follow :ref:`conventions and tips for clean, usable module code <developing_modules_best_practices>`.
* I want to :ref:`make sure my code runs on Python 2 and Python 3 <developing_python_3>`.
* I want to work on other development projects:
* I want to :ref:`write a plugin <developing_plugins>`.
* I want to :ref:`connect Ansible to a new source of inventory <developing_inventory>`.
* I want to :ref:`deprecate an outdated module <deprecating_modules>`.
* I want to contribute back to the Ansible project:
* I want to :ref:`understand how to contribute to Ansible <ansible_community_guide>`.
* I want to :ref:`contribute my module or plugin <developing_modules_checklist>`.
* I want to :ref:`understand the license agreement <contributor_license_agreement>` for contributions to Ansible.
If you prefer to read the entire guide, here's a list of the pages in order.
.. toctree::
:maxdepth: 2
developing_locally
developing_modules
developing_modules_general
developing_modules_checklist
developing_modules_best_practices
developing_python_3
debugging
developing_modules_documenting
sidecar
developing_modules_general_windows
developing_modules_in_groups
testing
module_lifecycle
developing_plugins
developing_inventory
developing_core
developing_program_flow_modules
developing_api
developing_rebasing
developing_module_utilities
developing_collections
migrating_roles
collections_galaxy_meta
overview_architecture

View File

@ -1,58 +0,0 @@
.. _core_branches_and_tags:
******************************************
``ansible-core`` project branches and tags
******************************************
``devel`` branch
================
All new development on the next version of ``ansible-core`` occurs exclusively in the ``devel`` branch,
and all bugfixes to prior releases must first be merged to devel before being backported to one or more stable branches
for inclusion in servicing releases. Around the Beta 1 milestone, a new ``stable-X.Y`` branch is cut from ``devel``,
which is then updated to host development of the ``X.Y+1`` release. External automated testing of Ansible content from
``devel`` is not generally recommended.
``stable-X.Y`` branches
=======================
All ``ansible-core`` ``X.Y.Z`` releases are created from a corresponding ``stable-X.Y`` branch. A
release's stable branch is typically cut from ``devel`` around ``X.Y.0 beta 1`` (when the release is feature complete).
All further bugfixes (no enhancements!) must be made against ``devel`` and backported to applicable stable branches.
``vX.Y.Z`` tags
===============
Each ``ansible-core vX.Y.Z`` release is tagged from the release commit in the corresponding ``stable-X.Y`` branch,
allowing access to the exact source used to create the release. As of ``ansible-core`` 2.13, the auto-generated GitHub
tarball of the tag contents is considered the official canonical release artifact.
.. _milestone_branch:
``milestone`` branch
====================
A ``milestone`` branch is a slow-moving stream of the ``devel`` branch, intended for external testing of ``ansible-core``
features under active development. As described in the :ref:`ansible_core_roadmaps` for a given release, development is
typically split into three phases of decreasing duration, with larger and more invasive changes targeted to be merged to
``devel`` in earlier phases. The ``milestone`` branch is updated to the contents of ``devel`` at the end of each
development phase. This allows testing of semi-stable unreleased features on a predictable schedule without the exposure
to the potential instability of the daily commit "fire hose" from ``devel``. When a release reaches the Beta 1 milestone,
the ``milestone`` branch will be updated to the first ``devel`` commit after the version number has been increased.
Further testing of the same release should be done from the new ``stable-X.Y`` branch that was created. If a severe issue
that significantly affects community testing or stability is discovered in the ``milestone`` branch, the branch contents
may require unscheduled adjustment, but not in a way that prevents fast-forward updates (for example, ``milestone``-only
commits will not be created or cherry-picked from ``devel``).
The following example is for illustrative purposes only. See the :ref:`ansible_core_roadmaps` for accurate dates. For example, the ``milestone`` branch in 2.13 ``ansible-core`` roadmap updated as follows:
* 27-Sep-2021: 2.13 Development Phase 1 begins; ``milestone`` contents are updated to 2.12.0b1 with version number set to
``2.13.0.dev0``. Automated content testing that includes version-specific ignore files (e.g., ``ignore-2.12.txt``)
should copy them for the current version (e.g., ``ignore-2.13.txt``) before this point to ensure that automated sanity
testing against the ``milestone`` branch will continue to pass.
* 13-Dec-2021: 2.13 Development Phase 2 begins; ``milestone`` contents are updated to the final commit from Development Phase 1
* 14-Feb-2022: 2.13 Development Phase 3 begins; ``milestone`` contents are updated to the final commit from Development Phase 2
* 11-Apr-2022: ``stable-2.13`` branch created with results from Development Phase 3 and freeze. ``2.13.0b1`` is released from
``stable-2.13``. Automated content testing should continue 2.13 series testing against the new branch. The ``devel``
version number is updated to ``2.14.0.dev0``, and ``milestone`` is updated to that point.

View File

@ -1,89 +0,0 @@
.. _developer_guide:
***************
Developer Guide
***************
.. note::
**Making Open Source More Inclusive**
Red Hat is committed to replacing problematic language in our code, documentation, and web properties. We are beginning with these four terms: master, slave, blacklist, and whitelist. We ask that you open an issue or pull request if you come upon a term that we have missed. For more details, see `our CTO Chris Wright's message <https://www.redhat.com/en/blog/making-open-source-more-inclusive-eradicating-problematic-language>`_.
Welcome to the Ansible Developer Guide!
**Who should use this guide?**
If you want to extend Ansible by using a custom module or plugin locally, creating a module or plugin, adding functionality to an existing module, or expanding test coverage, this guide is for you. We've included detailed information for developers on how to test and document modules, as well as the prerequisites for getting your module or plugin accepted into the main Ansible repository.
Find the task that best describes what you want to do:
* I'm looking for a way to address a use case:
* I want to :ref:`add a custom plugin or module locally <developing_locally>`.
* I want to figure out if :ref:`developing a module is the right approach <module_dev_should_you>` for my use case.
* I want to :ref:`develop a collection <developing_collections>`.
* I want to :ref:`contribute to an Ansible-maintained collection <contributing_maintained_collections>`.
* I want to :ref:`contribute to a community-maintained collection <hacking_collections>`.
* I want to :ref:`migrate a role to a collection <migrating_roles>`.
* I've read the info above, and I'm sure I want to develop a module:
* What do I need to know before I start coding?
* I want to :ref:`set up my Python development environment <environment_setup>`.
* I want to :ref:`get started writing a module <developing_modules_general>`.
* I want to write a specific kind of module:
* a :ref:`network module <developing_modules_network>`
* a :ref:`Windows module <developing_modules_general_windows>`.
* I want to :ref:`write a series of related modules <developing_modules_in_groups>` that integrate Ansible with a new product (for example, a database, cloud provider, network platform, and so on).
* I want to refine my code:
* I want to :ref:`debug my module code <debugging_modules>`.
* I want to :ref:`add tests <developing_testing>`.
* I want to :ref:`document my module <module_documenting>`.
* I want to :ref:`document my set of modules for a network platform <documenting_modules_network>`.
* I want to follow :ref:`conventions and tips for clean, usable module code <developing_modules_best_practices>`.
* I want to :ref:`make sure my code runs on Python 2 and Python 3 <developing_python_3>`.
* I want to work on other development projects:
* I want to :ref:`write a plugin <developing_plugins>`.
* I want to :ref:`connect Ansible to a new source of inventory <developing_inventory>`.
* I want to :ref:`deprecate an outdated module <deprecating_modules>`.
* I want to contribute back to the Ansible project:
* I want to :ref:`understand how to contribute to Ansible <ansible_community_guide>`.
* I want to :ref:`contribute my module or plugin <developing_modules_checklist>`.
* I want to :ref:`understand the license agreement <contributor_license_agreement>` for contributions to Ansible.
If you prefer to read the entire guide, here's a list of the pages in order.
.. toctree::
:maxdepth: 2
developing_locally
developing_modules
developing_modules_general
developing_modules_checklist
developing_modules_best_practices
developing_python_3
debugging
developing_modules_documenting
sidecar
developing_modules_general_windows
developing_modules_in_groups
testing
module_lifecycle
developing_plugins
developing_inventory
developing_core
developing_program_flow_modules
developing_api
developing_rebasing
developing_module_utilities
developing_collections
migrating_roles
collections_galaxy_meta
overview_architecture

View File

@ -1,112 +0,0 @@
.. _debugging_modules:
*****************
Debugging modules
*****************
.. contents::
:local:
.. _detailed_debugging:
Detailed debugging steps
========================
Ansible modules are put together as a zip file consisting of the module file and the various Python module boilerplate inside of a wrapper script. To see what is actually happening in the module, you need to extract the file from the wrapper. The wrapper script provides helper methods that let you do that.
The following steps use ``localhost`` as the target host, but you can use the same steps to debug against remote hosts as well. For a simpler approach to debugging without using the temporary files, see :ref:`simple debugging <simple_debugging>`.
#. Set :envvar:`ANSIBLE_KEEP_REMOTE_FILES` to ``1`` on the control host so Ansible will keep the remote module files instead of deleting them after the module finishes executing. Use the ``-vvv`` option to make Ansible more verbose. This will display the file name of the temporary module file.
.. code-block:: shell-session
$ ANSIBLE_KEEP_REMOTE_FILES=1 ansible localhost -m ping -a 'data=debugging_session' -vvv
<127.0.0.1> ESTABLISH LOCAL CONNECTION FOR USER: badger
<127.0.0.1> EXEC /bin/sh -c '( umask 77 && mkdir -p "` echo $HOME/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595 `" && echo "` echo $HOME/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595 `" )'
<127.0.0.1> PUT /var/tmp/tmpjdbJ1w TO /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/AnsiballZ_ping.py
<127.0.0.1> EXEC /bin/sh -c 'LANG=en_US.UTF-8 LC_ALL=en_US.UTF-8 LC_MESSAGES=en_US.UTF-8 /usr/bin/python /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/AnsiballZ_ping.py && sleep 0'
localhost | SUCCESS => {
"changed": false,
"invocation": {
"module_args": {
"data": "debugging_session"
},
"module_name": "ping"
},
"ping": "debugging_session"
}
#. Navigate to the temporary directory from the previous step. If the previous command was run against a remote host, connect to that host first before trying to navigate to the temporary directory.
.. code-block:: shell-session
$ ssh remotehost # only if not debugging against localhost
$ cd /home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595
#. Run the wrapper's ``explode`` command to turn the string into some Python files that you can work with.
.. code-block:: shell-session
$ python AnsiballZ_ping.py explode
Module expanded into:
/home/badger/.ansible/tmp/ansible-tmp-1461434734.35-235318071810595/debug_dir
If you want to examine the wrapper file you can. It will show a small Python script with a large base64 encoded string. The string contains the module to execute.
#. When you look into the temporary directory you'll see a structure like this:
.. code-block:: shell-session
├── AnsiballZ_ping.py
└── debug_dir
├── ansible
│   ├── __init__.py
│   ├── module_utils
│   │   ├── __init__.py
│   │   ├── _text.py
│   │   ├── basic.py
│   │   ├── common
│   │   ├── compat
│   │   ├── distro
│   │   ├── parsing
│   │   ├── pycompat24.py
│   │   └── six
│   └── modules
│   ├── __init__.py
│   └── ping.py
└── args
* ``AnsiballZ_ping.py`` is the Python script with the module code stored in a base64 encoded string. It contains various helper functions for executing the module.
* ``ping.py`` is the code for the module itself. You can modify this code to see what effect it would have on your module, or for debugging purposes.
* The ``args`` file contains a JSON string. The string is a dictionary containing the module arguments and other variables that Ansible passes into the module to change its behavior. Modify this file to change the parameters passed to the module.
* The ``ansible`` directory contains the module code in ``modules`` as well as code from :mod:`ansible.module_utils` that is used by the module. Ansible includes files for any :mod:`ansible.module_utils` imports in the module but not any files from any other module. If your module uses :mod:`ansible.module_utils.url` Ansible will include it for you. But if your module includes `requests <https://requests.readthedocs.io/en/latest/api/>`_, then you'll have to make sure that the Python `requests library <https://pypi.org/project/requests/>`_ is installed on the system before running the module.
You can modify files in this directory if you suspect that the module is having a problem in some of this boilerplate code rather than in the module code you have written.
#. Once you edit the code or arguments in the exploded tree, use the ``execute`` subcommand to run it:
.. code-block:: shell-session
$ python AnsiballZ_ping.py execute
{"invocation": {"module_args": {"data": "debugging_session"}}, "changed": false, "ping": "debugging_session"}
This subcommand inserts the absolute path to ``debug_dir`` as the first item in ``sys.path`` and invokes the script using the arguments in the ``args`` file. You can continue to run the module like this until you understand the problem. Then you can copy the changes back into your real module file and test that the real module works by using the ``ansible`` or ``ansible-playbook``.
.. _simple_debugging:
Simple debugging
================
The easiest way to run a debugger in a module, either local or remote, is to use `epdb <https://pypi.org/project/epdb/>`_. Add ``import epdb; epdb.serve()`` in the module code on the control node at the desired break point. To connect to the debugger, run ``epdb.connect()``. See the `epdb documentation <https://pypi.org/project/epdb/>`_ for how to specify the ``host`` and ``port``. If connecting to a remote node, make sure to use a port that is allowed by any firewall between the control node and the remote node.
This technique should work with any remote debugger, but we do not guarantee any particular remote debugging tool will work.
The `q <https://pypi.org/project/q/>`_ library is another very useful debugging tool.
Since ``print()`` statements do not work inside modules, raising an exception is a good approach if you just want to see some specific data. Put ``raise Exception(some_value)`` somewhere in the module and run it normally. Ansible will handle this exception, pass the message back to the control node, and display it.

View File

@ -1,47 +0,0 @@
.. _developing_api:
**********
Python API
**********
.. contents:: Topics
.. note:: This API is intended for internal Ansible use. Ansible may make changes to this API at any time that could break backward compatibility with older versions of the API. Because of this, external use is not supported by Ansible. If you want to use Python API only for executing playbooks or modules, consider `ansible-runner <https://ansible-runner.readthedocs.io/en/latest/>`_ first.
There are several ways to use Ansible from an API perspective. You can use
the Ansible Python API to control nodes, you can extend Ansible to respond to various Python events, you can
write plugins, and you can plug in inventory data from external data sources. This document
gives a basic overview and examples of the Ansible execution and playbook API.
If you would like to use Ansible programmatically from a language other than Python, trigger events asynchronously,
or have access control and logging demands, please see the `AWX project <https://github.com/ansible/awx/>`_.
.. note:: Because Ansible relies on forking processes, this API is not thread safe.
.. _python_api_example:
Python API example
==================
This example is a simple demonstration that shows how to minimally run a couple of tasks:
.. literalinclude:: ../../../../examples/scripts/uptime.py
:language: python
.. note:: Ansible emits warnings and errors through the display object, which prints directly to stdout, stderr and the Ansible log.
The source code for the ``ansible``
command line tools (``lib/ansible/cli/``) is `available on GitHub <https://github.com/ansible/ansible/tree/devel/lib/ansible/cli>`_.
.. seealso::
:ref:`developing_inventory`
Developing dynamic inventory integrations
:ref:`developing_modules_general`
Getting started on developing a module
:ref:`developing_plugins`
How to develop plugins
`Development Mailing List <https://groups.google.com/group/ansible-devel>`_
Mailing list for development topics
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,49 +0,0 @@
.. _developing_collections:
**********************
Developing collections
**********************
Collections are a distribution format for Ansible content. You can package and distribute playbooks, roles, modules, and plugins using collections. A typical collection addresses a set of related use cases. For example, the ``cisco.ios`` collection automates management of Cisco IOS devices.
You can create a collection and publish it to `Ansible Galaxy <https://galaxy.ansible.com>`_ or to a private Automation Hub instance. You can publish certified collections to the Red Hat Automation Hub, part of the Red Hat Ansible Automation Platform.
.. toctree::
:maxdepth: 2
:caption: Developing new collections
developing_collections_creating
developing_collections_shared
developing_collections_testing
developing_collections_distributing
developing_collections_documenting
.. toctree::
:maxdepth: 2
:caption: Working with existing collections
developing_collections_migrating
developing_collections_contributing
developing_collections_changelogs
.. toctree::
:maxdepth: 2
:caption: Collections references
developing_collections_structure
collections_galaxy_meta
For instructions on developing modules, see :ref:`developing_modules_general`.
.. seealso::
:ref:`collections`
Learn how to install and use collections in playbooks and roles
:ref:`contributing_maintained_collections`
Guidelines for contributing to selected collections
`Ansible Collections Overview and FAQ <https://github.com/ansible-collections/overview/blob/main/README.rst>`_
Current development status of community collections and FAQ
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,82 +0,0 @@
.. _collection_changelogs:
***************************************************************
Generating changelogs and porting guide entries in a collection
***************************************************************
You can create and share changelog and porting guide entries for your collection. If your collection is part of the Ansible Community package, we recommend that you use the `antsibull-changelog <https://github.com/ansible-community/antsibull-changelog>`_ tool to generate Ansible-compatible changelogs. The Ansible changelog uses the output of this tool to collate all the collections included in an Ansible release into one combined changelog for the release.
.. note::
Ansible here refers to the Ansible 2.10 or later release that includes a curated set of collections.
.. contents::
:local:
:depth: 2
Understanding antsibull-changelog
=================================
The ``antsibull-changelog`` tool allows you to create and update changelogs for Ansible collections that are compatible with the combined Ansible changelogs. This is an update to the changelog generator used in prior Ansible releases. The tool adds three new changelog fragment categories: ``breaking_changes``, ``security_fixes`` and ``trivial``. The tool also generates the ``changelog.yaml`` file that Ansible uses to create the combined ``CHANGELOG.rst`` file and Porting Guide for the release.
See :ref:`changelogs_how_to` and the `antsibull-changelog documentation <https://github.com/ansible-community/antsibull-changelog/tree/main/docs>`_ for complete details.
.. note::
The collection maintainers set the changelog policy for their collections. See the individual collection contributing guidelines for complete details.
Generating changelogs
---------------------
To initialize changelog generation:
#. Install ``antsibull-changelog``: :code:`pip install antsibull-changelog`.
#. Initialize changelogs for your repository: :code:`antsibull-changelog init <path/to/your/collection>`.
#. Optionally, edit the ``changelogs/config.yaml`` file to customize the location of the generated changelog ``.rst`` file or other options. See `Bootstrapping changelogs for collections <https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelogs.rst#bootstrapping-changelogs-for-collections>`_ for details.
To generate changelogs from the changelog fragments you created:
#. Optionally, validate your changelog fragments: :code:`antsibull-changelog lint`.
#. Generate the changelog for your release: :code:`antsibull-changelog release [--version version_number]`.
.. note::
Add the ``--reload-plugins`` option if you ran the ``antsibull-changelog release`` command previously and the version of the collection has not changed. ``antsibull-changelog`` caches the information on all plugins and does not update its cache until the collection version changes.
Porting Guide entries from changelog fragments
----------------------------------------------
The Ansible changelog generator automatically adds several changelog fragment categories to the Ansible Porting Guide:
* ``major_changes``
* ``breaking_changes``
* ``deprecated_features``
* ``removed_features``
Including collection changelogs into Ansible
=============================================
If your collection is part of Ansible, use one of the following three options to include your changelog into the Ansible release changelog:
* Use the ``antsibull-changelog`` tool.
* If are not using this tool, include the properly formatted ``changelog.yaml`` file into your collection. See the `changelog.yaml format <https://github.com/ansible-community/antsibull-changelog/blob/main/docs/changelog.yaml-format.md>`_ for details.
* Add a link to own changelogs or release notes in any format by opening an issue at https://github.com/ansible-community/ansible-build-data/ with the HTML link to that information.
.. note::
For the first two options, Ansible pulls the changelog details from Galaxy so your changelogs must be included in the collection version on Galaxy that is included in the upcoming Ansible release.
.. seealso::
:ref:`collection_changelogs`
Learn how to create good changelog fragments.
:ref:`collections`
Learn how to install and use collections.
:ref:`contributing_maintained_collections`
Guidelines for contributing to selected collections
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,65 +0,0 @@
.. _hacking_collections:
***************************
Contributing to collections
***************************
If you want to add functionality to an existing collection, modify a collection you are using to fix a bug, or change the behavior of a module in a collection, clone the git repository for that collection and make changes on a branch. You can combine changes to a collection with a local checkout of Ansible (``source hacking/env-setup``).
You should first check the collection repository to see if it has specific contribution guidelines. These are typically listed in the README.md or CONTRIBUTING.md files within the repository.
Contributing to a collection: community.general
===============================================
These instructions apply to collections hosted in the `ansible_collections GitHub organization <https://github.com/ansible-collections>`_. For other collections, especially for collections not hosted on GitHub, check the ``README.md`` of the collection for information on contributing to it.
This example uses the `community.general collection <https://github.com/ansible-collections/community.general/>`_. To contribute to other collections in the same GitHub org, replace the folder names ``community`` and ``general`` with the namespace and collection name of a different collection.
Prerequisites
-------------
* Include ``~/dev/ansible/collections/`` in :ref:`COLLECTIONS_PATHS`
* If that path mentions multiple directories, make sure that no other directory earlier in the search path contains a copy of ``community.general``.
Creating a PR
-------------
* Create the directory ``~/dev/ansible/collections/ansible_collections/community``:
.. code-block:: shell
mkdir -p ~/dev/ansible/collections/ansible_collections/community
* Clone `the community.general Git repository <https://github.com/ansible-collections/community.general/>`_ or a fork of it into the directory ``general``:
.. code-block:: shell
cd ~/dev/ansible/collections/ansible_collections/community
git clone git@github.com:ansible-collections/community.general.git general
* If you clone from a fork, add the original repository as a remote ``upstream``:
.. code-block:: shell
cd ~/dev/ansible/collections/ansible_collections/community/general
git remote add upstream git@github.com:ansible-collections/community.general.git
* Create a branch and commit your changes on the branch.
* Remember to add tests for your changes, see :ref:`testing_collections`.
* Push your changes to your fork of the collection and create a Pull Request.
You can test your changes by using this checkout of ``community.general`` in playbooks and roles with whichever version of Ansible you have installed locally, including a local checkout of ``ansible/ansible``'s ``devel`` branch.
.. seealso::
:ref:`collections`
Learn how to install and use collections.
:ref:`contributing_maintained_collections`
Guidelines for contributing to selected collections
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,61 +0,0 @@
.. _creating_collections:
********************
Creating collections
********************
To create a collection:
#. Create a :ref:`collection skeleton<creating_collections_skeleton>` with the ``collection init`` command.
#. Add modules and other content to the collection.
#. Build the collection into a collection artifact with :ref:`ansible-galaxy collection build<building_collections>`.
#. Publish the collection artifact to Galaxy with :ref:`ansible-galaxy collection publish<publishing_collections>`.
A user can then install your collection on their systems.
.. contents::
:local:
:depth: 2
.. _creating_collections_skeleton:
Creating a collection skeleton
==============================
To start a new collection, run the following command in your collections directory:
.. code-block:: bash
ansible_collections#> ansible-galaxy collection init my_namespace.my_collection
.. note::
Both the namespace and collection names use the same strict set of requirements. See `Galaxy namespaces <https://galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespaces>`_ on the Galaxy docsite for those requirements.
It will create the structure ``[my_namespace]/[my_collection]/[collection skeleton]``.
.. hint:: If Git is used for version control, the corresponding repository should be initialized in the collection directory.
Once the skeleton exists, you can populate the directories with the content you want inside the collection. See `ansible-collections <https://github.com/ansible-collections/>`_ GitHub Org to get a better idea of what you can place inside a collection.
Reference: the ``ansible-galaxy collection`` command
Currently the ``ansible-galaxy collection`` command implements the following sub commands:
* ``init``: Create a basic collection skeleton based on the default template included with Ansible or your own template.
* ``build``: Create a collection artifact that can be uploaded to Galaxy or your own repository.
* ``publish``: Publish a built collection artifact to Galaxy.
* ``install``: Install one or more collections.
To learn more about the ``ansible-galaxy`` command-line tool, see the :ref:`ansible-galaxy` man page.
.. seealso::
:ref:`collections`
Learn how to install and use collections.
:ref:`collection_structure`
Directories and files included in the collection skeleton
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,406 +0,0 @@
.. _distributing_collections:
************************
Distributing collections
************************
A collection is a distribution format for Ansible content. A typical collection contains modules and other plugins that address a set of related use cases. For example, a collection might automate administering a particular database. A collection can also contain roles and playbooks.
To distribute your collection and allow others to use it, you can publish your collection on one or more :term:`distribution server`. Distribution servers include:
================================= ===================================================================
Distribution server Collections accepted
================================= ===================================================================
Ansible Galaxy All collections
:term:`Pulp 3 Galaxy` All collections, supports signed collections
Red Hat Automation Hub Only collections certified by Red Hat, supports signed collections
Privately hosted Automation Hub Collections authorized by the owners
================================= ===================================================================
Distributing collections involves four major steps:
#. Initial configuration of your distribution server or servers
#. Building your collection tarball
#. Preparing to publish your collection
#. Publishing your collection
.. contents::
:local:
:depth: 2
.. _config_distribution_server:
Initial configuration of your distribution server or servers
============================================================
Configure a connection to one or more distribution servers so you can publish collections there. You only need to configure each distribution server once. You must repeat the other steps (building your collection tarball, preparing to publish, and publishing your collection) every time you publish a new collection or a new version of an existing collection.
1. Create a namespace on each distribution server you want to use.
2. Get an API token for each distribution server you want to use.
3. Specify the API token for each distribution server you want to use.
.. _get_namespace:
Creating a namespace
--------------------
You must upload your collection into a namespace on each distribution server. If you have a login for Ansible Galaxy, your Ansible Galaxy username is usually also an Ansible Galaxy namespace.
.. warning::
Namespaces on Ansible Galaxy cannot include hyphens. If you have a login for Ansible Galaxy that includes a hyphen, your Galaxy username is not also a Galaxy namespace. For example, ``awesome-user`` is a valid username for Ansible Galaxy, but it is not a valid namespace.
You can create additional namespaces on Ansible Galaxy if you choose. For Red Hat Automation Hub and private Automation Hub you must create a namespace before you can upload your collection. To create a namespace:
* To create a namespace on Galaxy, see `Galaxy namespaces <https://galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespaces>`_ on the Galaxy docsite for details.
* To create a namespace on Red Hat Automation Hub, see the `Ansible Certified Content FAQ <https://access.redhat.com/articles/4916901>`_.
Specify the namespace in the :file:`galaxy.yml` file for each collection. For more information on the :file:`galaxy.yml` file, see :ref:`collections_galaxy_meta`.
.. _galaxy_get_token:
Getting your API token
----------------------
An API token authenticates your connection to each distribution server. You need a separate API token for each distribution server. Use the correct API token to connect to each distribution server securely and protect your content.
To get your API token:
* To get an API token for Galaxy, go to the `Galaxy profile preferences <https://galaxy.ansible.com/me/preferences>`_ page and click :guilabel:`API Key`.
* To get an API token for Automation Hub, go to `the token page <https://cloud.redhat.com/ansible/automation-hub/token/>`_ and click :guilabel:`Load token`.
.. _galaxy_specify_token:
Specifying your API token and distribution server
-------------------------------------------------
Each time you publish a collection, you must specify the API token and the distribution server to create a secure connection. You have two options for specifying the token and distribution server:
* You can configure the token in configuration, as part of a ``galaxy_server_list`` entry in your :file:`ansible.cfg` file. Using configuration is the most secure option.
* You can pass the token at the command line as an argument to the ``ansible-galaxy`` command. If you pass the token at the command line, you can specify the server at the command line, by using the default setting, or by setting the server in configuration. Passing the token at the command line is insecure, because typing secrets at the command line may expose them to other users on the system.
.. _galaxy_token_ansible_cfg:
Specifying the token and distribution server in configuration
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
By default, Ansible Galaxy is configured as the only distribution server. You can add other distribution servers and specify your API token or tokens in configuration by editing the ``galaxy_server_list`` section of your :file:`ansible.cfg` file. This is the most secure way to manage authentication for distribution servers. Specify a URL and token for each server. For example:
.. code-block:: ini
[galaxy]
server_list = release_galaxy
[galaxy_server.release_galaxy]
url=https://galaxy.ansible.com/
token=abcdefghijklmnopqrtuvwxyz
You cannot use ``apt-key`` with any servers defined in your :ref:`galaxy_server_list <galaxy_server_config>`. See :ref:`galaxy_server_config` for complete details.
.. _galaxy_use_token_arg:
Specifying the token at the command line
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can specify the API token at the command line using the ``--token`` argument of the :ref:`ansible-galaxy` command. There are three ways to specify the distribution server when passing the token at the command line:
* using the ``--server`` argument of the :ref:`ansible-galaxy` command
* relying on the default (https://galaxy.ansible.com)
* setting a server in configuration by creating a :ref:`GALAXY_SERVER` setting in your :file:`ansible.cfg` file
For example:
.. code-block:: bash
ansible-galaxy collection publish path/to/my_namespace-my_collection-1.0.0.tar.gz --token abcdefghijklmnopqrtuvwxyz
.. warning::
Using the ``--token`` argument is insecure. Passing secrets at the command line may expose them to others on the system.
.. _building_collections:
Building your collection tarball
================================
After configuring one or more distribution servers, build a collection tarball. The collection tarball is the published artifact, the object that you upload and other users download to install your collection. To build a collection tarball:
#. Review the version number in your :file:`galaxy.yml` file. Each time you publish your collection, it must have a new version number. You cannot make changes to existing versions of your collection on a distribution server. If you try to upload the same collection version more than once, the distribution server returns the error ``Code: conflict.collection_exists``. Collections follow semantic versioning rules. For more information on versions, see :ref:`collection_versions`. For more information on the :file:`galaxy.yml` file, see :ref:`collections_galaxy_meta`.
#. Run ``ansible-galaxy collection build`` from inside the top-level directory of the collection. For example:
.. code-block:: bash
collection_dir#> ansible-galaxy collection build
This command builds a tarball of the collection in the current directory, which you can upload to your selected distribution server:
.. code-block:: shell
my_collection/
├── galaxy.yml
├── ...
├── my_namespace-my_collection-1.0.0.tar.gz
└── ...
.. note::
* To reduce the size of collections, certain files and folders are excluded from the collection tarball by default. See :ref:`ignoring_files_and_folders_collections` if your collection directory contains other files you want to exclude.
* The current Galaxy maximum tarball size is 2 MB.
You can upload your tarball to one or more distribution servers. You can also distribute your collection locally by copying the tarball to install your collection directly on target systems.
.. _ignoring_files_and_folders_collections:
Ignoring files and folders
--------------------------
You can exclude files from your collection with either :ref:`build_ignore <build_ignore>` or :ref:`manifest_directives`. For more information on the :file:`galaxy.yml` file, see :ref:`collections_galaxy_meta`.
.. _build_ignore:
Include all, with explicit ignores
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
By default the build step includes all the files in the collection directory in the tarball except for the following:
* ``galaxy.yml``
* ``*.pyc``
* ``*.retry``
* ``tests/output``
* previously built tarballs in the root directory
* various version control directories such as ``.git/``
To exclude other files and folders from your collection tarball, set a list of file glob-like patterns in the ``build_ignore`` key in the collection's ``galaxy.yml`` file. These patterns use the following special characters for wildcard matching:
* ``*``: Matches everything
* ``?``: Matches any single character
* ``[seq]``: Matches any character in sequence
* ``[!seq]``:Matches any character not in sequence
For example, to exclude the :file:`sensitive` folder within the ``playbooks`` folder as well any ``.tar.gz`` archives, set the following in your ``galaxy.yml`` file:
.. code-block:: yaml
build_ignore:
- playbooks/sensitive
- '*.tar.gz'
.. note::
The ``build_ignore`` feature is only supported with ``ansible-galaxy collection build`` in Ansible 2.10 or newer.
.. _manifest_directives:
Manifest Directives
^^^^^^^^^^^^^^^^^^^
.. versionadded:: 2.14
The :file:`galaxy.yml` file supports manifest directives that are historically used in Python packaging, as described in `MANIFEST.in commands <https://packaging.python.org/en/latest/guides/using-manifest-in/#manifest-in-commands>`_.
.. note::
The use of ``manifest`` requires installing the optional ``distlib`` Python dependency.
.. note::
The ``manifest`` feature is only supported with ``ansible-galaxy collection build`` in ``ansible-core`` 2.14 or newer, and is mutually exclusive with ``build_ignore``.
For example, to exclude the :file:`sensitive` folder within the ``playbooks`` folder as well as any ``.tar.gz`` archives, set the following in your :file:`galaxy.yml` file:
.. code-block:: yaml
manifest:
directives:
- recursive-exclude playbooks/sensitive **
- global-exclude *.tar.gz
By default, the ``MANIFEST.in`` style directives would exclude all files by default, but there are default directives in place. Those default directives are described below. To see the directives in use during build, pass ``-vvv`` with the ``ansible-galaxy collection build`` command.
.. code-block::
include meta/*.yml
include *.txt *.md *.rst COPYING LICENSE
recursive-include tests **
recursive-include docs **.rst **.yml **.yaml **.json **.j2 **.txt
recursive-include roles **.yml **.yaml **.json **.j2
recursive-include playbooks **.yml **.yaml **.json
recursive-include changelogs **.yml **.yaml
recursive-include plugins */**.py
recursive-include plugins/become **.yml **.yaml
recursive-include plugins/cache **.yml **.yaml
recursive-include plugins/callback **.yml **.yaml
recursive-include plugins/cliconf **.yml **.yaml
recursive-include plugins/connection **.yml **.yaml
recursive-include plugins/filter **.yml **.yaml
recursive-include plugins/httpapi **.yml **.yaml
recursive-include plugins/inventory **.yml **.yaml
recursive-include plugins/lookup **.yml **.yaml
recursive-include plugins/netconf **.yml **.yaml
recursive-include plugins/shell **.yml **.yaml
recursive-include plugins/strategy **.yml **.yaml
recursive-include plugins/test **.yml **.yaml
recursive-include plugins/vars **.yml **.yaml
recursive-include plugins/modules **.ps1 **.yml **.yaml
recursive-include plugins/module_utils **.ps1 **.psm1 **.cs
# manifest.directives from galaxy.yml inserted here
exclude galaxy.yml galaxy.yaml MANIFEST.json FILES.json <namespace>-<name>-*.tar.gz
recursive-exclude tests/output **
global-exclude /.* /__pycache__
.. note::
``<namespace>-<name>-*.tar.gz`` is expanded with the actual ``namespace`` and ``name``.
The ``manifest.directives`` supplied in :file:`galaxy.yml` are inserted after the default includes and before the default excludes.
To enable the use of manifest directives without supplying your own, insert either ``manifest: {}`` or ``manifest: null`` in the :file:`galaxy.yml` file and remove any use of ``build_ignore``.
If the default manifest directives do not meet your needs, you can set ``manifest.omit_default_directives`` to a value of ``true`` in :file:`galaxy.yml`. You then must specify a full compliment of manifest directives in :file:`galaxy.yml`. The defaults documented above are a good starting point.
Below is an example where the default directives are not included.
.. code-block:: yaml
manifest:
directives:
- include meta/runtime.yml
- include README.md LICENSE
- recursive-include plugins */**.py
- exclude galaxy.yml MANIFEST.json FILES.json <namespace>-<name>-*.tar.gz
- recursive-exclude tests/output **
omit_default_directives: true
.. _signing_collections:
Signing a collection
--------------------------
You can include a GnuPG signature with your collection on a :term:`Pulp 3 Galaxy` server. See `Enabling collection signing <https://galaxyng.netlify.app/config/collection_signing/>`_ for details.
You can manually generate detached signatures for a collection using the ``gpg`` CLI using the following step. This step assume you have generated a GPG private key, but do not cover this process.
.. code-block:: bash
ansible-galaxy collection build
tar -Oxzf namespace-name-1.0.0.tar.gz MANIFEST.json | gpg --output namespace-name-1.0.0.asc --detach-sign --armor --local-user email@example.com -
.. _trying_collection_locally:
Preparing to publish your collection
====================================
Each time you publish your collection, you must create a :ref:`new version <collection_versions>` on the distribution server. After you publish a version of a collection, you cannot delete or modify that version. To avoid unnecessary extra versions, check your collection for bugs, typos, and other issues locally before publishing:
#. Install the collection locally.
#. Review the locally installed collection before publishing a new version.
Installing your collection locally
----------------------------------
You have two options for installing your collection locally:
* Install your collection locally from the tarball.
* Install your collection locally from your git repository.
Installing your collection locally from the tarball
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To install your collection locally from the tarball, run ``ansible-galaxy collection install`` and specify the collection tarball. You can optionally specify a location using the ``-p`` flag. For example:
.. code-block:: bash
collection_dir#> ansible-galaxy collection install my_namespace-my_collection-1.0.0.tar.gz -p ./collections
Install the tarball into a directory configured in :ref:`COLLECTIONS_PATHS` so Ansible can easily find and load the collection. If you do not specify a path value, ``ansible-galaxy collection install`` installs the collection in the first path defined in :ref:`COLLECTIONS_PATHS`.
.. _collections_scm_install:
Installing your collection locally from a git repository
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
To install your collection locally from a git repository, specify the repository and the branch you want to install:
.. code-block:: bash
collection_dir#> ansible-galaxy collection install git+https://github.com/org/repo.git,devel
.. include:: ../shared_snippets/installing_collections_git_repo.txt
Reviewing your collection
-------------------------
Review the collection:
* Run a playbook that uses the modules and plugins in your collection. Verify that new features and functionality work as expected. For examples and more details see :ref:`Using collections <using_collections>`.
* Check the documentation for typos.
* Check that the version number of your tarball is higher than the latest published version on the distribution server or servers.
* If you find any issues, fix them and rebuild the collection tarball.
.. _collection_versions:
Understanding collection versioning
-----------------------------------
The only way to change a collection is to release a new version. The latest version of a collection (by highest version number) is the version displayed everywhere in Galaxy and Automation Hub. Users can still download older versions.
Follow semantic versioning when setting the version for your collection. In summary:
* Increment the major version number, ``x`` of ``x.y.z``, for an incompatible API change.
* Increment the minor version number, ``y`` of ``x.y.z``, for new functionality in a backwards compatible manner (for example new modules/plugins, parameters, return values).
* Increment the patch version number, ``z`` of ``x.y.z``, for backwards compatible bug fixes.
Read the official `Semantic Versioning <https://semver.org/>`_ documentation for details and examples.
.. _publish_collection:
Publishing your collection
==========================
The last step in distributing your collection is publishing the tarball to Ansible Galaxy, Red Hat Automation Hub, or a privately hosted Automation Hub instance. You can publish your collection in two ways:
* from the command line using the ``ansible-galaxy collection publish`` command
* from the website of the distribution server (Galaxy, Automation Hub) itself
.. _upload_collection_ansible_galaxy:
.. _publish_collection_galaxy_cmd:
Publishing a collection from the command line
---------------------------------------------
To upload the collection tarball from the command line using ``ansible-galaxy``:
.. code-block:: bash
ansible-galaxy collection publish path/to/my_namespace-my_collection-1.0.0.tar.gz
.. note::
This ansible-galaxy command assumes you have retrieved and stored your API token in configuration. See :ref:`galaxy_specify_token` for details.
The ``ansible-galaxy collection publish`` command triggers an import process, just as if you uploaded the collection through the Galaxy website. The command waits until the import process completes before reporting the status back. If you want to continue without waiting for the import result, use the ``--no-wait`` argument and manually look at the import progress in your `My Imports <https://galaxy.ansible.com/my-imports/>`_ page.
.. _upload_collection_galaxy:
Publishing a collection from the website
----------------------------------------
To publish your collection directly on the Galaxy website:
#. Go to the `My Content <https://galaxy.ansible.com/my-content/namespaces>`_ page, and click the **Add Content** button on one of your namespaces.
#. From the **Add Content** dialogue, click **Upload New Collection**, and select the collection archive file from your local filesystem.
When you upload a collection, Ansible always uploads the tarball to the namespace specified in the collection metadata in the ``galaxy.yml`` file, no matter which namespace you select on the website. If you are not an owner of the namespace specified in your collection metadata, the upload request fails.
After Galaxy uploads and accepts a collection, the website shows you the **My Imports** page. This page shows import process information. You can review any errors or warnings about your upload there.
.. seealso::
:ref:`collections`
Learn how to install and use collections.
:ref:`collections_galaxy_meta`
Table of fields used in the :file:`galaxy.yml` file
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,30 +0,0 @@
.. _documenting_collections:
***********************
Documenting collections
***********************
Documenting modules and plugins
===============================
Documenting modules is thoroughly documented in :ref:`module_documenting`. Plugins can be documented the same way as modules, that is with ``DOCUMENTATION``, ``EXAMPLES``, and ``RETURN`` blocks.
Documenting roles
=================
To document a role, you have to add a role argument spec by creating a file ``meta/argument_specs.yml`` in your role. See :ref:`role_argument_spec` for details. As an example, you can look at `the argument specs file <https://github.com/sensu/sensu-go-ansible/blob/master/roles/install/meta/argument_specs.yml>`_ of the :ref:`sensu.sensu_go.install role <ansible_collections.sensu.sensu_go.install_role>` on GitHub.
.. _build_collection_docsite:
Build a docsite with antsibull-docs
===================================
You can use `antsibull-docs <https://pypi.org/project/antsibull-docs>`_ to build a Sphinx-based docsite for your collection:
#. Create your collection and make sure you can use it with ansible-core by adding it to your :ref:`COLLECTIONS_PATHS`.
#. Create a directory ``dest`` and run ``antsibull-docs sphinx-init --use-current --dest-dir dest namespace.name``, where ``namespace.name`` is the name of your collection.
#. Go into ``dest`` and run ``pip install -r requirements.txt``. You might want to create a venv and activate it first to avoid installing this globally.
#. Then run ``./build.sh``.
#. Open ``build/html/index.html`` in a browser of your choice.
If you want to add additional documentation to your collection next to the plugin, module, and role documentation, see :ref:`collections_doc_dir`.

View File

@ -1,136 +0,0 @@
.. _migrate_to_collection:
***************************************************
Migrating Ansible content to a different collection
***************************************************
When you move content from one collection to another, for example to extract a set of related modules out of ``community.general`` to create a more focused collection, you must make sure the transition is easy for users to follow.
.. contents::
:local:
:depth: 2
Migrating content
=================
Before you start migrating content from one collection to another, look at `Ansible Collection Checklist <https://github.com/ansible-collections/overview/blob/main/collection_requirements.rst>`_.
To migrate content from one collection to another, if the collections are parts of `Ansible distribution <https://github.com/ansible-community/ansible-build-data/blob/main/2.10/ansible.in>`_:
#. Copy content from the source (old) collection to the target (new) collection.
#. Deprecate the module/plugin with ``removal_version`` scheduled for the next major version in ``meta/runtime.yml`` of the old collection. The deprecation must be released after the copied content has been included in a release of the new collection.
#. When the next major release of the old collection is prepared:
* remove the module/plugin from the old collection
* remove the symlink stored in ``plugin/modules`` directory if appropriate (mainly when removing from ``community.general`` and ``community.network``)
* remove related unit and integration tests
* remove specific module utils
* remove specific documentation fragments if there are any in the old collection
* add a changelog fragment containing entries for ``removed_features`` and ``breaking_changes``; you can see an example of a changelog fragment in this `pull request <https://github.com/ansible-collections/community.general/pull/1304>`_
* change ``meta/runtime.yml`` in the old collection:
* add ``redirect`` to the corresponding module/plugin's entry
* in particular, add ``redirect`` for the removed module utils and documentation fragments if applicable
* remove ``removal_version`` from there
* remove related entries from ``tests/sanity/ignore.txt`` files if exist
* remove changelog fragments for removed content that are not yet part of the changelog (in other words, do not modify `changelogs/changelog.yaml` and do not delete files mentioned in it)
* remove requirements that are no longer required in ``tests/unit/requirements.txt``, ``tests/requirements.yml`` and ``galaxy.yml``
To implement these changes, you need to create at least three PRs:
#. Create a PR against the new collection to copy the content.
#. Deprecate the module/plugin in the old collection.
#. Later create a PR against the old collection to remove the content according to the schedule.
Adding the content to the new collection
----------------------------------------
Create a PR in the new collection to:
#. Copy ALL the related files from the old collection.
#. If it is an action plugin, include the corresponding module with documentation.
#. If it is a module, check if it has a corresponding action plugin that should move with it.
#. Check ``meta/`` for relevant updates to ``runtime.yml`` if it exists.
#. Carefully check the moved ``tests/integration`` and ``tests/units`` and update for FQCN.
#. Review ``tests/sanity/ignore-*.txt`` entries in the old collection.
#. Update ``meta/runtime.yml`` in the old collection.
Removing the content from the old collection
--------------------------------------------
Create a PR against the source collection repository to remove the modules, module_utils, plugins, and docs_fragments related to this migration:
#. If you are removing an action plugin, remove the corresponding module that contains the documentation.
#. If you are removing a module, remove any corresponding action plugin that should stay with it.
#. Remove any entries about removed plugins from ``meta/runtime.yml``. Ensure they are added into the new repo.
#. Remove sanity ignore lines from ``tests/sanity/ignore\*.txt``
#. Remove associated integration tests from ``tests/integrations/targets/`` and unit tests from ``tests/units/plugins/``.
#. if you are removing from content from ``community.general`` or ``community.network``, remove entries from ``.github/BOTMETA.yml``.
#. Carefully review ``meta/runtime.yml`` for any entries you may need to remove or update, in particular deprecated entries.
#. Update ``meta/runtime.yml`` to contain redirects for EVERY PLUGIN, pointing to the new collection name.
.. warning::
Maintainers for the old collection have to make sure that the PR is merged in a way that it does not break user experience and semantic versioning:
#. A new version containing the merged PR must not be released before the collection the content has been moved to has been released again, with that content contained in it. Otherwise the redirects cannot work and users relying on that content will experience breakage.
#. Once 1.0.0 of the collection from which the content has been removed has been released, such PRs can only be merged for a new **major** version (in other words, 2.0.0, 3.0.0, and so on).
Updating BOTMETA.yml
--------------------
The ``BOTMETA.yml``, for example in `community.general collection repository <https://github.com/ansible-collections/community.general/blob/main/.github/BOTMETA.yml>`_, is the source of truth for:
* ansibullbot
If the old and/or new collection has ``ansibullbot``, its ``BOTMETA.yml`` must be updated correspondingly.
Ansibulbot will know how to redirect existing issues and PRs to the new repo. The build process for docs.ansible.com will know where to find the module docs.
.. code-block:: yaml
$modules/monitoring/grafana/grafana_plugin.py:
migrated_to: community.grafana
$modules/monitoring/grafana/grafana_dashboard.py:
migrated_to: community.grafana
$modules/monitoring/grafana/grafana_datasource.py:
migrated_to: community.grafana
$plugins/callback/grafana_annotations.py:
maintainers: $team_grafana
labels: monitoring grafana
migrated_to: community.grafana
$plugins/doc_fragments/grafana.py:
maintainers: $team_grafana
labels: monitoring grafana
migrated_to: community.grafana
`Example PR <https://github.com/ansible/ansible/pull/66981/files>`_
* The ``migrated_to:`` key must be added explicitly for every *file*. You cannot add ``migrated_to`` at the directory level. This is to allow module and plugin webdocs to be redirected to the new collection docs.
* ``migrated_to:`` MUST be added for every:
* module
* plugin
* module_utils
* contrib/inventory script
* You do NOT need to add ``migrated_to`` for:
* Unit tests
* Integration tests
* ReStructured Text docs (anything under ``docs/docsite/rst/``)
* Files that never existed in ``ansible/ansible:devel``
.. seealso::
:ref:`collections`
Learn how to install and use collections.
:ref:`contributing_maintained_collections`
Guidelines for contributing to selected collections
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,94 +0,0 @@
.. _collections_shared_resources:
*************************************
Using shared resources in collections
*************************************
Although developing Ansible modules contained in collections is similar to developing standalone Ansible modules, you use shared resources like documentation fragments and module utilities differently in collections. You can use documentation fragments within and across collections. You can use optional module utilities to support multiple versions of ansible-core in your collection. Collections can also depend on other collections.
.. contents::
:local:
:depth: 2
.. _docfragments_collections:
Using documentation fragments in collections
============================================
To include documentation fragments in your collection:
#. Create the documentation fragment: ``plugins/doc_fragments/fragment_name``.
#. Refer to the documentation fragment with its FQCN.
.. code-block:: yaml
extends_documentation_fragment:
- kubernetes.core.k8s_name_options
- kubernetes.core.k8s_auth_options
- kubernetes.core.k8s_resource_options
- kubernetes.core.k8s_scale_options
:ref:`module_docs_fragments` covers the basics for documentation fragments. The `kubernetes.core <https://github.com/ansible-collections/kubernetes.core>`_ collection includes a complete example.
If you use FQCN, you can use documentation fragments from one collection in another collection.
.. _optional_module_utils:
Leveraging optional module utilities in collections
===================================================
Optional module utilities let you adopt the latest features from the most recent ansible-core release in your collection-based modules without breaking your collection on older Ansible versions. With optional module utilities, you can use the latest features when running against the latest versions, while still providing fallback behaviors when running against older versions.
This implementation, widely used in Python programming, wraps optional imports in conditionals or defensive `try/except` blocks, and implements fallback behaviors for missing imports. Ansible's module payload builder supports these patterns by treating any module_utils import nested in a block (e.g., `if`, `try`) as optional. If the requested import cannot be found during the payload build, it is simply omitted from the target payload and assumed that the importing code will handle its absence at runtime. Missing top-level imports of module_utils packages (imports that are not wrapped in a block statement of any kind) will fail the module payload build, and will not execute on the target.
For example, the `ansible.module_utils.common.respawn` package is only available in Ansible 2.11 and higher. The following module code would fail during the payload build on Ansible 2.10 or earlier (as the requested Python module does not exist, and is not wrapped in a block to signal to the payload builder that it can be omitted from the module payload):
.. code-block:: python
from ansible.module_utils.common.respawn import respawn_module
By wrapping the import statement in a ``try`` block, the payload builder will omit the Python module if it cannot be located, and assume that the Ansible module will handle it at runtime:
.. code-block:: python
try:
from ansible.module_utils.common.respawn import respawn_module
except ImportError:
respawn_module = None
...
if needs_respawn:
if respawn_module:
respawn_module(target)
else:
module.fail_json('respawn is not available in Ansible < 2.11, ensure that foopkg is installed')
The optional import behavior also applies to module_utils imported from collections.
.. _collection_dependencies:
Listing collection dependencies
===============================
We recommend that collections work as standalone, independent units, depending only on ansible-core. However, if your collection must depend on features and functionality from another collection, list the other collection or collections under ``dependencies`` in your collection's :file:`galaxy.yml` file. Use the :file:`meta/runtime.yml` file to set the ansible-core version that your collection depends on. For more information on the :file:`galaxy.yml` file, see :ref:`collections_galaxy_meta`.
You can use git repositories for collection dependencies during local development and testing. For example:
.. code-block:: yaml
dependencies: {'git@github.com:organization/repo_name.git': 'devel'}
.. warning::
Do not use git repositories as dependencies for published collections. Dependencies for published collections must be other published collections.
.. seealso::
:ref:`collections`
Learn how to install and use collections.
:ref:`contributing_maintained_collections`
Guidelines for contributing to selected collections
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,293 +0,0 @@
.. _collection_structure:
********************
Collection structure
********************
A collection is a simple data structure. None of the directories are required unless you have specific content that belongs in one of them. A collection does require a ``galaxy.yml`` file at the root level of the collection. This file contains all of the metadata that Galaxy and other tools need in order to package, build and publish the collection.
.. contents::
:local:
:depth: 2
Collection directories and files
================================
A collection can contain these directories and files:
.. code-block:: shell-session
collection/
├── docs/
├── galaxy.yml
├── meta/
│ └── runtime.yml
├── plugins/
│ ├── modules/
│ │ └── module1.py
│ ├── inventory/
│ └── .../
├── README.md
├── roles/
│ ├── role1/
│ ├── role2/
│ └── .../
├── playbooks/
│ ├── files/
│ ├── vars/
│ ├── templates/
│ └── tasks/
└── tests/
.. note::
* Ansible only accepts ``.md`` extensions for the :file:`README` file and any files in the :file:`/docs` folder.
* See the `ansible-collections <https://github.com/ansible-collections/>`_ GitHub Org for examples of collection structure.
* Not all directories are currently in use. Those are placeholders for future features.
.. _galaxy_yml:
galaxy.yml
----------
A collection must have a ``galaxy.yml`` file that contains the necessary information to build a collection artifact. See :ref:`collections_galaxy_meta` for details.
.. _collections_doc_dir:
docs directory
--------------
Use the ``docs`` folder to describe how to use the roles and plugins the collection provides, role requirements, and so on.
For certified collections, Automation Hub displays documents written in markdown in the main ``docs`` directory with no subdirectories. This will not display on https://docs.ansible.com.
For community collections included in the Ansible PyPI package, docs.ansible.com displays documents written in reStructuredText (.rst) in a docsite/rst/ subdirectory. Define the structure of your extra documentation in ``docs/docsite/extra-docs.yml``:
.. code-block:: yaml
---
sections:
- title: Scenario Guide
toctree:
- scenario_guide
The index page of the documentation for your collection displays the title you define in ``docs/docsite/extra-docs.yml`` with a link to your extra documentation. For an example, see the `community.docker collection repo <https://github.com/ansible-collections/community.docker/tree/main/docs/docsite>`_ and the `community.docker collection documentation <https://docs.ansible.com/ansible/latest/collections/community/docker/index.html>`_.
You can add extra links to your collection index page and plugin pages with the ``docs/docsite/links.yml`` file. This populates the links under `Description and Communications <https://docs.ansible.com/ansible/devel/collections/community/dns/index.html#plugins-in-community-dns>`_ headings as well as links at the end of the individual plugin pages. See the `collection_template links.yml file <https://github.com/ansible-collections/collection_template/blob/main/docs/docsite/links.yml>`_ for a complete description of the structure and use of this file to create links.
Plugin and module documentation
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Keep the specific documentation for plugins and modules embedded as Python docstrings. Use ``ansible-doc`` to view documentation for plugins inside a collection:
.. code-block:: bash
ansible-doc -t lookup my_namespace.my_collection.lookup1
The ``ansible-doc`` command requires the fully qualified collection name (FQCN) to display specific plugin documentation. In this example, ``my_namespace`` is the Galaxy namespace and ``my_collection`` is the collection name within that namespace.
.. note:: The Galaxy namespace of an Ansible collection is defined in the ``galaxy.yml`` file. It can be different from the GitHub organization or repository name.
.. _collections_plugin_dir:
plugins directory
-----------------
Add a 'per plugin type' specific subdirectory here, including ``module_utils`` which is usable not only by modules, but by most plugins by using their FQCN. This is a way to distribute modules, lookups, filters, and so on without having to import a role in every play.
Vars plugins in collections are not loaded automatically, and always require being explicitly enabled by using their fully qualified collection name. See :ref:`enable_vars` for details.
Cache plugins in collections may be used for fact caching, but are not supported for inventory plugins.
.. _collection_module_utils:
module_utils
^^^^^^^^^^^^
When coding with ``module_utils`` in a collection, the Python ``import`` statement needs to take into account the FQCN along with the ``ansible_collections`` convention. The resulting Python import will look like ``from ansible_collections.{namespace}.{collection}.plugins.module_utils.{util} import {something}``
The following example snippets show a Python and PowerShell module using both default Ansible ``module_utils`` and
those provided by a collection. In this example the namespace is ``community``, the collection is ``test_collection``.
In the Python example the ``module_util`` in question is called ``qradar`` such that the FQCN is
``community.test_collection.plugins.module_utils.qradar``:
.. code-block:: python
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.common.text.converters import to_text
from ansible.module_utils.six.moves.urllib.parse import urlencode, quote_plus
from ansible.module_utils.six.moves.urllib.error import HTTPError
from ansible_collections.community.test_collection.plugins.module_utils.qradar import QRadarRequest
argspec = dict(
name=dict(required=True, type='str'),
state=dict(choices=['present', 'absent'], required=True),
)
module = AnsibleModule(
argument_spec=argspec,
supports_check_mode=True
)
qradar_request = QRadarRequest(
module,
headers={"Content-Type": "application/json"},
not_rest_data_keys=['state']
)
Note that importing something from an ``__init__.py`` file requires using the file name:
.. code-block:: python
from ansible_collections.namespace.collection_name.plugins.callback.__init__ import CustomBaseClass
In the PowerShell example the ``module_util`` in question is called ``hyperv`` such that the FQCN is
``community.test_collection.plugins.module_utils.hyperv``:
.. code-block:: powershell
#!powershell
#AnsibleRequires -CSharpUtil Ansible.Basic
#AnsibleRequires -PowerShell ansible_collections.community.test_collection.plugins.module_utils.hyperv
$spec = @{
name = @{ required = $true; type = "str" }
state = @{ required = $true; choices = @("present", "absent") }
}
$module = [Ansible.Basic.AnsibleModule]::Create($args, $spec)
Invoke-HyperVFunction -Name $module.Params.name
$module.ExitJson()
.. _collections_roles_dir:
roles directory
----------------
Collection roles are mostly the same as existing roles, but with a couple of limitations:
- Role names are now limited to contain only lowercase alphanumeric characters, plus ``_`` and start with an alpha character.
- Roles in a collection cannot contain plugins any more. Plugins must live in the collection ``plugins`` directory tree. Each plugin is accessible to all roles in the collection.
The directory name of the role is used as the role name. Therefore, the directory name must comply with the above role name rules. The collection import into Galaxy will fail if a role name does not comply with these rules.
You can migrate 'traditional roles' into a collection but they must follow the rules above. You may need to rename roles if they don't conform. You will have to move or link any role-based plugins to the collection specific directories.
.. note::
For roles imported into Galaxy directly from a GitHub repository, setting the ``role_name`` value in the role's metadata overrides the role name used by Galaxy. For collections, that value is ignored. When importing a collection, Galaxy uses the role directory as the name of the role and ignores the ``role_name`` metadata value.
playbooks directory
--------------------
In prior releases, you could reference playbooks in this directory using the full path to the playbook file from the command line.
In ansible-core 2.11 and later, you can use the FQCN, ``namespace.collection.playbook`` (with or without extension), to reference the playbooks from the command line or from ``import_playbook``.
This will keep the playbook in 'collection context', as if you had added ``collections: [ namespace.collection ]`` to it.
You can have most of the subdirectories you would expect, such ``files/``, ``vars/`` or ``templates/`` but no ``roles/`` since those are handled already in the collection.
Also, playbooks within a collection follow the same guidelines as any playbooks except for these few adjustments:
- Directory: It must be in the ``playbooks/`` directory.
- Hosts: The host should be defined as a variable so the users of a playbook do not mistakenly run the plays against their entire inventory (if the host is set to all). For example - ``hosts: '{{target|default("all")}}'``.
To run the plays, users can now use such commands as ``ansible-playbook --e 'targets=webservers'`` or ``ansible-playbook --limit webservers``. Either way, the collection owner should document their playbooks and how to use them in the ``docs/`` folder or ``README`` file.
.. _developing_collections_tests_directory:
tests directory
----------------
Ansible Collections are tested much like Ansible itself, by using the `ansible-test` utility which is released as part of Ansible, version 2.9.0 and newer. Because Ansible Collections are tested using the same tooling as Ansible itself, by using the `ansible-test`, all Ansible developer documentation for testing is applicable for authoring Collections Tests with one key concept to keep in mind.
See :ref:`testing_collections` for specific information on how to test collections with ``ansible-test``.
When reading the :ref:`developing_testing` documentation, there will be content that applies to running Ansible from source code through a git clone, which is typical of an Ansible developer. However, it's not always typical for an Ansible Collection author to be running Ansible from source but instead from a stable release, and to create Collections it is not necessary to run Ansible from source. Therefore, when references of dealing with `ansible-test` binary paths, command completion, or environment variables are presented throughout the :ref:`developing_testing` documentation; keep in mind that it is not needed for Ansible Collection Testing because the act of installing the stable release of Ansible containing `ansible-test` is expected to setup those things for you.
.. _meta_runtime_yml:
meta directory and runtime.yml
------------------------------
A collection can store some additional metadata in a ``runtime.yml`` file in the collection's ``meta`` directory. The ``runtime.yml`` file supports the top level keys:
- *requires_ansible*:
The version of Ansible Core (ansible-core) required to use the collection. Multiple versions can be separated with a comma.
.. code:: yaml
requires_ansible: ">=2.10,<2.11"
.. note:: although the version is a `PEP440 Version Specifier <https://www.python.org/dev/peps/pep-0440/#version-specifiers>`_ under the hood, Ansible deviates from PEP440 behavior by truncating prerelease segments from the Ansible version. This means that Ansible 2.11.0b1 is compatible with something that ``requires_ansible: ">=2.11"``.
- *plugin_routing*:
Content in a collection that Ansible needs to load from another location or that has been deprecated/removed.
The top level keys of ``plugin_routing`` are types of plugins, with individual plugin names as subkeys.
To define a new location for a plugin, set the ``redirect`` field to another name.
To deprecate a plugin, use the ``deprecation`` field to provide a custom warning message and the removal version or date. If the plugin has been renamed or moved to a new location, the ``redirect`` field should also be provided. If a plugin is being removed entirely, ``tombstone`` can be used for the fatal error message and removal version or date.
.. code:: yaml
plugin_routing:
inventory:
kubevirt:
redirect: community.general.kubevirt
my_inventory:
tombstone:
removal_version: "2.0.0"
warning_text: my_inventory has been removed. Please use other_inventory instead.
modules:
my_module:
deprecation:
removal_date: "2021-11-30"
warning_text: my_module will be removed in a future release of this collection. Use another.collection.new_module instead.
redirect: another.collection.new_module
podman_image:
redirect: containers.podman.podman_image
module_utils:
ec2:
redirect: amazon.aws.ec2
util_dir.subdir.my_util:
redirect: namespace.name.my_util
- *import_redirection*
A mapping of names for Python import statements and their redirected locations.
.. code:: yaml
import_redirection:
ansible.module_utils.old_utility:
redirect: ansible_collections.namespace_name.collection_name.plugins.module_utils.new_location
- *action_groups*
A mapping of groups and the list of action plugin and module names they contain. They may also have a special 'metadata' dictionary in the list, which can be used to include actions from other groups.
.. code:: yaml
action_groups:
groupname:
# The special metadata dictionary. All action/module names should be strings.
- metadata:
extend_group:
- another.collection.groupname
- another_group
- my_action
another_group:
- my_module
- another.collection.another_module
.. seealso::
:ref:`distributing_collections`
Learn how to package and publish your collection
:ref:`contributing_maintained_collections`
Guidelines for contributing to selected collections
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,99 +0,0 @@
.. _testing_collections:
*******************
Testing collections
*******************
Testing your collection ensures that your code works well and integrates well with the rest of the Ansible ecosystem. Your collection should pass the sanity tests for Ansible code. You should also add unit tests to cover the code in your collection and integration tests to cover the interactions between your collection and ansible-core.
.. contents::
:local:
:depth: 2
Testing tools
=============
The main tool for testing collections is ``ansible-test``, Ansible's testing tool described in :ref:`developing_testing` and provided by both the ``ansible`` and ``ansible-core`` packages.
You can run several sanity tests, as well as run unit and integration tests for plugins using ``ansible-test``. When you test collections, test against the ansible-core version(s) you are targeting.
You must always execute ``ansible-test`` from the root directory of a collection. You can run ``ansible-test`` in Docker containers without installing any special requirements. The Ansible team uses this approach in Azure Pipelines both in the ansible/ansible GitHub repository and in the large community collections such as `community.general <https://github.com/ansible-collections/community.general/>`_ and `community.network <https://github.com/ansible-collections/community.network/>`_. The examples below demonstrate running tests in Docker containers.
Sanity tests
------------
To run all sanity tests:
.. code-block:: shell-session
ansible-test sanity --docker default -v
See :ref:`testing_sanity` for more information. See the :ref:`full list of sanity tests <all_sanity_tests>` for details on the sanity tests and how to fix identified issues.
Adding unit tests
-----------------
You must place unit tests in the appropriate ``tests/unit/plugins/`` directory. For example, you would place tests for ``plugins/module_utils/foo/bar.py`` in ``tests/unit/plugins/module_utils/foo/test_bar.py`` or ``tests/unit/plugins/module_utils/foo/bar/test_bar.py``. For examples, see the `unit tests in community.general <https://github.com/ansible-collections/community.general/tree/main/tests/unit/>`_.
To run all unit tests for all supported Python versions:
.. code-block:: shell-session
ansible-test units --docker default -v
To run all unit tests only for a specific Python version:
.. code-block:: shell-session
ansible-test units --docker default -v --python 3.6
To run only a specific unit test:
.. code-block:: shell-session
ansible-test units --docker default -v --python 3.6 tests/unit/plugins/module_utils/foo/test_bar.py
You can specify Python requirements in the ``tests/unit/requirements.txt`` file. See :ref:`testing_units` for more information, especially on fixture files.
.. _collections_adding_integration_test:
Adding integration tests
------------------------
You must place integration tests in the appropriate ``tests/integration/targets/`` directory. For module integration tests, you can use the module name alone. For example, you would place integration tests for ``plugins/modules/foo.py`` in a directory called ``tests/integration/targets/foo/``. For non-module plugin integration tests, you must add the plugin type to the directory name. For example, you would place integration tests for ``plugins/connections/bar.py`` in a directory called ``tests/integration/targets/connection_bar/``. For lookup plugins, the directory must be called ``lookup_foo``, for inventory plugins, ``inventory_foo``, and so on.
You can write two different kinds of integration tests:
* Ansible role tests run with ``ansible-playbook`` and validate various aspects of the module. They can depend on other integration tests (usually named ``prepare_bar`` or ``setup_bar``, which prepare a service or install a requirement named ``bar`` in order to test module ``foo``) to set-up required resources, such as installing required libraries or setting up server services.
* ``runme.sh`` tests run directly as scripts. They can set up inventory files, and execute ``ansible-playbook`` or ``ansible-inventory`` with various settings.
For examples, see the `integration tests in community.general <https://github.com/ansible-collections/community.general/tree/main/tests/integration/targets/>`_. See also :ref:`testing_integration` for more details.
Since integration tests can install requirements, and set-up, start and stop services, we recommended running them in docker containers or otherwise restricted environments whenever possible. By default, ``ansible-test`` supports Docker images for several operating systems. See the `list of supported docker images <https://github.com/ansible/ansible/blob/devel/test/lib/ansible_test/_data/completion/docker.txt>`_ for all options. Use the ``default`` image mainly for platform-independent integration tests, such as those for cloud modules. The following examples use the ``fedora35`` image.
To execute all integration tests for a collection:
.. code-block:: shell-session
ansible-test integration --docker fedora35 -v
If you want more detailed output, run the command with ``-vvv`` instead of ``-v``. Alternatively, specify ``--retry-on-error`` to automatically re-run failed tests with higher verbosity levels.
To execute only the integration tests in a specific directory:
.. code-block:: shell-session
ansible-test integration --docker fedora35 -v connection_bar
You can specify multiple target names. Each target name is the name of a directory in ``tests/integration/targets/``.
.. seealso::
:ref:`developing_testing`
More resources on testing Ansible
:ref:`contributing_maintained_collections`
Guidelines for contributing to selected collections
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,22 +0,0 @@
***************************
Developing ``ansible-core``
***************************
Although ``ansible-core`` (the code hosted in the `ansible/ansible repository <https://github.com/ansible/ansible>`_ on GitHub) includes a few plugins that can be swapped out by the playbook directives or configuration, much of the code there is not modular. The documents here give insight into how the parts of ``ansible-core`` work together.
.. toctree::
:maxdepth: 1
core_branches_and_tags
developing_program_flow_modules
.. seealso::
:ref:`developing_api`
Learn about the Python API for task execution
:ref:`developing_plugins`
Learn about developing plugins
`Mailing List <https://groups.google.com/group/ansible-devel>`_
The development mailing list
`irc.libera.chat <https://libera.chat>`_
#ansible-devel IRC chat channel

View File

@ -1,514 +0,0 @@
.. _developing_inventory:
****************************
Developing dynamic inventory
****************************
Ansible can pull inventory information from dynamic sources, including cloud sources, by using the supplied :ref:`inventory plugins <inventory_plugins>`. For details about how to pull inventory information, see :ref:`dynamic_inventory`. If the source you want is not currently covered by existing plugins, you can create your own inventory plugin as with any other plugin type.
In previous versions, you had to create a script or program that could output JSON in the correct format when invoked with the proper arguments.
You can still use and write inventory scripts, as we ensured backwards compatibility through the :ref:`script inventory plugin <script_inventory>`
and there is no restriction on the programming language used.
If you choose to write a script, however, you will need to implement some features yourself such as caching, configuration management, dynamic variable and group composition, and so on.
If you use :ref:`inventory plugins <inventory_plugins>` instead, you can use the Ansible codebase and add these common features automatically.
.. contents:: Topics
:local:
.. _inventory_sources:
Inventory sources
=================
Inventory sources are the input strings that inventory plugins work with.
An inventory source can be a path to a file or to a script, or it can be raw data that the plugin can interpret.
The table below shows some examples of inventory plugins and the source types that you can pass to them with ``-i`` on the command line.
+--------------------------------------------+-----------------------------------------+
| Plugin | Source |
+--------------------------------------------+-----------------------------------------+
| :ref:`host list <host_list_inventory>` | A comma-separated list of hosts |
+--------------------------------------------+-----------------------------------------+
| :ref:`yaml <yaml_inventory>` | Path to a YAML format data file |
+--------------------------------------------+-----------------------------------------+
| :ref:`constructed <constructed_inventory>` | Path to a YAML configuration file |
+--------------------------------------------+-----------------------------------------+
| :ref:`ini <ini_inventory>` | Path to an INI formatted data file |
+--------------------------------------------+-----------------------------------------+
| :ref:`virtualbox <virtualbox_inventory>` | Path to a YAML configuration file |
+--------------------------------------------+-----------------------------------------+
| :ref:`script plugin <script_inventory>` | Path to an executable that outputs JSON |
+--------------------------------------------+-----------------------------------------+
.. _developing_inventory_inventory_plugins:
Inventory plugins
=================
Like most plugin types (except modules), inventory plugins must be developed in Python. They execute on the controller and should therefore adhere to the :ref:`control_node_requirements`.
Most of the documentation in :ref:`developing_plugins` also applies here. You should read that document first for a general understanding and then come back to this document for specifics on inventory plugins.
Normally, inventory plugins are executed at the start of a run, and before the playbooks, plays, or roles are loaded.
However, you can use the ``meta: refresh_inventory`` task to clear the current inventory and execute the inventory plugins again, and this task will generate a new inventory.
If you use the persistent cache, inventory plugins can also use the configured cache plugin to store and retrieve data. Caching inventory avoids making repeated and costly external calls.
.. _developing_an_inventory_plugin:
Developing an inventory plugin
------------------------------
The first thing you want to do is use the base class:
.. code-block:: python
from ansible.plugins.inventory import BaseInventoryPlugin
class InventoryModule(BaseInventoryPlugin):
NAME = 'myplugin' # used internally by Ansible, it should match the file name but not required
If the inventory plugin is in a collection, the NAME should be in the 'namespace.collection_name.myplugin' format. The base class has a couple of methods that each plugin should implement and a few helpers for parsing the inventory source and updating the inventory.
After you have the basic plugin working, you can incorporate other features by adding more base classes:
.. code-block:: python
from ansible.plugins.inventory import BaseInventoryPlugin, Constructable, Cacheable
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
NAME = 'myplugin'
For the bulk of the work in a plugin, we mostly want to deal with 2 methods ``verify_file`` and ``parse``.
.. _inventory_plugin_verify_file:
verify_file method
^^^^^^^^^^^^^^^^^^
Ansible uses this method to quickly determine if the inventory source is usable by the plugin. The determination does not need to be 100% accurate, as there might be an overlap in what plugins can handle and by default Ansible will try the enabled plugins as per their sequence.
.. code-block:: python
def verify_file(self, path):
''' return true/false if this is possibly a valid file for this plugin to consume '''
valid = False
if super(InventoryModule, self).verify_file(path):
# base class verifies that file exists and is readable by current user
if path.endswith(('virtualbox.yaml', 'virtualbox.yml', 'vbox.yaml', 'vbox.yml')):
valid = True
return valid
In the above example, from the :ref:`virtualbox inventory plugin <virtualbox_inventory>`, we screen for specific file name patterns to avoid attempting to consume any valid YAML file. You can add any type of condition here, but the most common one is 'extension matching'. If you implement extension matching for YAML configuration files, the path suffix <plugin_name>.<yml|yaml> should be accepted. All valid extensions should be documented in the plugin description.
The following is another example that does not use a 'file' but the inventory source string itself,
from the :ref:`host list <host_list_inventory>` plugin:
.. code-block:: python
def verify_file(self, path):
''' don't call base class as we don't expect a path, but a host list '''
host_list = path
valid = False
b_path = to_bytes(host_list, errors='surrogate_or_strict')
if not os.path.exists(b_path) and ',' in host_list:
# the path does NOT exist and there is a comma to indicate this is a 'host list'
valid = True
return valid
This method is just to expedite the inventory process and avoid unnecessary parsing of sources that are easy to filter out before causing a parse error.
.. _inventory_plugin_parse:
parse method
^^^^^^^^^^^^
This method does the bulk of the work in the plugin.
It takes the following parameters:
* inventory: inventory object with existing data and the methods to add hosts/groups/variables to inventory
* loader: Ansible's DataLoader. The DataLoader can read files, auto load JSON/YAML and decrypt vaulted data, and cache read files.
* path: string with inventory source (this is usually a path, but is not required)
* cache: indicates whether the plugin should use or avoid caches (cache plugin and/or loader)
The base class does some minimal assignment for reuse in other methods.
.. code-block:: python
def parse(self, inventory, loader, path, cache=True):
self.loader = loader
self.inventory = inventory
self.templar = Templar(loader=loader)
It is up to the plugin now to parse the provided inventory source and translate it into Ansible inventory.
To facilitate this, the example below uses a few helper functions:
.. code-block:: python
NAME = 'myplugin'
def parse(self, inventory, loader, path, cache=True):
# call base method to ensure properties are available for use with other helper methods
super(InventoryModule, self).parse(inventory, loader, path, cache)
# this method will parse 'common format' inventory sources and
# update any options declared in DOCUMENTATION as needed
config = self._read_config_data(path)
# if NOT using _read_config_data you should call set_options directly,
# to process any defined configuration for this plugin,
# if you don't define any options you can skip
#self.set_options()
# example consuming options from inventory source
mysession = apilib.session(user=self.get_option('api_user'),
password=self.get_option('api_pass'),
server=self.get_option('api_server')
)
# make requests to get data to feed into inventory
mydata = mysession.getitall()
#parse data and create inventory objects:
for colo in mydata:
for server in mydata[colo]['servers']:
self.inventory.add_host(server['name'])
self.inventory.set_variable(server['name'], 'ansible_host', server['external_ip'])
The specifics will vary depending on API and structure returned. Remember that if you get an inventory source error or any other issue, you should ``raise AnsibleParserError`` to let Ansible know that the source was invalid or the process failed.
For examples on how to implement an inventory plugin, see the source code here:
`lib/ansible/plugins/inventory <https://github.com/ansible/ansible/tree/devel/lib/ansible/plugins/inventory>`_.
.. _inventory_object:
inventory object
^^^^^^^^^^^^^^^^
The ``inventory`` object passed to ``parse`` has helpful methods for populating inventory.
``add_group`` adds a group to inventory if it doesn't already exist. It takes the group name as the only positional argument.
``add_child`` adds a group or host that exists in inventory to a parent group in inventory. It takes two positional arguments, the name of the parent group and the name of the child group or host.
``add_host`` adds a host to inventory if it doesn't already exist, optionally to a specific group. It takes the host name as the first argument and accepts two optional keyword arguments, ``group`` and ``port``. ``group`` is the name of a group in inventory, and ``port`` is an integer.
``set_variable`` adds a variable to a group or host in inventory. It takes three positional arguments: the name of the group or host, the name of the variable, and the value of the variable.
To create groups and variables using Jinja2 expressions, see the section on implementing ``constructed`` features below.
To see other inventory object methods, see the source code here:
`lib/ansible/inventory/data.py <https://github.com/ansible/ansible/tree/devel/lib/ansible/inventory/data.py>`_.
.. _inventory_plugin_caching:
inventory cache
^^^^^^^^^^^^^^^
To cache the inventory, extend the inventory plugin documentation with the inventory_cache documentation fragment and use the Cacheable base class.
.. code-block:: yaml
extends_documentation_fragment:
- inventory_cache
.. code-block:: python
class InventoryModule(BaseInventoryPlugin, Constructable, Cacheable):
NAME = 'myplugin'
Next, load the cache plugin specified by the user to read from and update the cache. If your inventory plugin uses YAML-based configuration files and the ``_read_config_data`` method, the cache plugin is loaded within that method. If your inventory plugin does not use ``_read_config_data``, you must load the cache explicitly with ``load_cache_plugin``.
.. code-block:: python
NAME = 'myplugin'
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
self.load_cache_plugin()
Before using the cache plugin, you must retrieve a unique cache key by using the ``get_cache_key`` method. This task needs to be done by all inventory modules using the cache, so that you don't use/overwrite other parts of the cache.
.. code-block:: python
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
self.load_cache_plugin()
cache_key = self.get_cache_key(path)
Now that you've enabled caching, loaded the correct plugin, and retrieved a unique cache key, you can set up the flow of data between the cache and your inventory using the ``cache`` parameter of the ``parse`` method. This value comes from the inventory manager and indicates whether the inventory is being refreshed (such as by the ``--flush-cache`` or the meta task ``refresh_inventory``). Although the cache shouldn't be used to populate the inventory when being refreshed, the cache should be updated with the new inventory if the user has enabled caching. You can use ``self._cache`` like a dictionary. The following pattern allows refreshing the inventory to work in conjunction with caching.
.. code-block:: python
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
self.load_cache_plugin()
cache_key = self.get_cache_key(path)
# cache may be True or False at this point to indicate if the inventory is being refreshed
# get the user's cache option too to see if we should save the cache if it is changing
user_cache_setting = self.get_option('cache')
# read if the user has caching enabled and the cache isn't being refreshed
attempt_to_read_cache = user_cache_setting and cache
# update if the user has caching enabled and the cache is being refreshed; update this value to True if the cache has expired below
cache_needs_update = user_cache_setting and not cache
# attempt to read the cache if inventory isn't being refreshed and the user has caching enabled
if attempt_to_read_cache:
try:
results = self._cache[cache_key]
except KeyError:
# This occurs if the cache_key is not in the cache or if the cache_key expired, so the cache needs to be updated
cache_needs_update = True
if not attempt_to_read_cache or cache_needs_update:
# parse the provided inventory source
results = self.get_inventory()
if cache_needs_update:
self._cache[cache_key] = results
# submit the parsed data to the inventory object (add_host, set_variable, etc)
self.populate(results)
After the ``parse`` method is complete, the contents of ``self._cache`` is used to set the cache plugin if the contents of the cache have changed.
You have three other cache methods available:
- ``set_cache_plugin`` forces the cache plugin to be set with the contents of ``self._cache``, before the ``parse`` method completes
- ``update_cache_if_changed`` sets the cache plugin only if ``self._cache`` has been modified, before the ``parse`` method completes
- ``clear_cache`` flushes the cache, ultimately by calling the cache plugin's ``flush()`` method, whose implementation is dependent upon the particular cache plugin in use. Note that if the user is using the same cache backend for facts and inventory, both will get flushed. To avoid this, the user can specify a distinct cache backend in their inventory plugin configuration.
constructed features
^^^^^^^^^^^^^^^^^^^^
Inventory plugins can create host variables and groups from Jinja2 expressions and variables by using features from the ``constructed`` inventory plugin. To do this, use the ``Constructable`` base class and extend the inventory plugin's documentation with the ``constructed`` documentation fragment.
.. code-block:: yaml
extends_documentation_fragment:
- constructed
.. code-block:: python
class InventoryModule(BaseInventoryPlugin, Constructable):
NAME = 'ns.coll.myplugin'
There are three main options in the ``constructed`` documentation fragment:
``compose`` creates variables using Jinja2 expressions. This is implemented by calling the ``_set_composite_vars`` method.
``keyed_groups`` creates groups of hosts based on variable values. This is implemented by calling the ``_add_host_to_keyed_groups`` method.
``groups`` creates groups based on Jinja2 conditionals. This is implemented by calling the ``_add_host_to_composed_groups`` method.
Each method should be called for every host added to inventory. Three positional arguments are required: the constructed option, a dictionary of variables, and a host name. Calling the method ``_set_composite_vars`` first will allow ``keyed_groups`` and ``groups`` to use the composed variables.
By default, undefined variables are ignored. This is permitted by default for ``compose`` so you can make the variable definitions depend on variables that will be populated later in a play from other sources. For groups, it allows using variables that are not always present without having to use the ``default`` filter. To support configuring undefined variables to be an error, pass the constructed option ``strict`` to each of the methods as a keyword argument.
``keyed_groups`` and ``groups`` use any variables already associated with the host (for example, from an earlier inventory source). ``_add_host_to_keyed_groups`` and ``add_host_to_composed_groups`` can turn this off by passing the keyword argument ``fetch_hostvars``.
Here is an example using all three methods:
.. code-block:: python
def add_host(self, hostname, host_vars):
self.inventory.add_host(hostname, group='all')
for var_name, var_value in host_vars.items():
self.inventory.set_variable(hostname, var_name, var_value)
strict = self.get_option('strict')
# Add variables created by the user's Jinja2 expressions to the host
self._set_composite_vars(self.get_option('compose'), host_vars, hostname, strict=True)
# Create user-defined groups using variables and Jinja2 conditionals
self._add_host_to_composed_groups(self.get_option('groups'), host_vars, hostname, strict=strict)
self._add_host_to_keyed_groups(self.get_option('keyed_groups'), host_vars, hostname, strict=strict)
By default, group names created with ``_add_host_to_composed_groups()`` and ``_add_host_to_keyed_groups()`` are valid Python identifiers. Invalid characters are replaced with an underscore ``_``. A plugin can change the sanitization used for the constructed features by setting ``self._sanitize_group_name`` to a new function. The core engine also does sanitization, so if the custom function is less strict it should be used in conjunction with the configuration setting ``TRANSFORM_INVALID_GROUP_CHARS``.
.. code-block:: python
from ansible.inventory.group import to_safe_group_name
class InventoryModule(BaseInventoryPlugin, Constructable):
NAME = 'ns.coll.myplugin'
@staticmethod
def custom_sanitizer(name):
return to_safe_group_name(name, replacer='')
def parse(self, inventory, loader, path, cache=True):
super(InventoryModule, self).parse(inventory, loader, path)
self._sanitize_group_name = custom_sanitizer
.. _inventory_source_common_format:
Common format for inventory sources
-----------------------------------
To simplify development, most plugins use a standard YAML-based configuration file as the inventory source. The file has only one required field ``plugin``, which should contain the name of the plugin that is expected to consume the file.
Depending on other common features used, you might need other fields, and you can add custom options in each plugin as required.
For example, if you use the integrated caching, ``cache_plugin``, ``cache_timeout`` and other cache-related fields could be present.
.. _inventory_development_auto:
The 'auto' plugin
-----------------
From Ansible 2.5 onwards, we include the :ref:`auto inventory plugin <auto_inventory>` and enable it by default. If the ``plugin`` field in your standard configuration file matches the name of your inventory plugin, the ``auto`` inventory plugin will load your plugin. The 'auto' plugin makes it easier to use your plugin without having to update configurations.
.. _inventory_scripts:
.. _developing_inventory_scripts:
Inventory scripts
=================
Even though we now have inventory plugins, we still support inventory scripts, not only for backwards compatibility but also to allow users to use other programming languages.
.. _inventory_script_conventions:
Inventory script conventions
----------------------------
Inventory scripts must accept the ``--list`` and ``--host <hostname>`` arguments. Although other arguments are allowed, Ansible will not use them.
Such arguments might still be useful for executing the scripts directly.
When the script is called with the single argument ``--list``, the script must output to stdout a JSON object that contains all the groups to be managed. Each group's value should be either an object containing a list of each host, any child groups, and potential group variables, or simply a list of hosts:
.. code-block:: json
{
"group001": {
"hosts": ["host001", "host002"],
"vars": {
"var1": true
},
"children": ["group002"]
},
"group002": {
"hosts": ["host003","host004"],
"vars": {
"var2": 500
},
"children":[]
}
}
If any of the elements of a group are empty, they may be omitted from the output.
When called with the argument ``--host <hostname>`` (where <hostname> is a host from above), the script must print a JSON object, either empty or containing variables to make them available to templates and playbooks. For example:
.. code-block:: json
{
"VAR001": "VALUE",
"VAR002": "VALUE"
}
Printing variables is optional. If the script does not print variables, it should print an empty JSON object.
.. _inventory_script_tuning:
Tuning the external inventory script
------------------------------------
.. versionadded:: 1.3
The stock inventory script system mentioned above works for all versions of Ansible, but calling ``--host`` for every host can be rather inefficient, especially if it involves API calls to a remote subsystem.
To avoid this inefficiency, if the inventory script returns a top-level element called "_meta", it is possible to return all the host variables in a single script execution. When this meta element contains a value for "hostvars", the inventory script will not be invoked with ``--host`` for each host. This behavior results in a significant performance increase for large numbers of hosts.
The data to be added to the top-level JSON object looks like this:
.. code-block:: text
{
# results of inventory script as above go here
# ...
"_meta": {
"hostvars": {
"host001": {
"var001" : "value"
},
"host002": {
"var002": "value"
}
}
}
}
To satisfy the requirements of using ``_meta``, to prevent ansible from calling your inventory with ``--host`` you must at least populate ``_meta`` with an empty ``hostvars`` object.
For example:
.. code-block:: text
{
# results of inventory script as above go here
# ...
"_meta": {
"hostvars": {}
}
}
.. _replacing_inventory_ini_with_dynamic_provider:
If you intend to replace an existing static inventory file with an inventory script, it must return a JSON object which contains an 'all' group that includes every host in the inventory as a member and every group in the inventory as a child. It should also include an 'ungrouped' group which contains all hosts which are not members of any other group.
A skeleton example of this JSON object is:
.. code-block:: json
{
"_meta": {
"hostvars": {}
},
"all": {
"children": [
"ungrouped"
]
},
"ungrouped": {
"children": [
]
}
}
An easy way to see how this should look is using :ref:`ansible-inventory`, which also supports ``--list`` and ``--host`` parameters like an inventory script would.
.. seealso::
:ref:`developing_api`
Python API to Playbooks and Ad Hoc Task Execution
:ref:`developing_modules_general`
Get started with developing a module
:ref:`developing_plugins`
How to develop plugins
`AWX <https://github.com/ansible/awx>`_
REST API endpoint and GUI for Ansible, syncs with dynamic inventory
`Development Mailing List <https://groups.google.com/group/ansible-devel>`_
Mailing list for development topics
:ref:`communication_irc`
How to join Ansible chat channels

View File

@ -1,145 +0,0 @@
.. _using_local_modules_and_plugins:
.. _developing_locally:
**********************************
Adding modules and plugins locally
**********************************
You can extend Ansible by adding custom modules or plugins. You can create them from scratch or copy existing ones for local use. You can store a local module or plugin on your Ansible control node and share it with your team or organization. You can also share plugins and modules by including them in a collection, then publishing the collection on Ansible Galaxy.
If you are using a local module or plugin but Ansible cannot find it, this page is all you need.
If you want to create a plugin or a module, see :ref:`developing_plugins`, :ref:`developing_modules_general` and :ref:`developing_collections`.
Extending Ansible with local modules and plugins offers shortcuts such as:
* You can copy other people's modules and plugins.
* When writing a new module, you can choose any programming language you like.
* You do not have to clone any repositories.
* You do not have to open a pull request.
* You do not have to add tests (though we recommend that you do!).
.. contents::
:local:
.. _modules_vs_plugins:
Modules and plugins: what is the difference?
============================================
If you are looking to add functionality to Ansible, you might wonder whether you need a module or a plugin. Here is a quick overview to help you understand what you need:
* :ref:`Plugins <working_with_plugins>` extend Ansible's core functionality. Most plugin types execute on the control node within the ``/usr/bin/ansible`` process. Plugins offer options and extensions for the core features of Ansible: transforming data, logging output, connecting to inventory, and more.
* Modules are a type of plugin that execute automation tasks on a 'target' (usually a remote system). Modules work as standalone scripts that Ansible executes in their own process outside of the controller. Modules interface with Ansible mostly via JSON, accepting arguments and returning information by printing a JSON string to stdout before exiting. Unlike the other plugins (which must be written in Python), modules can be written in any language; although Ansible provides modules in Python and Powershell only.
.. _use_collections:
Adding modules and plugins in collections
=========================================
You can add modules and plugins by :ref:`creating a collection <developing_collections>`. With a collection, you can use custom modules and plugins in any playbook or role. You can share your collection easily at any time through Ansible Galaxy.
The rest of this page describes other methods of using local, standalone modules or plugins.
.. _local_modules:
Adding a module or plugin outside of a collection
==================================================
You can configure Ansible to load standalone local modules or plugins in specific locations and make them available to all playbooks and roles (using configured paths). Alternatively, you can make a non-collection local module or plugin available only to certain playbooks or roles (via adjacent paths).
Adding standalone local modules for all playbooks and roles
-----------------------------------------------------------
To load standalone local modules automatically and make them available to all playbooks and roles, use the :ref:`DEFAULT_MODULE_PATH` configuration setting or the ``ANSIBLE_LIBRARY`` environment variable. The configuration setting and environment variable take a colon-separated list, similar to ``$PATH``. You have two options:
* Add your standalone local module to one of the default configured locations. See the :ref:`DEFAULT_MODULE_PATH` configuration setting for details. Default locations may change without notice.
* Add the location of your standalone local module to an environment variable or configuration:
* the ``ANSIBLE_LIBRARY`` environment variable
* the :ref:`DEFAULT_MODULE_PATH` configuration setting
To view your current configuration settings for modules:
.. code-block:: text
ansible-config dump |grep DEFAULT_MODULE_PATH
After you save your module file in one of these locations, Ansible loads it and you can use it in any local task, playbook, or role.
To confirm that ``my_local_module`` is available:
* type ``ansible localhost -m my_local_module`` to see the output for that module, or
* type ``ansible-doc -t module my_local_module`` to see the documentation for that module
.. note:: This applies to all plugin types but requires specific configuration and/or adjacent directories for each plugin type, see below.
.. note::
The ``ansible-doc`` command can parse module documentation from modules written in Python or an adjacent YAML file. If you have a module written in a programming language other than Python, you should write the documentation in a Python or YAML file adjacent to the module file. :ref:`adjacent_yaml_doc`
Adding standalone local modules for selected playbooks or a single role
-----------------------------------------------------------------------
Ansible automatically loads all executable files from certain directories adjacent to your playbook or role as modules. Standalone modules in these locations are available only to the specific playbook, playbooks, or role in the parent directory.
* To use a standalone module only in a selected playbook or playbooks, store the module in a subdirectory called ``library`` in the directory that contains the playbook or playbooks.
* To use a standalone module only in a single role, store the module in a subdirectory called ``library`` within that role.
.. note:: This applies to all plugin types but requires specific configuration and/or adjacent directories for each plugin type, see below.
.. warning::
Roles contained in collections cannot contain any modules or other plugins. All plugins in a collection must live in the collection ``plugins`` directory tree. All plugins in that tree are accessible to all roles in the collection. If you are developing new modules, we recommend distributing them in :ref:`collections <developing_collections>`, not in roles.
.. _distributing_plugins:
.. _local_plugins:
Adding a non-module plugin locally outside of a collection
==========================================================
You can configure Ansible to load standalone local plugins in a specified location or locations and make them available to all playbooks and roles. Alternatively, you can make a standalone local plugin available only to specific playbooks or roles.
.. note::
Although modules are plugins, the naming patterns for directory names and environment variables that apply to other plugin types do not apply to modules. See :ref:`local_modules`.
Adding local non-module plugins for all playbooks and roles
-----------------------------------------------------------
To load standalone local plugins automatically and make them available to all playbooks and roles, use the configuration setting or environment variable for the type of plugin you are adding. These configuration settings and environment variables take a colon-separated list, similar to ``$PATH``. You have two options:
* Add your local plugin to one of the default configured locations. See :ref:`configuration settings <ansible_configuration_settings>` for details on the correct configuration setting for the plugin type. Default locations may change without notice.
* Add the location of your local plugin to an environment variable or configuration:
* the relevant ``ANSIBLE_plugin_type_PLUGINS`` environment variable - for example, ``$ANSIBLE_INVENTORY_PLUGINS`` or ``$ANSIBLE_VARS_PLUGINS``
* the relevant ``plugin_type_PATH`` configuration setting, most of which begin with ``DEFAULT_`` - for example, ``DEFAULT_CALLBACK_PLUGIN_PATH`` or ``DEFAULT_FILTER_PLUGIN_PATH`` or ``BECOME_PLUGIN_PATH``
To view your current configuration settings for non-module plugins:
.. code-block:: text
ansible-config dump |grep plugin_type_PATH
After your plugin file is added to one of these locations, Ansible loads it and you can use it in any local module, task, playbook, or role. For more information on environment variables and configuration settings, see :ref:`ansible_configuration_settings`.
To confirm that ``plugins/plugin_type/my_local_plugin`` is available:
* type ``ansible-doc -t <plugin_type> my_local_lookup_plugin`` to see the documentation for that plugin - for example, ``ansible-doc -t lookup my_local_lookup_plugin``
The ``ansible-doc`` command works for most plugin types, but not for action, filter, or test plugins. See :ref:`ansible-doc` for more details.
Adding standalone local plugins for selected playbooks or a single role
-----------------------------------------------------------------------
Ansible automatically loads all plugins from certain directories adjacent to your playbook or role, loading each type of plugin separately from a directory named for the type of plugin. Standalone plugins in these locations are available only to the specific playbook, playbooks, or role in the parent directory.
* To use a standalone plugin only in a selected playbook or playbooks, store the plugin in a subdirectory for the correct ``plugin_type`` (for example, ``callback_plugins`` or ``inventory_plugins``) in the directory that contains the playbooks. These directories must use the ``_plugins`` suffix. For a full list of plugin types, see :ref:`working_with_plugins`.
* To use a standalone plugin only in a single role, store the plugin in a subdirectory for the correct ``plugin_type`` (for example, ``cache_plugins`` or ``strategy_plugins``) within that role. When shipped as part of a role, the plugin is available as soon as the role is executed. These directories must use the ``_plugins`` suffix. For a full list of plugin types, see :ref:`working_with_plugins`.
.. warning::
Roles contained in collections cannot contain any plugins. All plugins in a collection must live in the collection ``plugins`` directory tree. All plugins in that tree are accessible to all roles in the collection. If you are developing new plugins, we recommend distributing them in :ref:`collections <developing_collections>`, not in roles.
.. _ansible.legacy.custom:
Using ``ansible.legacy`` to access custom versions of an ``ansible.builtin`` module
===================================================================================
If you need to override one of the ``ansible.builtin`` modules and are using FQCN, you need to use ``ansible.legacy`` as part of the fully-qualified collection name (FQCN). For example, if you had your own ``copy`` module, you would access it as ``ansible.legacy.copy``. See :ref:`using_ansible_legacy` for details on how to use custom modules with roles within a collection.

Some files were not shown because too many files have changed in this diff Show More