Python 3.9 min for controller (#77566)
This commit is contained in:
parent
90ef914f77
commit
884244f1b2
|
@ -0,0 +1,4 @@
|
|||
major_changes:
|
||||
- ansible - Increase minimum Python requirement to Python 3.9 for CLI utilities and controller code
|
||||
breaking_changes:
|
||||
- ansible - Increase minimum Python requirement to Python 3.9 for CLI utilities and controller code
|
|
@ -10,7 +10,7 @@ from importlib.metadata import distribution
|
|||
|
||||
|
||||
def _short_name(name):
|
||||
return name.replace('ansible-', '').replace('ansible', 'adhoc')
|
||||
return name.removeprefix('ansible-').replace('ansible', 'adhoc')
|
||||
|
||||
|
||||
def main():
|
||||
|
|
|
@ -13,9 +13,9 @@ import sys
|
|||
|
||||
# Used for determining if the system is running a new enough python version
|
||||
# and should only restrict on our documented minimum versions
|
||||
if sys.version_info < (3, 8):
|
||||
if sys.version_info < (3, 9):
|
||||
raise SystemExit(
|
||||
'ERROR: Ansible requires Python 3.8 or newer on the controller. '
|
||||
'ERROR: Ansible requires Python 3.9 or newer on the controller. '
|
||||
'Current version: %s' % ''.join(sys.version.splitlines())
|
||||
)
|
||||
|
||||
|
|
|
@ -418,7 +418,7 @@ class TaskQueueManager:
|
|||
for possible in [method_name, 'v2_on_any']:
|
||||
gotit = getattr(callback_plugin, possible, None)
|
||||
if gotit is None:
|
||||
gotit = getattr(callback_plugin, possible.replace('v2_', ''), None)
|
||||
gotit = getattr(callback_plugin, possible.removeprefix('v2_'), None)
|
||||
if gotit is not None:
|
||||
methods.append(gotit)
|
||||
|
||||
|
|
|
@ -82,9 +82,9 @@ class Block(Base, Conditional, CollectionSearch, Taggable):
|
|||
all_vars = {}
|
||||
|
||||
if self._parent:
|
||||
all_vars.update(self._parent.get_vars())
|
||||
all_vars |= self._parent.get_vars()
|
||||
|
||||
all_vars.update(self.vars.copy())
|
||||
all_vars |= self.vars.copy()
|
||||
|
||||
return all_vars
|
||||
|
||||
|
|
|
@ -190,7 +190,7 @@ class IncludedFile:
|
|||
new_task._role_name = role_name
|
||||
for from_arg in new_task.FROM_ARGS:
|
||||
if from_arg in include_args:
|
||||
from_key = from_arg.replace('_from', '')
|
||||
from_key = from_arg.removesuffix('_from')
|
||||
new_task._from_files[from_key] = templar.template(include_args.pop(from_arg))
|
||||
|
||||
omit_token = task_vars.get('omit')
|
||||
|
|
|
@ -65,7 +65,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
|
||||
all_vars = self.vars.copy()
|
||||
if variable_manager:
|
||||
all_vars.update(variable_manager.get_vars())
|
||||
all_vars |= variable_manager.get_vars()
|
||||
|
||||
templar = Templar(loader=loader, variables=all_vars)
|
||||
|
||||
|
@ -105,8 +105,7 @@ class PlaybookInclude(Base, Conditional, Taggable):
|
|||
if new_obj.when and isinstance(entry, Play):
|
||||
entry._included_conditional = new_obj.when[:]
|
||||
|
||||
temp_vars = entry.vars.copy()
|
||||
temp_vars.update(new_obj.vars)
|
||||
temp_vars = entry.vars | new_obj.vars
|
||||
param_tags = temp_vars.pop('tags', None)
|
||||
if param_tags is not None:
|
||||
entry.tags.extend(param_tags.split(','))
|
||||
|
|
|
@ -99,7 +99,7 @@ class RoleDefinition(Base, Conditional, Taggable, CollectionSearch):
|
|||
# result and the role name
|
||||
if isinstance(ds, dict):
|
||||
(new_role_def, role_params) = self._split_role_params(ds)
|
||||
new_ds.update(new_role_def)
|
||||
new_ds |= new_role_def
|
||||
self._role_params = role_params
|
||||
|
||||
# set the role name in the new ds
|
||||
|
|
|
@ -78,7 +78,7 @@ class IncludeRole(TaskInclude):
|
|||
myplay = play
|
||||
|
||||
ri = RoleInclude.load(self._role_name, play=myplay, variable_manager=variable_manager, loader=loader, collection_list=self.collections)
|
||||
ri.vars.update(self.vars)
|
||||
ri.vars |= self.vars
|
||||
|
||||
if variable_manager is not None:
|
||||
available_variables = variable_manager.get_vars(play=myplay, task=self)
|
||||
|
@ -147,7 +147,7 @@ class IncludeRole(TaskInclude):
|
|||
|
||||
# build options for role includes
|
||||
for key in my_arg_names.intersection(IncludeRole.FROM_ARGS):
|
||||
from_key = key.replace('_from', '')
|
||||
from_key = key.removesuffix('_from')
|
||||
args_value = ir.args.get(key)
|
||||
if not isinstance(args_value, string_types):
|
||||
raise AnsibleParserError('Expected a string for %s but got %s instead' % (key, type(args_value)))
|
||||
|
@ -179,7 +179,7 @@ class IncludeRole(TaskInclude):
|
|||
def get_include_params(self):
|
||||
v = super(IncludeRole, self).get_include_params()
|
||||
if self._parent_role:
|
||||
v.update(self._parent_role.get_role_params())
|
||||
v |= self._parent_role.get_role_params()
|
||||
v.setdefault('ansible_parent_role_names', []).insert(0, self._parent_role.get_name())
|
||||
v.setdefault('ansible_parent_role_paths', []).insert(0, self._parent_role._role_path)
|
||||
return v
|
||||
|
|
|
@ -146,7 +146,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
def _preprocess_with_loop(self, ds, new_ds, k, v):
|
||||
''' take a lookup plugin name and store it correctly '''
|
||||
|
||||
loop_name = k.replace("with_", "")
|
||||
loop_name = k.removeprefix("with_")
|
||||
if new_ds.get('loop') is not None or new_ds.get('loop_with') is not None:
|
||||
raise AnsibleError("duplicate loop in task: %s" % loop_name, obj=ds)
|
||||
if v is None:
|
||||
|
@ -241,7 +241,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
if k in ('action', 'local_action', 'args', 'delegate_to') or k == action or k == 'shell':
|
||||
# we don't want to re-assign these values, which were determined by the ModuleArgsParser() above
|
||||
continue
|
||||
elif k.startswith('with_') and k.replace("with_", "") in lookup_loader:
|
||||
elif k.startswith('with_') and k.removeprefix("with_") in lookup_loader:
|
||||
# transform into loop property
|
||||
self._preprocess_with_loop(ds, new_ds, k, v)
|
||||
elif C.INVALID_TASK_ATTRIBUTE_FAILED or k in self._valid_attrs:
|
||||
|
@ -323,7 +323,7 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
else:
|
||||
isdict = templar.template(env_item, convert_bare=False)
|
||||
if isinstance(isdict, dict):
|
||||
env.update(isdict)
|
||||
env |= isdict
|
||||
else:
|
||||
display.warning("could not parse environment value, skipping: %s" % value)
|
||||
|
||||
|
@ -362,9 +362,9 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
def get_vars(self):
|
||||
all_vars = dict()
|
||||
if self._parent:
|
||||
all_vars.update(self._parent.get_vars())
|
||||
all_vars |= self._parent.get_vars()
|
||||
|
||||
all_vars.update(self.vars)
|
||||
all_vars |= self.vars
|
||||
|
||||
if 'tags' in all_vars:
|
||||
del all_vars['tags']
|
||||
|
@ -376,9 +376,9 @@ class Task(Base, Conditional, Taggable, CollectionSearch):
|
|||
def get_include_params(self):
|
||||
all_vars = dict()
|
||||
if self._parent:
|
||||
all_vars.update(self._parent.get_include_params())
|
||||
all_vars |= self._parent.get_include_params()
|
||||
if self.action in C._ACTION_ALL_INCLUDES:
|
||||
all_vars.update(self.vars)
|
||||
all_vars |= self.vars
|
||||
return all_vars
|
||||
|
||||
def copy(self, exclude_parent=False, exclude_tasks=False):
|
||||
|
|
|
@ -116,10 +116,10 @@ class TaskInclude(Task):
|
|||
else:
|
||||
all_vars = dict()
|
||||
if self._parent:
|
||||
all_vars.update(self._parent.get_vars())
|
||||
all_vars |= self._parent.get_vars()
|
||||
|
||||
all_vars.update(self.vars)
|
||||
all_vars.update(self.args)
|
||||
all_vars |= self.vars
|
||||
all_vars |= self.args
|
||||
|
||||
if 'tags' in all_vars:
|
||||
del all_vars['tags']
|
||||
|
|
|
@ -25,7 +25,7 @@ class ActionModule(ActionBase):
|
|||
# TODO: remove in favor of controller side argspec detecing valid arguments
|
||||
# network facts modules must support gather_subset
|
||||
try:
|
||||
name = self._connection.redirected_names[-1].replace('ansible.netcommon.', '', 1)
|
||||
name = self._connection.redirected_names[-1].removeprefix('ansible.netcommon.')
|
||||
except (IndexError, AttributeError):
|
||||
name = self._connection._load_name.split('.')[-1]
|
||||
if name not in ('network_cli', 'httpapi', 'netconf'):
|
||||
|
|
|
@ -118,8 +118,7 @@ class ActionModule(ActionBase):
|
|||
searchpath = newsearchpath
|
||||
|
||||
# add ansible 'template' vars
|
||||
temp_vars = task_vars.copy()
|
||||
temp_vars.update(generate_ansible_template_vars(self._task.args.get('src', None), source, dest))
|
||||
temp_vars = task_vars | generate_ansible_template_vars(self._task.args.get('src', None), source, dest)
|
||||
|
||||
# force templar to use AnsibleEnvironment to prevent issues with native types
|
||||
# https://github.com/ansible/ansible/issues/46169
|
||||
|
|
|
@ -82,8 +82,7 @@ class ActionModule(ActionBase):
|
|||
self._fixup_perms2((self._connection._shell.tmpdir, tmp_src))
|
||||
kwargs['body'] = body
|
||||
|
||||
new_module_args = self._task.args.copy()
|
||||
new_module_args.update(kwargs)
|
||||
new_module_args = self._task.args | kwargs
|
||||
|
||||
# call with ansible.legacy prefix to prevent collections collisions while allowing local override
|
||||
result.update(self._execute_module('ansible.legacy.uri', module_args=new_module_args, task_vars=task_vars, wrap_async=self._task.async_val))
|
||||
|
|
|
@ -628,7 +628,7 @@ class PluginLoader:
|
|||
if candidate_name.startswith('ansible.legacy'):
|
||||
# 'ansible.legacy' refers to the plugin finding behavior used before collections existed.
|
||||
# They need to search 'library' and the various '*_plugins' directories in order to find the file.
|
||||
plugin_load_context = self._find_plugin_legacy(name.replace('ansible.legacy.', '', 1),
|
||||
plugin_load_context = self._find_plugin_legacy(name.removeprefix('ansible.legacy.'),
|
||||
plugin_load_context, ignore_deprecated, check_aliases, suffix)
|
||||
else:
|
||||
# 'ansible.builtin' should be handled here. This means only internal, or builtin, paths are searched.
|
||||
|
|
|
@ -883,8 +883,7 @@ class StrategyBase:
|
|||
ti_copy = included_file._task.copy(exclude_parent=True)
|
||||
ti_copy._parent = included_file._task._parent
|
||||
|
||||
temp_vars = ti_copy.vars.copy()
|
||||
temp_vars.update(included_file._vars)
|
||||
temp_vars = ti_copy.vars | included_file._vars
|
||||
|
||||
ti_copy.vars = temp_vars
|
||||
|
||||
|
|
|
@ -123,7 +123,6 @@ class AnsibleJ2Vars(Mapping):
|
|||
|
||||
# prior to version 2.9, locals contained all of the vars and not just the current
|
||||
# local vars so this was not necessary for locals to propagate down to nested includes
|
||||
new_locals = self._locals.copy()
|
||||
new_locals.update(locals)
|
||||
new_locals = self._locals | locals
|
||||
|
||||
return AnsibleJ2Vars(self._templar, self._globals, locals=new_locals)
|
||||
|
|
|
@ -914,7 +914,7 @@ class AnsibleCollectionRef:
|
|||
"""
|
||||
legacy_plugin_dir_name = to_text(legacy_plugin_dir_name)
|
||||
|
||||
plugin_type = legacy_plugin_dir_name.replace(u'_plugins', u'')
|
||||
plugin_type = legacy_plugin_dir_name.removesuffix(u'_plugins')
|
||||
|
||||
if plugin_type == u'library':
|
||||
plugin_type = u'modules'
|
||||
|
|
|
@ -88,8 +88,7 @@ def combine_vars(a, b, merge=None):
|
|||
else:
|
||||
# HASH_BEHAVIOUR == 'replace'
|
||||
_validate_mutable_mappings(a, b)
|
||||
result = a.copy()
|
||||
result.update(b)
|
||||
result = a | b
|
||||
return result
|
||||
|
||||
|
||||
|
|
|
@ -232,9 +232,9 @@ class VariableManager:
|
|||
try:
|
||||
for entity in entities:
|
||||
if isinstance(entity, Host):
|
||||
data.update(plugin.get_host_vars(entity.name))
|
||||
data |= plugin.get_host_vars(entity.name)
|
||||
else:
|
||||
data.update(plugin.get_group_vars(entity.name))
|
||||
data |= plugin.get_group_vars(entity.name)
|
||||
except AttributeError:
|
||||
if hasattr(plugin, 'run'):
|
||||
raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
|
||||
|
@ -305,7 +305,7 @@ class VariableManager:
|
|||
# TODO: cleaning of facts should eventually become part of taskresults instead of vars
|
||||
try:
|
||||
facts = wrap_var(self._fact_cache.get(host.name, {}))
|
||||
all_vars.update(namespace_facts(facts))
|
||||
all_vars |= namespace_facts(facts)
|
||||
|
||||
# push facts to main namespace
|
||||
if C.INJECT_FACTS_AS_VARS:
|
||||
|
@ -670,7 +670,7 @@ class VariableManager:
|
|||
raise TypeError('The object retrieved for {0} must be a MutableMapping but was'
|
||||
' a {1}'.format(host, type(host_cache)))
|
||||
# Update the existing facts
|
||||
host_cache.update(facts)
|
||||
host_cache |= facts
|
||||
|
||||
# Save the facts back to the backing store
|
||||
self._fact_cache[host] = host_cache
|
||||
|
@ -684,7 +684,7 @@ class VariableManager:
|
|||
raise AnsibleAssertionError("the type of 'facts' to set for nonpersistent_facts should be a Mapping but is a %s" % type(facts))
|
||||
|
||||
try:
|
||||
self._nonpersistent_fact_cache[host].update(facts)
|
||||
self._nonpersistent_fact_cache[host] |= facts
|
||||
except KeyError:
|
||||
self._nonpersistent_fact_cache[host] = facts
|
||||
|
||||
|
|
|
@ -28,9 +28,9 @@ def get_plugin_vars(loader, plugin, path, entities):
|
|||
try:
|
||||
for entity in entities:
|
||||
if isinstance(entity, Host):
|
||||
data.update(plugin.get_host_vars(entity.name))
|
||||
data |= plugin.get_host_vars(entity.name)
|
||||
else:
|
||||
data.update(plugin.get_group_vars(entity.name))
|
||||
data |= plugin.get_group_vars(entity.name)
|
||||
except AttributeError:
|
||||
if hasattr(plugin, 'run'):
|
||||
raise AnsibleError("Cannot use v1 type vars plugin %s from %s" % (plugin._load_name, plugin._original_path))
|
||||
|
|
|
@ -26,7 +26,6 @@ classifiers =
|
|||
Natural Language :: English
|
||||
Operating System :: POSIX
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3.8
|
||||
Programming Language :: Python :: 3.9
|
||||
Programming Language :: Python :: 3.10
|
||||
Programming Language :: Python :: 3.11
|
||||
|
@ -37,7 +36,7 @@ classifiers =
|
|||
|
||||
[options]
|
||||
zip_safe = False
|
||||
python_requires = >=3.8
|
||||
python_requires = >=3.9
|
||||
include_package_data = True
|
||||
# keep ansible-test as a verbatim script to work with editable installs, since it needs to do its
|
||||
# own package redirection magic that's beyond the scope of the normal `ansible` path redirection
|
||||
|
|
Loading…
Reference in New Issue